Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,600
def save_as(self, event): filename = self.asksavefile() if filename: if self.writefile(filename): self.set_filename(filename) self.set_saved(1) try: self.editwin.store_file_breaks() except __HOLE__: pass self.text.focus_set() self.updaterecentfileslist(filename) return "break"
AttributeError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/idlelib/IOBinding.py/IOBinding.save_as
6,601
def writefile(self, filename): self.fixlastline() chars = self.encode(self.text.get("1.0", "end-1c")) if self.eol_convention != "\n": chars = chars.replace("\n", self.eol_convention) try: with open(filename, "wb") as f: f.write(chars) return True except __HOLE__ as msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False
IOError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/idlelib/IOBinding.py/IOBinding.writefile
6,602
def try_unlink(fname): try: os.unlink(fname) except __HOLE__ as e: if "No such file" not in str(e): raise
OSError
dataset/ETHPy150Open fp7-ofelia/ocf/expedient/src/python/expedient_geni/tests.py/try_unlink
6,603
def test_getprotocol(): try: getprotocol('invalid') assert False, "ValueError was not raised" except __HOLE__: pass
ValueError
dataset/ETHPy150Open openstack/wsme/wsme/tests/test_protocols.py/test_getprotocol
6,604
def test_main(): suite = unittest.TestSuite() suite.addTest(DocTestSuite('_threading_local')) if test_support.is_jython: del ThreadingLocalTest.test_local_refs suite.addTest(unittest.makeSuite(ThreadingLocalTest)) try: from thread import _local except __HOLE__: pass else: import _threading_local local_orig = _threading_local.local def setUp(test): _threading_local.local = _local def tearDown(test): _threading_local.local = local_orig suite.addTest(DocTestSuite('_threading_local', setUp=setUp, tearDown=tearDown) ) test_support.run_unittest(suite)
ImportError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/test/test_threading_local.py/test_main
6,605
def run(self, edit): settings = self.view.settings() history = settings.get("git_savvy.help.history") or [] try: history.pop() page, anchor = history[-1] except __HOLE__: print("sorry, no can do!") return settings.set("git_savvy.help.history", history) self.view.run_command("gs_help_browse", {"page": page, "anchor": anchor, "add_to_history": False})
IndexError
dataset/ETHPy150Open divmain/GitSavvy/common/commands/help.py/GsHelpGotoPrevious.run
6,606
def symlink (src, dst): print "Creating symlink to %s from %s" % (src, dst) try: os.symlink(src, dst) except __HOLE__, e: "Already exists!"
OSError
dataset/ETHPy150Open fp7-ofelia/ocf/ofam/src/install.py/symlink
6,607
def addDir (path, owner): try: print "Making Directory: %s" % (path) os.makedirs(path) except __HOLE__, e: pass call('chown %s %s' % (owner, path))
OSError
dataset/ETHPy150Open fp7-ofelia/ocf/ofam/src/install.py/addDir
6,608
def aliasSub(requestContext, seriesList, search, replace): """ Runs series names through a regex search/replace. Example:: &target=aliasSub(ip.*TCP*,"^.*TCP(\d+)","\\1") """ try: seriesList.name = re.sub(search, replace, seriesList.name) except __HOLE__: for series in seriesList: series.name = re.sub(search, replace, series.name) return seriesList
AttributeError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/functions.py/aliasSub
6,609
def alias(requestContext, seriesList, newName): """ Takes one metric or a wildcard seriesList and a string in quotes. Prints the string instead of the metric name in the legend. Example:: &target=alias(Sales.widgets.largeBlue,"Large Blue Widgets") """ try: seriesList.name = newName except __HOLE__: for series in seriesList: series.name = newName return seriesList
AttributeError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/functions.py/alias
6,610
def removeAbovePercentile(requestContext, seriesList, n): """ Removes data above the nth percentile from the series or list of series provided. Values above this percentile are assigned a value of None. """ for s in seriesList: s.name = 'removeAbovePercentile(%s, %d)' % (s.name, n) s.pathExpression = s.name try: percentile = nPercentile(requestContext, [s], n)[0][0] except __HOLE__: continue for index, val in enumerate(s): if val is None: continue if val > percentile: s[index] = None return seriesList
IndexError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/functions.py/removeAbovePercentile
6,611
def removeBelowPercentile(requestContext, seriesList, n): """ Removes data below the nth percentile from the series or list of series provided. Values below this percentile are assigned a value of None. """ for s in seriesList: s.name = 'removeBelowPercentile(%s, %d)' % (s.name, n) s.pathExpression = s.name try: percentile = nPercentile(requestContext, [s], n)[0][0] except __HOLE__: continue for (index, val) in enumerate(s): if val is None: continue if val < percentile: s[index] = None return seriesList
IndexError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/functions.py/removeBelowPercentile
6,612
def branch_stats(self): """Get stats about branches. Returns a dict mapping line numbers to a tuple: (total_exits, taken_exits). """ exit_counts = self.parser.exit_counts() missing_arcs = self.missing_branch_arcs() stats = {} for lnum in self.branch_lines(): exits = exit_counts[lnum] try: missing = len(missing_arcs[lnum]) except __HOLE__: missing = 0 stats[lnum] = (exits, exits - missing) return stats
KeyError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/coverage/results.py/Analysis.branch_stats
6,613
def DownloadActivityList(self, serviceRecord, exhaustive=False): oauthSession = self._oauthSession(serviceRecord) activities = [] exclusions = [] page_url = "https://api.endomondo.com/api/1/workouts" while True: resp = oauthSession.get(page_url) try: respList = resp.json()["data"] except __HOLE__: self._rateLimitBailout(resp) raise APIException("Error decoding activity list resp %s %s" % (resp.status_code, resp.text)) for actInfo in respList: activity = UploadedActivity() activity.StartTime = self._parseDate(actInfo["start_time"]) logger.debug("Activity s/t %s" % activity.StartTime) if "is_tracking" in actInfo and actInfo["is_tracking"]: exclusions.append(APIExcludeActivity("Not complete", activity_id=actInfo["id"], permanent=False, user_exception=UserException(UserExceptionType.LiveTracking))) continue if "end_time" in actInfo: activity.EndTime = self._parseDate(actInfo["end_time"]) if actInfo["sport"] in self._activityMappings: activity.Type = self._activityMappings[actInfo["sport"]] # "duration" is timer time if "duration_total" in actInfo: activity.Stats.TimerTime = ActivityStatistic(ActivityStatisticUnit.Seconds, value=float(actInfo["duration_total"])) if "distance_total" in actInfo: activity.Stats.Distance = ActivityStatistic(ActivityStatisticUnit.Kilometers, value=float(actInfo["distance_total"])) if "calories_total" in actInfo: activity.Stats.Energy = ActivityStatistic(ActivityStatisticUnit.Kilocalories, value=float(actInfo["calories_total"])) activity.Stats.Elevation = ActivityStatistic(ActivityStatisticUnit.Meters) if "altitude_max" in actInfo: activity.Stats.Elevation.Max = float(actInfo["altitude_max"]) if "altitude_min" in actInfo: activity.Stats.Elevation.Min = float(actInfo["altitude_min"]) if "total_ascent" in actInfo: activity.Stats.Elevation.Gain = float(actInfo["total_ascent"]) if "total_descent" in actInfo: activity.Stats.Elevation.Loss = float(actInfo["total_descent"]) activity.Stats.Speed = ActivityStatistic(ActivityStatisticUnit.KilometersPerHour) if "speed_max" in actInfo: activity.Stats.Speed.Max = float(actInfo["speed_max"]) if "heart_rate_avg" in actInfo: activity.Stats.HR = ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, avg=float(actInfo["heart_rate_avg"])) if "heart_rate_max" in actInfo: activity.Stats.HR.update(ActivityStatistic(ActivityStatisticUnit.BeatsPerMinute, max=float(actInfo["heart_rate_max"]))) if "cadence_avg" in actInfo: activity.Stats.Cadence = ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, avg=int(actInfo["cadence_avg"])) if "cadence_max" in actInfo: activity.Stats.Cadence.update(ActivityStatistic(ActivityStatisticUnit.RevolutionsPerMinute, max=int(actInfo["cadence_max"]))) if "title" in actInfo: activity.Name = actInfo["title"] activity.ServiceData = {"WorkoutID": int(actInfo["id"])} activity.CalculateUID() activities.append(activity) paging = resp.json()["paging"] if "next" not in paging or not paging["next"] or not exhaustive: break else: page_url = paging["next"] return activities, exclusions
ValueError
dataset/ETHPy150Open cpfair/tapiriik/tapiriik/services/Endomondo/endomondo.py/EndomondoService.DownloadActivityList
6,614
def DownloadActivity(self, serviceRecord, activity): resp = self._oauthSession(serviceRecord).get("https://api.endomondo.com/api/1/workouts/%d" % activity.ServiceData["WorkoutID"], params={"fields": "points"}) try: resp = resp.json() except __HOLE__: self._rateLimitBailout(resp) res_txt = resp.text raise APIException("Parse failure in Endomondo activity download: %s" % resp.status_code) lap = Lap(stats=activity.Stats, startTime=activity.StartTime, endTime=activity.EndTime) activity.Laps = [lap] activity.GPS = False old_location = None in_pause = False for pt in resp["points"]: wp = Waypoint() if "time" not in pt: # Manually-entered activities with a course attached to them have date-less waypoints # It'd be nice to transfer those courses, but it's a concept few other sites support AFAIK # So, ignore the points entirely continue wp.Timestamp = self._parseDate(pt["time"]) if ("lat" in pt and "lng" in pt) or "alt" in pt: wp.Location = Location() if "lat" in pt and "lng" in pt: wp.Location.Latitude = pt["lat"] wp.Location.Longitude = pt["lng"] activity.GPS = True if "alt" in pt: wp.Location.Altitude = pt["alt"] if wp.Location == old_location: # We have seen the point with the same coordinates # before. This causes other services (e.g Strava) to # interpret this as if we were standing for a while, # which causes us having wrong activity time when # importing. We mark the point as paused in hopes this # fixes the issue. in_pause = True wp.Type = WaypointType.Pause elif in_pause: in_pause = False wp.Type = WaypointType.Resume old_location = wp.Location if "hr" in pt: wp.HR = pt["hr"] if "cad" in pt: wp.Cadence = pt["cad"] lap.Waypoints.append(wp) activity.Stationary = len(lap.Waypoints) == 0 return activity
ValueError
dataset/ETHPy150Open cpfair/tapiriik/tapiriik/services/Endomondo/endomondo.py/EndomondoService.DownloadActivity
6,615
def _run_exitfuncs(): """run any registered exit functions _exithandlers is traversed in reverse order so functions are executed last in, first out. """ exc_info = None while _exithandlers: func, targs, kargs = _exithandlers.pop() try: func(*targs, **kargs) except __HOLE__: exc_info = sys.exc_info() except: import traceback sys.stderr.write("Error in atexit._run_exitfuncs:\n") traceback.print_exc() exc_info = sys.exc_info() if exc_info is not None: raise exc_info[0](exc_info[1])
SystemExit
dataset/ETHPy150Open python-zk/kazoo/kazoo/python2atexit.py/_run_exitfuncs
6,616
def get_page_size(request, default=20): session = request.session cookies = request.COOKIES try: page_size = int(session.get('horizon_pagesize', cookies.get('horizon_pagesize', getattr(settings, 'API_RESULT_PAGE_SIZE', default)))) except __HOLE__: page_size = session['horizon_pagesize'] = int(default) return page_size
ValueError
dataset/ETHPy150Open CiscoSystems/avos/horizon/utils/functions.py/get_page_size
6,617
def next_key(tuple_of_tuples, key): """Processes a tuple of 2-element tuples and returns the key which comes after the given key. """ for i, t in enumerate(tuple_of_tuples): if t[0] == key: try: return tuple_of_tuples[i + 1][0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open CiscoSystems/avos/horizon/utils/functions.py/next_key
6,618
def previous_key(tuple_of_tuples, key): """Processes a tuple of 2-element tuples and returns the key which comes before the given key. """ for i, t in enumerate(tuple_of_tuples): if t[0] == key: try: return tuple_of_tuples[i - 1][0] except __HOLE__: return None
IndexError
dataset/ETHPy150Open CiscoSystems/avos/horizon/utils/functions.py/previous_key
6,619
def __new__(cls, start=None, stop=None, step=None, name=None, dtype=None, fastpath=False, copy=False, **kwargs): if fastpath: return cls._simple_new(start, stop, step, name=name) cls._validate_dtype(dtype) # RangeIndex if isinstance(start, RangeIndex): if name is None: name = start.name return cls._simple_new(name=name, **dict(start._get_data_as_items())) # validate the arguments def _ensure_int(value, field): try: new_value = int(value) assert(new_value == value) except (__HOLE__, AssertionError): raise TypeError("RangeIndex(...) must be called with integers," " {value} was passed for {field}".format( value=type(value).__name__, field=field) ) return new_value if start is None: start = 0 else: start = _ensure_int(start, 'start') if stop is None: stop = start start = 0 else: stop = _ensure_int(stop, 'stop') if step is None: step = 1 elif step == 0: raise ValueError("Step must not be zero") else: step = _ensure_int(step, 'step') return cls._simple_new(start, stop, step, name)
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/indexes/range.py/RangeIndex.__new__
6,620
@classmethod def _simple_new(cls, start, stop=None, step=None, name=None, dtype=None, **kwargs): result = object.__new__(cls) # handle passed None, non-integers if start is None or not com.is_integer(start): try: return RangeIndex(start, stop, step, name=name, **kwargs) except __HOLE__: return Index(start, stop, step, name=name, **kwargs) result._start = start result._stop = stop or 0 result._step = step or 1 result.name = name for k, v in compat.iteritems(kwargs): setattr(result, k, v) result._reset_identity() return result
TypeError
dataset/ETHPy150Open pydata/pandas/pandas/indexes/range.py/RangeIndex._simple_new
6,621
@classmethod def _add_numeric_methods_binary(cls): """ add in numeric methods, specialized to RangeIndex """ def _make_evaluate_binop(op, opstr, reversed=False, step=False): """ Parameters ---------- op : callable that accepts 2 parms perform the binary op opstr : string string name of ops reversed : boolean, default False if this is a reversed op, e.g. radd step : callable, optional, default to False op to apply to the step parm if not None if False, use the existing step """ def _evaluate_numeric_binop(self, other): other = self._validate_for_numeric_binop(other, op, opstr) attrs = self._get_attributes_dict() attrs = self._maybe_update_attributes(attrs) if reversed: self, other = other, self try: # alppy if we have an override if step: rstep = step(self._step, other) # we don't have a representable op # so return a base index if not com.is_integer(rstep) or not rstep: raise ValueError else: rstep = self._step rstart = op(self._start, other) rstop = op(self._stop, other) result = RangeIndex(rstart, rstop, rstep, **attrs) # for compat with numpy / Int64Index # even if we can represent as a RangeIndex, return # as a Float64Index if we have float-like descriptors if not all([com.is_integer(x) for x in [rstart, rstop, rstep]]): result = result.astype('float64') return result except (__HOLE__, TypeError, AttributeError): pass # convert to Int64Index ops if isinstance(self, RangeIndex): self = self.values if isinstance(other, RangeIndex): other = other.values return Index(op(self, other), **attrs) return _evaluate_numeric_binop cls.__add__ = cls.__radd__ = _make_evaluate_binop( operator.add, '__add__') cls.__sub__ = _make_evaluate_binop(operator.sub, '__sub__') cls.__rsub__ = _make_evaluate_binop( operator.sub, '__sub__', reversed=True) cls.__mul__ = cls.__rmul__ = _make_evaluate_binop( operator.mul, '__mul__', step=operator.mul) cls.__truediv__ = _make_evaluate_binop( operator.truediv, '__truediv__', step=operator.truediv) cls.__rtruediv__ = _make_evaluate_binop( operator.truediv, '__truediv__', reversed=True, step=operator.truediv) if not compat.PY3: cls.__div__ = _make_evaluate_binop( operator.div, '__div__', step=operator.div) cls.__rdiv__ = _make_evaluate_binop( operator.div, '__div__', reversed=True, step=operator.div)
ValueError
dataset/ETHPy150Open pydata/pandas/pandas/indexes/range.py/RangeIndex._add_numeric_methods_binary
6,622
def run_subprocess(cmd, data=None): """ Execute the command C{cmd} in a subprocess. @param cmd: The command to execute, specified as a list of string. @param data: A string containing data to send to the subprocess. @return: A tuple C{(out, err)}. @raise OSError: If there is any problem executing the command, or if its exitval is not 0. """ if isinstance(cmd, basestring): cmd = cmd.split() # Under Python 2.4+, use subprocess try: from subprocess import Popen, PIPE pipe = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) out, err = pipe.communicate(data) if hasattr(pipe, 'returncode'): if pipe.returncode == 0: return out, err else: raise RunSubprocessError(cmd, out, err) else: # Assume that there was an error iff anything was written # to the child's stderr. if err == '': return out, err else: raise RunSubprocessError(cmd, out, err) except ImportError: pass # Under Python 2.3 or earlier, on unix, use popen2.Popen3 so we # can access the return value. import popen2 if hasattr(popen2, 'Popen3'): pipe = popen2.Popen3(' '.join(cmd), True) to_child = pipe.tochild from_child = pipe.fromchild child_err = pipe.childerr if data: to_child.write(data) to_child.close() out = err = '' while pipe.poll() is None: out += from_child.read() err += child_err.read() out += from_child.read() err += child_err.read() if pipe.wait() == 0: return out, err else: raise RunSubprocessError(cmd, out, err) # Under Python 2.3 or earlier, on non-unix, use os.popen3 else: to_child, from_child, child_err = os.popen3(' '.join(cmd), 'b') if data: try: to_child.write(data) # Guard for a broken pipe error except __HOLE__, e: raise OSError(e) to_child.close() err = child_err.read() out = from_child.read() # Assume that there was an error iff anything was written # to the child's stderr. if err == '': return out, err else: raise RunSubprocessError(cmd, out, err)
IOError
dataset/ETHPy150Open ardekantur/pyglet/tools/epydoc/epydoc/util.py/run_subprocess
6,623
def services(b): logging.info('searching for service dependencies') # Command fragments for listing the files in a package. commands = {'apt': ['dpkg-query', '-L'], 'yum': ['rpm', '-ql']} # Build a map of the directory that contains each file in the # blueprint to the pathname of that file. dirs = defaultdict(list) for pathname in b.files: dirname = os.path.dirname(pathname) if dirname not in ('/etc', '/etc/init', '/etc/init.d'): dirs[dirname].append(pathname) def service_file(manager, service, pathname): """ Add dependencies for every pathname extracted from init scripts and other dependent files. """ content = open(pathname).read() for match in pattern.finditer(content): if match.group(1) in b.files: b.add_service_file(manager, service, match.group(1)) for dirname in b.sources.iterkeys(): content = util.unicodeme(content) if dirname in content: b.add_service_source(manager, service, dirname) def service_package(manager, service, package_manager, package): """ Add dependencies for every file in the blueprint that's also in this service's package or in a directory in this service's package. """ try: p = subprocess.Popen(commands[package_manager] + [package], close_fds=True, stdout=subprocess.PIPE) except __HOLE__: return for line in p.stdout: pathname = line.rstrip() if pathname in b.files: b.add_service_file(manager, service, pathname) elif pathname in dirs: b.add_service_file(manager, service, *dirs[pathname]) def service(manager, service): """ Add extra file dependencies found in packages. Then add extra file dependencies found by searching file content for pathnames. """ walk.walk_service_packages(b, manager, service, service_package=service_package) if 'sysvinit' == manager: service_file(manager, service, '/etc/init.d/{0}'.format(service)) elif 'upstart' == manager: service_file(manager, service, '/etc/init/{0}.conf'.format(service)) walk.walk_service_files(b, manager, service, service_file=service_file) b.walk(service=service)
KeyError
dataset/ETHPy150Open devstructure/blueprint/blueprint/services.py/services
6,624
def getByReference(self, ref): """ Returns an object based on the supplied reference. The C{ref} should be an C{int}. If the reference is not found, C{None} will be returned. """ try: return self.list[ref] except __HOLE__: return None
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/IndexedCollection.getByReference
6,625
def getClassAlias(self, klass): """ Gets a class alias based on the supplied C{klass}. If one is not found in the global context, one is created locally. If you supply a string alias and the class is not registered, L{pyamf.UnknownClassAlias} will be raised. @param klass: A class object or string alias. @return: The L{pyamf.ClassAlias} instance that describes C{klass} """ try: return self._class_aliases[klass] except __HOLE__: pass try: alias = self._class_aliases[klass] = pyamf.get_class_alias(klass) except pyamf.UnknownClassAlias: if isinstance(klass, python.str_types): raise # no alias has been found yet .. check subclasses alias = util.get_class_alias(klass) or pyamf.ClassAlias meta = util.get_class_meta(klass) alias = alias(klass, defer=True, **meta) self._class_aliases[klass] = alias return alias
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Context.getClassAlias
6,626
def readElement(self): """ Reads an AMF3 element from the data stream. @raise DecodeError: The ActionScript type is unsupported. @raise EOStream: No more data left to decode. """ pos = self.stream.tell() try: t = self.stream.read(1) except IOError: raise pyamf.EOStream try: func = self._func_cache[t] except KeyError: func = self.getTypeFunc(t) if not func: raise pyamf.DecodeError("Unsupported ActionScript type %s" % ( hex(ord(t)),)) self._func_cache[t] = func try: return func() except __HOLE__: self.stream.seek(pos) raise
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Decoder.readElement
6,627
def writeSequence(self, iterable): """ Encodes an iterable. The default is to write If the iterable has an al """ try: alias = self.context.getClassAlias(iterable.__class__) except (__HOLE__, pyamf.UnknownClassAlias): self.writeList(iterable) return if alias.external: # a is a subclassed list with a registered alias - push to the # correct method self.writeObject(iterable) return self.writeList(iterable)
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Encoder.writeSequence
6,628
def writeGenerator(self, gen): """ Iterates over a generator object and encodes all that is returned. """ n = getattr(gen, 'next') while True: try: self.writeElement(n()) except __HOLE__: break
StopIteration
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Encoder.writeGenerator
6,629
def getTypeFunc(self, data): """ Returns a callable that will encode C{data} to C{self.stream}. If C{data} is unencodable, then C{None} is returned. """ if data is None: return self.writeNull t = type(data) # try types that we know will work if t is str or issubclass(t, str): return self.writeBytes if t is unicode or issubclass(t, unicode): return self.writeString elif t is bool: return self.writeBoolean elif t is float: return self.writeNumber elif t in python.int_types: return self.writeNumber elif t in (list, tuple): return self.writeList elif isinstance(data, (list, tuple)): return self.writeSequence elif t is types.GeneratorType: return self.writeGenerator elif t is pyamf.UndefinedType: return self.writeUndefined elif t in (datetime.date, datetime.datetime, datetime.time): return self.writeDate elif xml.is_xml(data): return self.writeXML # check for any overridden types for type_, func in pyamf.TYPE_MAP.iteritems(): try: if isinstance(data, type_): return _CustomTypeFunc(self, func) except __HOLE__: if python.callable(type_) and type_(data): return _CustomTypeFunc(self, func) # now try some types that won't encode if t in python.class_types: # can't encode classes return None elif isinstance(data, python.func_types): # can't encode code objects return None elif isinstance(t, types.ModuleType): # cannot encode module objects return None # well, we tried .. return self.writeObject
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Encoder.getTypeFunc
6,630
def writeElement(self, data): """ Encodes C{data} to AMF. If the data is not able to be matched to an AMF type, then L{pyamf.EncodeError} will be raised. """ key = type(data) func = None try: func = self._func_cache[key] except __HOLE__: func = self.getTypeFunc(data) if func is None: raise pyamf.EncodeError('Unable to encode %r (type %r)' % ( data, key)) self._func_cache[key] = func func(data)
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Encoder.writeElement
6,631
def next(self): try: element = self.bucket.pop(0) except __HOLE__: raise StopIteration start_pos = self.stream.tell() self.writeElement(element) end_pos = self.stream.tell() self.stream.seek(start_pos) return self.stream.read(end_pos - start_pos)
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/codec.py/Encoder.next
6,632
def assert_course(p0, p1, expected): try: len(expected) array = True except __HOLE__: array = False result = geog.course(p0, p1) if not array: with pytest.raises(TypeError): len(result) assert np.allclose(result, expected) result = geog.course(p0, p1, bearing=True) # Constrain between -180 and 180 bearing = 90 - expected bearing = np.where(bearing > 180, bearing - 360, bearing) assert np.allclose(result, bearing)
TypeError
dataset/ETHPy150Open jwass/geog/tests/test_course.py/assert_course
6,633
def DownloadLogs(self): """Download the requested logs. This will write the logs to the file designated by self.output_file, or to stdout if the filename is '-'. Multiple roundtrips to the server may be made. """ if self.server: StatusUpdate('Downloading request logs for app %s server %s version %s.' % (self.app_id, self.server, self.version_id)) else: StatusUpdate('Downloading request logs for app %s version %s.' % (self.app_id, self.version_id)) tf = tempfile.TemporaryFile() last_offset = None try: while True: try: new_offset = self.RequestLogLines(tf, last_offset) if not new_offset or new_offset == last_offset: break last_offset = new_offset except KeyboardInterrupt: StatusUpdate('Keyboard interrupt; saving data downloaded so far.') break StatusUpdate('Copying request logs to %r.' % self.output_file) if self.output_file == '-': of = sys.stdout else: try: of = open(self.output_file, self.write_mode) except __HOLE__, err: StatusUpdate('Can\'t write %r: %s.' % (self.output_file, err)) sys.exit(1) try: line_count = CopyReversedLines(tf, of) finally: of.flush() if of is not sys.stdout: of.close() finally: tf.close() StatusUpdate('Copied %d records.' % line_count)
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/LogsRequester.DownloadLogs
6,634
def DateOfLogLine(line): """Returns a date object representing the log line's timestamp. Args: line: a log line string. Returns: A date object representing the timestamp or None if parsing fails. """ m = re.compile(r'[^[]+\[(\d+/[A-Za-z]+/\d+):[^\d]*').match(line) if not m: return None try: return datetime.date(*time.strptime(m.group(1), '%d/%b/%Y')[:3]) except __HOLE__: return None
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/DateOfLogLine
6,635
def FindSentinel(filename, blocksize=2**16): """Return the sentinel line from the output file. Args: filename: The filename of the output file. (We'll read this file.) blocksize: Optional block size for buffering, for unit testing. Returns: The contents of the last line in the file that doesn't start with a tab, with its trailing newline stripped; or None if the file couldn't be opened or no such line could be found by inspecting the last 'blocksize' bytes of the file. """ if filename == '-': StatusUpdate('Can\'t combine --append with output to stdout.') sys.exit(2) try: fp = open(filename, 'rb') except __HOLE__, err: StatusUpdate('Append mode disabled: can\'t read %r: %s.' % (filename, err)) return None try: fp.seek(0, 2) fp.seek(max(0, fp.tell() - blocksize)) lines = fp.readlines() del lines[:1] sentinel = None for line in lines: if not line.startswith('\t'): sentinel = line if not sentinel: StatusUpdate('Append mode disabled: can\'t find sentinel in %r.' % filename) return None return sentinel.rstrip('\n') finally: fp.close()
IOError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/FindSentinel
6,636
def EnsureDir(path): """Makes sure that a directory exists at the given path. If a directory already exists at that path, nothing is done. Otherwise, try to create a directory at that path with os.makedirs. If that fails, propagate the resulting OSError exception. Args: path: The path that you want to refer to a directory. """ try: os.makedirs(path) except __HOLE__, exc: if not (exc.errno == errno.EEXIST and os.path.isdir(path)): raise
OSError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/EnsureDir
6,637
def DoDownloadApp(rpcserver, out_dir, app_id, server, app_version): """Downloads the files associated with a particular app version. Args: rpcserver: The RPC server to use to download. out_dir: The directory the files should be downloaded to. app_id: The app ID of the app whose files we want to download. server: The server we want to download from. Can be: - None: We'll download from the default server. - <server>: We'll download from the specified server. app_version: The version number we want to download. Can be: - None: We'll download the latest default version. - <major>: We'll download the latest minor version. - <major>/<minor>: We'll download that exact version. """ StatusUpdate('Fetching file list...') url_args = {'app_id': app_id} if server: url_args['server'] = server if app_version is not None: url_args['version_match'] = app_version result = rpcserver.Send('/api/files/list', **url_args) StatusUpdate('Fetching files...') lines = result.splitlines() if len(lines) < 1: logging.error('Invalid response from server: empty') return full_version = lines[0] file_lines = lines[1:] current_file_number = 0 num_files = len(file_lines) num_errors = 0 for line in file_lines: parts = line.split('|', 2) if len(parts) != 3: logging.error('Invalid response from server: expecting ' '"<id>|<size>|<path>", found: "%s"\n', line) return current_file_number += 1 file_id, size_str, path = parts try: size = int(size_str) except __HOLE__: logging.error('Invalid file list entry from server: invalid size: ' '"%s"', size_str) return StatusUpdate('[%d/%d] %s' % (current_file_number, num_files, path)) def TryGet(): """A request to /api/files/get which works with the RetryWithBackoff.""" try: contents = rpcserver.Send('/api/files/get', app_id=app_id, version=full_version, id=file_id) return True, contents except urllib2.HTTPError, exc: if exc.code == 503: return False, exc else: raise def PrintRetryMessage(_, delay): StatusUpdate('Server busy. Will try again in %d seconds.' % delay) success, contents = RetryWithBackoff(TryGet, PrintRetryMessage) if not success: logging.error('Unable to download file "%s".', path) num_errors += 1 continue if len(contents) != size: logging.error('File "%s": server listed as %d bytes but served ' '%d bytes.', path, size, len(contents)) num_errors += 1 full_path = os.path.join(out_dir, path) if os.path.exists(full_path): logging.error('Unable to create file "%s": path conflicts with ' 'an existing file or directory', path) num_errors += 1 continue full_dir = os.path.dirname(full_path) try: EnsureDir(full_dir) except OSError, exc: logging.error('Couldn\'t create directory "%s": %s', full_dir, exc) num_errors += 1 continue try: out_file = open(full_path, 'wb') except IOError, exc: logging.error('Couldn\'t open file "%s": %s', full_path, exc) num_errors += 1 continue try: try: out_file.write(contents) except IOError, exc: logging.error('Couldn\'t write to file "%s": %s', full_path, exc) num_errors += 1 continue finally: out_file.close() if num_errors > 0: logging.error('Number of errors: %d. See output for details.', num_errors)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/DoDownloadApp
6,638
def DoUpload(self, paths, openfunc): """Uploads a new appversion with the given config and files to the server. Args: paths: An iterator that yields the relative paths of the files to upload. openfunc: A function that takes a path and returns a file-like object. Returns: An appinfo.AppInfoSummary if one was returned from the server, None otherwise. """ logging.info('Reading app configuration.') StatusUpdate('\nStarting update of %s' % self.Describe()) path = '' try: self.resource_limits = GetResourceLimits(self.rpcserver, self.config) StatusUpdate('Scanning files on local disk.') num_files = 0 for path in paths: file_handle = openfunc(path) file_classification = FileClassification(self.config, path) try: file_length = GetFileLength(file_handle) if file_classification.IsApplicationFile(): max_size = self.resource_limits['max_file_size'] else: max_size = self.resource_limits['max_blob_size'] if file_length > max_size: logging.error('Ignoring file \'%s\': Too long ' '(max %d bytes, file is %d bytes)', path, max_size, file_length) else: logging.info('Processing file \'%s\'', path) self.AddFile(path, file_handle) finally: file_handle.close() num_files += 1 if num_files % 500 == 0: StatusUpdate('Scanned %d files.' % num_files) except KeyboardInterrupt: logging.info('User interrupted. Aborting.') raise except EnvironmentError, e: logging.error('An error occurred processing file \'%s\': %s. Aborting.', path, e) raise app_summary = None try: missing_files = self.Begin() if missing_files: StatusUpdate('Uploading %d files and blobs.' % len(missing_files)) num_files = 0 for missing_file in missing_files: file_handle = openfunc(missing_file) try: self.UploadFile(missing_file, file_handle) finally: file_handle.close() num_files += 1 if num_files % 500 == 0: StatusUpdate('Processed %d out of %s.' % (num_files, len(missing_files))) self.file_batcher.Flush() self.blob_batcher.Flush() self.errorblob_batcher.Flush() StatusUpdate('Uploaded %d files and blobs' % num_files) if (self.config.derived_file_type and appinfo.PYTHON_PRECOMPILED in self.config.derived_file_type): try: self.Precompile() except urllib2.HTTPError, e: ErrorUpdate('Error %d: --- begin server output ---\n' '%s\n--- end server output ---' % (e.code, e.read().rstrip('\n'))) if e.code == 422 or self.config.runtime == 'go': raise print >>self.error_fh, ( 'Precompilation failed. Your app can still serve but may ' 'have reduced startup performance. You can retry the update ' 'later to retry the precompilation step.') app_summary = self.Commit() StatusUpdate('Completed update of %s' % self.Describe()) except __HOLE__: logging.info('User interrupted. Aborting.') self.Rollback() raise except urllib2.HTTPError, err: logging.info('HTTP Error (%s)', err) self.Rollback() raise except: logging.exception('An unexpected error occurred. Aborting.') self.Rollback() raise logging.info('Done!') return app_summary
KeyboardInterrupt
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/AppVersionUpload.DoUpload
6,639
def RequestLogs(self): """Write request logs to a file.""" args_length = len(self.args) server = '' if args_length == 2: appyaml = self._ParseAppInfoFromYaml(self.args.pop(0)) app_id = appyaml.application server = appyaml.server or '' version = appyaml.version elif args_length == 1: if not (self.options.app_id and self.options.version): self.parser.error( ('Expected the --application and --version flags if <directory> ' 'argument is not specified.')) else: self._PrintHelpAndExit() if self.options.app_id: app_id = self.options.app_id if self.options.server_id: server = self.options.server_id if self.options.version: version = self.options.version if (self.options.severity is not None and not 0 <= self.options.severity <= MAX_LOG_LEVEL): self.parser.error( 'Severity range is 0 (DEBUG) through %s (CRITICAL).' % MAX_LOG_LEVEL) if self.options.num_days is None: self.options.num_days = int(not self.options.append) try: end_date = self._ParseEndDate(self.options.end_date) except (TypeError, __HOLE__): self.parser.error('End date must be in the format YYYY-MM-DD.') rpcserver = self._GetRpcServer() logs_requester = LogsRequester(rpcserver, app_id, server, version, self.args[0], self.options.num_days, self.options.append, self.options.severity, end_date, self.options.vhost, self.options.include_vhost, self.options.include_all, time_func=self.time_func) logs_requester.DownloadLogs()
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/AppCfgApp.RequestLogs
6,640
def RunBulkloader(self, arg_dict): """Invokes the bulkloader with the given keyword arguments. Args: arg_dict: Dictionary of arguments to pass to bulkloader.Run(). """ try: import sqlite3 except __HOLE__: logging.error('upload_data action requires SQLite3 and the python ' 'sqlite3 module (included in python since 2.5).') sys.exit(1) sys.exit(bulkloader.Run(arg_dict))
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/AppCfgApp.RunBulkloader
6,641
def main(argv): logging.basicConfig(format=('%(asctime)s %(levelname)s %(filename)s:' '%(lineno)s %(message)s ')) try: result = AppCfgApp(argv).Run() if result: sys.exit(result) except __HOLE__: StatusUpdate('Interrupted.') sys.exit(1)
KeyboardInterrupt
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/tools/appcfg.py/main
6,642
def on_select_remote(self, remote_index): """ After the user selects a remote, display a panel of branches that are present on that remote, then proceed to `on_select_branch`. """ # If the user pressed `esc` or otherwise cancelled. if remote_index == -1: return self.selected_remote = self.remotes[remote_index] # Save the selected remote for automatic selection on next palette command. self.last_remote_used = self.selected_remote self.branches_on_selected_remote = self.list_remote_branches(self.selected_remote) current_local_branch = self.get_current_branch_name() try: pre_selected_idx = self.branches_on_selected_remote.index( self.selected_remote + "/" + current_local_branch) except __HOLE__: pre_selected_idx = 0 def deferred_panel(): self.window.show_quick_panel( self.branches_on_selected_remote, self.on_select_branch, flags=sublime.MONOSPACE_FONT, selected_index=pre_selected_idx ) sublime.set_timeout(deferred_panel)
ValueError
dataset/ETHPy150Open divmain/GitSavvy/core/commands/pull.py/GsPullCommand.on_select_remote
6,643
def __lt__(self, other): # convenience obj = self.obj if isinstance(other, Comparable): other = other.obj # None < everything else if other is None: return False if obj is None: return True # numbers < everything else (except None) if isinstance(obj, numeric_types) \ and not isinstance(other, numeric_types): return True if not isinstance(obj, numeric_types) \ and isinstance(other, numeric_types): return False # binary < unicode if isinstance(obj, text_type) and isinstance(other, binary_type): return False if isinstance(obj, binary_type) and isinstance(other, text_type): return True try: # attempt native comparison return obj < other except __HOLE__: # fall back to comparing type names return _typestr(obj) < _typestr(other)
TypeError
dataset/ETHPy150Open alimanfoo/petl/petl/comparison.py/Comparable.__lt__
6,644
def __getitem__(self, key): if self.is_remote: # pragma: no cover getitem = partial(robust_getitem, catch=RuntimeError) else: getitem = operator.getitem try: data = getitem(self.array, key) except __HOLE__: # Catch IndexError in netCDF4 and return a more informative error # message. This is most often called when an unsorted indexer is # used before the data is loaded from disk. msg = ('The indexing operation you are attempting to perform is ' 'not valid on netCDF4.Variable object. Try loading your ' 'data into memory first by calling .load().') if not PY3: import traceback msg += '\n\nOriginal traceback:\n' + traceback.format_exc() raise IndexError(msg) if self.ndim == 0: # work around for netCDF4-python's broken handling of 0-d # arrays (slicing them always returns a 1-dimensional array): # https://github.com/Unidata/netcdf4-python/pull/220 data = np.asscalar(data) return data
IndexError
dataset/ETHPy150Open pydata/xarray/xarray/backends/netCDF4_.py/NetCDF4ArrayWrapper.__getitem__
6,645
def _nc4_group(ds, group, mode): if group in set([None, '', '/']): # use the root group return ds else: # make sure it's a string if not isinstance(group, basestring): raise ValueError('group must be a string or None') # support path-like syntax path = group.strip('/').split('/') for key in path: try: ds = ds.groups[key] except __HOLE__ as e: if mode != 'r': ds = ds.createGroup(key) else: # wrap error to provide slightly more helpful message raise IOError('group not found: %s' % key, e) return ds
KeyError
dataset/ETHPy150Open pydata/xarray/xarray/backends/netCDF4_.py/_nc4_group
6,646
def get_process_list(): procs = dict() for proc in psutil.process_iter(): try: pinfo = proc.as_dict(attrs=['pid', 'cmdline']) procs[pinfo['pid']] = pinfo['cmdline'][0] except (psutil.NoSuchProcess, IndexError, __HOLE__): pass return procs
TypeError
dataset/ETHPy150Open gooddata/smoker/tests/server/test_plugins.py/get_process_list
6,647
def get_TestSuite_from_module(mod, config): """Get an existing suite from a module.""" for methname in ("get_suite", "GetSuite"): try: meth = getattr(mod, methname) return meth(config) except __HOLE__: continue raise module.ObjectImportError("Module %r does not have a get_suite() function." % (module,))
AttributeError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/QA/testloader.py/get_TestSuite_from_module
6,648
@staticmethod def _get_mode(mode_arg, i_var_count, d_var_count): """ Tries to return an appropriate mode class. Intended to be called only by __new__. mode_arg Can be a string or a class. If it is a PlotMode subclass, it is simply returned. If it is a string, it can an alias for a mode or an empty string. In the latter case, we try to find a default mode for the i_var_count and d_var_count. i_var_count The number of independent variables needed to evaluate the d_vars. d_var_count The number of dependent variables; usually the number of functions to be evaluated in plotting. For example, a Cartesian function y = f(x) has one i_var (x) and one d_var (y). A parametric form x,y,z = f(u,v), f(u,v), f(u,v) has two two i_vars (u,v) and three d_vars (x,y,z). """ # if the mode_arg is simply a PlotMode class, # check that the mode supports the numbers # of independent and dependent vars, then # return it try: m = None if issubclass(mode_arg, PlotMode): m = mode_arg except __HOLE__: pass if m: if not m._was_initialized: raise ValueError(("To use unregistered plot mode %s " "you must first call %s._init_mode().") % (m.__name__, m.__name__)) if d_var_count != m.d_var_count: raise ValueError(("%s can only plot functions " "with %i dependent variables.") % (m.__name__, m.d_var_count)) if i_var_count > m.i_var_count: raise ValueError(("%s cannot plot functions " "with more than %i independent " "variables.") % (m.__name__, m.i_var_count)) return m # If it is a string, there are two possibilities. if isinstance(mode_arg, str): i, d = i_var_count, d_var_count if i > PlotMode._i_var_max: raise ValueError(var_count_error(True, True)) if d > PlotMode._d_var_max: raise ValueError(var_count_error(False, True)) # If the string is '', try to find a suitable # default mode if not mode_arg: return PlotMode._get_default_mode(i, d) # Otherwise, interpret the string as a mode # alias (e.g. 'cartesian', 'parametric', etc) else: return PlotMode._get_aliased_mode(mode_arg, i, d) else: raise ValueError("PlotMode argument must be " "a class or a string")
TypeError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/plot_mode.py/PlotMode._get_mode
6,649
@staticmethod def _get_default_mode(i, d, i_vars=-1): if i_vars == -1: i_vars = i try: return PlotMode._mode_default_map[d][i] except __HOLE__: # Keep looking for modes in higher i var counts # which support the given d var count until we # reach the max i_var count. if i < PlotMode._i_var_max: return PlotMode._get_default_mode(i + 1, d, i_vars) else: raise ValueError(("Couldn't find a default mode " "for %i independent and %i " "dependent variables.") % (i_vars, d))
TypeError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/plot_mode.py/PlotMode._get_default_mode
6,650
@staticmethod def _get_aliased_mode(alias, i, d, i_vars=-1): if i_vars == -1: i_vars = i if alias not in PlotMode._mode_alias_list: raise ValueError(("Couldn't find a mode called" " %s. Known modes: %s.") % (alias, ", ".join(PlotMode._mode_alias_list))) try: return PlotMode._mode_map[d][i][alias] except __HOLE__: # Keep looking for modes in higher i var counts # which support the given d var count and alias # until we reach the max i_var count. if i < PlotMode._i_var_max: return PlotMode._get_aliased_mode(alias, i + 1, d, i_vars) else: raise ValueError(("Couldn't find a %s mode " "for %i independent and %i " "dependent variables.") % (alias, i_vars, d))
TypeError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/plot_mode.py/PlotMode._get_aliased_mode
6,651
@staticmethod def _interpret_args(args): interval_wrong_order = "PlotInterval %s was given before any function(s)." interpret_error = "Could not interpret %s as a function or interval." functions, intervals = [], [] if isinstance(args[0], GeometryEntity): for coords in list(args[0].arbitrary_point()): functions.append(coords) intervals.append(PlotInterval.try_parse(args[0].plot_interval())) else: for a in args: i = PlotInterval.try_parse(a) if i is not None: if len(functions) == 0: raise ValueError(interval_wrong_order % (str(i))) else: intervals.append(i) else: if is_sequence(a, include=str): raise ValueError(interpret_error % (str(a))) try: f = sympify(a) functions.append(f) except __HOLE__: raise ValueError(interpret_error % str(a)) return functions, intervals
TypeError
dataset/ETHPy150Open sympy/sympy/sympy/plotting/pygletplot/plot_mode.py/PlotMode._interpret_args
6,652
def deftgt(self, forme=None): if forme is None: forme = self try: tgtview = self.tgtview except __HOLE__: self.env.deftgt(forme) else: if forme.tgtfullname in tgtview: self.error('Duplicate definition of %r'%forme.tgtfullname, forme.src.node) tgtview[forme.tgtfullname] = forme
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Main.py/Description.deftgt
6,653
def get_descr_by_name(self, name, context=None): if name.startswith(self.mod.tgt_prefix): return self.get_descr_by_tgt_name(name, context) e = self parts = name.split('.') for part in parts: try: e = e.localview[part] except __HOLE__: assert context self.env.error( 'Undefined: %r in %r.'%(part, e.get_id_name()), context, exception=UndefinedError) e.resolve_lookuped() return e
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Main.py/Package.get_descr_by_name
6,654
def __repr__(self): try: return self.cond_expr except __HOLE__: return Description.__repr__(self)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Main.py/ConditionRef.__repr__
6,655
def get_name(self): try: return self.get_arg_name() except __HOLE__: return '?'
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/guppy-0.1.10/guppy/gsl/Main.py/Arg.get_name
6,656
def _open(fullpath): try: size = os.stat(fullpath).st_size except OSError, err: # Permission denied - ignore the file print_debug("%s: permission denied: %s" % (fullpath, err)) return None if size > 1024*1024: # too big print_debug("%s: the file is too big: %d bytes" % (fullpath, size)) return None try: return open(fullpath, 'rU') except __HOLE__, err: # Access denied, or a special file - ignore it print_debug("%s: access denied: %s" % (fullpath, err)) return None
IOError
dataset/ETHPy150Open Southpaw-TACTIC/TACTIC/src/context/client/tactic-api-python-4.0.api04/Tools/Scripts/pysource.py/_open
6,657
def test_cache_options(): try: from chest import Chest except __HOLE__: return cache = Chest() def inc2(x): assert 'y' in cache return x + 1 with dask.set_options(cache=cache): get_sync({'x': (inc2, 'y'), 'y': 1}, 'x')
ImportError
dataset/ETHPy150Open dask/dask/dask/tests/test_async.py/test_cache_options
6,658
def precedence(item): """ Returns the precedence of a given object. """ if hasattr(item, "precedence"): return item.precedence try: mro = item.__class__.__mro__ except __HOLE__: return PRECEDENCE["Atom"] for i in mro: n = i.__name__ if n in PRECEDENCE_FUNCTIONS: return PRECEDENCE_FUNCTIONS[n](item) elif n in PRECEDENCE_VALUES: return PRECEDENCE_VALUES[n] return PRECEDENCE["Atom"]
AttributeError
dataset/ETHPy150Open sympy/sympy/sympy/printing/precedence.py/precedence
6,659
def adb_shell(device, command, timeout=None, check_exit_code=False, as_root=False): # NOQA # pylint: disable=too-many-branches, too-many-locals, too-many-statements _check_env() if as_root: command = 'echo \'{}\' | su'.format(escape_single_quotes(command)) device_string = '-s {}'.format(device) if device else '' full_command = 'adb {} shell "{}"'.format(device_string, escape_double_quotes(command)) logger.debug(full_command) if check_exit_code: actual_command = "adb {} shell '({}); echo; echo $?'".format(device_string, escape_single_quotes(command)) try: raw_output, error = check_output(actual_command, timeout, shell=True) except CalledProcessErrorWithStderr as e: raw_output = e.output error = e.error exit_code = e.returncode if exit_code == 1: logger.debug("Exit code 1 could be either the return code of the command or mean ADB failed") if raw_output: if raw_output.endswith('\r\n'): newline = '\r\n' elif raw_output.endswith('\n'): newline = '\n' else: raise WAError("Unknown new line separator in: {}".format(raw_output)) try: output, exit_code, _ = raw_output.rsplit(newline, 2) except __HOLE__: exit_code, _ = raw_output.rsplit(newline, 1) output = '' else: # raw_output is empty exit_code = '969696' # just because output = '' exit_code = exit_code.strip() if exit_code.isdigit(): if int(exit_code): message = 'Got exit code {}\nfrom: {}\nSTDOUT: {}\nSTDERR: {}'.format(exit_code, full_command, output, error) raise DeviceError(message) elif am_start_error.findall(output): message = 'Could not start activity; got the following:' message += '\n{}'.format(am_start_error.findall(output)[0]) raise DeviceError(message) else: # not all digits if am_start_error.findall(output): message = 'Could not start activity; got the following:' message += '\n{}'.format(am_start_error.findall(output)[0]) raise DeviceError(message) else: raise DeviceError('adb has returned early; did not get an exit code. Was kill-server invoked?') else: # do not check exit code try: output, _ = check_output(full_command, timeout, shell=True) except CalledProcessErrorWithStderr as e: output = e.output error = e.error exit_code = e.returncode if e.returncode == 1: logger.debug("Got Exit code 1, could be either the return code of the command or mean ADB failed") return output
ValueError
dataset/ETHPy150Open ARM-software/workload-automation/wlauto/utils/android.py/adb_shell
6,660
def filer_staticmedia_prefix(): """ Returns the string contained in the setting FILER_STATICMEDIA_PREFIX. """ try: from .. import settings except __HOLE__: return '' return settings.FILER_STATICMEDIA_PREFIX
ImportError
dataset/ETHPy150Open django-leonardo/django-leonardo/leonardo/module/media/templatetags/filermedia.py/filer_staticmedia_prefix
6,661
def biblio(self, aliases, provider_url_template=None, cache_enabled=True): aliases_dict = provider.alias_dict_from_tuples(aliases) if "blog" in aliases_dict: id = aliases_dict["blog"][0] # Only lookup biblio for items with appropriate ids if not id: #self.logger.debug(u"%s not checking biblio, no relevant alias" % (self.provider_name)) return None if not provider_url_template: provider_url_template = self.biblio_url_template self.logger.debug(u"%s getting biblio for %s" % (self.provider_name, id)) # set up stuff that is true for all blogs, wordpress and not biblio_dict = {} biblio_dict["url"] = id biblio_dict["account"] = provider.strip_leading_http(id) biblio_dict["is_account"] = True # special key to tell webapp to render as genre heading # now add things that are true just for wordpress blogs if not provider_url_template: provider_url_template = self.biblio_url_template url = self._get_templated_url(provider_url_template, id, "biblio") # try to get a response from the data provider response = self.http_get(url, cache_enabled=cache_enabled) if (response.status_code == 200) and ("name" in response.text): biblio_dict["hosting_platform"] = "wordpress.com" try: biblio_dict.update(self._extract_biblio(response.text, id)) except (__HOLE__, TypeError): pass return biblio_dict
AttributeError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/wordpresscom.py/Wordpresscom.biblio
6,662
def wordpress_post_id_from_nid(self, nid): try: return json.loads(nid)["wordpress_post_id"] except (KeyError, __HOLE__): return None
ValueError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/wordpresscom.py/Wordpresscom.wordpress_post_id_from_nid
6,663
def blog_url_from_nid(self, nid): try: return json.loads(nid)["blog_url"] except (__HOLE__, ValueError): return None # default method; providers can override
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/providers/wordpresscom.py/Wordpresscom.blog_url_from_nid
6,664
def get_aliases(self, query): query_url = "{api_url}/provider/{provider_name}/memberitems/{query}?method=sync".format( api_url=api_url, provider_name=self.provider_name, query=query ) start = time.time() logger.info(u"getting aliases from the {provider} importer, using url '{url}'".format( provider=self.provider_name, url=query_url)) r = requests.get(query_url) try: response = json.loads(r.text) aliases = response["memberitems"] logger.debug(u"got some aliases from the http call: " + str(aliases)) except __HOLE__: logger.warning(u"{provider} importer returned no json for {query}".format( provider=self.provider_name, query="query")) aliases = [] # annoyingly, some providers return lists-as-IDs, which must be joined with a comma aliases = [(namespace, id) if isinstance(id, str) else (namespace, ",".join(id)) for namespace, id in aliases] logger.info(u"{provider} importer got {num_aliases} aliases with username '{q}' in {elapsed} seconds.".format( provider=self.provider_name, num_aliases=len(aliases), q=query, elapsed=round(time.time() - start, 2))) return aliases
ValueError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/fakes.py/Importer.get_aliases
6,665
def poll(self, max_time=60): logger.info(u"polling collection '{collection_id}'".format( collection_id=self.collection_id)) still_updating = True tries = 0 start = time.time() while still_updating: url = api_url + "/collection/" + self.collectionId resp = requests.get(url, config={'verbose': None}) try: items = json.loads(resp.text)["items"] except __HOLE__: items = [] logger.warning(u"get '{url}' returned no json, only '{resp}') ".format( url=url, resp=resp.text)) tries += 1 currently_updating_flags = [True for item in items if item["currently_updating"]] num_currently_updating = len(currently_updating_flags) num_finished_updating = len(items) - num_currently_updating logger.info(u"{num_done} of {num_total} items done updating after {tries} requests.".format( num_done=num_finished_updating, num_total=len(items), tries=tries)) logger.debug(u"got these items back: " + str(items)) elapsed = time.time() - start if resp.status_code == 200: logger.info(u"collection '{id}' with {num_items} items finished updating in {elapsed} seconds.".format( id=self.collection_id, num_items=len(items), elapsed=round(elapsed, 2))) return True elif elapsed > max_time: raise Exception( "max polling time ({max} secs) exceeded for collection {id}. These items didn't update: {item_ids}".format( max=max_time, id=self.collection_id, item_ids=", ".join([item["_id"] for item in items if item["currently_updating"]]))) return False sleep(0.5)
ValueError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/fakes.py/ReportPage.poll
6,666
def get_dois(self, num=1): start = time.time() dois = [] url = "http://random.labs.crossref.org/dois?from=2000&count=" + str(num) logger.info(u"getting {num} random dois with IdSampler, using {url}".format( num=num, url=url)) try: r = requests.get(url, timeout=10) except Timeout: logger.warning(u"the random doi service isn't working right now (timed out); sending back an empty list.") return dois if r.status_code == 200: try: dois = json.loads(r.text) logger.info(u"IdSampler got {count} random dois back in {elapsed} seconds".format( count=len(dois), elapsed=round(time.time() - start, 2))) logger.debug(u"IdSampler got these dois back: " + str(dois)) except __HOLE__: pass if not dois: logger.warning(u"the random doi service isn't working right now (got error code); sending back an empty list.") return dois
ValueError
dataset/ETHPy150Open Impactstory/total-impact-core/totalimpact/fakes.py/IdSampler.get_dois
6,667
def parse_args(): description='Silly Server for mocking real http servers' options = [ { "dest": "root_dir", "required": False, "metavar": "/dir/somedir", "help": """Directory where your fake responses are waiting for me. If not provided - default response will be used everywhere.""", "type": str, "key": "-d", }, { "dest": "port", "required": False, "metavar": "port", "help": "Port to listen on. Default is 8000.", "type": int, "key": "-p" } ] try: import argparse parser = argparse.ArgumentParser(description=description) for o in options: parser.add_argument(o["key"], dest=o["dest"], required=o["required"], metavar=o["metavar"], help=o["help"], type=o["type"]) return vars(parser.parse_args()) except __HOLE__: import optparse parser = optparse.OptionParser(description=description) for o in options: parser.add_option(o["key"], dest=o["dest"], metavar=o["metavar"], help=o["help"], type=o["type"]) return vars(parser.parse_args()[0])
ImportError
dataset/ETHPy150Open bak1an/silly-server/ss.py/parse_args
6,668
def log(self): """ """ for log in self.logs: log() try: if (self.store.stamp - self.flushStamp) >= self.flushPeriod: console.profuse("Logger {0} Flushed at {1}, previous flush at {2}\n".format( self.name, self.store.stamp, self.flushStamp)) self.flush() self.flushStamp = self.store.stamp except __HOLE__: self.flushStamp = self.store.stamp #forces flushStamp to be a number once store.stamp is
TypeError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/logging.py/Logger.log
6,669
def createPath(self, prefix = './'): """creates log directory path creates physical directories on disk """ try: #if repened too quickly could be same so we make a do until kludge path = self.path i = 0 while path == self.path: #do until keep trying until different dt = datetime.datetime.now() path = "{0}_{1}_{2:04d}{3:02d}{4:02d}_{5:02d}{6:02d}{7:02d}".format( prefix, self.name, dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second + i) path = os.path.abspath(path) #convert to proper absolute path i +=1 if not os.path.exists(path): os.makedirs(path) except __HOLE__ as ex: console.terse("Error: creating log directory '{0}'\n".format(ex)) return False self.path = path console.concise(" Created Logger {0} Directory= '{1}'\n".format( self.name, self.path)) return True
OSError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/logging.py/Logger.createPath
6,670
def reopen(self): """closes if open then reopens """ self.close() #innocuous to call close() on unopened file try: self.file = open(self.path, 'a+') except __HOLE__ as ex: console.terse("Error: creating log file '{0}'\n".format(ex)) self.file = None return False console.concise(" Created Log file '{0}'\n".format(self.path)) return True
IOError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/logging.py/Log.reopen
6,671
def log(self): """called by conditional actions """ self.stamp = self.store.stamp #should be different if binary kind cf = io.StringIO() #use string io faster than concatenation try: text = self.formats['_time'] % self.stamp except __HOLE__: text = '%s' % self.stamp cf.write(ns2u(text)) for tag, loggee in self.loggees.items(): if loggee: #len non zero for field, value in loggee.items(): try: text = self.formats[tag][field] % value except TypeError: text = '%s' % value cf.write(ns2u(text)) else: #no items so just write tab cf.write(u'\t') cf.write(u'\n') try: self.file.write(cf.getvalue()) except ValueError as ex: #if self.file already closed then ValueError console.terse("{0}\n".format(ex)) cf.close()
TypeError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/logging.py/Log.log
6,672
def logSequence(self, fifo=False): """ called by conditional actions Log and remove all elements of sequence Default is lifo order If fifo Then log in fifo order head is left tail is right lifo is log tail to head fifo is log head to tail """ self.stamp = self.store.stamp #should be different if binary kind cf = io.StringIO() #use string io faster than concatenation try: stamp = self.formats['_time'] % self.stamp except __HOLE__: stamp = '%s' % self.stamp if self.loggees: tag, loggee = self.loggees.items()[0] # only works for one loggee if loggee: # not empty field, value = loggee.items()[0] # only first item d = deque() if isinstance(value, MutableSequence): #has pop method while value: # not empty d.appendleft(value.pop()) #remove and copy in order elif isinstance(value, MutableMapping): # has popitem method while value: # not empty d.appendleft(value.popitem()) #remove and copy in order else: #not mutable sequence or mapping so log normally d.appendleft(value) while d: # not empty if fifo: element = d.popleft() else: #lifo element = d.pop() try: text = self.formats[tag][field] % (element, ) except TypeError: text = '%s' % element cf.write(u"%s\t%s\n" % (stamp, text)) try: self.file.write(cf.getvalue()) except ValueError as ex: #if self.file already closed then ValueError console.terse("{0}\n".format(ex)) cf.close()
TypeError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/logging.py/Log.logSequence
6,673
def change(self): """log if changed logs once and then only if changed requires that self.prepare has been called otherwise fields in self.lasts won't match fields in log """ if self.stamp is None: #Always log at least once even if not updated self.log() return change = False for tag, loggee in self.loggees.items(): last = self.lasts[tag] #get last Data object for each loggee for field, value in loggee.items(): try: if getattr(last, field) != value: change = True setattr(last, field, value) except __HOLE__ as ex: # console.terse("Warning: Log {0}, new runtime field" " '{1}' for loggee {2}\n".format( self.name, field, loggee.name)) if change: self.log()
AttributeError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/logging.py/Log.change
6,674
def _checkFilePath(self, path): try: if not os.path.exists(path): os.makedirs(path) except __HOLE__: log = "Could not create "+ path self.logger.writeLog(syslog.LOG_ERR, "%s"%(str(log)))
OSError
dataset/ETHPy150Open lmco/laikaboss/laikamilter.py/LaikaMilter._checkFilePath
6,675
def _checkOKToContinueWithOpenFiles(self): okToContinue = True try: pid = os.getpid() try: fd_dir=os.path.join('/proc/', str(pid), 'fd/') except: self.logger.writeLog(syslog.LOG_DEBUG, "Open Files: Problem With PID: "+str(pid)) numOpenFilesOutput = 0 for file in os.listdir(fd_dir): numOpenFilesOutput += 1 if (int(numOpenFilesOutput) > int(self.milterConfig.maxFiles)): self.logger.writeLog(syslog.LOG_ERR, "Open Files: "+str(numOpenFilesOutput)+", Returning "+str(self.milterConfig.dispositionModes["OverLimit".lower()])+" to avoid shutdown at "+str(self.milterConfig.maxFiles)) okToContinue = False else: self.logger.writeLog(syslog.LOG_DEBUG, self.milterConfig.milterInstance+" Open Files: "+str(numOpenFilesOutput)+ " of "+ str(self.milterConfig.maxFiles)) except __HOLE__: self.logger.writeLog(syslog.LOG_ERR, "Value Error in checkOpenFiles") except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() print "ERROR EOM %s" % (repr(traceback.format_exception(exc_type, exc_value, exc_traceback))) self.logger.writeLog(syslog.LOG_ERR, "Error in checkOpenFiles") return okToContinue #_dispositionMessage main helper function to open dispositioner class to disposition message.
ValueError
dataset/ETHPy150Open lmco/laikaboss/laikamilter.py/LaikaMilter._checkOKToContinueWithOpenFiles
6,676
def _writeFileToDisk(self): if self.archiveFileName: try: fp = open(self.archiveFileName, "wb") fp.write(self.fileBuffer) fp.flush() fp.close() except __HOLE__: log = self.uuid+" Could not open "+ self.archiveFileName+ " for writing" self.logger.writeLog(syslog.LOG_ERR, "%s"%(str(log))) #Write Custom header to the file pointer to be written to disk
IOError
dataset/ETHPy150Open lmco/laikaboss/laikamilter.py/LaikaMilter._writeFileToDisk
6,677
def deserialize(data): try: if not isinstance(data, str): data = data.decode('utf-8') data = json.loads(data) if 'key' not in data or 'uri' not in data: raise ValueError("Missing 'key' or 'uri' fields.") return Account(key=load_private_key(data['key'].encode('utf8')), uri=data['uri']) except (__HOLE__, ValueError, AttributeError) as e: raise IOError("Invalid account structure: {}".format(e))
TypeError
dataset/ETHPy150Open veeti/manuale/manuale/account.py/deserialize
6,678
def update_hash_dict(filehash, filename): """ Opens the pickled hash dictionary, adds an entry, and dumps it back. """ try: with open(file_path + '/hash_dict.pickle', 'rb') as f: hash_dict = cPickle.load(f) except __HOLE__: hash_dict = {} hash_dict.update({filename: filehash}) with open(os.path.join(file_path, 'hash_dict.pickle'), 'wb') as f: cPickle.dump(hash_dict, f)
IOError
dataset/ETHPy150Open statsmodels/statsmodels/tools/hash_funcs.py/update_hash_dict
6,679
def check_hash(rawfile, filename): """ Returns True if hash does not match the previous one. """ try: with open(file_path + '/hash_dict.pickle', 'rb') as f: hash_dict = cPickle.load(f) except __HOLE__: hash_dict = {} try: checkhash = hash_dict[filename] except: checkhash = None filehash = get_hash(rawfile) if filehash == checkhash: return False, None return True, filehash
IOError
dataset/ETHPy150Open statsmodels/statsmodels/tools/hash_funcs.py/check_hash
6,680
def parse_content(self, text): """parse section to formal format raw_content: {title: section(with title)}. For `help` access. formal_content: {title: section} but the section has been dedented without title. For parse instance""" raw_content = self.raw_content raw_content.clear() formal_collect = {} with warnings.catch_warnings(): warnings.simplefilter("ignore") try: split = self.visible_empty_line_re.split(text) except __HOLE__: # python >= 3.5 split = [text] option_split_re = self.option_split_re name = re.compile(re.escape(self.option_name), re.IGNORECASE) for text in filter(lambda x: x and x.strip(), split): # logger.warning('get options group:\n%r', text) with warnings.catch_warnings(): warnings.simplefilter("ignore") try: split_options = option_split_re.split(text) except ValueError: # python >= 3.5 continue split_options.pop(0) for title, section in zip(split_options[::2], split_options[1::2]): prefix, end = name.split(title) prefix = prefix.strip() section = section.rstrip() if end.endswith('\n'): formal = section else: formal = ' ' * len(title) + section formal_collect.setdefault(prefix, []).append(formal) # logger.error((title, section)) if prefix in raw_content: # TODO: better handling way? if self.namedoptions: log = logger.warning else: log = logger.debug log('duplicated options section %s', prefix) raw_content[prefix] += '\n%s%s' % (title, section) else: raw_content[prefix] = title + section if formal_collect: for each_title, values in formal_collect.items(): value = '\n'.join(map(textwrap.dedent, values)) formal_collect[each_title] = value self.formal_content = formal_collect
ValueError
dataset/ETHPy150Open TylerTemp/docpie/docpie/parser.py/OptionParser.parse_content
6,681
def parse_line_to_lis(self, line, name=None): if name is not None: _, find_name, line = line.partition(name) if not find_name: raise DocpieError( '%s is not in usage pattern %s' % (name, _)) # wrapped_space = self.wrap_symbol_re.sub(r' \1 ', line.strip()) # logger.debug(wrapped_space) # result = [x for x in self.split_re.split(wrapped_space) if x] angle_bracket_re = self.angle_bracket_re wrap_symbol_re = self.wrap_symbol_re with warnings.catch_warnings(): warnings.simplefilter('ignore') try: sep_by_angle = angle_bracket_re.split(line) except __HOLE__: sep_by_angle = [line] wrap_space = [] for index, each_block in enumerate(sep_by_angle): if index % 2: wrap_space.append(each_block) continue if not each_block: continue warped_space = wrap_symbol_re.sub(r' \1 ', each_block) wrap_space.append(warped_space) wraped = ''.join(wrap_space) with warnings.catch_warnings(): warnings.simplefilter('ignore') try: sep = self.split_re.split(wraped) except ValueError: sep = [wraped] result = list(filter(None, sep)) # drop name if name is None: result.pop(0) return result
ValueError
dataset/ETHPy150Open TylerTemp/docpie/docpie/parser.py/UsageParser.parse_line_to_lis
6,682
def __init__(self,methodName='runTest'): unittest.TestCase.__init__(self,methodName) self.host = "localhost:%d" % self.SERVER_PORT self.connected = False self.handle = -1 logdir = os.environ.get("ZKPY_LOG_DIR") logfile = os.path.join(logdir, self.__class__.__name__ + ".log") try: f = open(logfile,"w") zookeeper.set_log_stream(f) except __HOLE__: print("Couldn't open " + logfile + " for writing")
IOError
dataset/ETHPy150Open francelabs/datafari/debian7/zookeeper/contrib/zkpython/src/test/zktestbase.py/TestBase.__init__
6,683
def unregister(self, model=None): '''Unregister a ``model`` if provided, otherwise it unregister all registered models. Return a list of unregistered model managers or ``None`` if no managers were removed.''' if model is not None: try: manager = self._registered_models.pop(model) except __HOLE__: return if self._registered_names.get(manager._meta.name) == manager: self._registered_names.pop(manager._meta.name) return [manager] else: managers = list(self._registered_models.values()) self._registered_models.clear() return managers
KeyError
dataset/ETHPy150Open lsbardel/python-stdnet/stdnet/odm/mapper.py/Router.unregister
6,684
def model_iterator(application, include_related=True, exclude=None): '''A generator of :class:`StdModel` classes found in *application*. :parameter application: A python dotted path or an iterable over python dotted-paths where models are defined. Only models defined in these paths are considered. For example:: from stdnet.odm import model_iterator APPS = ('stdnet.contrib.searchengine', 'stdnet.contrib.timeseries') for model in model_iterator(APPS): ... ''' if exclude is None: exclude = set() application = native_str(application) if ismodule(application) or isinstance(application, str): if ismodule(application): mod, application = application, application.__name__ else: try: mod = import_module(application) except ImportError: # the module is not there mod = None if mod: label = application.split('.')[-1] try: mod_models = import_module('.models', application) except __HOLE__: mod_models = mod label = getattr(mod_models, 'app_label', label) models = set() for name in dir(mod_models): value = getattr(mod_models, name) meta = getattr(value, '_meta', None) if isinstance(value, ModelType) and meta: for model in models_from_model( value, include_related=include_related, exclude=exclude): if (model._meta.app_label == label and model not in models): models.add(model) yield model else: for app in application: for m in model_iterator(app): yield m
ImportError
dataset/ETHPy150Open lsbardel/python-stdnet/stdnet/odm/mapper.py/model_iterator
6,685
@property def isAbstract(self): if self.subtreeRollUp: return self.subtreeRollUp == CHILDREN_BUT_NO_ROLLUP try: try: return self.abstract # ordinate may have an abstract attribute except __HOLE__: # if none use axis object return self.definitionNode.isAbstract except AttributeError: # axis may never be abstract return False
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/StructuralNode.isAbstract
6,686
@property def tagSelectors(self): try: return self._tagSelectors except __HOLE__: if self.parentStructuralNode is None: self._tagSelectors = set() else: self._tagSelectors = self.parentStructuralNode.tagSelectors if self.tagSelector: self._tagSelectors.add(self.tagSelector) return self._tagSelectors
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/StructuralNode.tagSelectors
6,687
@property def parentDefinitionNode(self): try: return self._parentDefinitionNode except __HOLE__: parentDefinitionNode = None for rel in self.modelXbrl.relationshipSet(XbrlConst.euAxisMember).toModelObject(self): parentDefinitionNode = rel.fromModelObject break self._parentDefinitionNode = parentDefinitionNode return parentDefinitionNode
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelEuAxisCoord.parentDefinitionNode
6,688
@property def filterRelationships(self): try: return self._filterRelationships except __HOLE__: rels = [] # order so conceptName filter is first (if any) (may want more sorting in future) for rel in self.modelXbrl.relationshipSet((XbrlConst.tableFilter, XbrlConst.tableFilterMMDD, XbrlConst.tableFilter201305, XbrlConst.tableFilter201301, XbrlConst.tableFilter2011)).fromModelObject(self): if isinstance(rel.toModelObject, ModelConceptName): rels.insert(0, rel) # put conceptName filters first else: rels.append(rel) self._filterRelationships = rels return rels
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelTable.filterRelationships
6,689
@property def renderingXPathContext(self): try: return self._rendrCntx except __HOLE__: xpCtx = getattr(self.modelXbrl, "rendrCntx", None) # none for EU 2010 tables if xpCtx is not None: self._rendrCntx = xpCtx.copy() for tblParamRel in self.modelXbrl.relationshipSet((XbrlConst.tableParameter, XbrlConst.tableParameterMMDD)).fromModelObject(self): varQname = tblParamRel.variableQname parameter = tblParamRel.toModelObject if isinstance(parameter, ModelParameter): self._rendrCntx.inScopeVars[varQname] = xpCtx.inScopeVars.get(parameter.parameterQname) else: self._rendrCntx = None return self._rendrCntx
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelTable.renderingXPathContext
6,690
def aspectValue(self, xpCtx, aspect, inherit=None): try: # if xpCtx is None: xpCtx = self.modelXbrl.rendrCntx (must have xpCtx of callint table) if aspect == Aspect.LOCATION and self._locationSourceVar in xpCtx.inScopeVars: return xpCtx.inScopeVars[self._locationSourceVar] return self.evaluateRule(xpCtx, aspect) except __HOLE__: return '(unavailable)' # table defective or not initialized
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelConstraintSet.aspectValue
6,691
@property def constraintSets(self): try: return self._constraintSets except __HOLE__: self._constraintSets = dict((ruleSet.tagName, ruleSet) for ruleSet in XmlUtil.children(self, self.namespaceURI, "ruleSet")) if self.aspectsCovered(): # any local rule? self._constraintSets[None] = self return self._constraintSets
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelRuleDefinitionNode.constraintSets
6,692
@property def aspectsInTaggedConstraintSet(self): try: return self._aspectsInTaggedConstraintSet except __HOLE__: self._aspectsInTaggedConstraintSet = set() for tag, constraintSet in self.constraitSets().items(): if tag is not None: for aspect in constraintSet.aspectsCovered(): if aspect != Aspect.DIMENSIONS: self._aspectsInTaggedConstraintSet.add(aspect) return self._aspectsInTaggedConstraintSet
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelRuleDefinitionNode.aspectsInTaggedConstraintSet
6,693
@property def generations(self): try: return _INT( XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "generations") ) except (__HOLE__, ValueError): if self.axis in ('sibling', 'child', 'parent'): return 1 return 0
TypeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelRelationshipDefinitionNode.generations
6,694
def coveredAspect(self, structuralNode=None): try: return self._coveredAspect except __HOLE__: self._coveredAspect = self.dimRelationships(structuralNode, getDimQname=True) return self._coveredAspect
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelDimensionRelationshipDefinitionNode.coveredAspect
6,695
def coveredAspect(self, structuralNode=None): try: return self._coveredAspect except __HOLE__: coveredAspect = self.get("coveredAspect") if coveredAspect in coveredAspectToken: self._coveredAspect = coveredAspectToken[coveredAspect] else: # must be a qname self._coveredAspect = qname(self, coveredAspect) return self._coveredAspect
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelSelectionDefinitionNode.coveredAspect
6,696
@property def filterRelationships(self): try: return self._filterRelationships except __HOLE__: rels = [] # order so conceptName filter is first (if any) (may want more sorting in future) for rel in self.modelXbrl.relationshipSet((XbrlConst.tableAspectNodeFilter, XbrlConst.tableAspectNodeFilterMMDD, XbrlConst.tableAspectNodeFilter201305, XbrlConst.tableFilterNodeFilter2011, XbrlConst.tableAxisFilter2011,XbrlConst.tableAxisFilter201205)).fromModelObject(self): if isinstance(rel.toModelObject, ModelConceptName): rels.insert(0, rel) # put conceptName filters first else: rels.append(rel) self._filterRelationships = rels return rels
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelFilterDefinitionNode.filterRelationships
6,697
def aspectsCovered(self, varBinding=None): try: return self._aspectsCovered except __HOLE__: self._aspectsCovered = set() self._dimensionsCovered = set() self.includeUnreportedValue = False if self.localName == "aspectNode": # after 2-13-05-17 aspectElt = XmlUtil.child(self, self.namespaceURI, ("conceptAspect", "unitAspect", "entityIdentifierAspect", "periodAspect", "dimensionAspect")) if aspectElt is not None: if aspectElt.localName == "dimensionAspect": dimQname = qname(aspectElt, aspectElt.textValue) self._aspectsCovered.add(dimQname) self._aspectsCovered.add(Aspect.DIMENSIONS) self._dimensionsCovered.add(dimQname) self.includeUnreportedValue = aspectElt.get("includeUnreportedValue") in ("true", "1") else: self._aspectsCovered.add(aspectNodeAspectCovered[aspectElt.localName]) else: # filter node (prior to 2013-05-17) for rel in self.filterRelationships: if rel.isCovered: _filter = rel.toModelObject self._aspectsCovered |= _filter.aspectsCovered(varBinding) self._dimensionsCovered = set(aspect for aspect in self._aspectsCovered if isinstance(aspect,QName)) if self._dimensionsCovered: self._aspectsCovered.add(Aspect.DIMENSIONS) return self._aspectsCovered
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelRenderingObject.py/ModelFilterDefinitionNode.aspectsCovered
6,698
def _kill_app(self, method, process_count): """ Confirms that a number of test apps are terminated after the provided method is executed. `method` Callable to execute when testing app terminate functionality. This method will be passed the filename for the test app binary that will be launched to test against. `process_count` Number of test apps to launch that are supposed to be killed. Returns ``True`` if all launched apps were successfully terminated. """ # range check if process_count > 20: process_count = 20 if process_count < 1: process_count = 1 # make a copy of 'sh' shell binary and alter it so it has a unique # checksum for this binary, so we don't kill other running instances of # bin/sh bin_file = self.make_file() shutil.copyfile('/bin/sh', bin_file) # change the checksum of the binary open(bin_file, 'a+b', 0).write('A' * 100) os.chmod(bin_file, 0700) # set exec.. silly macosx # launch copied shell binary in background processes = {} for i in range(process_count): proc = subprocess.Popen([bin_file, '-c', 'while true; do sleep 1; done'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) processes[proc.pid] = proc # call provided method method(bin_file) i = 0 while i < 1500: i += 1 try: # wait for the processes to die; reclaim process entries pid, status = os.waitpid(-1, os.P_NOWAIT) time.sleep(0.05) except __HOLE__: break if pid in processes: del processes[pid] fail = False if processes: fail = True # kill what's leftover for pid, proc in processes.iteritems(): try: proc.terminate() proc.wait() except OSError: pass return (not fail)
OSError
dataset/ETHPy150Open xtrementl/focus/tests/unit/plugin/modules/test_apps.py/CloseAppCase._kill_app
6,699
@defer.inlineCallbacks def register( self, localpart=None, password=None, generate_token=True, guest_access_token=None, make_guest=False ): """Registers a new client on the server. Args: localpart : The local part of the user ID to register. If None, one will be generated. password (str) : The password to assign to this user so they can login again. This can be None which means they cannot login again via a password (e.g. the user is an application service user). Returns: A tuple of (user_id, access_token). Raises: RegistrationError if there was a problem registering. """ yield run_on_reactor() password_hash = None if password: password_hash = self.auth_handler().hash(password) if localpart: yield self.check_username(localpart, guest_access_token=guest_access_token) was_guest = guest_access_token is not None if not was_guest: try: int(localpart) raise RegistrationError( 400, "Numeric user IDs are reserved for guest users." ) except __HOLE__: pass user = UserID(localpart, self.hs.hostname) user_id = user.to_string() token = None if generate_token: token = self.auth_handler().generate_access_token(user_id) yield self.store.register( user_id=user_id, token=token, password_hash=password_hash, was_guest=was_guest, make_guest=make_guest, ) yield registered_user(self.distributor, user) else: # autogen a sequential user ID attempts = 0 token = None user = None while not user: localpart = yield self._generate_user_id(attempts > 0) user = UserID(localpart, self.hs.hostname) user_id = user.to_string() yield self.check_user_id_not_appservice_exclusive(user_id) if generate_token: token = self.auth_handler().generate_access_token(user_id) try: yield self.store.register( user_id=user_id, token=token, password_hash=password_hash, make_guest=make_guest ) except SynapseError: # if user id is taken, just generate another user = None user_id = None token = None attempts += 1 yield registered_user(self.distributor, user) # We used to generate default identicons here, but nowadays # we want clients to generate their own as part of their branding # rather than there being consistent matrix-wide ones, so we don't. defer.returnValue((user_id, token))
ValueError
dataset/ETHPy150Open matrix-org/synapse/synapse/handlers/register.py/RegistrationHandler.register