Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,300
def emit(self, record): """ Emit a record. Pickles the record and writes it to the socket in binary format. If there is an error with the socket, silently drop the packet. If there was a problem with the socket, re-establishes the socket. """ try: s = self.makePickle(record) self.send(s) except (KeyboardInterrupt, __HOLE__): raise except: self.handleError(record)
SystemExit
dataset/ETHPy150Open babble/babble/include/jython/Lib/logging/handlers.py/SocketHandler.emit
5,301
def emit(self, record): """ Emit a record. The record is formatted, and then sent to the syslog server. If exception information is present, it is NOT sent to the server. """ msg = self.format(record) """ We need to convert record level to lowercase, maybe this will change in the future. """ msg = self.log_format_string % ( self.encodePriority(self.facility, self.mapPriority(record.levelname)), msg) try: if self.unixsocket: try: self.socket.send(msg) except socket.error: self._connect_unixsocket(self.address) self.socket.send(msg) else: self.socket.sendto(msg, self.address) except (__HOLE__, SystemExit): raise except: self.handleError(record)
KeyboardInterrupt
dataset/ETHPy150Open babble/babble/include/jython/Lib/logging/handlers.py/SysLogHandler.emit
5,302
def emit(self, record): """ Emit a record. Format the record and send it to the specified addressees. """ try: import smtplib try: from email.Utils import formatdate except: formatdate = self.date_time port = self.mailport if not port: port = smtplib.SMTP_PORT smtp = smtplib.SMTP(self.mailhost, port) msg = self.format(record) msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( self.fromaddr, string.join(self.toaddrs, ","), self.getSubject(record), formatdate(), msg) smtp.sendmail(self.fromaddr, self.toaddrs, msg) smtp.quit() except (KeyboardInterrupt, __HOLE__): raise except: self.handleError(record)
SystemExit
dataset/ETHPy150Open babble/babble/include/jython/Lib/logging/handlers.py/SMTPHandler.emit
5,303
def __init__(self, appname, dllname=None, logtype="Application"): logging.Handler.__init__(self) try: import win32evtlogutil, win32evtlog self.appname = appname self._welu = win32evtlogutil if not dllname: dllname = os.path.split(self._welu.__file__) dllname = os.path.split(dllname[0]) dllname = os.path.join(dllname[0], r'win32service.pyd') self.dllname = dllname self.logtype = logtype self._welu.AddSourceToRegistry(appname, dllname, logtype) self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE self.typemap = { logging.DEBUG : win32evtlog.EVENTLOG_INFORMATION_TYPE, logging.INFO : win32evtlog.EVENTLOG_INFORMATION_TYPE, logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE, logging.ERROR : win32evtlog.EVENTLOG_ERROR_TYPE, logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE, } except __HOLE__: print "The Python Win32 extensions for NT (service, event "\ "logging) appear not to be available." self._welu = None
ImportError
dataset/ETHPy150Open babble/babble/include/jython/Lib/logging/handlers.py/NTEventLogHandler.__init__
5,304
def emit(self, record): """ Emit a record. Determine the message ID, event category and event type. Then log the message in the NT event log. """ if self._welu: try: id = self.getMessageID(record) cat = self.getEventCategory(record) type = self.getEventType(record) msg = self.format(record) self._welu.ReportEvent(self.appname, id, cat, type, [msg]) except (__HOLE__, SystemExit): raise except: self.handleError(record)
KeyboardInterrupt
dataset/ETHPy150Open babble/babble/include/jython/Lib/logging/handlers.py/NTEventLogHandler.emit
5,305
def emit(self, record): """ Emit a record. Send the record to the Web server as an URL-encoded dictionary """ try: import httplib, urllib host = self.host h = httplib.HTTP(host) url = self.url data = urllib.urlencode(self.mapLogRecord(record)) if self.method == "GET": if (string.find(url, '?') >= 0): sep = '&' else: sep = '?' url = url + "%c%s" % (sep, data) h.putrequest(self.method, url) # support multiple hosts on one IP address... # need to strip optional :port from host, if present i = string.find(host, ":") if i >= 0: host = host[:i] h.putheader("Host", host) if self.method == "POST": h.putheader("Content-type", "application/x-www-form-urlencoded") h.putheader("Content-length", str(len(data))) h.endheaders() if self.method == "POST": h.send(data) h.getreply() #can't do anything with the result except (KeyboardInterrupt, __HOLE__): raise except: self.handleError(record)
SystemExit
dataset/ETHPy150Open babble/babble/include/jython/Lib/logging/handlers.py/HTTPHandler.emit
5,306
def handle_service_message(self): """Handles incoming service messages from supervisor socket""" try: serialized_request = self.remote_control_socket.recv_multipart(flags=zmq.NOBLOCK)[0] except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: return instruction = ServiceMessage.loads(serialized_request)[0] try: response = self.instructions[instruction]() except __HOLE__: errors_logger.exception("%s instruction not recognized by worker" % instruction) return self.remote_control_socket.send_multipart([ServiceMessage.dumps(response)], flags=zmq.NOBLOCK) # If halt instruction succedded, raise HaltException # so the worker event loop knows it has to stop if instruction == WORKER_HALT and int(response) == SUCCESS_STATUS: raise HaltException() return
KeyError
dataset/ETHPy150Open oleiade/Elevator/elevator/backend/worker.py/Worker.handle_service_message
5,307
def test_iteration(self): nsm = message.NamespaceMap() uripat = 'http://example.com/foo%r' nsm.add(uripat%0) for n in range(1,23): self.failUnless(uripat%(n-1) in nsm) self.failUnless(nsm.isDefined(uripat%(n-1))) nsm.add(uripat%n) for (uri, alias) in nsm.iteritems(): self.failUnless(uri[22:]==alias[3:]) i=0 it = nsm.iterAliases() try: while True: it.next() i += 1 except StopIteration: self.failUnless(i == 23) i=0 it = nsm.iterNamespaceURIs() try: while True: it.next() i += 1 except __HOLE__: self.failUnless(i == 23)
StopIteration
dataset/ETHPy150Open adieu/python-openid/openid/test/test_message.py/NamespaceMapTest.test_iteration
5,308
def Decode(self, encoded_data): """Decode the encoded data. Args: encoded_data: a byte string containing the encoded data. Returns: A tuple containing a byte string of the decoded data and the remaining encoded data. Raises: BackEndError: if the base32 stream cannot be decoded. """ try: decoded_data = base64.b32decode(encoded_data, casefold=False) except (__HOLE__, binascii.Error) as exception: raise errors.BackEndError( u'Unable to decode base32 stream with error: {0!s}.'.format( exception)) return decoded_data, b'' # Register the decoder with the encoding manager.
TypeError
dataset/ETHPy150Open log2timeline/dfvfs/dfvfs/encoding/base32_decoder.py/Base32Decoder.Decode
5,309
def get_indexes(self): #from hyperadmin.resources.indexes import Index from hyperadmin.resources.models.filters import FieldFilter, SearchFilter from django.db import models from django.contrib.admin.util import get_fields_from_path try: from django.contrib.admin.util import lookup_needs_distinct except __HOLE__: from hyperadmin.resources.models.util import lookup_needs_distinct indexes = {'primary': ModelIndex('primary', self)} index = ModelIndex('filter', self) indexes['filter'] = index if self.list_filter: for list_filter in self.list_filter: use_distinct = False if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(index=index) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for # the type of the given field. field, field_list_filter_class = list_filter, FieldFilter.create if not isinstance(field, models.Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] spec = field_list_filter_class(field, field_path=field_path, index=index) # Check if we need to use distinct() use_distinct = (use_distinct or lookup_needs_distinct(self.opts, field_path)) if spec: index.filters.append(spec) if self.search_fields: index.register_filter(SearchFilter, search_fields=self.search_fields) ''' date_section = self.register_section('date', FilterSection) if self.date_hierarchy: pass ''' return indexes
ImportError
dataset/ETHPy150Open zbyte64/django-hyperadmin/hyperadmin/resources/models/resources.py/BaseModelResource.get_indexes
5,310
def test_axes(): try: import matplotlib version = matplotlib.__version__.split("-")[0] version = version.split(".")[:2] if [int(_) for _ in version] < [0,99]: raise ImportError import pylab except __HOLE__: print("\nSkipping test (pylab not available or too old version)\n") return fig = pylab.figure() axes = fig.add_subplot(111) for ctx in [mp, fp]: ctx.plot(lambda x: x**2, [0, 3], axes=axes) assert axes.get_xlabel() == 'x' assert axes.get_ylabel() == 'f(x)' fig = pylab.figure() axes = fig.add_subplot(111) for ctx in [mp, fp]: ctx.cplot(lambda z: z, [-2, 2], [-10, 10], axes=axes) assert axes.get_xlabel() == 'Re(z)' assert axes.get_ylabel() == 'Im(z)'
ImportError
dataset/ETHPy150Open fredrik-johansson/mpmath/mpmath/tests/test_visualization.py/test_axes
5,311
def run(self): data = {'state': 'unknown'} try: proc = subprocess.Popen( ['sudo', '/opt/MegaRAID/MegaCli/MegaCli64', '-LDInfo', '-Lall', '-aALL'], stdout=subprocess.PIPE, close_fds=True) output = proc.communicate()[0] except __HOLE__ as exception: self.checks_logger.error( 'Unable to find /opt/MegaRAID/MegaCli/MegaCli64.' ' Error: {0}'.format(exception.message)) return data for line in output.split("\n"): print line if line.startswith('State'): data['state'] = line.split(':')[1].replace(' ', '') return data
OSError
dataset/ETHPy150Open serverdensity/sd-agent-plugins/MegaRAID/MegaRAID.py/MegaRAID.run
5,312
def load(self): try: context = self.fmt_module.load(open(self.configfile, 'r')) except __HOLE__, error: raise ValueError( 'loading configuration file: {} ({})'.format( error, self.configfile)) return context
IOError
dataset/ETHPy150Open intuition-io/insights/insights/contexts/file.py/FileContext.load
5,313
def buffer_to_value(self, obj, buffer, offset, default_endianness=DEFAULT_ENDIANNESS): """ Converts the bytes in ``buffer`` at ``offset`` to a native Python value. Returns that value and the number of bytes consumed to create it. :param obj: The parent :class:`.PebblePacket` of this field :type obj: .PebblePacket :param buffer: The buffer from which to extract a value. :type buffer: bytes :param offset: The offset in the buffer to start at. :type offset: int :param default_endianness: The default endianness of the value. Used if ``endianness`` was not passed to the :class:`Field` constructor. :type default_endianness: str :return: (value, length) :rtype: (:class:`object`, :any:`int`) """ try: value, length = struct.unpack_from(str(self.endianness or default_endianness) + self.struct_format, buffer, offset)[0], struct.calcsize(self.struct_format) if self._enum is not None: try: return self._enum(value), length except __HOLE__ as e: raise PacketDecodeError("{}: {}".format(self.type, e)) else: return value, length except struct.error as e: raise PacketDecodeError("{}: {}".format(self.type, e))
ValueError
dataset/ETHPy150Open pebble/libpebble2/libpebble2/protocol/base/types.py/Field.buffer_to_value
5,314
def buffer_to_value(self, obj, buffer, offset, default_endianness=DEFAULT_ENDIANNESS): try: return uuid.UUID(bytes=buffer[offset:offset+16]), 16 except __HOLE__ as e: raise PacketDecodeError("{}: failed to decode UUID: {}".format(self.type, e))
ValueError
dataset/ETHPy150Open pebble/libpebble2/libpebble2/protocol/base/types.py/UUID.buffer_to_value
5,315
def prepare(self, obj, value): try: setattr(obj, self.determinant._name, self.type_map[type(value)]) except __HOLE__: if not self.accept_missing: raise if isinstance(self.length, Field): setattr(obj, self.length._name, len(value.serialise()))
KeyError
dataset/ETHPy150Open pebble/libpebble2/libpebble2/protocol/base/types.py/Union.prepare
5,316
def buffer_to_value(self, obj, buffer, offset, default_endianness=DEFAULT_ENDIANNESS): if isinstance(self.length, Field): length = getattr(obj, self.length._name) else: length = len(buffer) - offset k = getattr(obj, self.determinant._name) try: return self.contents[k].parse(buffer[offset:offset+length], default_endianness=default_endianness) except __HOLE__: if not self.accept_missing: raise PacketDecodeError("{}: unrecognised value for union: {}".format(self.type, k)) else: return None, length
KeyError
dataset/ETHPy150Open pebble/libpebble2/libpebble2/protocol/base/types.py/Union.buffer_to_value
5,317
def split_and_deserialize(string): """Split and try to JSON deserialize a string. Gets a string with the KEY=VALUE format, split it (using '=' as the separator) and try to JSON deserialize the VALUE. :returns: A tuple of (key, value). """ try: key, value = string.split("=", 1) except ValueError: raise exc.CommandError(_('Attributes must be a list of ' 'PATH=VALUE not "%s"') % string) try: value = json.loads(value) except __HOLE__: pass return (key, value)
ValueError
dataset/ETHPy150Open openstack/python-ironicclient/ironicclient/common/utils.py/split_and_deserialize
5,318
def common_params_for_list(args, fields, field_labels): """Generate 'params' dict that is common for every 'list' command. :param args: arguments from command line. :param fields: possible fields for sorting. :param field_labels: possible field labels for sorting. :returns: a dict with params to pass to the client method. """ params = {} if args.marker is not None: params['marker'] = args.marker if args.limit is not None: if args.limit < 0: raise exc.CommandError( _('Expected non-negative --limit, got %s') % args.limit) params['limit'] = args.limit if args.sort_key is not None: # Support using both heading and field name for sort_key fields_map = dict(zip(field_labels, fields)) fields_map.update(zip(fields, fields)) try: sort_key = fields_map[args.sort_key] except __HOLE__: raise exc.CommandError( _("%(sort_key)s is an invalid field for sorting, " "valid values for --sort-key are: %(valid)s") % {'sort_key': args.sort_key, 'valid': list(fields_map)}) params['sort_key'] = sort_key if args.sort_dir is not None: if args.sort_dir not in ('asc', 'desc'): raise exc.CommandError( _("%s is an invalid value for sort direction, " "valid values for --sort-dir are: 'asc', 'desc'") % args.sort_dir) params['sort_dir'] = args.sort_dir params['detail'] = args.detail requested_fields = args.fields[0] if args.fields else None if requested_fields is not None: params['fields'] = requested_fields return params
KeyError
dataset/ETHPy150Open openstack/python-ironicclient/ironicclient/common/utils.py/common_params_for_list
5,319
def make_configdrive(path): """Make the config drive file. :param path: The directory containing the config drive files. :returns: A gzipped and base64 encoded configdrive string. """ # Make sure path it's readable if not os.access(path, os.R_OK): raise exc.CommandError(_('The directory "%s" is not readable') % path) with tempfile.NamedTemporaryFile() as tmpfile: with tempfile.NamedTemporaryFile() as tmpzipfile: publisher = 'ironicclient-configdrive 0.1' try: p = subprocess.Popen(['genisoimage', '-o', tmpfile.name, '-ldots', '-allow-lowercase', '-allow-multidot', '-l', '-publisher', publisher, '-quiet', '-J', '-r', '-V', 'config-2', path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except __HOLE__ as e: raise exc.CommandError( _('Error generating the config drive. Make sure the ' '"genisoimage" tool is installed. Error: %s') % e) stdout, stderr = p.communicate() if p.returncode != 0: raise exc.CommandError( _('Error generating the config drive.' 'Stdout: "%(stdout)s". Stderr: %(stderr)s') % {'stdout': stdout, 'stderr': stderr}) # Compress file tmpfile.seek(0) g = gzip.GzipFile(fileobj=tmpzipfile, mode='wb') shutil.copyfileobj(tmpfile, g) g.close() tmpzipfile.seek(0) return base64.b64encode(tmpzipfile.read())
OSError
dataset/ETHPy150Open openstack/python-ironicclient/ironicclient/common/utils.py/make_configdrive
5,320
def bool_argument_value(arg_name, bool_str, strict=True, default=False): """Returns the Boolean represented by bool_str. Returns the Boolean value for the argument named arg_name. The value is represented by the string bool_str. If the string is an invalid Boolean string: if strict is True, a CommandError exception is raised; otherwise the default value is returned. :param arg_name: The name of the argument :param bool_str: The string representing a Boolean value :param strict: Used if the string is invalid. If True, raises an exception. If False, returns the default value. :param default: The default value to return if the string is invalid and not strict :returns: the Boolean value represented by bool_str or the default value if bool_str is invalid and strict is False :raises CommandError: if bool_str is an invalid Boolean string """ try: val = strutils.bool_from_string(bool_str, strict, default) except __HOLE__ as e: raise exc.CommandError(_("argument %(arg)s: %(err)s.") % {'arg': arg_name, 'err': e}) return val
ValueError
dataset/ETHPy150Open openstack/python-ironicclient/ironicclient/common/utils.py/bool_argument_value
5,321
def __new__(cls, name, bases, attrs): try: parents = [b for b in bases if issubclass(b, EntityForm)] except __HOLE__: # We are defining EntityForm itself parents = None sup = super(EntityFormMetaclass, cls) if not parents: # Then there's no business trying to use proxy fields. return sup.__new__(cls, name, bases, attrs) # Fake a declaration of all proxy fields so they'll be handled correctly. opts = ModelFormOptions(attrs.get('Meta', None)) if opts.model: formfield_callback = attrs.get('formfield_callback', None) proxy_fields = proxy_fields_for_entity_model(opts.model, opts.fields, opts.exclude, opts.widgets, formfield_callback) else: proxy_fields = {} new_attrs = proxy_fields.copy() new_attrs.update(attrs) new_class = sup.__new__(cls, name, bases, new_attrs) new_class.proxy_fields = proxy_fields return new_class
NameError
dataset/ETHPy150Open ithinksw/philo/philo/forms/entities.py/EntityFormMetaclass.__new__
5,322
def action_on_a_page_single_doc(page): docs = [row.doc for row in page] for doc in docs: doc["tiid"] = doc["_id"] try: doc["last_update_run"] except __HOLE__: doc["last_update_run"] = None print "try" try: print doc["tiid"] cur.execute("""INSERT INTO items(tiid, created, last_modified, last_update_run) VALUES (%(tiid)s, %(created)s, %(last_modified)s, %(last_update_run)s)""", doc) #conn.commit() except psycopg2.IntegrityError: print "row already exists" mypostgresdao.conn.rollback() except: mypostgresdao.conn.rollback() finally: pass
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/extras/db_housekeeping/postgres_mirror.py/action_on_a_page_single_doc
5,323
def build_items_save_list(items): items_save_list = [] for item in items: item["tiid"] = item["_id"] try: item["last_update_run"] except __HOLE__: item["last_update_run"] = None items_save_list += [item] return items_save_list
KeyError
dataset/ETHPy150Open Impactstory/total-impact-core/extras/db_housekeeping/postgres_mirror.py/build_items_save_list
5,324
def __new__(cls, meth, callback=None): try: obj = meth.__self__ func = meth.__func__ except __HOLE__: raise TypeError("argument should be a bound method, not {}" .format(type(meth))) def _cb(arg): # The self-weakref trick is needed to avoid creating a reference # cycle. self = self_wr() if self._alive: self._alive = False if callback is not None: callback(self) self = ref.__new__(cls, obj, _cb) self._func_ref = ref(func, _cb) self._meth_type = type(meth) self._alive = True self_wr = ref(self) return self
AttributeError
dataset/ETHPy150Open django/django/django/dispatch/weakref_backports.py/WeakMethod.__new__
5,325
def gdb(fips_dir, proj_dir, cfg_name, target=None, target_args=None) : """debug a single target with gdb""" # prepare proj_name = util.get_project_name_from_dir(proj_dir) util.ensure_valid_project_dir(proj_dir) # load the config(s) configs = config.load(fips_dir, proj_dir, cfg_name) if configs : for cfg in configs : # check if config is valid config_valid, _ = config.check_config_valid(fips_dir, proj_dir, cfg, print_errors = True) if config_valid : deploy_dir = util.get_deploy_dir(fips_dir, proj_name, cfg) log.colored(log.YELLOW, "=== gdb: {}".format(cfg['name'])) cmdLine = ['gdb', "-ex", "run", "--args", target] if target_args : cmdLine.extend(target_args) try: subprocess.call(args = cmdLine, cwd = deploy_dir) except __HOLE__ : log.error("Failed to execute gdb (not installed?)") else : log.error("Config '{}' not valid in this environment".format(cfg['name'])) else : log.error("No valid configs found for '{}'".format(cfg_name)) return True #-------------------------------------------------------------------------------
OSError
dataset/ETHPy150Open floooh/fips/verbs/gdb.py/gdb
5,326
def _run_tcp(self, listener, socket): """Start a raw TCP server in a new green thread.""" while True: try: new_sock, address = socket.accept() self._pool.spawn_n(listener, new_sock) except (SystemExit, __HOLE__): pass
KeyboardInterrupt
dataset/ETHPy150Open nii-cloud/dodai-compute/nova/wsgi.py/Server._run_tcp
5,327
def get_user(request): """authorize user based on API key if it was passed, otherwise just use the request's user. :param request: :return: django.contrib.au th.User """ from tastypie.models import ApiKey if 'json' in request.META['CONTENT_TYPE']: try: req = json.loads(request.body) if 'api_key' in req: api_key = ApiKey.objects.get(key=req['api_key']) return api_key.user except __HOLE__: pass if 'api_key' in request.REQUEST: api_key = ApiKey.objects.get(key=request.REQUEST['api_key']) return api_key.user elif request.user.is_authenticated(): return User.objects.get(pk=request.user.pk) else: return request.user
ValueError
dataset/ETHPy150Open hydroshare/hydroshare2/ga_resources/utils.py/get_user
5,328
def to_python(self, value): value = super(BBoxField, self).to_python(value) if not value: return -180.0,-90.0,180.0,90.0 try: lx, ly, ux, uy = value.split(',') if self.localize: lx = float(sanitize_separators(lx)) ly = float(sanitize_separators(ly)) ux = float(sanitize_separators(ux)) uy = float(sanitize_separators(uy)) if uy < ly or ux < lx: raise ValidationError("BBoxes must be in lower-left(x,y), upper-right(x,y) order") except (__HOLE__, TypeError): raise ValidationError("BBoxes must be four floating point values separated by commas") lx = float(sanitize_separators(lx)) ly = float(sanitize_separators(ly)) ux = float(sanitize_separators(ux)) uy = float(sanitize_separators(uy)) return lx, ly, ux, uy
ValueError
dataset/ETHPy150Open hydroshare/hydroshare2/ga_resources/utils.py/BBoxField.to_python
5,329
def get_object(self, *args, **kwargs): obj = super(OwnedObjectMixin, self).get_object(*args, **kwargs) try: if not obj.owner == self.request.user: raise Http404() except __HOLE__: pass return obj
AttributeError
dataset/ETHPy150Open matagus/django-planet/planet/views.py/OwnedObjectMixin.get_object
5,330
def _target(self): try: timer = Timer() if self.show_time else None with Spinner(label=self.label, timer=timer) as spinner: while not self.shutdown_event.is_set(): spinner.step() spinner.sleep() except __HOLE__: # Swallow Control-C signals without producing a nasty traceback that # won't make any sense to the average user. pass
KeyboardInterrupt
dataset/ETHPy150Open xolox/python-humanfriendly/humanfriendly/__init__.py/AutomaticSpinner._target
5,331
def __getitem__(self, pos): print('__getitem__:', pos) try: return self.items.__getitem__(pos) except __HOLE__ as e: print('ERROR:', e)
TypeError
dataset/ETHPy150Open fluentpython/example-code/attic/sequences/slice_test.py/SliceDemo.__getitem__
5,332
def build_activation(act=None): def compose(a, b): c = lambda z: b(a(z)) c.__theanets_name__ = '%s(%s)' % (b.__theanets_name__, a.__theanets_name__) return c if '+' in act: return functools.reduce( compose, (build_activation(a) for a in act.split('+'))) options = { 'tanh': T.tanh, 'linear': lambda z: z, 'logistic': T.nnet.sigmoid, 'sigmoid': T.nnet.sigmoid, 'hard_sigmoid': T.nnet.hard_sigmoid, 'softplus': T.nnet.softplus, 'softmax': softmax, 'theano_softmax': T.nnet.softmax, # shorthands 'relu': lambda z: z * (z > 0), 'trel': lambda z: z * (z > 0) * (z < 1), 'trec': lambda z: z * (z > 1), 'tlin': lambda z: z * (abs(z) > 1), # modifiers 'rect:max': lambda z: T.minimum(1, z), 'rect:min': lambda z: T.maximum(0, z), # normalization 'norm:dc': lambda z: (z.T - z.mean(axis=1)).T, 'norm:max': lambda z: (z.T / T.maximum(1e-10, abs(z).max(axis=1))).T, 'norm:std': lambda z: (z.T / T.maximum(1e-10, T.std(z, axis=1))).T, } for k, v in options.items(): v.__theanets_name__ = k try: return options[act] except __HOLE__: raise KeyError('unknown activation %r' % act)
KeyError
dataset/ETHPy150Open zomux/deepy/deepy/utils/activations.py/build_activation
5,333
def execute(self, response, autofixture): """Generates fixture objects from the given response and stores them in the application-specific cache. :param response: the recorded :class:`Response` :param autofixture: the active :class:`AutoFixture` """ if not has_request_context: return try: app = autofixture.app # Create response fixture fixture = Fixture.from_response(response, app, self.request_name) autofixture.add_fixture(fixture) # Create request fixture if request.data: fixture = Fixture.from_request(request, app, self.request_name) autofixture.add_fixture(fixture) except __HOLE__: # pragma: no cover warnings.warn("Could not create fixture for unsupported mime type") return response
TypeError
dataset/ETHPy150Open janukobytsch/flask-autofixture/flask_autofixture/command.py/CreateFixtureCommand.execute
5,334
def get_test_module(app_name): ''' Import tests module ''' module_name = '.'.join([app_name, 'tests']) try: return import_module(module_name) except __HOLE__, exception: if exception.message == 'No module named tests': raise ImportError('No module named {0}'.format(module_name))
ImportError
dataset/ETHPy150Open plus500s/django-test-tools/test_tools/test_runner.py/get_test_module
5,335
def load_custom_test_package(self, module, app_name): ''' Load custom test package from module and app ''' for importer, module_name, ispkg in pkgutil.iter_modules( [os.path.dirname(module.__file__)]): try: import_module('.'.join([app_name, 'tests'])) except __HOLE__, e: pass else: module = import_module('.'.join([app_name, 'tests', module_name])) yield defaultTestLoader.loadTestsFromModule(module)
ImportError
dataset/ETHPy150Open plus500s/django-test-tools/test_tools/test_runner.py/DiscoveryDjangoTestSuiteRunner.load_custom_test_package
5,336
def get_apps(self): try: return settings.PROJECT_APPS except __HOLE__: return settings.INSTALLED_APPS
AttributeError
dataset/ETHPy150Open plus500s/django-test-tools/test_tools/test_runner.py/DiscoveryDjangoTestSuiteRunner.get_apps
5,337
def parse_kwarg(string_): ''' Parses the string and looks for the following kwarg format: "{argument name}={argument value}" For example: "my_message=Hello world" Returns the kwarg name and value, or (None, None) if the regex was not matched. ''' try: return KWARG_REGEX.match(string_).groups() except __HOLE__: return None, None
AttributeError
dataset/ETHPy150Open saltstack/salt/salt/utils/args.py/parse_kwarg
5,338
def register_adapters(): global adapters_registered if adapters_registered is True: return try: import pkg_resources packageDir = pkg_resources.resource_filename('pyamf', 'adapters') except: packageDir = os.path.dirname(__file__) for f in glob.glob(os.path.join(packageDir, '*.py')): mod = os.path.basename(f).split(os.path.extsep, 1)[0] if mod == '__init__' or not mod.startswith('_'): continue try: register_adapter(mod[1:].replace('_', '.'), PackageImporter(mod)) except __HOLE__: pass adapters_registered = True
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/PyAMF-0.6.1/pyamf/adapters/__init__.py/register_adapters
5,339
def __str__(self): try: line = 'line %d: ' % (self.args[1].coord.line,) except (AttributeError, __HOLE__, IndexError): line = '' return '%s%s' % (line, self.args[0])
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/CDefError.__str__
5,340
def _typeof(self, cdecl, consider_function_as_funcptr=False): # string -> ctype object try: result = self._parsed_types[cdecl] except __HOLE__: with self._lock: result = self._typeof_locked(cdecl) # btype, really_a_function_type = result if really_a_function_type and not consider_function_as_funcptr: raise CDefError("the type %r is a function type, not a " "pointer-to-function type" % (cdecl,)) return btype
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/FFI._typeof
5,341
def gc(self, cdata, destructor): """Return a new cdata object that points to the same data. Later, when this new cdata object is garbage-collected, 'destructor(old_cdata_object)' will be called. """ try: gcp = self._backend.gcp except __HOLE__: pass else: return gcp(cdata, destructor) # with self._lock: try: gc_weakrefs = self.gc_weakrefs except AttributeError: from .gc_weakref import GcWeakrefs gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self) return gc_weakrefs.build(cdata, destructor)
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/FFI.gc
5,342
def _get_cached_btype(self, type): assert self._lock.acquire(False) is False # call me with the lock! try: BType = self._cached_btypes[type] except __HOLE__: finishlist = [] BType = type.get_cached_btype(self, finishlist) for type in finishlist: type.finish_backend_type(self, finishlist) return BType
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/FFI._get_cached_btype
5,343
def _apply_embedding_fix(self, kwds): # must include an argument like "-lpython2.7" for the compiler def ensure(key, value): lst = kwds.setdefault(key, []) if value not in lst: lst.append(value) # if '__pypy__' in sys.builtin_module_names: if sys.platform == "win32": # we need 'libpypy-c.lib'. Right now, distributions of # pypy contain it as 'include/python27.lib'. You need # to manually copy it back to 'libpypy-c.lib'. XXX Will # be fixed in the next pypy release. pythonlib = "libpypy-c" if hasattr(sys, 'prefix'): ensure('library_dirs', sys.prefix) else: # we need 'libpypy-c.{so,dylib}', which should be by # default located in 'sys.prefix/bin' pythonlib = "pypy-c" if hasattr(sys, 'prefix'): import os ensure('library_dirs', os.path.join(sys.prefix, 'bin')) else: if sys.platform == "win32": template = "python%d%d" if hasattr(sys, 'gettotalrefcount'): template += '_d' else: try: import sysconfig except __HOLE__: # 2.6 from distutils import sysconfig template = "python%d.%d" if sysconfig.get_config_var('DEBUG_EXT'): template += sysconfig.get_config_var('DEBUG_EXT') pythonlib = (template % (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) if hasattr(sys, 'abiflags'): pythonlib += sys.abiflags ensure('libraries', pythonlib) if sys.platform == "win32": ensure('extra_link_args', '/MANIFEST')
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/FFI._apply_embedding_fix
5,344
def init_once(self, func, tag): # Read _init_once_cache[tag], which is either (False, lock) if # we're calling the function now in some thread, or (True, result). # Don't call setdefault() in most cases, to avoid allocating and # immediately freeing a lock; but still use setdefaut() to avoid # races. try: x = self._init_once_cache[tag] except __HOLE__: x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) # Common case: we got (True, result), so we return the result. if x[0]: return x[1] # Else, it's a lock. Acquire it to serialize the following tests. with x[1]: # Read again from _init_once_cache the current status. x = self._init_once_cache[tag] if x[0]: return x[1] # Call the function and store the result back. result = func() self._init_once_cache[tag] = (True, result) return result
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/FFI.init_once
5,345
def _load_backend_lib(backend, name, flags): if name is None: if sys.platform != "win32": return backend.load_library(None, flags) name = "c" # Windows: load_library(None) fails, but this works # (backward compatibility hack only) try: if '.' not in name and '/' not in name: raise OSError("library not found: %r" % (name,)) return backend.load_library(name, flags) except __HOLE__: import ctypes.util path = ctypes.util.find_library(name) if path is None: raise # propagate the original OSError return backend.load_library(path, flags)
OSError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/_load_backend_lib
5,346
def _make_ffi_library(ffi, libname, flags): import os backend = ffi._backend backendlib = _load_backend_lib(backend, libname, flags) # def accessor_function(name): key = 'function ' + name tp, _ = ffi._parser._declarations[key] BType = ffi._get_cached_btype(tp) try: value = backendlib.load_function(BType, name) except __HOLE__ as e: raise AttributeError('%s: %s' % (name, e)) library.__dict__[name] = value # def accessor_variable(name): key = 'variable ' + name tp, _ = ffi._parser._declarations[key] BType = ffi._get_cached_btype(tp) read_variable = backendlib.read_variable write_variable = backendlib.write_variable setattr(FFILibrary, name, property( lambda self: read_variable(BType, name), lambda self, value: write_variable(BType, name, value))) # def accessor_constant(name): raise NotImplementedError("non-integer constant '%s' cannot be " "accessed from a dlopen() library" % (name,)) # def accessor_int_constant(name): library.__dict__[name] = ffi._parser._int_constants[name] # accessors = {} accessors_version = [False] # def update_accessors(): if accessors_version[0] is ffi._cdef_version: return # from . import model for key, (tp, _) in ffi._parser._declarations.items(): if not isinstance(tp, model.EnumType): tag, name = key.split(' ', 1) if tag == 'function': accessors[name] = accessor_function elif tag == 'variable': accessors[name] = accessor_variable elif tag == 'constant': accessors[name] = accessor_constant else: for i, enumname in enumerate(tp.enumerators): def accessor_enum(name, tp=tp, i=i): tp.check_not_partial() library.__dict__[name] = tp.enumvalues[i] accessors[enumname] = accessor_enum for name in ffi._parser._int_constants: accessors.setdefault(name, accessor_int_constant) accessors_version[0] = ffi._cdef_version # def make_accessor(name): with ffi._lock: if name in library.__dict__ or name in FFILibrary.__dict__: return # added by another thread while waiting for the lock if name not in accessors: update_accessors() if name not in accessors: raise AttributeError(name) accessors[name](name) # class FFILibrary(object): def __getattr__(self, name): make_accessor(name) return getattr(self, name) def __setattr__(self, name, value): try: property = getattr(self.__class__, name) except AttributeError: make_accessor(name) setattr(self, name, value) else: property.__set__(self, value) def __dir__(self): with ffi._lock: update_accessors() return accessors.keys() # if libname is not None: try: if not isinstance(libname, str): # unicode, on Python 2 libname = libname.encode('utf-8') FFILibrary.__name__ = 'FFILibrary_%s' % libname except UnicodeError: pass library = FFILibrary() return library, library.__dict__
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/_make_ffi_library
5,347
def _builtin_function_type(func): # a hack to make at least ffi.typeof(builtin_function) work, # if the builtin function was obtained by 'vengine_cpy'. import sys try: module = sys.modules[func.__module__] ffi = module._cffi_original_ffi types_of_builtin_funcs = module._cffi_types_of_builtin_funcs tp = types_of_builtin_funcs[func] except (__HOLE__, AttributeError, TypeError): return None else: with ffi._lock: return ffi._get_cached_btype(tp)
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/cffi-1.5.2/cffi/api.py/_builtin_function_type
5,348
def makePath(path): # Try creating a directory try: os.makedirs(path) except __HOLE__, err: if err.errno != 17: raise
OSError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Report.py/makePath
5,349
def _generateNode(self, output, node): dispatchTable = { Section: self._generateSection, Paragraph: self._generateParagraph, Image: self._generateImage, Table: self._generateTable, Link: self._generateLink, LinkTarget: self._generateLinkTarget, UnorderedList: self._generateUnorderedList, } self._depth += 1 self._sectionCounter.append(0) try: dispatchTable[node.__class__](output, node) except __HOLE__: print >>output, cgi.escape(str(node)), self._sectionCounter.pop() self._depth -= 1
KeyError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Report.py/HtmlCompiler._generateNode
5,350
def read_file(filename): """Read a file into a string""" path = os.path.abspath(os.path.dirname(__file__)) filepath = os.path.join(path, filename) try: return open(filepath).read() except __HOLE__: return '' # Use the docstring of the __init__ file to be the description
IOError
dataset/ETHPy150Open antoinemartin/django-windows-tools/setup.py/read_file
5,351
def testWithRaise(self): counter = 0 try: counter += 1 with mock_contextmanager_generator(): counter += 10 raise RuntimeError counter += 100 # Not reached except __HOLE__: self.assertEqual(counter, 11) else: self.fail("Didn't raise RuntimeError")
RuntimeError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_with.py/NonLocalFlowControlTestCase.testWithRaise
5,352
def testExceptionInEnter(self): try: with self.Dummy() as a, self.EnterRaises(): self.fail('body of bad with executed') except __HOLE__: pass else: self.fail('RuntimeError not reraised') self.assertTrue(a.enter_called) self.assertTrue(a.exit_called)
RuntimeError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/test/test_with.py/NestedWith.testExceptionInEnter
5,353
@staticmethod def parsetime(time_str): """ Try to parse any generalised time to standard format. For now used by Codechef """ try: dt = datetime.datetime.strptime(time_str, "%I:%M %p %d/%m/%y") return dt except __HOLE__: cal = pdt.Calendar() dt, flags = cal.parseDT(time_str) assert flags return dt # -------------------------------------------------------------------------
ValueError
dataset/ETHPy150Open stopstalk/stopstalk-deployment/modules/sites/codechef.py/Profile.parsetime
5,354
@staticmethod def get_tags(problem_link): url = problem_link.split("/") url = url[2:] url.insert(1, "api/contests") if len(url) == 4: url.insert(2, "PRACTICE") url = "https://" + "/".join(url) response = get_request(url) if response == -1 or response == {}: return ["-"] t = response.json() all_tags = [] try: tags = t["tags"] all_as = BeautifulSoup(str(tags)).find_all("a") for i in all_as: all_tags.append(i.contents[0].strip()) return all_tags except __HOLE__: return all_tags # -------------------------------------------------------------------------
KeyError
dataset/ETHPy150Open stopstalk/stopstalk-deployment/modules/sites/codechef.py/Profile.get_tags
5,355
def parallelize_codechef(self, handle, page, last_retrieved): """ Helper function for retrieving codechef submissions parallely """ if self.retrieve_failed: return url = "https://www.codechef.com/recent/user?user_handle=" + \ handle + \ "&page=" + \ str(page) tmp = get_request(url, headers={"User-Agent": user_agent}) # GET request failed if tmp == -1 or tmp == {}: self.retrieve_failed = True return d = ast.literal_eval(tmp.text)["content"] it = 1 self.submissions[handle][page] = {} x = bs4.BeautifulSoup(d) for i in x.find_all("tr"): try: if i["class"][0] == "kol": self.submissions[handle][page][it] = [] submission = self.submissions[handle][page][it] append = submission.append # tos = time_of_submission tos = i.contents[0].contents[0] tos = str(ast.literal_eval(repr(tos).replace("\\", ""))) tos = self.parsetime(tos) curr = time.strptime(str(tos), "%Y-%m-%d %H:%M:%S") # So cool optimization! if curr <= last_retrieved: return append(str(tos)) # Problem name/url prob = i.contents[1].contents[0] prob["href"] = "http://www.codechef.com" + prob["href"] problem_link = eval(repr(prob["href"]).replace("\\", "")) append(problem_link) try: append(prob.contents[0]) except __HOLE__: append("") # Submission status stat = i.contents[2].contents[0] stat = stat.find("img")["src"] stat = repr(stat).replace("\\", "") stat = stat[7:-5] st = "AC" if stat == "tick-icon": st = "AC" elif stat == "cross-icon": st = "WA" elif stat == "alert-icon": st = "CE" elif stat == "runtime-error": st = "RE" elif stat == "clock_error": st = "TLE" else: st = "OTH" append(st) # Question points pts = i.contents[2].contents[0].contents try: if len(pts) >= 5: points = pts[2] + " " + pts[4] else: points = pts[2] except IndexError: if st == "AC": points = "100" else: points = "0" append(points) # Language append(i.contents[3].contents[0].strip()) # View code link # @ToDo: Find a way to get the code link view_link = "" append(view_link) it += 1 except KeyError: pass # -------------------------------------------------------------------------
IndexError
dataset/ETHPy150Open stopstalk/stopstalk-deployment/modules/sites/codechef.py/Profile.parallelize_codechef
5,356
def get_submissions(self, last_retrieved): if self.retrieve_failed: return -1 if self.handle: handle = self.handle else: return -1 user_url = "http://www.codechef.com/recent/user?user_handle=" + handle tmp = get_request(user_url, headers={"User-Agent": user_agent}) if tmp == -1 or tmp == {}: return tmp d = ast.literal_eval(tmp.text) max_page = d["max_page"] submissions = {handle: {}} it = 1 # Apply parallel processing only if retrieving from the INITIAL_DATE tmp_const = time.strptime(current.INITIAL_DATE, "%Y-%m-%d %H:%M:%S") if tmp_const == last_retrieved: threads = [] for i in xrange(max_page): threads.append(gevent.spawn(self.parallelize_codechef, handle, i, last_retrieved)) gevent.joinall(threads) return self.submissions else: for page in xrange(0, max_page): user_url = "http://www.codechef.com/recent/user?user_handle=" + \ handle + \ "&page=" + \ str(page) tmp = get_request(user_url, headers={"User-Agent": user_agent}) if tmp == -1: return -1 d = ast.literal_eval(tmp.text)["content"] submissions[handle][page] = {} x = bs4.BeautifulSoup(d) for i in x.find_all("tr"): try: if i["class"][0] == "kol": submissions[handle][page][it] = [] submission = submissions[handle][page][it] append = submission.append # tos = time_of_submission tos = i.contents[0].contents[0] tos = str(ast.literal_eval(repr(tos).replace("\\", ""))) # Do not retrieve any further because this leads to ambiguity # If 2 hours ago => 2 hours 20 mins or 2 hours 14 mins ... # Let the user come back later when the datetime is exact # This prevents from redundant addition into database # @ToDo: For now we are allowing redundant submissions # for codechef :/ . Find a way to change it. #if tos.__contains__("hours"): # continue tos = self.parsetime(tos) curr = time.strptime(str(tos), "%Y-%m-%d %H:%M:%S") if curr <= last_retrieved: return submissions append(str(tos)) # Problem name/url prob = i.contents[1].contents[0] prob["href"] = "http://www.codechef.com" + prob["href"] problem_link = eval(repr(prob["href"]).replace("\\", "")) append(problem_link) try: append(prob.contents[0]) except IndexError: append("") # Submission status stat = i.contents[2].contents[0] stat = stat.find("img")["src"] stat = repr(stat).replace("\\", "") stat = stat[7:-5] st = "AC" if stat == "tick-icon": st = "AC" elif stat == "cross-icon": st = "WA" elif stat == "alert-icon": st = "CE" elif stat == "runtime-error": st = "RE" elif stat == "clock_error": st = "TLE" else: st = "OTH" append(st) # Question points pts = i.contents[2].contents[0].contents try: if len(pts) >= 5: points = pts[2] + " " + pts[4] else: points = pts[2] except __HOLE__: if st == "AC": points = "100" else: points = "0" append(points) # Language append(i.contents[3].contents[0].strip()) # View code link view_link = "" append(view_link) it += 1 except KeyError: pass return submissions
IndexError
dataset/ETHPy150Open stopstalk/stopstalk-deployment/modules/sites/codechef.py/Profile.get_submissions
5,357
@given(u'A working Radamsa installation') def step_impl(context): """Check for a working Radamsa installation.""" if context.radamsa_location is None: assert False, "The feature file requires Radamsa, but the path is " \ "undefined." try: subprocess.check_output([context.radamsa_location, "--help"], stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, __HOLE__) as error: assert False, "Could not execute Radamsa from %s: %s" % (context.radamsa_location, error) assert True
OSError
dataset/ETHPy150Open F-Secure/mittn/mittn/httpfuzzer/steps.py/step_impl
5,358
def args2body(self, parsed_args): params = {} if parsed_args.name: params['name'] = parsed_args.name if not isinstance(parsed_args.start, datetime.datetime): try: parsed_args.start = datetime.datetime.strptime( parsed_args.start, '%Y-%m-%d %H:%M') except ValueError: raise exception.IncorrectLease if not isinstance(parsed_args.end, datetime.datetime): try: parsed_args.end = datetime.datetime.strptime( parsed_args.end, '%Y-%m-%d %H:%M') except __HOLE__: raise exception.IncorrectLease if parsed_args.start > parsed_args.end: raise exception.IncorrectLease params['start'] = datetime.datetime.strftime(parsed_args.start, '%Y-%m-%d %H:%M') params['end'] = datetime.datetime.strftime(parsed_args.end, '%Y-%m-%d %H:%M') params['reservations'] = [] params['events'] = [] physical_reservations = [] for phys_res_str in parsed_args.physical_reservations: err_msg = ("Invalid physical-reservation argument '%s'. " "Reservation arguments must be of the " "form --physical-reservation <min=int,max=int," "hypervisor_properties=str,resource_properties=str>" % phys_res_str) phys_res_info = {"min": "", "max": "", "hypervisor_properties": "", "resource_properties": ""} prog = re.compile('^(\w+)=(\w+|\[[^]]+\])(?:,(.+))?$') def parse_params(params): match = prog.search(params) if match: self.log.info("Matches: %s", match.groups()) k, v = match.group(1, 2) if k in phys_res_info: phys_res_info[k] = v else: raise exception.IncorrectLease(err_msg) if len(match.groups()) == 3 and match.group(3) is not None: parse_params(match.group(3)) parse_params(phys_res_str) if not phys_res_info['min'] and not phys_res_info['max']: raise exception.IncorrectLease(err_msg) # NOTE(sbauza): The resource type should be conf-driven mapped with # climate.conf file but that's potentially on another # host phys_res_info['resource_type'] = 'physical:host' physical_reservations.append(phys_res_info) if physical_reservations: params['reservations'] += physical_reservations reservations = [] for res_str in parsed_args.reservations: err_msg = ("Invalid reservation argument '%s'. " "Reservation arguments must be of the " "form --reservation <key=value>" % res_str) res_info = {} for kv_str in res_str.split(","): try: k, v = kv_str.split("=", 1) except ValueError: raise exception.IncorrectLease(err_msg) res_info[k] = v reservations.append(res_info) if reservations: params['reservations'] += reservations if not params['reservations']: raise exception.IncorrectLease events = [] for event_str in parsed_args.events: err_msg = ("Invalid event argument '%s'. " "Event arguments must be of the " "form --event <event_type=str,event_date=time>" % event_str) event_info = {"event_type": "", "event_date": ""} for kv_str in event_str.split(","): try: k, v = kv_str.split("=", 1) except ValueError: raise exception.IncorrectLease(err_msg) if k in event_info: event_info[k] = v else: raise exception.IncorrectLease(err_msg) if not event_info['event_type'] and not event_info['event_date']: raise exception.IncorrectLease(err_msg) event_date = event_info['event_date'] try: date = datetime.datetime.strptime(event_date, '%Y-%m-%d %H:%M') event_date = datetime.datetime.strftime(date, '%Y-%m-%d %H:%M') event_info['event_date'] = event_date except ValueError: raise exception.IncorrectLease events.append(event_info) if events: params['events'] = events return params
ValueError
dataset/ETHPy150Open openstack/python-blazarclient/climateclient/v1/shell_commands/leases.py/CreateLease.args2body
5,359
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15): to_dir = os.path.abspath(to_dir) rep_modules = 'pkg_resources', 'setuptools' imported = set(sys.modules).intersection(rep_modules) try: import pkg_resources except __HOLE__: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("setuptools>=" + version) return except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.VersionConflict as VC_err: if imported: msg = textwrap.dedent(""" The required version of setuptools (>={version}) is not available, and can't be installed while this script is running. Please install a more recent version first, using 'easy_install -U setuptools'. (Currently using {VC_err.args[0]!r}) """).format(VC_err=VC_err, version=version) sys.stderr.write(msg) sys.exit(2) # otherwise, reload ok del pkg_resources, sys.modules['pkg_resources'] return _do_download(version, download_base, to_dir, download_delay)
ImportError
dataset/ETHPy150Open akamai-open/api-kickstart/examples/python/tools/ez_setup.py/use_setuptools
5,360
@classmethod def _get_val(cls, item, path): """ Return the answer in the given submitted form (item) to the question specified by path. Return empty tuple if no answer was given to the given question. """ if path: try: v = item['form'] for key in path: v = v[key] return v except __HOLE__: return ()
KeyError
dataset/ETHPy150Open dimagi/commcare-hq/custom/abt/reports/expressions.py/AbtSupervisorExpressionSpec._get_val
5,361
def get(self, request, repo, slug): condition = Q() for name in get_search_names(slug): condition |= Q(name__iexact=name) try: package = self.repository.packages.get(condition) except __HOLE__: if not self.repository.enable_auto_mirroring: raise Http404("Auto mirroring is not enabled") enqueue(fetch_package, self.repository.pk, slug) return redirect(self.repository.upstream_pypi_url + '/' + slug) # Redirect if slug is not an exact match if slug != package.name: url = reverse('packages:simple_detail', kwargs={ 'repo': self.repository.slug, 'slug': package.name }) return redirect(url) self.object = package context = self.get_context_data( object=self.object, releases=list(package.releases.all())) return self.render_to_response(context)
ObjectDoesNotExist
dataset/ETHPy150Open mvantellingen/localshop/localshop/apps/packages/views.py/SimpleDetail.get
5,362
def get(self, request, repo, name): try: package = self.repository.packages.get(name__iexact=name) except __HOLE__: package = None enqueue(fetch_package, self.repository.pk, name) return redirect(package)
ObjectDoesNotExist
dataset/ETHPy150Open mvantellingen/localshop/localshop/apps/packages/views.py/PackageRefreshView.get
5,363
def handle_register_or_upload(post_data, files, user, repository): """Process a `register` or `upload` comment issued via distutils. This method is called with the authenticated user. """ name = post_data.get('name') version = post_data.get('version') if settings.LOCALSHOP_VERSIONING_TYPE: scheme = get_versio_versioning_scheme(settings.LOCALSHOP_VERSIONING_TYPE) try: Version(version, scheme=scheme) except __HOLE__: response = HttpResponseBadRequest( reason="Invalid version supplied '{!s}' for '{!s}' scheme.".format( version, settings.LOCALSHOP_VERSIONING_TYPE)) return response if not name or not version: logger.info("Missing name or version for package") return HttpResponseBadRequest('No name or version given') try: condition = Q() for search_name in get_search_names(name): condition |= Q(name__iexact=search_name) package = repository.packages.get(condition) # Error out when we try to override a mirror'ed package for now # not sure what the best thing is if not package.is_local: return HttpResponseBadRequest( '%s is a pypi package!' % package.name) try: release = package.releases.get(version=version) except ObjectDoesNotExist: release = None except ObjectDoesNotExist: package = None release = None # Validate the data form = forms.ReleaseForm(post_data, instance=release) if not form.is_valid(): return HttpResponseBadRequest(reason=form.errors.values()[0][0]) if not package: pkg_form = forms.PackageForm(post_data, repository=repository) if not pkg_form.is_valid(): return HttpResponseBadRequest( reason=six.next(six.itervalues(pkg_form.errors))[0]) package = pkg_form.save() release = form.save(commit=False) release.package = package release.save() # If this is an upload action then process the uploaded file if files: filename = files['distribution']._name try: release_file = release.files.get(filename=filename) if settings.LOCALSHOP_RELEASE_OVERWRITE is False: message = 'That it already released, please bump version.' return HttpResponseBadRequest(message) except ObjectDoesNotExist: release_file = models.ReleaseFile( release=release, filename=filename) form_file = forms.ReleaseFileForm( post_data, files, instance=release_file) if not form_file.is_valid(): return HttpResponseBadRequest('ERRORS %s' % form_file.errors) release_file = form_file.save(commit=False) release_file.save() return HttpResponse()
AttributeError
dataset/ETHPy150Open mvantellingen/localshop/localshop/apps/packages/views.py/handle_register_or_upload
5,364
def instance_to_dict(obj): """Recursively convert a class instance into a dict args: obj: a class instance returns: dict representation """ if isinstance(obj, (int, float, complex, bool, str)): return obj if isinstance(obj, dict): new = {} for k in obj: new[k] = instance_to_dict(obj[k]) return new if isinstance(obj, (list, tuple)): new = [] for val in obj: new.append(instance_to_dict(val)) return new new = {} try: for k in obj.__dict__: new[k] = instance_to_dict(obj.__dict__[k]) except __HOLE__: return str(obj) else: return new
AttributeError
dataset/ETHPy150Open polaris-gslb/polaris-gslb/polaris_health/util/__init__.py/instance_to_dict
5,365
def __init__(self, path, budget, device=None): """ Load YNAB budget from the specified path. Parameters ---------- path : str Path to the budget root, e.g. ~/Documents/YNAB for local budgets or ~/Dropbox/YNAB for cloud-synced budgets. budget : str Budget name. device : str (optional) Device name -- this corresponds to the .ydevice files in the "devices" folder. Can be either A, B, C, ... or a full device name (hostname for desktops). The full name can be found in .ydevice files in the "devices" folder. If this parameter is not specified, the device with the latest modification time of the budget file will be selected. """ root = os.path.abspath(os.path.expanduser(path)) pattern = re.compile('^' + re.escape(budget) + r'~[A-F0-9]{8}\.ynab4$') folders = list(filter(pattern.match, os.listdir(root))) if not folders: raise RuntimeError('Budget {!r} not found at: {}'.format(budget, path)) if len(folders) > 1: raise RuntimeError('Multiple budgets {!r} found at: {}'.format(budget, path)) budget_folder = os.path.join(root, folders.pop()) meta = os.path.join(budget_folder, 'Budget.ymeta') with open(meta, 'r') as f: data_folder = os.path.join(budget_folder, json.load(f)['relativeDataFolderName']) devices_folder = os.path.join(data_folder, 'devices') device_files = filter(re.compile(r'^[A-Z]\.ydevice$').match, os.listdir(devices_folder)) devices = [] for device_file in device_files: with open(os.path.join(devices_folder, device_file), 'r') as f: device_data = Device(json.load(f), strict=False) guid = device_data.deviceGUID budget_file = os.path.join(data_folder, guid, 'Budget.yfull') if os.path.isfile(budget_file): devices.append({ 'id': device_data.shortDeviceId, 'name': device_data.friendlyName, 'file': budget_file, 'mtime': os.stat(budget_file).st_mtime, 'full': device_data.hasFullKnowledge }) if not devices: raise RuntimeError('No valid devices found for {!r} at: {}'.format(budget, path)) if device is None: devices = [d for d in devices if d['full']] if not devices: raise RuntimeError('No devices with full knowledge found') device = max(devices, key=lambda d: d['mtime']) else: try: if device in string.ascii_uppercase: device = [d for d in devices if d['id'] == device].pop() else: device = [d for d in devices if d['name'] == device].pop() if not device['full']: warnings.warn('Device {!r} does not have full knowledge'.format(d['name'])) except __HOLE__: raise RuntimeError('No device {!r} for {!r} at: {}'.format(device, budget, path)) self._path = device['file'] self._device = device['name'] with open(self._path, 'r') as f: self._init_data(json.load(f))
IndexError
dataset/ETHPy150Open aldanor/pynab/ynab/ynab.py/YNAB.__init__
5,366
def make_syntax(): # note that the key is a tuple of the number of arguments, # and the name of the reference before the first space. # for example [refer year name] would be (2, u"refer") # and [absurd] would be (0, u"absurd") # the value is a function that accepts # entry, str, and then N additional parameters where # N is equal to the number of args specified in the # tuple # [this is my author bio][author] def author(entry, str): authors = entry.authors.all() if len(authors) == 1: return str % authors[0].get_absolute_url() else: return str % u"/author/" # [this is the lifeflow tag ][tag lifeflow] def tag(entry, str, slug): t = lifeflow.models.Tag.objects.get(slug=slug) return str % t.get_absolute_url() # [this is the comment with primary key 123][comment 123] def comment(entry, str, pk): c = lifeflow.models.Comment.objects.get(pk=int(pk)) return str % c.get_absolute_url() # [this is the project with slug magic-wand][project magic-wand] def project(entry, str, slug): p = lifeflow.models.Project.objects.get(slug=slug) return str % p.get_absolute_url() # [remember my previous entry?][previous] def previous(entry, str): if entry.__class__.__name__ == "Entry": prev = entry.get_previous_article() if prev is None: return None return str % prev.get_absolute_url() # [Update: I clarified this in the next entry!][next] def next(entry, str): if entry.__class__.__name__ == "Entry": nxt = entry.get_next_article() if nxt is None: return None return str % nxt.get_absolute_url() # [Check out the first entry in this series][series 1] # [or the second entry!][series 2] def series_number(entry, str, nth): try: nth = int(nth) if nth > 0: nth = nth - 1 except ValueError: return None series = entry.series.all()[0] if series: try: e = series.entry_set.all().order_by('pub_date')[nth] return str % e.get_absolute_url() except __HOLE__: return None # [Remember the Two-Faced Django series?][series two_faced 1] # [Well, I wrote that too! Go me.][series jet-survival 3] def series_slug_number(entry, str, slug, nth): try: nth = int(nth) if nth > 0: nth = nth - 1 except ValueError: return None try: series = lifeflow.models.Series.objects.get(slug=slug) except lifeflow.models.Series.DoesNotExist: return None try: e = series.entry_set.all()[nth] return str % e.get_absolute_url() except IndexError: return None # [and check out this code!][file the_name] # ![ a picture that I really like][file my_pic] # ![ and you can abreviate it][f my_pic] # [this way too][f my_code] def file(entry, str, name): try: resource = lifeflow.models.Resource.objects.get(markdown_id=name) return str % resource.get_relative_url() except lifeflow.models.Resource.DoesNotExist: return None # [I like markdown][history md] # [and talk about why the lucky stiff occasionally][history why] # [but history is long... so...][h why] # [and a link to my svn][h svn_lethain] def history(entry, str, name): pass syntax = {} syntax[(0, u"previous")] = previous syntax[(0, u"next")] = next syntax[(0, u"author")] = author syntax[(1, u"file")] = file syntax[(1, u"f")] = file syntax[(1, u"tag")] = tag syntax[(1, u"comment")] = comment syntax[(1, u"project")] = project syntax[(1, u"series")] = series_number syntax[(2, u"series")] = series_slug_number return syntax
IndexError
dataset/ETHPy150Open lethain/lifeflow/markdown/mdx_lifeflow.py/make_syntax
5,367
def process_dynamic(self, ref): # if tag has already been built, ignore if self.tags.has_key(ref): return None parts = ref.split(u" ") name = parts[0] args = parts[1:] length = len(args) format = u"[%s]: %s" % (ref, u"%s") try: func = self.syntax[(length, name)] result = func(self.entry, format, *args) self.tags[ref] = True return result except __HOLE__: self.tags[ref] = False to_return = None
KeyError
dataset/ETHPy150Open lethain/lifeflow/markdown/mdx_lifeflow.py/LifeflowPreprocessor.process_dynamic
5,368
@if_connected def on_pre_save(self, view, agent): if view.is_scratch(): return p = view.name() if view.file_name(): try: p = utils.to_rel_path(view.file_name()) except __HOLE__: p = view.file_name() i = self.between_save_events[view.buffer_id()] i[0] += 1 i[1] = p
ValueError
dataset/ETHPy150Open Floobits/floobits-sublime/floo/listener.py/Listener.on_pre_save
5,369
@if_connected def on_post_save(self, view, agent): view_buf_id = view.buffer_id() def cleanup(): i = self.between_save_events[view_buf_id] i[0] -= 1 if view.is_scratch(): return i = self.between_save_events[view_buf_id] if agent.ignored_saves[view_buf_id] > 0: agent.ignored_saves[view_buf_id] -= 1 return cleanup() old_name = i[1] i = self.between_save_events[view_buf_id] if i[0] > 1: return cleanup() old_name = i[1] event = None buf = get_buf(view) try: name = utils.to_rel_path(view.file_name()) except __HOLE__: name = view.file_name() is_shared = utils.is_shared(view.file_name()) if buf is None: if not is_shared: return cleanup() if G.IGNORE and G.IGNORE.is_ignored(view.file_name(), log=True): msg.log(view.file_name(), ' is ignored. Not creating buffer.') return cleanup() msg.log('Creating new buffer ', name, ' ', view.file_name()) event = { 'name': 'create_buf', 'buf': get_text(view), 'path': name } elif name != old_name: if is_shared: msg.log('renamed buffer ', old_name, ' to ', name) event = { 'name': 'rename_buf', 'id': buf['id'], 'path': name } else: msg.log('deleting buffer from shared: ', name) event = { 'name': 'delete_buf', 'id': buf['id'], } if event: agent.send(event) if is_shared and buf: agent.views_changed.append(('saved', view, buf)) cleanup()
ValueError
dataset/ETHPy150Open Floobits/floobits-sublime/floo/listener.py/Listener.on_post_save
5,370
def copyfile(src, dest, symlink=True): if not os.path.exists(src): # Some bad symlink in the src logger.warn('Cannot find file %s (bad symlink)', src) return if os.path.exists(dest): logger.debug('File %s already exists', dest) return if not os.path.exists(os.path.dirname(dest)): logger.info('Creating parent directories for %s', os.path.dirname(dest)) os.makedirs(os.path.dirname(dest)) if not os.path.islink(src): srcpath = os.path.abspath(src) else: srcpath = os.readlink(src) if symlink and hasattr(os, 'symlink') and not is_win: logger.info('Symlinking %s', dest) try: os.symlink(srcpath, dest) except (OSError, __HOLE__): logger.info('Symlinking failed, copying to %s', dest) copyfileordir(src, dest, symlink) else: logger.info('Copying to %s', dest) copyfileordir(src, dest, symlink)
NotImplementedError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/copyfile
5,371
def file_search_dirs(): here = os.path.dirname(os.path.abspath(__file__)) dirs = ['.', here, join(here, 'virtualenv_support')] if os.path.splitext(os.path.dirname(__file__))[0] != 'virtualenv': # Probably some boot script; just in case virtualenv is installed... try: import virtualenv except __HOLE__: pass else: dirs.append(os.path.join(os.path.dirname(virtualenv.__file__), 'virtualenv_support')) return [d for d in dirs if os.path.isdir(d)]
ImportError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/file_search_dirs
5,372
def call_subprocess(cmd, show_stdout=True, filter_stdout=None, cwd=None, raise_on_returncode=True, extra_env=None, remove_from_env=None): cmd_parts = [] for part in cmd: if len(part) > 45: part = part[:20]+"..."+part[-20:] if ' ' in part or '\n' in part or '"' in part or "'" in part: part = '"%s"' % part.replace('"', '\\"') if hasattr(part, 'decode'): try: part = part.decode(sys.getdefaultencoding()) except __HOLE__: part = part.decode(sys.getfilesystemencoding()) cmd_parts.append(part) cmd_desc = ' '.join(cmd_parts) if show_stdout: stdout = None else: stdout = subprocess.PIPE logger.debug("Running command %s" % cmd_desc) if extra_env or remove_from_env: env = os.environ.copy() if extra_env: env.update(extra_env) if remove_from_env: for varname in remove_from_env: env.pop(varname, None) else: env = None try: proc = subprocess.Popen( cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout, cwd=cwd, env=env) except Exception: e = sys.exc_info()[1] logger.fatal( "Error %s while executing command %s" % (e, cmd_desc)) raise all_output = [] if stdout is not None: stdout = proc.stdout encoding = sys.getdefaultencoding() fs_encoding = sys.getfilesystemencoding() while 1: line = stdout.readline() try: line = line.decode(encoding) except UnicodeDecodeError: line = line.decode(fs_encoding) if not line: break line = line.rstrip() all_output.append(line) if filter_stdout: level = filter_stdout(line) if isinstance(level, tuple): level, line = level logger.log(level, line) if not logger.stdout_level_matches(level): logger.show_progress() else: logger.info(line) else: proc.communicate() proc.wait() if proc.returncode: if raise_on_returncode: if all_output: logger.notify('Complete output from command %s:' % cmd_desc) logger.notify('\n'.join(all_output) + '\n----------------------------------------') raise OSError( "Command %s failed with error code %s" % (cmd_desc, proc.returncode)) else: logger.warn( "Command %s had error code %s" % (cmd_desc, proc.returncode))
UnicodeDecodeError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/call_subprocess
5,373
def path_locations(home_dir): """Return the path locations for the environment (where libraries are, where scripts go, etc)""" # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its # prefix arg is broken: http://bugs.python.org/issue3386 if is_win: # Windows has lots of problems with executables with spaces in # the name; this function will remove them (using the ~1 # format): mkdir(home_dir) if ' ' in home_dir: import ctypes GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW size = max(len(home_dir)+1, 256) buf = ctypes.create_unicode_buffer(size) try: u = unicode except __HOLE__: u = str ret = GetShortPathName(u(home_dir), buf, size) if not ret: print('Error: the path "%s" has a space in it' % home_dir) print('We could not determine the short pathname for it.') print('Exiting.') sys.exit(3) home_dir = str(buf.value) lib_dir = join(home_dir, 'Lib') inc_dir = join(home_dir, 'Include') bin_dir = join(home_dir, 'Scripts') if is_jython: lib_dir = join(home_dir, 'Lib') inc_dir = join(home_dir, 'Include') bin_dir = join(home_dir, 'bin') elif is_pypy: lib_dir = home_dir inc_dir = join(home_dir, 'include') bin_dir = join(home_dir, 'bin') elif not is_win: lib_dir = join(home_dir, 'lib', py_version) multiarch_exec = '/usr/bin/multiarch-platform' if is_executable_file(multiarch_exec): # In Mageia (2) and Mandriva distros the include dir must be like: # virtualenv/include/multiarch-x86_64-linux/python2.7 # instead of being virtualenv/include/python2.7 p = subprocess.Popen(multiarch_exec, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() # stdout.strip is needed to remove newline character inc_dir = join(home_dir, 'include', stdout.strip(), py_version + abiflags) else: inc_dir = join(home_dir, 'include', py_version + abiflags) bin_dir = join(home_dir, 'bin') return home_dir, lib_dir, inc_dir, bin_dir
NameError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/path_locations
5,374
def copy_required_modules(dst_prefix, symlink): import imp # If we are running under -p, we need to remove the current # directory from sys.path temporarily here, so that we # definitely get the modules from the site directory of # the interpreter we are running under, not the one # virtualenv.py is installed under (which might lead to py2/py3 # incompatibility issues) _prev_sys_path = sys.path if os.environ.get('VIRTUALENV_INTERPRETER_RUNNING'): sys.path = sys.path[1:] try: for modname in REQUIRED_MODULES: if modname in sys.builtin_module_names: logger.info("Ignoring built-in bootstrap module: %s" % modname) continue try: f, filename, _ = imp.find_module(modname) except __HOLE__: logger.info("Cannot import bootstrap module: %s" % modname) else: if f is not None: f.close() # special-case custom readline.so on OS X, but not for pypy: if modname == 'readline' and sys.platform == 'darwin' and not ( is_pypy or filename.endswith(join('lib-dynload', 'readline.so'))): dst_filename = join(dst_prefix, 'lib', 'python%s' % sys.version[:3], 'readline.so') elif modname == 'readline' and sys.platform == 'win32': # special-case for Windows, where readline is not a # standard module, though it may have been installed in # site-packages by a third-party package pass else: dst_filename = change_prefix(filename, dst_prefix) copyfile(filename, dst_filename, symlink) if filename.endswith('.pyc'): pyfile = filename[:-1] if os.path.exists(pyfile): copyfile(pyfile, dst_filename[:-1], symlink) finally: sys.path = _prev_sys_path
ImportError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/copy_required_modules
5,375
def install_python(home_dir, lib_dir, inc_dir, bin_dir, site_packages, clear, symlink=True): """Install just the base environment, no distutils patches etc""" if sys.executable.startswith(bin_dir): print('Please use the *system* python to run this script') return if clear: rmtree(lib_dir) ## FIXME: why not delete it? ## Maybe it should delete everything with #!/path/to/venv/python in it logger.notify('Not deleting %s', bin_dir) if hasattr(sys, 'real_prefix'): logger.notify('Using real prefix %r' % sys.real_prefix) prefix = sys.real_prefix elif hasattr(sys, 'base_prefix'): logger.notify('Using base prefix %r' % sys.base_prefix) prefix = sys.base_prefix else: prefix = sys.prefix mkdir(lib_dir) fix_lib64(lib_dir, symlink) stdlib_dirs = [os.path.dirname(os.__file__)] if is_win: stdlib_dirs.append(join(os.path.dirname(stdlib_dirs[0]), 'DLLs')) elif is_darwin: stdlib_dirs.append(join(stdlib_dirs[0], 'site-packages')) if hasattr(os, 'symlink'): logger.info('Symlinking Python bootstrap modules') else: logger.info('Copying Python bootstrap modules') logger.indent += 2 try: # copy required files... for stdlib_dir in stdlib_dirs: if not os.path.isdir(stdlib_dir): continue for fn in os.listdir(stdlib_dir): bn = os.path.splitext(fn)[0] if fn != 'site-packages' and bn in REQUIRED_FILES: copyfile(join(stdlib_dir, fn), join(lib_dir, fn), symlink) # ...and modules copy_required_modules(home_dir, symlink) finally: logger.indent -= 2 mkdir(join(lib_dir, 'site-packages')) import site site_filename = site.__file__ if site_filename.endswith('.pyc'): site_filename = site_filename[:-1] elif site_filename.endswith('$py.class'): site_filename = site_filename.replace('$py.class', '.py') site_filename_dst = change_prefix(site_filename, home_dir) site_dir = os.path.dirname(site_filename_dst) writefile(site_filename_dst, SITE_PY) writefile(join(site_dir, 'orig-prefix.txt'), prefix) site_packages_filename = join(site_dir, 'no-global-site-packages.txt') if not site_packages: writefile(site_packages_filename, '') if is_pypy or is_win: stdinc_dir = join(prefix, 'include') else: stdinc_dir = join(prefix, 'include', py_version + abiflags) if os.path.exists(stdinc_dir): copyfile(stdinc_dir, inc_dir, symlink) else: logger.debug('No include dir %s' % stdinc_dir) platinc_dir = distutils.sysconfig.get_python_inc(plat_specific=1) if platinc_dir != stdinc_dir: platinc_dest = distutils.sysconfig.get_python_inc( plat_specific=1, prefix=home_dir) if platinc_dir == platinc_dest: # Do platinc_dest manually due to a CPython bug; # not http://bugs.python.org/issue3386 but a close cousin platinc_dest = subst_path(platinc_dir, prefix, home_dir) if platinc_dest: # PyPy's stdinc_dir and prefix are relative to the original binary # (traversing virtualenvs), whereas the platinc_dir is relative to # the inner virtualenv and ignores the prefix argument. # This seems more evolved than designed. copyfile(platinc_dir, platinc_dest, symlink) # pypy never uses exec_prefix, just ignore it if sys.exec_prefix != prefix and not is_pypy: if is_win: exec_dir = join(sys.exec_prefix, 'lib') elif is_jython: exec_dir = join(sys.exec_prefix, 'Lib') else: exec_dir = join(sys.exec_prefix, 'lib', py_version) for fn in os.listdir(exec_dir): copyfile(join(exec_dir, fn), join(lib_dir, fn), symlink) if is_jython: # Jython has either jython-dev.jar and javalib/ dir, or just # jython.jar for name in 'jython-dev.jar', 'javalib', 'jython.jar': src = join(prefix, name) if os.path.exists(src): copyfile(src, join(home_dir, name), symlink) # XXX: registry should always exist after Jython 2.5rc1 src = join(prefix, 'registry') if os.path.exists(src): copyfile(src, join(home_dir, 'registry'), symlink=False) copyfile(join(prefix, 'cachedir'), join(home_dir, 'cachedir'), symlink=False) mkdir(bin_dir) py_executable = join(bin_dir, os.path.basename(sys.executable)) if 'Python.framework' in prefix: # OS X framework builds cause validation to break # https://github.com/pypa/virtualenv/issues/322 if os.environ.get('__PYVENV_LAUNCHER__'): del os.environ["__PYVENV_LAUNCHER__"] if re.search(r'/Python(?:-32|-64)*$', py_executable): # The name of the python executable is not quite what # we want, rename it. py_executable = os.path.join( os.path.dirname(py_executable), 'python') logger.notify('New %s executable in %s', expected_exe, py_executable) pcbuild_dir = os.path.dirname(sys.executable) pyd_pth = os.path.join(lib_dir, 'site-packages', 'virtualenv_builddir_pyd.pth') if is_win and os.path.exists(os.path.join(pcbuild_dir, 'build.bat')): logger.notify('Detected python running from build directory %s', pcbuild_dir) logger.notify('Writing .pth file linking to build directory for *.pyd files') writefile(pyd_pth, pcbuild_dir) else: pcbuild_dir = None if os.path.exists(pyd_pth): logger.info('Deleting %s (not Windows env or not build directory python)' % pyd_pth) os.unlink(pyd_pth) if sys.executable != py_executable: ## FIXME: could I just hard link? executable = sys.executable shutil.copyfile(executable, py_executable) make_exe(py_executable) if is_win or is_cygwin: pythonw = os.path.join(os.path.dirname(sys.executable), 'pythonw.exe') if os.path.exists(pythonw): logger.info('Also created pythonw.exe') shutil.copyfile(pythonw, os.path.join(os.path.dirname(py_executable), 'pythonw.exe')) python_d = os.path.join(os.path.dirname(sys.executable), 'python_d.exe') python_d_dest = os.path.join(os.path.dirname(py_executable), 'python_d.exe') if os.path.exists(python_d): logger.info('Also created python_d.exe') shutil.copyfile(python_d, python_d_dest) elif os.path.exists(python_d_dest): logger.info('Removed python_d.exe as it is no longer at the source') os.unlink(python_d_dest) # we need to copy the DLL to enforce that windows will load the correct one. # may not exist if we are cygwin. py_executable_dll = 'python%s%s.dll' % ( sys.version_info[0], sys.version_info[1]) py_executable_dll_d = 'python%s%s_d.dll' % ( sys.version_info[0], sys.version_info[1]) pythondll = os.path.join(os.path.dirname(sys.executable), py_executable_dll) pythondll_d = os.path.join(os.path.dirname(sys.executable), py_executable_dll_d) pythondll_d_dest = os.path.join(os.path.dirname(py_executable), py_executable_dll_d) if os.path.exists(pythondll): logger.info('Also created %s' % py_executable_dll) shutil.copyfile(pythondll, os.path.join(os.path.dirname(py_executable), py_executable_dll)) if os.path.exists(pythondll_d): logger.info('Also created %s' % py_executable_dll_d) shutil.copyfile(pythondll_d, pythondll_d_dest) elif os.path.exists(pythondll_d_dest): logger.info('Removed %s as the source does not exist' % pythondll_d_dest) os.unlink(pythondll_d_dest) if is_pypy: # make a symlink python --> pypy-c python_executable = os.path.join(os.path.dirname(py_executable), 'python') if sys.platform in ('win32', 'cygwin'): python_executable += '.exe' logger.info('Also created executable %s' % python_executable) copyfile(py_executable, python_executable, symlink) if is_win: for name in ['libexpat.dll', 'libpypy.dll', 'libpypy-c.dll', 'libeay32.dll', 'ssleay32.dll', 'sqlite3.dll', 'tcl85.dll', 'tk85.dll']: src = join(prefix, name) if os.path.exists(src): copyfile(src, join(bin_dir, name), symlink) for d in sys.path: if d.endswith('lib_pypy'): break else: logger.fatal('Could not find lib_pypy in sys.path') raise SystemExit(3) logger.info('Copying lib_pypy') copyfile(d, os.path.join(home_dir, 'lib_pypy'), symlink) if os.path.splitext(os.path.basename(py_executable))[0] != expected_exe: secondary_exe = os.path.join(os.path.dirname(py_executable), expected_exe) py_executable_ext = os.path.splitext(py_executable)[1] if py_executable_ext.lower() == '.exe': # python2.4 gives an extension of '.4' :P secondary_exe += py_executable_ext if os.path.exists(secondary_exe): logger.warn('Not overwriting existing %s script %s (you must use %s)' % (expected_exe, secondary_exe, py_executable)) else: logger.notify('Also creating executable in %s' % secondary_exe) shutil.copyfile(sys.executable, secondary_exe) make_exe(secondary_exe) if '.framework' in prefix: if 'Python.framework' in prefix: logger.debug('MacOSX Python framework detected') # Make sure we use the embedded interpreter inside # the framework, even if sys.executable points to # the stub executable in ${sys.prefix}/bin # See http://groups.google.com/group/python-virtualenv/ # browse_thread/thread/17cab2f85da75951 original_python = os.path.join( prefix, 'Resources/Python.app/Contents/MacOS/Python') if 'EPD' in prefix: logger.debug('EPD framework detected') original_python = os.path.join(prefix, 'bin/python') shutil.copy(original_python, py_executable) # Copy the framework's dylib into the virtual # environment virtual_lib = os.path.join(home_dir, '.Python') if os.path.exists(virtual_lib): os.unlink(virtual_lib) copyfile( os.path.join(prefix, 'Python'), virtual_lib, symlink) # And then change the install_name of the copied python executable try: mach_o_change(py_executable, os.path.join(prefix, 'Python'), '@executable_path/../.Python') except: e = sys.exc_info()[1] logger.warn("Could not call mach_o_change: %s. " "Trying to call install_name_tool instead." % e) try: call_subprocess( ["install_name_tool", "-change", os.path.join(prefix, 'Python'), '@executable_path/../.Python', py_executable]) except: logger.fatal("Could not call install_name_tool -- you must " "have Apple's development tools installed") raise if not is_win: # Ensure that 'python', 'pythonX' and 'pythonX.Y' all exist py_exe_version_major = 'python%s' % sys.version_info[0] py_exe_version_major_minor = 'python%s.%s' % ( sys.version_info[0], sys.version_info[1]) py_exe_no_version = 'python' required_symlinks = [ py_exe_no_version, py_exe_version_major, py_exe_version_major_minor ] py_executable_base = os.path.basename(py_executable) if py_executable_base in required_symlinks: # Don't try to symlink to yourself. required_symlinks.remove(py_executable_base) for pth in required_symlinks: full_pth = join(bin_dir, pth) if os.path.exists(full_pth): os.unlink(full_pth) if symlink: os.symlink(py_executable_base, full_pth) else: copyfile(py_executable, full_pth, symlink) if is_win and ' ' in py_executable: # There's a bug with subprocess on Windows when using a first # argument that has a space in it. Instead we have to quote # the value: py_executable = '"%s"' % py_executable # NOTE: keep this check as one line, cmd.exe doesn't cope with line breaks cmd = [py_executable, '-c', 'import sys;out=sys.stdout;' 'getattr(out, "buffer", out).write(sys.prefix.encode("utf-8"))'] logger.info('Testing executable with %s %s "%s"' % tuple(cmd)) try: proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) proc_stdout, proc_stderr = proc.communicate() except __HOLE__: e = sys.exc_info()[1] if e.errno == errno.EACCES: logger.fatal('ERROR: The executable %s could not be run: %s' % (py_executable, e)) sys.exit(100) else: raise e proc_stdout = proc_stdout.strip().decode("utf-8") proc_stdout = os.path.normcase(os.path.abspath(proc_stdout)) norm_home_dir = os.path.normcase(os.path.abspath(home_dir)) if hasattr(norm_home_dir, 'decode'): norm_home_dir = norm_home_dir.decode(sys.getfilesystemencoding()) if proc_stdout != norm_home_dir: logger.fatal( 'ERROR: The executable %s is not functioning' % py_executable) logger.fatal( 'ERROR: It thinks sys.prefix is %r (should be %r)' % (proc_stdout, norm_home_dir)) logger.fatal( 'ERROR: virtualenv is not compatible with this system or executable') if is_win: logger.fatal( 'Note: some Windows users have reported this error when they ' 'installed Python for "Only this user" or have multiple ' 'versions of Python installed. Copying the appropriate ' 'PythonXX.dll to the virtualenv Scripts/ directory may fix ' 'this problem.') sys.exit(100) else: logger.info('Got sys.prefix result: %r' % proc_stdout) pydistutils = os.path.expanduser('~/.pydistutils.cfg') if os.path.exists(pydistutils): logger.notify('Please make sure you remove any previous custom paths from ' 'your %s file.' % pydistutils) ## FIXME: really this should be calculated earlier fix_local_scheme(home_dir, symlink) if site_packages: if os.path.exists(site_packages_filename): logger.info('Deleting %s' % site_packages_filename) os.unlink(site_packages_filename) return py_executable
OSError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/install_python
5,376
def fix_local_scheme(home_dir, symlink=True): """ Platforms that use the "posix_local" install scheme (like Ubuntu with Python 2.7) need to be given an additional "local" location, sigh. """ try: import sysconfig except __HOLE__: pass else: if sysconfig._get_default_scheme() == 'posix_local': local_path = os.path.join(home_dir, 'local') if not os.path.exists(local_path): os.mkdir(local_path) for subdir_name in os.listdir(home_dir): if subdir_name == 'local': continue copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \ os.path.join(local_path, subdir_name), symlink)
ImportError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/fix_local_scheme
5,377
def fixup_scripts(home_dir, bin_dir): if is_win: new_shebang_args = ( '%s /c' % os.path.normcase(os.environ.get('COMSPEC', 'cmd.exe')), '', '.exe') else: new_shebang_args = ('/usr/bin/env', sys.version[:3], '') # This is what we expect at the top of scripts: shebang = '#!%s' % os.path.normcase(os.path.join( os.path.abspath(bin_dir), 'python%s' % new_shebang_args[2])) # This is what we'll put: new_shebang = '#!%s python%s%s' % new_shebang_args for filename in os.listdir(bin_dir): filename = os.path.join(bin_dir, filename) if not os.path.isfile(filename): # ignore subdirs, e.g. .svn ones. continue f = open(filename, 'rb') try: try: lines = f.read().decode('utf-8').splitlines() except __HOLE__: # This is probably a binary program instead # of a script, so just ignore it. continue finally: f.close() if not lines: logger.warn('Script %s is an empty file' % filename) continue old_shebang = lines[0].strip() old_shebang = old_shebang[0:2] + os.path.normcase(old_shebang[2:]) if not old_shebang.startswith(shebang): if os.path.basename(filename) in OK_ABS_SCRIPTS: logger.debug('Cannot make script %s relative' % filename) elif lines[0].strip() == new_shebang: logger.info('Script %s has already been made relative' % filename) else: logger.warn('Script %s cannot be made relative (it\'s not a normal script that starts with %s)' % (filename, shebang)) continue logger.notify('Making script %s relative' % filename) script = relative_script([new_shebang] + lines[1:]) f = open(filename, 'wb') f.write('\n'.join(script).encode('utf-8')) f.close()
UnicodeDecodeError
dataset/ETHPy150Open femmerling/backyard/virtualenv.py/fixup_scripts
5,378
def GetRawResponse(self, with_status=True): try: return self.GetStatus() + "\r\n" + str(self.ResponseHeaders) + \ "\n\n" + self.ResponseContents except __HOLE__: return self.GetStatus() + "\r\n" + str(self.ResponseHeaders) + \ "\n\n" + "[Binary Content]"
UnicodeDecodeError
dataset/ETHPy150Open owtf/owtf/framework/http/transaction.py/HTTP_Transaction.GetRawResponse
5,379
def ImportProxyRequestResponse(self, request, response): self.IsInScope = request.in_scope self.URL = request.url self.InitData(request.body) self.Method = request.method try: self.Status = str(response.code) + " " + \ response_messages[int(response.code)] except __HOLE__: self.Status = str(response.code) + " " + "Unknown Error" self.RawRequest = request.raw_request self.ResponseHeaders = response.header_string self.ResponseContents = response.body self.ResponseSize = len(self.ResponseContents) self.Time = str(response.request_time) self.TimeHuman = self.Timer.get_time_human(self.Time) self.LocalTimestamp = request.local_timestamp self.Found = (self.Status == "200 OK") self.Cookies_list = response.cookies self.New = True self.ID = '' self.HTMLLinkToID = ''
KeyError
dataset/ETHPy150Open owtf/owtf/framework/http/transaction.py/HTTP_Transaction.ImportProxyRequestResponse
5,380
def load_divs(f, specs, Ks): specs = np.asarray(strict_map(normalize_div_name, specs)) Ks = np.ravel(Ks) n_bags = next(itervalues(next(itervalues(f)))).shape[0] divs = np.empty((n_bags, n_bags, specs.size, Ks.size), dtype=np.float32) divs.fill(np.nan) for i, spec in enumerate(specs): try: group = f[spec] except __HOLE__: msg = "missing div func {} in {}" raise KeyError(msg.format(spec, f.filename)) for j, K in enumerate(Ks): try: vals = group[str(K)] except KeyError: msg = "{} is missing K={} in {}" raise KeyError(msg.format(spec, K, f.filename)) divs[:, :, i, j] = vals return divs
KeyError
dataset/ETHPy150Open dougalsutherland/py-sdm/sdm/tests/test_divs.py/load_divs
5,381
def setup_os_root(self, root): LOG.debug("Inspecting guest OS root filesystem %s", root) mounts = self.handle.inspect_get_mountpoints(root) if len(mounts) == 0: raise exception.NovaException( _("No mount points found in %(root)s of %(image)s") % {'root': root, 'image': self.image}) # the root directory must be mounted first mounts.sort(key=lambda mount: mount[0]) root_mounted = False for mount in mounts: LOG.debug("Mounting %(dev)s at %(dir)s", {'dev': mount[1], 'dir': mount[0]}) try: self.handle.mount_options("", mount[1], mount[0]) root_mounted = True except __HOLE__ as e: msg = _("Error mounting %(device)s to %(dir)s in image" " %(image)s with libguestfs (%(e)s)") % \ {'image': self.image, 'device': mount[1], 'dir': mount[0], 'e': e} if root_mounted: LOG.debug(msg) else: raise exception.NovaException(msg)
RuntimeError
dataset/ETHPy150Open openstack/nova/nova/virt/disk/vfs/guestfs.py/VFSGuestFS.setup_os_root
5,382
def setup(self, mount=True): LOG.debug("Setting up appliance for %(image)s", {'image': self.image}) try: self.handle = tpool.Proxy( guestfs.GuestFS(python_return_dict=False, close_on_exit=False)) except __HOLE__ as e: if ('close_on_exit' in six.text_type(e) or 'python_return_dict' in six.text_type(e)): # NOTE(russellb) In case we're not using a version of # libguestfs new enough to support parameters close_on_exit # and python_return_dict which were added in libguestfs 1.20. self.handle = tpool.Proxy(guestfs.GuestFS()) else: raise if CONF.guestfs.debug: self.configure_debug() try: if forceTCG: self.handle.set_backend_settings("force_tcg") except AttributeError as ex: # set_backend_settings method doesn't exist in older # libguestfs versions, so nothing we can do but ignore LOG.warning(_LW("Unable to force TCG mode, " "libguestfs too old? %s"), ex) pass try: if isinstance(self.image, imgmodel.LocalImage): self.handle.add_drive_opts(self.image.path, format=self.image.format) elif isinstance(self.image, imgmodel.RBDImage): self.handle.add_drive_opts("%s/%s" % (self.image.pool, self.image.name), protocol="rbd", format=imgmodel.FORMAT_RAW, server=self.image.servers, username=self.image.user, secret=self.image.password) else: raise exception.UnsupportedImageModel( self.image.__class__.__name__) self.handle.launch() if mount: self.setup_os() self.handle.aug_init("/", 0) self.mount = True except RuntimeError as e: # explicitly teardown instead of implicit close() # to prevent orphaned VMs in cases when an implicit # close() is not enough self.teardown() raise exception.NovaException( _("Error mounting %(image)s with libguestfs (%(e)s)") % {'image': self.image, 'e': e}) except Exception: # explicitly teardown instead of implicit close() # to prevent orphaned VMs in cases when an implicit # close() is not enough self.teardown() raise
TypeError
dataset/ETHPy150Open openstack/nova/nova/virt/disk/vfs/guestfs.py/VFSGuestFS.setup
5,383
def teardown(self): LOG.debug("Tearing down appliance") try: try: if self.mount: self.handle.aug_close() except __HOLE__ as e: LOG.warning(_LW("Failed to close augeas %s"), e) try: self.handle.shutdown() except AttributeError: # Older libguestfs versions haven't an explicit shutdown pass except RuntimeError as e: LOG.warning(_LW("Failed to shutdown appliance %s"), e) try: self.handle.close() except AttributeError: # Older libguestfs versions haven't an explicit close pass except RuntimeError as e: LOG.warning(_LW("Failed to close guest handle %s"), e) finally: # dereference object and implicitly close() self.handle = None
RuntimeError
dataset/ETHPy150Open openstack/nova/nova/virt/disk/vfs/guestfs.py/VFSGuestFS.teardown
5,384
def has_file(self, path): LOG.debug("Has file path=%s", path) path = self._canonicalize_path(path) try: self.handle.stat(path) return True except __HOLE__: return False
RuntimeError
dataset/ETHPy150Open openstack/nova/nova/virt/disk/vfs/guestfs.py/VFSGuestFS.has_file
5,385
def _ExpandArchs(self, archs, sdkroot): """Expands variables references in ARCHS, and remove duplicates.""" variable_mapping = self._VariableMapping(sdkroot) expanded_archs = [] for arch in archs: if self.variable_pattern.match(arch): variable = arch try: variable_expansion = variable_mapping[variable] for arch in variable_expansion: if arch not in expanded_archs: expanded_archs.append(arch) except __HOLE__ as e: print 'Warning: Ignoring unsupported variable "%s".' % variable elif arch not in expanded_archs: expanded_archs.append(arch) return expanded_archs
KeyError
dataset/ETHPy150Open adblockplus/gyp/pylib/gyp/xcode_emulation.py/XcodeArchsDefault._ExpandArchs
5,386
def _pick_idp(self, query, end_point_index): """ If more than one idp and if none is selected, I have to do wayf or disco """ query_dict = {} if isinstance(query, six.string_types): query_dict = dict(parse_qs(query)) else: for key, value in six.iteritems(query): if isinstance(value, list): query_dict[key] = value[0] else: query_dict[key] = value query = urlencode(query_dict) _cli = self.sp # Find all IdPs idps = self.sp.metadata.with_descriptor("idpsso") idp_entity_id = None if len(idps) == 1: # idps is a dictionary idp_entity_id = list(idps.keys())[0] if not idp_entity_id and query: try: _idp_entity_id = query_dict[self.idp_query_param][0] if _idp_entity_id in idps: idp_entity_id = _idp_entity_id except __HOLE__: logger.debug("No IdP entity ID in query: %s" % query) pass if not idp_entity_id: cookie = self.create_cookie( '{"' + self.CONST_QUERY + '": "' + base64.b64encode(query) + '" , "' + self.CONST_HASIDP + '": "False" }', self.CONST_SAML_COOKIE, self.CONST_SAML_COOKIE) if self.sp_conf.WAYF: if query: try: wayf_selected = query_dict["wayf_selected"][0] except KeyError: return self._wayf_redirect(cookie) idp_entity_id = wayf_selected else: return self._wayf_redirect(cookie) elif self.sp_conf.DISCOSRV: if query: idp_entity_id = _cli.parse_discovery_service_response(query=query) if not idp_entity_id: sid_ = sid() self.cache_outstanding_queries[sid_] = self.verification_endpoint eid = _cli.config.entityid disco_end_point_index = end_point_index["disco_end_point_index"] ret = _cli.config.getattr("endpoints", "sp")[ "discovery_response"][disco_end_point_index][0] ret += "?sid=%s" % sid_ loc = _cli.create_discovery_service_request( self.sp_conf.DISCOSRV, eid, **{"return": ret}) return -1, SeeOther(loc, headers=[cookie]) elif not len(idps): raise ServiceErrorException( 'Misconfiguration for the SAML Service Provider!') else: return -1, NotImplemented("No WAYF or DS present!") return 0, idp_entity_id
KeyError
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/authn/saml.py/SAMLAuthnMethod._pick_idp
5,387
def get_user(self, user_id): """Return a User by their UserID. Raises KeyError if the User is not available. """ try: return self._user_dict[user_id] except __HOLE__: logger.warning('UserList returning unknown User for UserID {}' .format(user_id)) return User(user_id, DEFAULT_NAME, None, None, [], False)
KeyError
dataset/ETHPy150Open tdryer/hangups/hangups/user.py/UserList.get_user
5,388
@lib.api_call def transmit_block(self, array): try: self.bus.write_i2c_block_data(self.addr, self.reg, array) except __HOLE__ as err: self.logger.debug(err) return err
IOError
dataset/ETHPy150Open IEEERobotics/bot/bot/hardware/servo_cape.py/ServoCape.transmit_block
5,389
def test_03_Files_List_GetList_Iterate(self): drive = GoogleDrive(self.ga) flist = drive.ListFile({'q': "title = '%s' and trashed = false"%self.title, 'maxResults': 2}) files = [] while True: try: x = flist.GetList() self.assertTrue(len(x) <= 2) files.extend(x) except __HOLE__: break for file1 in self.file_list: found = False for file2 in files: if file1['id'] == file2['id']: found = True self.assertEqual(found, True)
StopIteration
dataset/ETHPy150Open googledrive/PyDrive/pydrive/test/test_filelist.py/GoogleDriveFileListTest.test_03_Files_List_GetList_Iterate
5,390
def DeleteOldFile(self, file_name): try: os.remove(file_name) except __HOLE__: pass
OSError
dataset/ETHPy150Open googledrive/PyDrive/pydrive/test/test_filelist.py/GoogleDriveFileListTest.DeleteOldFile
5,391
def split_to_jip_args(args): """Check the <args> and search for '--'. If found, everything after '--' is put into 'Jip_args'""" if args and "<args>" in args: try: i = args["<args>"].index("--") args["<args>"], args["<jip_args>"] = args["<args>"][:i], \ args["<args>"][i + 1:] except __HOLE__: pass
ValueError
dataset/ETHPy150Open thasso/pyjip/jip/cli/jip_interpreter.py/split_to_jip_args
5,392
def add_git_segment(powerline): try: p = subprocess.Popen(['git', 'status', '--porcelain', '-b'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=git_subprocess_env()) except __HOLE__: # Popen will throw an OSError if git is not found return pdata = p.communicate() if p.returncode != 0: return status = pdata[0].decode("utf-8").splitlines() stats = parse_git_stats(status) branch_info = parse_git_branch_info(status) if branch_info: stats.ahead = branch_info["ahead"] stats.behind = branch_info["behind"] branch = branch_info['local'] else: branch = _get_git_detached_branch() bg = Color.REPO_CLEAN_BG fg = Color.REPO_CLEAN_FG if stats.dirty: bg = Color.REPO_DIRTY_BG fg = Color.REPO_DIRTY_FG powerline.append(' %s ' % branch, fg, bg) stats.add_to_powerline(powerline, Color)
OSError
dataset/ETHPy150Open milkbikis/powerline-shell/segments/git.py/add_git_segment
5,393
def test_issue_7754(self): old_cwd = os.getcwd() config_dir = os.path.join(integration.TMP, 'issue-7754') if not os.path.isdir(config_dir): os.makedirs(config_dir) os.chdir(config_dir) config_file_name = 'master' with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr: config = yaml.load(fhr.read()) config['log_file'] = 'file:///dev/log/LOG_LOCAL3' with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw: fhw.write( yaml.dump(config, default_flow_style=False) ) ret = self.run_script( self._call_binary_, '--config-dir {0} -d'.format( config_dir ), timeout=15, catch_stderr=True, with_retcode=True ) try: self.assertIn("'doc.runner:'", ret[0]) self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:'))) except __HOLE__: if os.path.exists('/dev/log') and ret[2] != 2: # If there's a syslog device and the exit code was not 2, # 'No such file or directory', raise the error raise self.assertIn( 'Failed to setup the Syslog logging handler', '\n'.join(ret[1]) ) self.assertEqual(ret[2], 2) finally: self.chdir(old_cwd) if os.path.isdir(config_dir): shutil.rmtree(config_dir)
AssertionError
dataset/ETHPy150Open saltstack/salt/tests/integration/shell/runner.py/RunTest.test_issue_7754
5,394
def coerce_key_types(doc, keys): """ Given a document and a list of keys such as ['rows', '123', 'edit'], return a list of keys, such as ['rows', 123, 'edit']. """ ret = [] active = doc for idx, key in enumerate(keys): # Coerce array lookups to integers. if isinstance(active, coreapi.Array): try: key = int(key) except: pass # Descend through the document, so we can correctly identify # any nested array lookups. ret.append(key) try: active = active[key] except (KeyError, IndexError, __HOLE__, TypeError): ret += keys[idx + 1:] break return ret
ValueError
dataset/ETHPy150Open core-api/python-client/coreapi/commandline.py/coerce_key_types
5,395
@click.command(help='Display the current document.\n\nOptionally display just the element at the given PATH.') @click.argument('path', nargs=-1) def show(path): doc = get_document() if doc is None: click.echo('No current document. Use `coreapi get` to fetch a document first.') sys.exit(1) if path: keys = coerce_key_types(doc, path) for key in keys: try: doc = doc[key] except (__HOLE__, IndexError): click.echo('Key %s not found.' % repr(key).strip('u')) sys.exit(1) click.echo(display(doc))
KeyError
dataset/ETHPy150Open core-api/python-client/coreapi/commandline.py/show
5,396
@click.command(help='Display description for link at given PATH.') @click.argument('path', nargs=-1) def describe(path): doc = get_document() if doc is None: click.echo('No current document. Use `coreapi get` to fetch a document first.') sys.exit(1) if not path: click.echo('Missing PATH to a link in the document.') sys.exit(1) node = doc keys = coerce_key_types(doc, path) for key in keys: try: node = node[key] except (__HOLE__, IndexError): click.echo('Key %s not found.' % repr(key).strip('u')) sys.exit(1) if not isinstance(node, coreapi.Link): click.echo('Given PATH must index a link, not a %s.' % doc.__class__.__name__) sys.exit(1) fields_description = any([field.description for field in node.fields]) if not (node.description or fields_description): click.echo('Link has no description.') sys.exit(1) if node.description: click.echo(node.description) click.echo() for field in node.fields: name = field.name if field.required else '[%s]' % field.name if field.description: click.echo('* %s - %s' % (name, field.description)) else: click.echo('* %s' % name)
KeyError
dataset/ETHPy150Open core-api/python-client/coreapi/commandline.py/describe
5,397
@staticmethod def last_mod(): try: last_mod = Post.objects\ .published()\ .order_by('-created')[0] return last_mod.created except __HOLE__: return None
IndexError
dataset/ETHPy150Open jamiecurle/django-omblog/omblog/ccsitemap.py/PostSiteMap.last_mod
5,398
def testCreate(self): # log.info("Schematic from indev") size = (64, 64, 64) temp = mktemp("testcreate.schematic") schematic = MCSchematic(shape=size, filename=temp, mats='Classic') level = self.indevLevel.level schematic.copyBlocksFrom(level, BoundingBox((0, 0, 0), (64, 64, 64,)), (0, 0, 0)) assert((schematic.Blocks[0:64, 0:64, 0:64] == level.Blocks[0:64, 0:64, 0:64]).all()) schematic.copyBlocksFrom(level, BoundingBox((0, 0, 0), (64, 64, 64,)), (-32, -32, -32)) assert((schematic.Blocks[0:32, 0:32, 0:32] == level.Blocks[32:64, 32:64, 32:64]).all()) schematic.saveInPlace() schem = mclevel.fromFile("schematics/CreativeInABox.schematic") tempSchematic = MCSchematic(shape=(1, 1, 3)) tempSchematic.copyBlocksFrom(schem, BoundingBox((0, 0, 0), (1, 1, 3)), (0, 0, 0)) level = self.anvilLevel.level for cx, cz in itertools.product(xrange(0, 4), xrange(0, 4)): try: level.createChunk(cx, cz) except __HOLE__: pass schematic.copyBlocksFrom(level, BoundingBox((0, 0, 0), (64, 64, 64,)), (0, 0, 0)) schematic.close() os.remove(temp)
ValueError
dataset/ETHPy150Open mcedit/pymclevel/test/schematic_test.py/TestSchematics.testCreate
5,399
def _get_raw_post_data(self): try: return self._raw_post_data except __HOLE__: self._raw_post_data = self._req.read() return self._raw_post_data
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/handlers/modpython.py/ModPythonRequest._get_raw_post_data