Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
4,800
def getData(self): """ Get the data as a list. """ data = [] # Get data for rnd in range(0, (self.num_rounds + 1) * self.msg_blocks): tmp_row = [] for word in self.print_format: try: # Add word to table if word == 'w': weight = self.characteristic_data[word+str(rnd)] # Print hw(weight) or weight depending on the cipher if self.cipher.name == "keccakdiff" or \ self.cipher.name == "ketje" or \ self.cipher.name == "ascon": tmp_row.append("-" + str(int(weight, 16))) else: tmp_row.append("-" + str(bin(int(weight, 16)).count('1'))) else: tmp_row.append(self.characteristic_data[word+str(rnd)]) except __HOLE__: tmp_row.append("none") if tmp_row: data.append(tmp_row) return data
KeyError
dataset/ETHPy150Open kste/cryptosmt/cryptanalysis/diffchars.py/DifferentialCharacteristic.getData
4,801
def read_json_or_die(path): try: with open(path, 'rb') as f: string = f.read() return json.loads(string) except __HOLE__ as e: print "Invalid JSON in %s:\n%s" % (path, string) print e sys.exit(1)
ValueError
dataset/ETHPy150Open codalab/codalab-cli/codalab/lib/codalab_manager.py/read_json_or_die
4,802
def prompt_bool(prompt, default=None): if default is None: prompt = "%s [yn] " % prompt elif default is True: prompt = "%s [Yn] " % prompt elif default is False: prompt = "%s [yN] " % prompt else: raise ValueError("default must be None, True, or False") while True: response = raw_input(prompt).strip() if default is not None and len(response) == 0: return default try: return bool(strtobool(response)) except __HOLE__: print "Please enter y(es) or n(o)." continue
ValueError
dataset/ETHPy150Open codalab/codalab-cli/codalab/lib/codalab_manager.py/prompt_bool
4,803
def get_dummy(self, context, id, fields): try: return self.dummys[id] except __HOLE__: raise exceptions.NotFound()
KeyError
dataset/ETHPy150Open openstack/neutron/neutron/tests/unit/dummy_plugin.py/DummyServicePlugin.get_dummy
4,804
def delete_dummy(self, context, id): try: svc_type_id = self.dummys[id]['service_type'] del self.dummys[id] self.svctype_mgr.decrease_service_type_refcount(context, svc_type_id) except __HOLE__: raise exceptions.NotFound()
KeyError
dataset/ETHPy150Open openstack/neutron/neutron/tests/unit/dummy_plugin.py/DummyServicePlugin.delete_dummy
4,805
def __init__(self, *args, **kw): def remove(wr, selfref=ref(self)): self = selfref() if self is not None: try: del self.data[wr.key] except __HOLE__: pass self._remove = remove UserDict.UserDict.__init__(self, *args, **kw)
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakValueDictionary.__init__
4,806
def __contains__(self, key): try: o = self.data[key]() except __HOLE__: return False return o is not None
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakValueDictionary.__contains__
4,807
def has_key(self, key): try: o = self.data[key]() except __HOLE__: return False return o is not None
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakValueDictionary.has_key
4,808
def get(self, key, default=None): try: wr = self.data[key] except __HOLE__: return default else: o = wr() if o is None: # This should only happen return default else: return o
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakValueDictionary.get
4,809
def pop(self, key, *args): try: o = self.data.pop(key)() except __HOLE__: if args: return args[0] raise if o is None: raise KeyError, key else: return o
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakValueDictionary.pop
4,810
def setdefault(self, key, default=None): try: wr = self.data[key] except __HOLE__: self.data[key] = KeyedRef(default, self._remove, key) return default else: return wr()
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakValueDictionary.setdefault
4,811
def __init__(self, dict=None): self.data = {} def remove(k, selfref=ref(self)): self = selfref() if self is not None: try: del self.data[k] except __HOLE__: pass self._remove = remove if dict is not None: self.update(dict)
KeyError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakKeyDictionary.__init__
4,812
def has_key(self, key): try: wr = ref(key) except __HOLE__: return 0 return wr in self.data
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakKeyDictionary.has_key
4,813
def __contains__(self, key): try: wr = ref(key) except __HOLE__: return 0 return wr in self.data
TypeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/weakref.py/WeakKeyDictionary.__contains__
4,814
@methodtrace(utils.logger) def test_write_read(self): altsettings = [devinfo.INTF_BULK, devinfo.INTF_INTR] eps = [devinfo.EP_BULK, devinfo.EP_INTR] data_len = [8, 8] if utils.is_iso_test_allowed(): altsettings.append(devinfo.INTF_ISO) eps.append(devinfo.EP_ISO) data_len.append(64) def delay(alt): # Hack to avoid two consecutive isochronous transfers to fail if alt == devinfo.INTF_ISO and utils.is_windows(): time.sleep(0.5) for alt, length in zip(altsettings, data_len): self.dev.set_interface_altsetting(0, alt) for data in make_data_list(length): adata = utils.to_array(data) length = utils.data_len(data) buff = usb.util.create_buffer(length) try: ret = self.dev.write(eps[alt], data) except NotImplementedError: continue self.assertEqual(ret, length) self.assertEqual( ret, length, 'Failed to write data: ' + \ str(data) + ', in interface = ' + \ str(alt)) try: ret = self.dev.read(eps[alt] | usb.util.ENDPOINT_IN, length) except __HOLE__: continue self.assertTrue( utils.array_equals(ret, adata), str(ret) + ' != ' + \ str(adata) + ', in interface = ' + \ str(alt)) delay(alt) try: ret = self.dev.write(eps[alt], data) except NotImplementedError: continue self.assertEqual(ret, length) self.assertEqual( ret, length, 'Failed to write data: ' + \ str(data) + ', in interface = ' + \ str(alt)) try: ret = self.dev.read(eps[alt] | usb.util.ENDPOINT_IN, buff) except NotImplementedError: continue self.assertEqual(ret, length) self.assertTrue( utils.array_equals(buff, adata), str(buff) + ' != ' + \ str(adata) + ', in interface = ' + \ str(alt)) delay(alt)
NotImplementedError
dataset/ETHPy150Open walac/pyusb/tests/test_integration.py/DeviceTest.test_write_read
4,815
def _load_file(parser, filename): if not os.path.exists(filename): logger.debug( 'Loading config from %s failed; it does not exist', filename) return if not os.access(filename, os.R_OK): logger.warning( 'Loading config from %s failed; read permission missing', filename) return try: logger.info('Loading config from %s', filename) with io.open(filename, 'rb') as filehandle: parser.readfp(filehandle) except configparser.MissingSectionHeaderError as e: logger.warning('%s does not have a config section, not loaded.', filename) except configparser.ParsingError as e: linenos = ', '.join(str(lineno) for lineno, line in e.errors) logger.warning( '%s has errors, line %s has been ignored.', filename, linenos) except __HOLE__: # TODO: if this is the initial load of logging config we might not # have a logger at this point, we might want to handle this better. logger.debug('Config file %s not found; skipping', filename)
IOError
dataset/ETHPy150Open mopidy/mopidy/mopidy/config/__init__.py/_load_file
4,816
def list_apps(): try: # django >= 1.7, to support AppConfig from django.apps import apps return [app.name for app in apps.get_app_configs()] except __HOLE__: # old way return list(settings.INSTALLED_APPS)
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/list_apps
4,817
def list_app_labels(): try: # django >= 1.7, to support AppConfig from django.apps import apps return [app.label for app in apps.get_app_configs()] except __HOLE__: # old way return [app.rsplit(".")[-1] for app in settings.INSTALLED_APPS]
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/list_app_labels
4,818
def get_app(app_label): try: # django >= 1.7 from django.apps import apps return apps.get_app_config(app_label).models_module except __HOLE__: from django.db import models return models.get_app(app_label)
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_app
4,819
def get_apps(): try: # django >= 1.7, to support AppConfig from django.apps import apps return [app.models_module for app in apps.get_app_configs() if app.models_module] except __HOLE__: from django.db import models return models.get_apps()
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_apps
4,820
def get_apps_from_cache(): try: from django.apps import apps return [app.models_module for app in apps.get_app_configs() if app.models_module] except __HOLE__: from django.db.models.loading import cache return cache.get_apps()
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_apps_from_cache
4,821
def get_models_from_cache(app): try: from django.apps import apps return apps.get_models(app) except __HOLE__: from django.db.models.loading import cache return cache.get_models(app)
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_models_from_cache
4,822
def get_app_models(app_labels=None): if app_labels is None: try: # django >= 1.7, to support AppConfig from django.apps import apps return apps.get_models(include_auto_created=True) except ImportError: from django.db import models return models.get_models(include_auto_created=True) if not isinstance(app_labels, (list, tuple, set)): app_labels = [app_labels] app_models = [] try: # django >= 1.7, to support AppConfig from django.apps import apps for app_label in app_labels: app_config = apps.get_app_config(app_label) app_models.extend(app_config.get_models(include_auto_created=True)) except ImportError: from django.db import models try: app_list = [models.get_app(app_label) for app_label in app_labels] except (models.ImproperlyConfigured, __HOLE__) as e: raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e) for app in app_list: app_models.extend(models.get_models(app, include_auto_created=True)) return app_models
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_app_models
4,823
def get_model_compat(app_label, model_name): """Get a model on multiple Django versions.""" try: # django >= 1.7 from django.apps import apps return apps.get_model(app_label, model_name) except __HOLE__: from django.db.models import get_model return get_model(app_label, model_name)
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_model_compat
4,824
def get_models_compat(app_label): """Get models on multiple Django versions.""" try: # django >= 1.7 from django.apps import apps return apps.get_app_config(app_label).get_models() except __HOLE__: from django.db.models import get_models return get_models(app_label)
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_models_compat
4,825
def get_models_for_app(app_label): """Returns the models in the given app.""" try: # django >= 1.7 from django.apps import apps return apps.get_app_config(app_label).get_models() except __HOLE__: from django.db.models import get_app, get_models return get_models(get_app(app_label))
ImportError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/get_models_for_app
4,826
def load_tag_library(libname): """Load a templatetag library on multiple Django versions. Returns None if the library isn't loaded. """ if django.VERSION < (1, 9): from django.template.base import get_library, InvalidTemplateLibrary try: lib = get_library(libname) return lib except InvalidTemplateLibrary: return None else: from django.template.backends.django import get_installed_libraries from django.template.library import InvalidTemplateLibrary try: lib = get_installed_libraries()[libname] lib = importlib.import_module(lib).register return lib except (InvalidTemplateLibrary, __HOLE__): return None
KeyError
dataset/ETHPy150Open django-extensions/django-extensions/django_extensions/compat.py/load_tag_library
4,827
def element_defs(tags): def _padding(type, element): return ' ' * (type.max_tagname_len - len(element.name)) s = '' for tag in tags: element = tg.elements[tag] # no def needed for built-in types if element.typename in ('xsd:string'): continue try: type = tg.types[element.typename] except __HOLE__: continue # e.g. blipFill = ElementDef('p:blipFill', 'CT_BlipFillProperties') s += ("\n%s = ElementDef('%s', '%s')\n" % (element.name, element.tag, element.typename)) # e.g. blipFill.add_child('a:blip', cardinality='?') for child in type.elements: s += ("%s.add_child('%s'%s, cardinality='%s')\n" % (element.name, child.tag, _padding(type, child), child.cardinality)) # e.g. ext.add_attribute('x', required=True, default="0") for a in type.required_attributes: default = a.default if a.default else '' s += ("%s.add_attribute('%s', required=True, default='%s')\n" % (element.name, a.name, default)) # e.g. xfrm.add_attributes('rot', 'flipH', 'flipV') if type.optional_attributes: params = "', '".join([a.name for a in type.optional_attributes]) s += "%s.add_attributes('%s')\n" % (element.name, params) return s # ============================================================================ # main # ============================================================================
KeyError
dataset/ETHPy150Open scanny/python-pptx/lab/parse_xsd/parse_xsd.py/element_defs
4,828
def process(self): self.config = self._find_config(self.path) self._project_name = None self._project_branch = None if self.config: try: with open(self.config, 'r', encoding='utf-8') as fh: self._project_name = u(fh.readline().strip()) self._project_branch = u(fh.readline().strip()) except UnicodeDecodeError: # pragma: nocover try: with open(self.config, 'r', encoding=sys.getfilesystemencoding()) as fh: self._project_name = u(fh.readline().strip()) self._project_branch = u(fh.readline().strip()) except: log.traceback('warn') except __HOLE__: # pragma: nocover log.traceback('warn') return True return False
IOError
dataset/ETHPy150Open wakatime/sublime-wakatime/packages/wakatime/projects/wakatime_project_file.py/WakaTimeProjectFile.process
4,829
def next(self): try: result = self.folds[self.fold_idx] self.fold_idx += 1 return result except __HOLE__: self.fold_idx = 0 raise StopIteration
IndexError
dataset/ETHPy150Open HIPS/Kayak/kayak/crossval.py/CrossValidator.next
4,830
def test_now_to_1_year(self, run_in_various_years): # The following two tests test the span from "now" to "a year from # now". Depending on when the test is run, the interval may include a # leap year. The 'try' assumes it's not a leap year, the 'except' # tries it again as a leap year. try: assert dtw(0, 31536000) == "1 year" assert dtw(1, 31536001) == "1 year" except __HOLE__: # If the intervening year contains February 29th assert dtw(0, 31536000) == "11 months and 30 days" assert dtw(1, 31536001) == "11 months and 30 days"
AssertionError
dataset/ETHPy150Open mikeorr/WebHelpers2/webhelpers2/tests/test_date.py/TestDistanceOfTimeInWords.test_now_to_1_year
4,831
def serve(self, interface='0.0.0.0', port=3000): self.log.info('ZenQueue Native Server v%s', zenqueue.__version__) if interface == '0.0.0.0': self.log.info('Serving on %s:%d (all interfaces)', interface, port) else: self.log.info('Serving on %s:%d', interface, port) self.socket = api.tcp_listener((interface, port)) # A lot of the code below was copied or adapted from eventlet's # implementation of an asynchronous WSGI server. try: while True: try: try: client_socket, client_addr = self.socket.accept() except socket.error, exc: # EPIPE (Broken Pipe) and EBADF (Bad File Descriptor) # errors are common for clients that suddenly quit. We # shouldn't worry so much about them. if exc[0] not in [errno.EPIPE, errno.EBADF]: raise # Throughout the logging output, we use the client's ID in # hexadecimal to identify a particular client in the logs. self.log.info('Client %x connected: %r', id(client_socket), client_addr) # Handle this client on the pool, sleeping for 0 time to # allow the handler (or other coroutines) to run. self.client_pool.execute_async(self.handle, client_socket) api.sleep(0) except __HOLE__: # It's a fatal error because it kills the program. self.log.fatal('Received keyboard interrupt.') # This removes the socket from the current hub's list of # sockets to check for clients (i.e. the select() call). # select() is a key component of asynchronous networking. api.get_hub().remove_descriptor(self.socket.fileno()) break finally: try: self.log.info('Shutting down server.') self.socket.close() except socket.error, exc: # See above for why we shouldn't worry about Broken Pipe or Bad # File Descriptor errors. if exc[0] not in [errno.EPIPE, errno.EBADF]: raise finally: self.socket = None
KeyboardInterrupt
dataset/ETHPy150Open zacharyvoase/zenqueue/zenqueue/server/native.py/NativeQueueServer.serve
4,832
def handle(self, client): reader, writer = client.makefile('r'), client.makefile('w') try: while True: try: # If the client sends an empty line, ignore it. line = reader.readline() stripped_line = line.rstrip('\r\n') if not line: break elif not stripped_line: api.sleep(0) continue # Try to parse the request, failing if it is invalid. try: action, args, kwargs = self.parse_command(stripped_line) except ValueError: # Request was malformed. ValueError is raised by # simplejson when the passed string is not valid JSON. self.log.error('Received malformed request from client %x', id(client)) write_json(writer, ['error:request', 'malformed request']) continue # Find the method corresponding to the requested action. try: method = getattr(self, 'do_' + action) except __HOLE__: self.log.error('Missing action requested by client %x', id(client)) write_json(writer, ['error:request', 'action not found']) continue # Run the method, dealing with exceptions or success. try: self.log.debug('Action %r requested by client %x', action, id(client)) # All actions get the client socket as an additional # argument. This means they can do cool things with the # client object that might not be possible otherwise. output = method(client, *args, **kwargs) except Break: # The Break error propagates up the call chain and # causes the server to disconnect the client. break except self.queue.Timeout: # The client will pick this up. It's not so much a # serious error, which is why we don't log it: timeouts # are more often than not specified for very useful # reasons. write_json(writer, ['error:timeout', None]) except Exception, exc: self.log.error( 'Action %r raised error %r for client %x', action, exc, id(client)) write_json(writer, ['error:action', repr(exc)]) # Chances are that if an error occurred, we'll need to # raise it properly. This will trigger the closing of # the client socket via the finally clause below. raise ActionError(exc) else: # I guess debug is overkill. self.log.debug('Action %r successful for client %x', action, id(client)) write_json(writer, ['success', output]) except ActionError, exc: # Raise the inner action error. This will prevent the # catch-all except statement below from logging action # errors as 'unknown' errors. The exception has already been # written to the client. raise ActionError.args[0] except Exception, exc: self.log.error('Unknown error occurred for client %x: %r', id(client), exc) # If we really don't know what happened, then write_json(writer, ['error:unknown', repr(exc)]) raise # Raises the last exception, in this case exc. except: # If any exception has been raised at this point, it will show up as # an error in the logging output. self.log.error('Forcing disconnection of client %x', id(client)) finally: # If code reaches this point simply by non-error means (i.e. an # actual call to the quit, exit or shutdown actions), then it will # not include an error-level logging event. self.log.info('Client %x disconnected', id(client)) client.close() # Most of these methods are pure wrappers around the underlying queue # object.
AttributeError
dataset/ETHPy150Open zacharyvoase/zenqueue/zenqueue/server/native.py/NativeQueueServer.handle
4,833
def init_app(self, app): """Initialize this class with the given :class:`flask.Flask` application or :class:`flask.Blueprint` object. :param app: the Flask application or blueprint object :type app: flask.Flask :type app: flask.Blueprint Examples:: api = Api() api.add_resource(...) api.init_app(app) """ # If app is a blueprint, defer the initialization try: app.record(self._deferred_blueprint_init) # Flask.Blueprint has a 'record' attribute, Flask.Api does not except __HOLE__: self._init_app(app) else: self.blueprint = app
AttributeError
dataset/ETHPy150Open flask-restful/flask-restful/flask_restful/__init__.py/Api.init_app
4,834
def __init__(self, tag=None, api_username=None, api_password=None, api_key=None, hostname=None, port=None, serial=None, use_http=False, use_get=False, timeout=None, ssl_context=None): self._log = logging.getLogger(__name__).log self.tag = tag self.api_username = None self.api_password = None self.api_key = None self.hostname = None self.port = port self.serial = serial self.use_get = use_get self.timeout = timeout self.ssl_context = ssl_context self._log(DEBUG3, 'Python version: %s', sys.version) self._log(DEBUG3, 'xml.etree.ElementTree version: %s', etree.VERSION) self._log(DEBUG3, 'pan-python version: %s', __version__) if self.port is not None: try: self.port = int(self.port) if self.port < 1 or self.port > 65535: raise ValueError except ValueError: raise PanXapiError('Invalid port: %s' % self.port) if self.timeout is not None: try: self.timeout = int(self.timeout) if not self.timeout > 0: raise ValueError except ValueError: raise PanXapiError('Invalid timeout: %s' % self.timeout) if self.ssl_context is not None: try: ssl.SSLContext(ssl.PROTOCOL_SSLv23) except __HOLE__: raise PanXapiError('SSL module has no SSLContext()') init_panrc = {} # .panrc args from constructor if api_username is not None: init_panrc['api_username'] = api_username if api_password is not None: init_panrc['api_password'] = api_password if api_key is not None: init_panrc['api_key'] = api_key if hostname is not None: init_panrc['hostname'] = hostname if port is not None: init_panrc['port'] = port if serial is not None: init_panrc['serial'] = serial try: panrc = pan.rc.PanRc(tag=self.tag, init_panrc=init_panrc) except pan.rc.PanRcError as msg: raise PanXapiError(str(msg)) # If we get a api_username and api_password in the constructor # and no api_key, delete api_key inherited from .panrc if any. # Prevent confusion when you specify a api_username and # api_password but they are not used due to existence of # api_key in .panrc. if ('api_key' in panrc.panrc and api_username is not None and api_password is not None and api_key is None): del panrc.panrc['api_key'] self._log(DEBUG1, 'ignoring .panrc inherited api_key') if 'api_username' in panrc.panrc: self.api_username = panrc.panrc['api_username'] if 'api_password' in panrc.panrc: self.api_password = panrc.panrc['api_password'] if 'api_key' in panrc.panrc: self.api_key = panrc.panrc['api_key'] if 'hostname' in panrc.panrc: self.hostname = panrc.panrc['hostname'] if 'port' in panrc.panrc: self.port = panrc.panrc['port'] try: self.port = int(self.port) if self.port < 1 or self.port > 65535: raise ValueError except ValueError: raise PanXapiError('Invalid port from .panrc: %s' % self.port) if 'serial' in panrc.panrc: self.serial = panrc.panrc['serial'] if self.hostname is None: raise PanXapiError('hostname argument required') if self.api_key is None and (self.api_username is None or self.api_password is None): raise PanXapiError('api_key or api_username and ' + 'api_password arguments required') if use_http: scheme = 'http' else: scheme = 'https' self.uri = '%s://%s' % (scheme, self.hostname) if self.port is not None: self.uri += ':%s' % self.port self.uri += '/api/' if _legacy_urllib: self._log(DEBUG2, 'using legacy urllib')
AttributeError
dataset/ETHPy150Open PaloAltoNetworks/SplunkforPaloAltoNetworks/bin/lib/pan-python/lib/pan/xapi.py/PanXapi.__init__
4,835
@staticmethod def __qs_to_dict(qs): if isinstance(qs, dict): return qs d = {} try: pairs = qs.split('&') for pair in pairs: key, value = pair.split('=', 1) d[key] = value except __HOLE__: return None return d
ValueError
dataset/ETHPy150Open PaloAltoNetworks/SplunkforPaloAltoNetworks/bin/lib/pan-python/lib/pan/xapi.py/PanXapi.__qs_to_dict
4,836
def commit(self, cmd=None, action=None, sync=False, interval=None, timeout=None, extra_qs=None): self.__set_api_key() self.__clear_response() if interval is not None: try: interval = float(interval) if interval < 0: raise ValueError except ValueError: raise PanXapiError('Invalid interval: %s' % interval) else: interval = _job_query_interval if timeout is not None: try: timeout = int(timeout) if timeout < 0: raise ValueError except __HOLE__: raise PanXapiError('Invalid timeout: %s' % timeout) query = {} query['type'] = 'commit' query['key'] = self.api_key if self.serial is not None: query['target'] = self.serial if cmd is not None: query['cmd'] = cmd if action is not None: query['action'] = action if extra_qs is not None: query = self.__merge_extra_qs(query, extra_qs) response = self.__api_request(query) if not response: raise PanXapiError(self.status_detail) if not self.__set_response(response): raise PanXapiError(self.status_detail) if sync is not True: return job = self.element_root.find('./result/job') if job is None: return self._log(DEBUG2, 'commit job: %s', job.text) cmd = 'show jobs id "%s"' % job.text start_time = time.time() while True: try: self.op(cmd=cmd, cmd_xml=True) except PanXapiError as msg: raise PanXapiError('commit %s: %s' % (cmd, msg)) path = './result/job/status' status = self.element_root.find(path) if status is None: raise PanXapiError('no status element in ' + "'%s' response" % cmd) if status.text == 'FIN': # XXX commit vs. commit-all job status return self._log(DEBUG2, 'job %s status %s', job.text, status.text) if (timeout is not None and timeout != 0 and time.time() > start_time + timeout): raise PanXapiError('timeout waiting for ' + 'job %s completion' % job.text) self._log(DEBUG2, 'sleep %.2f seconds', interval) time.sleep(interval)
ValueError
dataset/ETHPy150Open PaloAltoNetworks/SplunkforPaloAltoNetworks/bin/lib/pan-python/lib/pan/xapi.py/PanXapi.commit
4,837
def export(self, category=None, from_name=None, to_name=None, pcapid=None, search_time=None, serialno=None, extra_qs=None): self.__set_api_key() self.__clear_response() query = {} query['type'] = 'export' query['key'] = self.api_key if category is not None: query['category'] = category if from_name is not None: query['from'] = from_name if to_name is not None: query['to'] = to_name if pcapid is not None: query['pcapid'] = pcapid if search_time is not None: query['search-time'] = search_time elif pcapid is not None: if isinstance(pcapid, str): try: n = int(pcapid) except __HOLE__: raise PanXapiError('Invalid pcapid: %s' % pcapid) pcap_time = self.pcapid_time(n) panos_time = self.panos_time(pcap_time) query['search-time'] = panos_time self._log(DEBUG1, 'pcapid time: %s %s', pcap_time, panos_time) if serialno is not None: query['serialno'] = serialno if extra_qs is not None: query = self.__merge_extra_qs(query, extra_qs) response = self.__api_request(query) if not response: raise PanXapiError(self.status_detail) if not self.__set_response(response): raise PanXapiError(self.status_detail) if self.export_result: self.export_result['category'] = category
ValueError
dataset/ETHPy150Open PaloAltoNetworks/SplunkforPaloAltoNetworks/bin/lib/pan-python/lib/pan/xapi.py/PanXapi.export
4,838
def log(self, log_type=None, nlogs=None, skip=None, filter=None, interval=None, timeout=None, extra_qs=None): self.__set_api_key() self.__clear_response() if interval is None: interval = _job_query_interval try: interval = float(interval) if interval < 0: raise ValueError except ValueError: raise PanXapiError('Invalid interval: %s' % interval) if timeout is not None: try: timeout = int(timeout) if timeout < 0: raise ValueError except __HOLE__: raise PanXapiError('Invalid timeout: %s' % timeout) query = {} query['type'] = 'log' query['key'] = self.api_key if log_type is not None: query['log-type'] = log_type if nlogs is not None: query['nlogs'] = nlogs if skip is not None: query['skip'] = skip if filter is not None: query['query'] = filter if extra_qs is not None: query = self.__merge_extra_qs(query, extra_qs) response = self.__api_request(query) if not response: raise PanXapiError(self.status_detail) if not self.__set_response(response): raise PanXapiError(self.status_detail) job = self.element_root.find('./result/job') if job is None: raise PanXapiError('no job element in type=log response') query = {} query['type'] = 'log' query['action'] = 'get' query['key'] = self.api_key query['job-id'] = job.text self._log(DEBUG2, 'log job: %s', job.text) start_time = time.time() while True: response = self.__api_request(query) if not response: raise PanXapiError(self.status_detail) if not self.__set_response(response): raise PanXapiError(self.status_detail) status = self.element_root.find('./result/job/status') if status is None: raise PanXapiError('no status element in ' + 'type=log&action=get response') if status.text == 'FIN': return self._log(DEBUG2, 'job %s status %s', job.text, status.text) if (timeout is not None and timeout != 0 and time.time() > start_time + timeout): raise PanXapiError('timeout waiting for ' + 'job %s completion' % job.text) self._log(DEBUG2, 'sleep %.2f seconds', interval) time.sleep(interval)
ValueError
dataset/ETHPy150Open PaloAltoNetworks/SplunkforPaloAltoNetworks/bin/lib/pan-python/lib/pan/xapi.py/PanXapi.log
4,839
def __exit__(self, exc_type, exc_value, tb): if exc_type is None: try: exc_name = self.expected.__name__ except __HOLE__: exc_name = str(self.expected) raise self.failureException( "{0} not raised".format(exc_name)) if not issubclass(exc_type, self.expected): # let unexpected exceptions pass through return False self.exception = exc_value # store for later retrieval if self.expected_regexp is None: return True expected_regexp = self.expected_regexp if isinstance(expected_regexp, basestring): expected_regexp = re.compile(expected_regexp) if not expected_regexp.search(str(exc_value)): raise self.failureException('"%s" does not match "%s"' % (expected_regexp.pattern, str(exc_value))) return True
AttributeError
dataset/ETHPy150Open openstack/stacktach/tests/unit/__init__.py/_AssertRaisesContext.__exit__
4,840
def __getitem__(self, item): if isinstance(item, int): try: return next(islice(self, item, item+1)) except __HOLE__: raise IndexError('index out of range') elif isinstance(item, slice): return islice(self, item.start, item.stop, item.step)
StopIteration
dataset/ETHPy150Open alimanfoo/petl/petl/util/base.py/IterContainer.__getitem__
4,841
def itervalues(table, field, **kwargs): missing = kwargs.get('missing', None) it = iter(table) hdr = next(it) indices = asindices(hdr, field) assert len(indices) > 0, 'no field selected' getvalue = operator.itemgetter(*indices) for row in it: try: value = getvalue(row) yield value except __HOLE__: if len(indices) > 1: # try one at a time value = list() for i in indices: if i < len(row): value.append(row[i]) else: value.append(missing) yield tuple(value) else: yield missing
IndexError
dataset/ETHPy150Open alimanfoo/petl/petl/util/base.py/itervalues
4,842
def asdict(hdr, row, missing=None): flds = [text_type(f) for f in hdr] try: # list comprehension should be faster items = [(flds[i], row[i]) for i in range(len(flds))] except IndexError: # short row, fall back to slower for loop items = list() for i, f in enumerate(flds): try: v = row[i] except __HOLE__: v = missing items.append((f, v)) return dict(items)
IndexError
dataset/ETHPy150Open alimanfoo/petl/petl/util/base.py/asdict
4,843
def asnamedtuple(nt, row, missing=None): try: return nt(*row) except __HOLE__: # row may be long or short # expected number of fields ne = len(nt._fields) # actual number of values na = len(row) if ne > na: # pad short rows padded = tuple(row) + (missing,) * (ne-na) return nt(*padded) elif ne < na: # truncate long rows return nt(*row[:ne]) else: raise
TypeError
dataset/ETHPy150Open alimanfoo/petl/petl/util/base.py/asnamedtuple
4,844
def __getitem__(self, f): if isinstance(f, int): idx = f elif f in self.flds: idx = self.flds.index(f) else: raise ArgumentError('item ' + repr(f) + ' not in fields ' + repr(self.flds)) try: return super(Record, self).__getitem__(idx) except __HOLE__: # handle short rows return self.missing
IndexError
dataset/ETHPy150Open alimanfoo/petl/petl/util/base.py/Record.__getitem__
4,845
def __getattr__(self, f): if f in self.flds: try: return super(Record, self).__getitem__(self.flds.index(f)) except __HOLE__: # handle short rows return self.missing else: raise ArgumentError('item ' + repr(f) + ' not in fields ' + repr(self.flds))
IndexError
dataset/ETHPy150Open alimanfoo/petl/petl/util/base.py/Record.__getattr__
4,846
def main(argv=None): """script main. parses command line options in sys.argv, unless *argv* is given. """ if not argv: argv = sys.argv # setup command line parser parser = E.OptionParser( version="%prog version: $Id", usage=globals()["__doc__"]) parser.add_option("-u", "--ucsc-genome", dest="ucsc_genome", type="string", help="UCSC genome identifier [default=%default].") parser.add_option("-g", "--genome-file", dest="genome_file", type="string", help="filename with genome [default=%default].") parser.add_option("--extend", dest="extension", type="int", help="extend tags by this number of bases " "[default=%default].") parser.add_option("--shift-size", dest="shift", type="int", help="shift tags by this number of bases " "[default=%default].") parser.add_option("--window-size", dest="window_size", type="int", help="window size to be used in the analysis" "[default=%default].") parser.add_option("--saturation-iterations", dest="saturation_iterations", type="int", help="iterations for saturation analysis " "[default=%default].") parser.add_option("-t", "--toolset", dest="toolset", type="choice", action="append", choices=("saturation", "coverage", "enrichment", "dmr", "rms", "rpm", "all", "convert"), help="actions to perform [default=%default].") parser.add_option("-w", "--bigwig-file", dest="bigwig", action="store_true", help="store wig files as bigwig files - requires a " "genome file [default=%default]") parser.add_option("--treatment", dest="treatment_files", type="string", action="append", help="BAM files for treatment. At least one is required " "[%default]") parser.add_option("--control", dest="control_files", type="string", action="append", help="BAM files for control for differential " "methylation analysis. Optional [%default].") parser.add_option("--input", dest="input_files", type="string", action="append", help="BAM files for input correction. " "Optional [%default].") parser.add_option("--is-not-medip", dest="is_medip", action="store_false", help="data is not MeDIP data and is not expected " "to fit the calibration model. No CpG " "density normalized rms data is computed" "[default=%default].") parser.add_option("--output-rdata", dest="output_rdata", action="store_true", help="in dmr analysis, write R session to file. " "The file name " "is given by --ouptut-filename-pattern [%default].") parser.add_option("--rdata-file", dest="input_rdata", type="string", help="in dmr analysis, read saved R session from " "file. This can be used to apply different " "filters [%default]") parser.add_option("--fdr-threshold", dest="fdr_threshold", type="float", help="FDR threshold to apply for selecting DMR " "[default=%default].") parser.add_option("--fdr-method", dest="fdr_method", type="choice", choices=("bonferroni", "BH", "holm", "hochberg", "hommel", "BY", "fdr", "none"), help="FDR method to apply for selecting DMR " "[default=%default].") parser.set_defaults( input_format="bam", ucsc_genome="Hsapiens.UCSC.hg19", genome_file=None, extend=0, shift=0, window_size=300, saturation_iterations=10, toolset=[], bigwig=False, treatment_files=[], control_files=[], input_files=[], output_rdata=False, input_rdata=None, is_medip=True, fdr_threshold=0.1, fdr_method="BH", ) # add common options (-h/--help, ...) and parse command line (options, args) = E.Start(parser, argv=argv, add_output_options=True) if "convert" in options.toolset: results = [] for line in CSV.DictReader(options.stdin, dialect="excel-tab"): if line['edgeR.p.value'] == "NA": continue # assumes only a single treatment/control treatment_name = options.treatment_files[0] control_name = options.control_files[0] status = "OK" try: results.append( Expression.GeneExpressionResult._make(( "%s:%i-%i" % (line['chr'], int(line['start']), int(line['stop'])), treatment_name, float(line['MSets1.rpkm.mean']), 0, control_name, float(line['MSets2.rpkm.mean']), 0, float(line['edgeR.p.value']), float(line['edgeR.adj.p.value']), float(line['edgeR.logFC']), math.pow(2.0, float(line['edgeR.logFC'])), float(line['edgeR.logFC']), # no transform ["0", "1"][float(line['edgeR.adj.p.value']) < options.fdr_threshold], status))) except __HOLE__, msg: raise ValueError("parsing error %s in line: %s" % (msg, line)) Expression.writeExpressionResults(options.stdout, results) return if len(options.treatment_files) < 1: raise ValueError("please specify a filename with sample data") if options.bigwig and not options.genome_file: raise ValueError("please provide a genome file when outputting bigwig") if options.genome_file: fasta = IndexedFasta.IndexedFasta(options.genome_file) contig_sizes = fasta.getContigSizes() if len(options.toolset) == 0: options.toolset = ["all"] do_all = "all" in options.toolset # load MEDIPS R.library('MEDIPS') genome_file = 'BSgenome.%s' % options.ucsc_genome R.library(genome_file) window_size = options.window_size extend = options.extend shift = options.shift saturation_iterations = options.saturation_iterations # TRUE is the default in MEDIPS uniq = "TRUE" if "saturation" in options.toolset or do_all: E.info("saturation analysis") for fn in options.treatment_files + options.control_files: paired = isPaired(fn) R('''sr = MEDIPS.saturation( file='%(fn)s', BSgenome='%(genome_file)s', shift=%(shift)i, extend=%(extend)i, window_size=%(window_size)i, uniq=%(uniq)s, nit = %(saturation_iterations)i, paired = %(paired)s, nrit = 1)''' % locals()) R.png(E.getOutputFile("%s_saturation.png" % fn)) R('''MEDIPS.plotSaturation(sr)''') R('''dev.off()''') R('''write.table(sr$estimation, file ='%s', sep='\t')''' % E.getOutputFile("%s_saturation_estimation.tsv" % fn)) outfile = IOTools.openFile( E.getOutputFile("%s_saturation.tsv" % fn, "w")) outfile.write("category\tvalues\n") outfile.write( "estimated_correlation\t%s\n" % ",".join(["%f" % x for x in R('''sr$maxEstCor''')])) outfile.write( "true_correlation\t%s\n" % ",".join(["%f" % x for x in R('''sr$maxTruCor''')])) outfile.write( "nreads\t%s\n" % ",".join(["%i" % x for x in R('''sr$numberReads''')])) outfile.close() if "coverage" in options.toolset or do_all: E.info("CpG coverage analysis") for fn in options.treatment_files + options.control_files: paired = isPaired(fn) R('''cr = MEDIPS.seqCoverage( file='%(fn)s', BSgenome='%(genome_file)s', pattern='CG', shift=%(shift)i, extend=%(extend)i, paired=%(paired)s, uniq=%(uniq)s)''' % locals()) R.png(E.getOutputFile("%s_cpg_coverage_pie.png" % fn)) R('''MEDIPS.plotSeqCoverage(seqCoverageObj=cr, type = "pie", cov.level = c(0, 1, 2, 3, 4, 5))''') R('''dev.off()''') R.png(E.getOutputFile("%s_cpg_coverage_hist.png" % fn)) R('''MEDIPS.plotSeqCoverage(seqCoverageObj=cr, type = "hist", t=15)''') R('''dev.off()''') # note: this file is large R('''write.table(cr$cov.res, file=gzfile('%s','w'), sep='\t')''' % E.getOutputFile("%s_saturation_coveredpos.tsv.gz" % fn)) if 'enrichment' in options.toolset or do_all: E.info("CpG enrichment analysis") outfile = IOTools.openFile(E.getOutputFile("enrichment.tsv.gz"), "w") slotnames = (("regions.CG", "regions_CG", "%i"), ("regions.C", "regions_C", "%s"), ("regions.G", "regions_G", "%f"), ("regions.relH", "regions_relH", "%i"), ("regions.GoGe", "regions_GoGe", "%i"), ("genome.CG", "genome_CG", "%s"), ("genome.C", "genome_C", "%s"), ("genome.G", "genome_G", "%i"), ("genome.relH", "genome_relH", "%i"), ("enrichment.score.relH", "enrichment_relH", "%s"), ("enrichment.score.GoGe", "enrichment_GoGe", "%s")) outfile.write("\t".join(['sample'] + [x[1] for x in slotnames]) + "\n") for fn in options.treatment_files + options.control_files: paired = isPaired(fn) R('''ce = MEDIPS.CpGenrich( file='%(fn)s', BSgenome='%(genome_file)s', shift=%(shift)i, extend=%(extend)i, paired=%(paired)s, uniq=%(uniq)s)''' % locals()) outfile.write("%s" % fn) for slotname, label, pattern in slotnames: value = tuple(R('''ce$%s''' % slotname)) if len(value) == 0: value = "" outfile.write("\t%s" % pattern % value[0]) outfile.write("\n") outfile.close() if options.input_rdata: E.info("reading R session info from '%s'" % options.input_rdata) R('''load('%s')''' % options.input_rdata) else: if "dmr" in options.toolset or "correlation" in options.toolset \ or do_all: # build four sets for x, fn in enumerate(options.treatment_files): paired = isPaired(fn) E.info("loading '%s'" % fn) R('''treatment_R%(x)i = MEDIPS.createSet( file='%(fn)s', BSgenome='%(genome_file)s', shift=%(shift)i, extend=%(extend)i, window_size=%(window_size)i, paired=%(paired)s, uniq=%(uniq)s)''' % locals()) R('''treatment_set = c(%s)''' % ",".join(["treatment_R%i" % x for x in range(len(options.treatment_files))])) if options.control_files: for x, fn in enumerate(options.control_files): paired = isPaired(fn) E.info("loading '%s'" % fn) R('''control_R%(x)i = MEDIPS.createSet( file='%(fn)s', BSgenome='%(genome_file)s', shift=%(shift)i, extend=%(extend)i, window_size=%(window_size)i, paired=%(paired)s, uniq=%(uniq)s)''' % locals()) R('''control_set = c(%s)''' % ",".join(["control_R%i" % x for x in range(len(options.control_files))])) # build coupling vector R('''CS = MEDIPS.couplingVector(pattern="CG", refObj = treatment_set[[1]])''') if "correlation" in options.toolset or do_all: R('''cor.matrix = MEDIPS.correlation( c(treatment_set, control_set))''') R('''write.table(cor.matrix, file='%s', sep="\t")''' % E.getOutputFile("correlation")) if "dmr" in options.toolset or do_all: # Data that does not fit the model causes # "Error in 1:max_signal_index : argument of length 0" # The advice is to set MeDIP=FALSE # See: http://comments.gmane.org/ # gmane.science.biology.informatics.conductor/52319 if options.is_medip: medip = "TRUE" else: medip = "FALSE" fdr_method = options.fdr_method E.info("applying test for differential methylation") R('''meth = MEDIPS.meth( MSet1 = treatment_set, MSet2 = control_set, CSet = CS, ISet1 = NULL, ISet2 = NULL, p.adj = "%(fdr_method)s", diff.method = "edgeR", prob.method = "poisson", MeDIP = %(medip)s, CNV = F, type = "rpkm", minRowSum = 1)''' % locals()) # Note: several Gb in size # Output full methylation data table R('''write.table(meth, file=gzfile('%s', 'w'), sep="\t", row.names=F, quote=F)''' % E.getOutputFile("data.tsv.gz")) # save R session if options.output_rdata: R('''save.image(file='%s', safe=FALSE)''' % E.getOutputFile("session.RData")) # DMR analysis - test for windows and output if "dmr" in options.toolset: E.info("selecting differentially methylated windows") # test windows for differential methylation fdr_threshold = options.fdr_threshold R('''tested = MEDIPS.selectSig(meth, adj=T, ratio=NULL, p.value=%(fdr_threshold)f, bg.counts=NULL, CNV=F)''' % locals()) R('''write.table(tested, file=gzfile('%s', 'w'), sep="\t", quote=F)''' % E.getOutputFile("significant_windows.gz")) # select gain and merge adjacent windows try: R('''gain = tested[which(tested[, grep("logFC", colnames(tested))] > 0),]; gain_merged = MEDIPS.mergeFrames(frames=gain, distance=1)''') E.info('gain output: %s, merged: %s' % (str(R('''dim(gain)''')), str(R('''dim(gain_merged)''')))) R('''of=gzfile('%s', 'w'); write.table(gain_merged, file=of, sep="\t", quote=F, row.names=FALSE, col.names=FALSE); close(of)''' % E.getOutputFile("gain.bed.gz")) except rpy2.rinterface.RRuntimeError, msg: E.warn("could not compute gain windows: msg=%s" % msg) # select loss and merge adjacent windows try: R('''loss = tested[which(tested[, grep("logFC", colnames(tested))] < 0),]; loss_merged = MEDIPS.mergeFrames(frames=loss, distance=1)''') E.info('loss output: %s, merged: %s' % (str(R('''dim(loss)''')), str(R('''dim(loss_merged)''')))) R('''of=gzfile('%s', 'w'); write.table(loss_merged, file=of, sep="\t", quote=F, row.names=F, col.names=F); close(of)''' % E.getOutputFile("loss.bed.gz")) except rpy2.rinterface.RRuntimeError, msg: E.warn("could not compute loss windows: msg=%s" % msg) # if "rpm" in options.toolset or do_all: # outputfile = E.getOutputFile("rpm.wig") # R('''MEDIPS.exportWIG(file = '%(outputfile)s', # data = CONTROL.SET, raw = T, descr = "rpm")''' % # locals()) # if options.bigwig: # bigwig(outputfile, contig_sizes) # else: # compress(outputfile) # if "rms" in options.toolset or do_all: # outputfile = E.getOutputFile("rms.wig") # R('''MEDIPS.exportWIG(file = '%(outputfile)s', # data = CONTROL.SET, raw = F, descr = "rms")''' % # locals()) # if options.bigwig: # bigwig(outputfile, contig_sizes) # else: # compress(outputfile) # write footer and output benchmark information. E.Stop()
ValueError
dataset/ETHPy150Open CGATOxford/cgat/scripts/runMEDIPS.py/main
4,847
def load(self, source): """ Load a snapshot from a file path or a file object """ # Try to open, else assume it's a file object try: fp = open(source, "r") data = fp.read() except TypeError: data = source.read() try: # Load JSON into an OrderedDict parsed = json.loads(data, object_pairs_hook=collections.OrderedDict) self._load_json(parsed) except ValueError: self._load_deprecated(data) try: source.close() except __HOLE__: pass
AttributeError
dataset/ETHPy150Open aldebaran/qibuild/python/qisrc/snapshot.py/Snapshot.load
4,848
def _load_deprecated(self, source): for line in source.splitlines(): try: (src, sha1) = line.split(":") except __HOLE__: ui.error("could not parse", line) continue src = src.strip() sha1 = sha1.strip() self.refs[src] = sha1
ValueError
dataset/ETHPy150Open aldebaran/qibuild/python/qisrc/snapshot.py/Snapshot._load_deprecated
4,849
def probe(self, value): storage_type = value.__class__ self.storage_types.add(storage_type.__name__) self.value_count += 1 # FIXME: check for existence in field.empty_values if value is None: self.null_count += 1 if value == '': self.empty_string_count += 1 try: l = len(value) self.min_len = min(self.min_len, l) self.max_len = max(self.max_len, l) except __HOLE__: pass self._probe_distinct(value) for probe in self.probes: probe.probe(value)
TypeError
dataset/ETHPy150Open Stiivi/bubbles/bubbles/ops/audit.py/BasicAuditProbe.probe
4,850
def __getattr__(self, name): self.__lock__.acquire() try: try: return self.__storage__[get_ident()][name] except __HOLE__: raise AttributeError(name) finally: self.__lock__.release()
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/Local.__getattr__
4,851
def __delattr__(self, name): self.__lock__.acquire() try: try: del self.__storage__[get_ident()][name] except __HOLE__: raise AttributeError(name) finally: self.__lock__.release()
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/Local.__delattr__
4,852
def _get_current_object(self): """Return the current object. This is useful if you want the real object behind the proxy at a time for performance reasons or because you want to pass the object into a different context. """ if not hasattr(self.__local, '__release_local__'): return self.__local() try: return getattr(self.__local, self.__name__) except __HOLE__: raise RuntimeError('no object bound to %s' % self.__name__)
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/LocalProxy._get_current_object
4,853
@property def __dict__(self): try: return self._get_current_object().__dict__ except __HOLE__: return AttributeError('__dict__')
RuntimeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/LocalProxy.__dict__
4,854
def __repr__(self): try: obj = self._get_current_object() except __HOLE__: return '<%s unbound>' % self.__class__.__name__ return repr(obj)
RuntimeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/LocalProxy.__repr__
4,855
def __nonzero__(self): try: return bool(self._get_current_object()) except __HOLE__: return False
RuntimeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/LocalProxy.__nonzero__
4,856
def __unicode__(self): try: return unicode(self._get_current_object()) except __HOLE__: return repr(self)
RuntimeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/LocalProxy.__unicode__
4,857
def __dir__(self): try: return dir(self._get_current_object()) except __HOLE__: return []
RuntimeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/webapp2-2.3/webapp2_extras/local.py/LocalProxy.__dir__
4,858
def assign_variable_types(variables,local_dict={}, global_dict={}, auto_downcast=1, type_converters=converters.default): incoming_vars = {} incoming_vars.update(global_dict) incoming_vars.update(local_dict) variable_specs = [] errors = {} for var in variables: try: example_type = incoming_vars[var] # look through possible type specs to find which one # should be used to for example_type spec = None for factory in type_converters: if factory.type_match(example_type): spec = factory.type_spec(var,example_type) break if not spec: # should really define our own type. raise IndexError else: variable_specs.append(spec) except __HOLE__: errors[var] = ("The type and dimensionality specifications" + "for variable '" + var + "' are missing.") except IndexError: errors[var] = ("Unable to convert variable '" + var + "' to a C++ type.") if errors: raise TypeError(format_error_msg(errors)) if auto_downcast: variable_specs = downcast(variable_specs) return variable_specs
KeyError
dataset/ETHPy150Open scipy/scipy/scipy/weave/ext_tools.py/assign_variable_types
4,859
def getYCoord(self, value, side=None): if "left" == side: yLabelValues = self.yLabelValuesL yTop = self.yTopL yBottom = self.yBottomL elif "right" == side: yLabelValues = self.yLabelValuesR yTop = self.yTopR yBottom = self.yBottomR else: yLabelValues = self.yLabelValues yTop = self.yTop yBottom = self.yBottom try: highestValue = max(yLabelValues) lowestValue = min(yLabelValues) except __HOLE__: highestValue = yTop lowestValue = yBottom pixelRange = self.area['ymax'] - self.area['ymin'] relativeValue = value - lowestValue valueRange = highestValue - lowestValue if self.logBase: if value <= 0: return None relativeValue = ( math.log(value, self.logBase) - math.log(lowestValue, self.logBase)) valueRange = math.log(highestValue, self.logBase) - math.log( lowestValue, self.logBase) pixelToValueRatio = pixelRange / valueRange valueInPixels = pixelToValueRatio * relativeValue return self.area['ymax'] - valueInPixels
ValueError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/render/glyph.py/LineGraph.getYCoord
4,860
def drawLines(self, width=None, dash=None, linecap='butt', linejoin='miter'): if not width: width = self.lineWidth self.ctx.set_line_width(width) originalWidth = width width = float(int(width) % 2) / 2 if dash: self.ctx.set_dash(dash, 1) else: self.ctx.set_dash([], 0) self.ctx.set_line_cap({ 'butt': cairo.LINE_CAP_BUTT, 'round': cairo.LINE_CAP_ROUND, 'square': cairo.LINE_CAP_SQUARE, }[linecap]) self.ctx.set_line_join({ 'miter': cairo.LINE_JOIN_MITER, 'round': cairo.LINE_JOIN_ROUND, 'bevel': cairo.LINE_JOIN_BEVEL, }[linejoin]) # check whether there is an stacked metric singleStacked = False for series in self.data: if 'stacked' in series.options: singleStacked = True if singleStacked: self.data = sort_stacked(self.data) # stack the values if self.areaMode == 'stacked' and not self.secondYAxis: # TODO Allow stacked area mode with secondYAxis total = [] for series in self.data: if 'drawAsInfinite' in series.options: continue series.options['stacked'] = True for i in range(len(series)): if len(total) <= i: total.append(0) if series[i] is not None: original = series[i] series[i] += total[i] total[i] += original elif self.areaMode == 'first': self.data[0].options['stacked'] = True elif self.areaMode == 'all': for series in self.data: if 'drawAsInfinite' not in series.options: series.options['stacked'] = True # apply alpha channel and create separate stroke series if self.params.get('areaAlpha'): try: alpha = float(self.params['areaAlpha']) except __HOLE__: alpha = 0.5 strokeSeries = [] for series in self.data: if 'stacked' in series.options: series.options['alpha'] = alpha newSeries = TimeSeries( series.name, series.start, series.end, series.step * series.valuesPerPoint, [x for x in series]) newSeries.xStep = series.xStep newSeries.color = series.color if 'secondYAxis' in series.options: newSeries.options['secondYAxis'] = True strokeSeries.append(newSeries) self.data += strokeSeries # setup the clip region self.ctx.set_line_width(1.0) self.ctx.rectangle(self.area['xmin'], self.area['ymin'], self.area['xmax'] - self.area['xmin'], self.area['ymax'] - self.area['ymin']) self.ctx.clip() self.ctx.set_line_width(originalWidth) # save clip to restore once stacked areas are drawn self.ctx.save() clipRestored = False for series in self.data: if 'stacked' not in series.options: # stacked areas are always drawn first. if this series is not # stacked, we finished stacking. reset the clip region so # lines can show up on top of the stacked areas. if not clipRestored: clipRestored = True self.ctx.restore() if 'lineWidth' in series.options: self.ctx.set_line_width(series.options['lineWidth']) if 'dashed' in series.options: self.ctx.set_dash([series.options['dashed']], 1) else: self.ctx.set_dash([], 0) # Shift the beginning of drawing area to the start of the series # if the graph itself has a larger range missingPoints = (series.start - self.startTime) / series.step startShift = series.xStep * (missingPoints / series.valuesPerPoint) x = float(self.area['xmin']) + startShift + (self.lineWidth / 2.0) y = float(self.area['ymin']) startX = x if series.options.get('invisible'): self.setColor(series.color, 0, True) else: self.setColor(series.color, series.options.get('alpha') or 1.0) # The number of preceeding datapoints that had a None value. consecutiveNones = 0 for index, value in enumerate(series): if value != value: # convert NaN to None value = None if value is None and self.params.get('drawNullAsZero'): value = 0.0 if value is None: if consecutiveNones == 0: self.ctx.line_to(x, y) if 'stacked' in series.options: # Close off and fill area before unknown interval if self.secondYAxis: if 'secondYAxis' in series.options: self.fillAreaAndClip( x, y, startX, self.getYCoord(0, "right")) else: self.fillAreaAndClip( x, y, startX, self.getYCoord(0, "left")) else: self.fillAreaAndClip(x, y, startX, self.getYCoord(0)) x += series.xStep consecutiveNones += 1 else: if self.secondYAxis: if 'secondYAxis' in series.options: y = self.getYCoord(value, "right") else: y = self.getYCoord(value, "left") else: y = self.getYCoord(value) if y is None: value = None elif y < 0: y = 0 if 'drawAsInfinite' in series.options and value > 0: self.ctx.move_to(x, self.area['ymax']) self.ctx.line_to(x, self.area['ymin']) self.ctx.stroke() x += series.xStep continue if consecutiveNones > 0: startX = x if self.lineMode == 'staircase': if consecutiveNones > 0: self.ctx.move_to(x, y) else: self.ctx.line_to(x, y) x += series.xStep self.ctx.line_to(x, y) elif self.lineMode == 'slope': if consecutiveNones > 0: self.ctx.move_to(x, y) self.ctx.line_to(x, y) x += series.xStep elif self.lineMode == 'connected': # If if the gap is larger than the connectedLimit or # if this is the first non-None datapoint in the # series, start drawing from that datapoint. if ( consecutiveNones > self.connectedLimit or consecutiveNones == index ): self.ctx.move_to(x, y) self.ctx.line_to(x, y) x += series.xStep consecutiveNones = 0 if 'stacked' in series.options: if self.lineMode == 'staircase': xPos = x else: xPos = x-series.xStep if self.secondYAxis: if 'secondYAxis' in series.options: areaYFrom = self.getYCoord(0, "right") else: areaYFrom = self.getYCoord(0, "left") else: areaYFrom = self.getYCoord(0) self.fillAreaAndClip(xPos, y, startX, areaYFrom) else: self.ctx.stroke() # return to the original line width self.ctx.set_line_width(originalWidth) if 'dash' in series.options: # if we changed the dash setting before, change it back now if dash: self.ctx.set_dash(dash, 1) else: self.ctx.set_dash([], 0)
ValueError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/render/glyph.py/LineGraph.drawLines
4,861
def wait_until_shutdown(): while not _shutting_down: try: time.sleep(1) except __HOLE__: # On Windows time.sleep raises IOError when interrupted. pass
IOError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/tools/devappserver2/shutdown.py/wait_until_shutdown
4,862
def setupdomain(self, data): #for debug purpose # for key, value in data.items() : # print (key, value) ee_domain_name = data['site_name'] ee_site_webroot = data['webroot'] if 'webroot' in data.keys() else '' # Check if nginx configuration already exists # if os.path.isfile('/etc/nginx/sites-available/{0}' # .format(ee_domain_name)): # raise SiteError("nginx configuration already exists for site") Log.info(self, "Setting up NGINX configuration \t", end='') # write nginx config for file try: ee_site_nginx_conf = open('/etc/nginx/sites-available/{0}' .format(ee_domain_name), encoding='utf-8', mode='w') if not data['php7']: self.app.render((data), 'virtualconf.mustache', out=ee_site_nginx_conf) else: self.app.render((data), 'virtualconf-php7.mustache', out=ee_site_nginx_conf) ee_site_nginx_conf.close() except __HOLE__ as e: Log.debug(self, "{0}".format(e)) raise SiteError("create nginx configuration failed for site") except Exception as e: Log.debug(self, "{0}".format(e)) raise SiteError("create nginx configuration failed for site") finally: # Check nginx -t and return status over it try: Log.debug(self, "Checking generated nginx conf, please wait...") FNULL = open('/dev/null', 'w') ret = subprocess.check_call(["nginx", "-t"], stdout=FNULL, stderr=subprocess.STDOUT) Log.info(self, "[" + Log.ENDC + "Done" + Log.OKBLUE + "]") except CalledProcessError as e: Log.debug(self, "{0}".format(str(e))) Log.info(self, "[" + Log.ENDC + Log.FAIL + "Fail" + Log.OKBLUE + "]") raise SiteError("created nginx configuration failed for site." " check with `nginx -t`") # create symbolic link for EEFileUtils.create_symlink(self, ['/etc/nginx/sites-available/{0}' .format(ee_domain_name), '/etc/nginx/sites-enabled/{0}' .format(ee_domain_name)]) if 'proxy' in data.keys() and data['proxy']: return # Creating htdocs & logs directory Log.info(self, "Setting up webroot \t\t", end='') try: if not os.path.exists('{0}/htdocs'.format(ee_site_webroot)): os.makedirs('{0}/htdocs'.format(ee_site_webroot)) if not os.path.exists('{0}/logs'.format(ee_site_webroot)): os.makedirs('{0}/logs'.format(ee_site_webroot)) if not os.path.exists('{0}/conf/nginx'.format(ee_site_webroot)): os.makedirs('{0}/conf/nginx'.format(ee_site_webroot)) EEFileUtils.create_symlink(self, ['/var/log/nginx/{0}.access.log' .format(ee_domain_name), '{0}/logs/access.log' .format(ee_site_webroot)]) EEFileUtils.create_symlink(self, ['/var/log/nginx/{0}.error.log' .format(ee_domain_name), '{0}/logs/error.log' .format(ee_site_webroot)]) except Exception as e: Log.debug(self, "{0}".format(e)) raise SiteError("setup webroot failed for site") finally: # TODO Check if directories are setup if (os.path.exists('{0}/htdocs'.format(ee_site_webroot)) and os.path.exists('{0}/logs'.format(ee_site_webroot))): Log.info(self, "[" + Log.ENDC + "Done" + Log.OKBLUE + "]") else: Log.info(self, "[" + Log.ENDC + "Fail" + Log.OKBLUE + "]") raise SiteError("setup webroot failed for site")
IOError
dataset/ETHPy150Open EasyEngine/easyengine/ee/cli/plugins/site_functions.py/setupdomain
4,863
def setupLetsEncrypt(self, ee_domain_name): ee_wp_email = EEVariables.ee_email while not ee_wp_email: try: ee_wp_email = input('Enter WordPress email: ') except EOFError as e: Log.debug(self, "{0}".format(e)) raise SiteError("input WordPress username failed") if not os.path.isdir("/opt/letsencrypt"): cloneLetsEncrypt(self) EEFileUtils.chdir(self, '/opt/letsencrypt') EEShellExec.cmd_exec(self, "git pull") if os.path.isfile("/etc/letsencrypt/renewal/{0}.conf".format(ee_domain_name)): Log.debug(self, "LetsEncrypt SSL Certificate found for the domain {0}" .format(ee_domain_name)) ssl= archivedCertificateHandle(self,ee_domain_name,ee_wp_email) else: Log.warn(self,"Please Wait while we fetch SSL Certificate for your site.\nIt may take time depending upon network.") ssl = EEShellExec.cmd_exec(self, "./letsencrypt-auto certonly --webroot -w /var/www/{0}/htdocs/ -d {0} -d www.{0} " .format(ee_domain_name) + "--email {0} --text --agree-tos".format(ee_wp_email)) if ssl: Log.info(self, "Let's Encrypt successfully setup for your site") Log.info(self, "Your certificate and chain have been saved at " "/etc/letsencrypt/live/{0}/fullchain.pem".format(ee_domain_name)) Log.info(self, "Configuring Nginx SSL configuration") try: Log.info(self, "Adding /var/www/{0}/conf/nginx/ssl.conf".format(ee_domain_name)) sslconf = open("/var/www/{0}/conf/nginx/ssl.conf" .format(ee_domain_name), encoding='utf-8', mode='w') sslconf.write("listen 443 ssl {http2};\n".format(http2=("http2" if EEAptGet.is_installed(self,'nginx-mainline') else "spdy")) + "ssl on;\n" "ssl_certificate /etc/letsencrypt/live/{0}/fullchain.pem;\n" "ssl_certificate_key /etc/letsencrypt/live/{0}/privkey.pem;\n" .format(ee_domain_name)) sslconf.close() # updateSiteInfo(self, ee_domain_name, ssl=True) EEGit.add(self, ["/etc/letsencrypt"], msg="Adding letsencrypt folder") except __HOLE__ as e: Log.debug(self, str(e)) Log.debug(self, "Error occured while generating " "ssl.conf") else: Log.error(self, "Unable to setup, Let\'s Encrypt", False) Log.error(self, "Please make sure that your site is pointed to \n" "same server on which you are running Let\'s Encrypt Client " "\n to allow it to verify the site automatically.")
IOError
dataset/ETHPy150Open EasyEngine/easyengine/ee/cli/plugins/site_functions.py/setupLetsEncrypt
4,864
def httpsRedirect(self,ee_domain_name,redirect=True): if redirect: if os.path.isfile("/etc/nginx/conf.d/force-ssl-{0}.conf.disabled".format(ee_domain_name)): EEFileUtils.mvfile(self, "/etc/nginx/conf.d/force-ssl-{0}.conf.disabled".format(ee_domain_name), "/etc/nginx/conf.d/force-ssl-{0}.conf".format(ee_domain_name)) else: try: Log.info(self, "Adding /etc/nginx/conf.d/force-ssl-{0}.conf".format(ee_domain_name)) sslconf = open("/etc/nginx/conf.d/force-ssl-{0}.conf" .format(ee_domain_name), encoding='utf-8', mode='w') sslconf.write("server {\n" "\tlisten 80;\n" + "\tserver_name www.{0} {0};\n".format(ee_domain_name) + "\treturn 301 https://{0}".format(ee_domain_name)+"$request_uri;\n}" ) sslconf.close() # Nginx Configation into GIT except __HOLE__ as e: Log.debug(self, str(e)) Log.debug(self, "Error occured while generating " "/etc/nginx/conf.d/force-ssl-{0}.conf".format(ee_domain_name)) Log.info(self, "Added HTTPS Force Redirection for Site " " http://{0}".format(ee_domain_name)) EEGit.add(self, ["/etc/nginx"], msg="Adding /etc/nginx/conf.d/force-ssl-{0}.conf".format(ee_domain_name)) else: if os.path.isfile("/etc/nginx/conf.d/force-ssl-{0}.conf".format(ee_domain_name)): EEFileUtils.mvfile(self, "/etc/nginx/conf.d/force-ssl-{0}.conf".format(ee_domain_name), "/etc/nginx/conf.d/force-ssl-{0}.conf.disabled".format(ee_domain_name)) Log.info(self, "Disabled HTTPS Force Redirection for Site " " http://{0}".format(ee_domain_name))
IOError
dataset/ETHPy150Open EasyEngine/easyengine/ee/cli/plugins/site_functions.py/httpsRedirect
4,865
def _get_events_from_cache(self, events, check_redacted, get_prev_content, allow_rejected): event_map = {} for event_id in events: try: ret = self._get_event_cache.get( (event_id, check_redacted, get_prev_content,) ) if allow_rejected or not ret.rejected_reason: event_map[event_id] = ret else: event_map[event_id] = None except __HOLE__: pass return event_map
KeyError
dataset/ETHPy150Open matrix-org/synapse/synapse/storage/events.py/EventsStore._get_events_from_cache
4,866
@defer.inlineCallbacks def _background_reindex_origin_server_ts(self, progress, batch_size): target_min_stream_id = progress["target_min_stream_id_inclusive"] max_stream_id = progress["max_stream_id_exclusive"] rows_inserted = progress.get("rows_inserted", 0) INSERT_CLUMP_SIZE = 1000 def reindex_search_txn(txn): sql = ( "SELECT stream_ordering, event_id FROM events" " WHERE ? <= stream_ordering AND stream_ordering < ?" " ORDER BY stream_ordering DESC" " LIMIT ?" ) txn.execute(sql, (target_min_stream_id, max_stream_id, batch_size)) rows = txn.fetchall() if not rows: return 0 min_stream_id = rows[-1][0] event_ids = [row[1] for row in rows] events = self._get_events_txn(txn, event_ids) rows = [] for event in events: try: event_id = event.event_id origin_server_ts = event.origin_server_ts except (KeyError, __HOLE__): # If the event is missing a necessary field then # skip over it. continue rows.append((origin_server_ts, event_id)) sql = ( "UPDATE events SET origin_server_ts = ? WHERE event_id = ?" ) for index in range(0, len(rows), INSERT_CLUMP_SIZE): clump = rows[index:index + INSERT_CLUMP_SIZE] txn.executemany(sql, clump) progress = { "target_min_stream_id_inclusive": target_min_stream_id, "max_stream_id_exclusive": min_stream_id, "rows_inserted": rows_inserted + len(rows) } self._background_update_progress_txn( txn, self.EVENT_ORIGIN_SERVER_TS_NAME, progress ) return len(rows) result = yield self.runInteraction( self.EVENT_ORIGIN_SERVER_TS_NAME, reindex_search_txn ) if not result: yield self._end_background_update(self.EVENT_ORIGIN_SERVER_TS_NAME) defer.returnValue(result)
AttributeError
dataset/ETHPy150Open matrix-org/synapse/synapse/storage/events.py/EventsStore._background_reindex_origin_server_ts
4,867
def _set_special_baudrate(self, baudrate): # right size is 44 on x86_64, allow for some growth buf = array.array('i', [0] * 64) try: # get serial_struct fcntl.ioctl(self.fd, TCGETS2, buf) # set custom speed buf[2] &= ~termios.CBAUD buf[2] |= BOTHER buf[9] = buf[10] = baudrate # set serial_struct fcntl.ioctl(self.fd, TCSETS2, buf) except __HOLE__ as e: raise ValueError('Failed to set custom baud rate ({}): {}'.format(baudrate, e))
IOError
dataset/ETHPy150Open pyserial/pyserial/serial/serialposix.py/PlatformSpecific._set_special_baudrate
4,868
def _set_rs485_mode(self, rs485_settings): buf = array.array('i', [0] * 8) # flags, delaytx, delayrx, padding try: fcntl.ioctl(self.fd, TIOCGRS485, buf) buf[0] |= SER_RS485_ENABLED if rs485_settings is not None: if rs485_settings.loopback: buf[0] |= SER_RS485_RX_DURING_TX else: buf[0] &= ~SER_RS485_RX_DURING_TX if rs485_settings.rts_level_for_tx: buf[0] |= SER_RS485_RTS_ON_SEND else: buf[0] &= ~SER_RS485_RTS_ON_SEND if rs485_settings.rts_level_for_rx: buf[0] |= SER_RS485_RTS_AFTER_SEND else: buf[0] &= ~SER_RS485_RTS_AFTER_SEND buf[1] = int(rs485_settings.delay_before_tx * 1000) buf[2] = int(rs485_settings.delay_before_rx * 1000) else: buf[0] = 0 # clear SER_RS485_ENABLED fcntl.ioctl(self.fd, TIOCSRS485, buf) except __HOLE__ as e: raise ValueError('Failed to set RS485 mode: {}'.format(e))
IOError
dataset/ETHPy150Open pyserial/pyserial/serial/serialposix.py/PlatformSpecific._set_rs485_mode
4,869
def open(self): """\ Open port with current settings. This may throw a SerialException if the port cannot be opened.""" if self._port is None: raise SerialException("Port must be configured before it can be used.") if self.is_open: raise SerialException("Port is already open.") self.fd = None # open try: self.fd = os.open(self.portstr, os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK) except __HOLE__ as msg: self.fd = None raise SerialException(msg.errno, "could not open port {}: {}".format(self._port, msg)) #~ fcntl.fcntl(self.fd, fcntl.F_SETFL, 0) # set blocking try: self._reconfigure_port(force_update=True) except: try: os.close(self.fd) except: # ignore any exception when closing the port # also to keep original exception that happened when setting up pass self.fd = None raise else: self.is_open = True if not self._dsrdtr: self._update_dtr_state() if not self._rtscts: self._update_rts_state() self.reset_input_buffer()
OSError
dataset/ETHPy150Open pyserial/pyserial/serial/serialposix.py/Serial.open
4,870
def _reconfigure_port(self, force_update=False): """Set communication parameters on opened port.""" if self.fd is None: raise SerialException("Can only operate on a valid file descriptor") custom_baud = None vmin = vtime = 0 # timeout is done via select if self._inter_byte_timeout is not None: vmin = 1 vtime = int(self._inter_byte_timeout * 10) try: orig_attr = termios.tcgetattr(self.fd) iflag, oflag, cflag, lflag, ispeed, ospeed, cc = orig_attr except termios.error as msg: # if a port is nonexistent but has a /dev file, it'll fail here raise SerialException("Could not configure port: {}".format(msg)) # set up raw mode / no echo / binary cflag |= (termios.CLOCAL | termios.CREAD) lflag &= ~(termios.ICANON | termios.ECHO | termios.ECHOE | termios.ECHOK | termios.ECHONL | termios.ISIG | termios.IEXTEN) # |termios.ECHOPRT for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk if hasattr(termios, flag): lflag &= ~getattr(termios, flag) oflag &= ~(termios.OPOST | termios.ONLCR | termios.OCRNL) iflag &= ~(termios.INLCR | termios.IGNCR | termios.ICRNL | termios.IGNBRK) if hasattr(termios, 'IUCLC'): iflag &= ~termios.IUCLC if hasattr(termios, 'PARMRK'): iflag &= ~termios.PARMRK # setup baud rate try: ispeed = ospeed = getattr(termios, 'B{}'.format(self._baudrate)) except AttributeError: try: ispeed = ospeed = self.BAUDRATE_CONSTANTS[self._baudrate] except KeyError: #~ raise ValueError('Invalid baud rate: %r' % self._baudrate) # may need custom baud rate, it isn't in our list. ispeed = ospeed = getattr(termios, 'B38400') try: custom_baud = int(self._baudrate) # store for later except __HOLE__: raise ValueError('Invalid baud rate: {!r}'.format(self._baudrate)) else: if custom_baud < 0: raise ValueError('Invalid baud rate: {!r}'.format(self._baudrate)) # setup char len cflag &= ~termios.CSIZE if self._bytesize == 8: cflag |= termios.CS8 elif self._bytesize == 7: cflag |= termios.CS7 elif self._bytesize == 6: cflag |= termios.CS6 elif self._bytesize == 5: cflag |= termios.CS5 else: raise ValueError('Invalid char len: {!r}'.format(self._bytesize)) # setup stop bits if self._stopbits == serial.STOPBITS_ONE: cflag &= ~(termios.CSTOPB) elif self._stopbits == serial.STOPBITS_ONE_POINT_FIVE: cflag |= (termios.CSTOPB) # XXX same as TWO.. there is no POSIX support for 1.5 elif self._stopbits == serial.STOPBITS_TWO: cflag |= (termios.CSTOPB) else: raise ValueError('Invalid stop bit specification: {!r}'.format(self._stopbits)) # setup parity iflag &= ~(termios.INPCK | termios.ISTRIP) if self._parity == serial.PARITY_NONE: cflag &= ~(termios.PARENB | termios.PARODD) elif self._parity == serial.PARITY_EVEN: cflag &= ~(termios.PARODD) cflag |= (termios.PARENB) elif self._parity == serial.PARITY_ODD: cflag |= (termios.PARENB | termios.PARODD) elif self._parity == serial.PARITY_MARK and plat[:5] == 'linux': cflag |= (termios.PARENB | CMSPAR | termios.PARODD) elif self._parity == serial.PARITY_SPACE and plat[:5] == 'linux': cflag |= (termios.PARENB | CMSPAR) cflag &= ~(termios.PARODD) else: raise ValueError('Invalid parity: {!r}'.format(self._parity)) # setup flow control # xonxoff if hasattr(termios, 'IXANY'): if self._xonxoff: iflag |= (termios.IXON | termios.IXOFF) # |termios.IXANY) else: iflag &= ~(termios.IXON | termios.IXOFF | termios.IXANY) else: if self._xonxoff: iflag |= (termios.IXON | termios.IXOFF) else: iflag &= ~(termios.IXON | termios.IXOFF) # rtscts if hasattr(termios, 'CRTSCTS'): if self._rtscts: cflag |= (termios.CRTSCTS) else: cflag &= ~(termios.CRTSCTS) elif hasattr(termios, 'CNEW_RTSCTS'): # try it with alternate constant name if self._rtscts: cflag |= (termios.CNEW_RTSCTS) else: cflag &= ~(termios.CNEW_RTSCTS) # XXX should there be a warning if setting up rtscts (and xonxoff etc) fails?? # buffer # vmin "minimal number of characters to be read. 0 for non blocking" if vmin < 0 or vmin > 255: raise ValueError('Invalid vmin: {!r}'.format(vmin)) cc[termios.VMIN] = vmin # vtime if vtime < 0 or vtime > 255: raise ValueError('Invalid vtime: {!r}'.format(vtime)) cc[termios.VTIME] = vtime # activate settings if force_update or [iflag, oflag, cflag, lflag, ispeed, ospeed, cc] != orig_attr: termios.tcsetattr( self.fd, termios.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]) # apply custom baud rate, if any if custom_baud is not None: self._set_special_baudrate(custom_baud) if self._rs485_mode is not None: self._set_rs485_mode(self._rs485_mode)
ValueError
dataset/ETHPy150Open pyserial/pyserial/serial/serialposix.py/Serial._reconfigure_port
4,871
def read(self, size=1): """\ Read size bytes from the serial port. If a timeout is set it may return less characters as requested. With no timeout it will block until the requested number of bytes is read. """ if not self.is_open: raise portNotOpenError read = bytearray() timeout = self._timeout while len(read) < size: try: start_time = time.time() ready, _, _ = select.select([self.fd], [], [], timeout) # If select was used with a timeout, and the timeout occurs, it # returns with empty lists -> thus abort read operation. # For timeout == 0 (non-blocking operation) also abort when # there is nothing to read. if not ready: break # timeout buf = os.read(self.fd, size - len(read)) # read should always return some data as select reported it was # ready to read when we get to this point. if not buf: # Disconnected devices, at least on Linux, show the # behavior that they are always ready to read immediately # but reading returns nothing. raise SerialException( 'device reports readiness to read but returned no data ' '(device disconnected or multiple access on port?)') read.extend(buf) if timeout is not None: timeout -= time.time() - start_time if timeout <= 0: break except __HOLE__ as e: # this is for Python 3.x where select.error is a subclass of # OSError ignore EAGAIN errors. all other errors are shown if e.errno != errno.EAGAIN: raise SerialException('read failed: {}'.format(e)) except select.error as e: # this is for Python 2.x # ignore EAGAIN errors. all other errors are shown # see also http://www.python.org/dev/peps/pep-3151/#select if e[0] != errno.EAGAIN: raise SerialException('read failed: {}'.format(e)) return bytes(read)
OSError
dataset/ETHPy150Open pyserial/pyserial/serial/serialposix.py/Serial.read
4,872
def write(self, data): """Output the given byte string over the serial port.""" if not self.is_open: raise portNotOpenError d = to_bytes(data) tx_len = len(d) timeout = self._write_timeout if timeout and timeout > 0: # Avoid comparing None with zero timeout += time.time() while tx_len > 0: try: n = os.write(self.fd, d) if timeout == 0: # Zero timeout indicates non-blocking - simply return the # number of bytes of data actually written return n elif timeout and timeout > 0: # Avoid comparing None with zero # when timeout is set, use select to wait for being ready # with the time left as timeout timeleft = timeout - time.time() if timeleft < 0: raise writeTimeoutError _, ready, _ = select.select([], [self.fd], [], timeleft) if not ready: raise writeTimeoutError else: assert timeout is None # wait for write operation _, ready, _ = select.select([], [self.fd], [], None) if not ready: raise SerialException('write failed (select)') d = d[n:] tx_len -= n except SerialException: raise except __HOLE__ as v: if v.errno != errno.EAGAIN: raise SerialException('write failed: {}'.format(v)) # still calculate and check timeout if timeout and timeout - time.time() < 0: raise writeTimeoutError return len(data)
OSError
dataset/ETHPy150Open pyserial/pyserial/serial/serialposix.py/Serial.write
4,873
@register.filter def restructuredparts(value, **overrides): """return the restructured text parts""" try: from docutils.core import publish_parts except __HOLE__: if settings.DEBUG: raise template.TemplateSyntaxError("Error in {% restructuredtext %} filter: The Python docutils library isn't installed.") return value else: docutils_settings = dict(getattr(settings, "RESTRUCTUREDTEXT_FILTER_SETTINGS", {})) docutils_settings.update(overrides) if "halt_level" not in docutils_settings: docutils_settings["halt_level"] = 6 return publish_parts(source=value, writer_name="html4css1", settings_overrides=docutils_settings)
ImportError
dataset/ETHPy150Open amarandon/smeuhsocial/apps/blog/templatetags/restructuredtext.py/restructuredparts
4,874
def tearDown(self): """clear tests each time """ try: shutil.rmtree(self.anima_path) except __HOLE__: pass try: os.environ.pop('ANIMAPATH') except KeyError: pass
OSError
dataset/ETHPy150Open eoyilmaz/anima/tests/env/test_environment_variables.py/EnvironmentVariableSetupTestCase.tearDown
4,875
def test_anima_path_env_variable_does_not_exists(self): """testing if a KeyError will be raised if ANIMAPATH env variable does not exist """ try: os.environ.pop("ANIMAPATH") except __HOLE__: pass self.assertRaises(KeyError, discover_env_vars)
KeyError
dataset/ETHPy150Open eoyilmaz/anima/tests/env/test_environment_variables.py/EnvironmentVariableSetupTestCase.test_anima_path_env_variable_does_not_exists
4,876
def test_env_variables_is_working_properly_in_linux(self): """testing if environment variables are properly defined by using the os name """ global platform_name platform_name = "Linux" try: os.environ.pop('ENV1') except __HOLE__: pass os.environ['ENV1'] = '/Test/Value' discover_env_vars('test_env') self.assertEqual( '/Test/Value:/mnt/Z/some/path1:/mnt/Z/some/path2:' '/mnt/Z/some/other/path1:/mnt/Z/some/other/path2', os.environ['ENV1'] )
KeyError
dataset/ETHPy150Open eoyilmaz/anima/tests/env/test_environment_variables.py/EnvironmentVariableSetupTestCase.test_env_variables_is_working_properly_in_linux
4,877
def test_env_variables_is_working_properly_in_windows(self): """testing if environment variables are properly defined by using the os name """ global platform_name platform_name = "Windows" try: os.environ.pop('ENV1') except __HOLE__: pass os.environ['ENV1'] = 'Z:/Test/Value' discover_env_vars('test_env') self.assertEqual( 'Z:/Test/Value:Z:/some/path1:Z:/some/path2:' 'Z:/some/other/path1:Z:/some/other/path2', os.environ['ENV1'] )
KeyError
dataset/ETHPy150Open eoyilmaz/anima/tests/env/test_environment_variables.py/EnvironmentVariableSetupTestCase.test_env_variables_is_working_properly_in_windows
4,878
def test_env_variables_is_working_properly_in_osx(self): """testing if environment variables are properly defined by using the os name """ global platform_name platform_name = "Darwin" self.assertEqual( "Darwin", platform.system() ) try: os.environ.pop('ENV1') except __HOLE__: pass os.environ['ENV1'] = '/Volumes/Z/Test/Value' discover_env_vars('test_env') self.assertEqual( '/Volumes/Z/Test/Value:/Volumes/Z/some/path1:/Volumes/Z/some/path2:' '/Volumes/Z/some/other/path1:/Volumes/Z/some/other/path2', os.environ['ENV1'] ) '/Volumes/Z/Test/Value:/Volumes/Z/some/path1:/Volumes/Z/some/path2:/Volumes/Z/some/other/path1:/Volumes/Z/some/other/path2' '/Volumes/Z/Test/Value:/mnt/Z/some/path1:/mnt/Z/some/path2:/mnt/Z/some/other/path1:/mnt/Z/some/other/path2'
KeyError
dataset/ETHPy150Open eoyilmaz/anima/tests/env/test_environment_variables.py/EnvironmentVariableSetupTestCase.test_env_variables_is_working_properly_in_osx
4,879
def search_userid(username): headers = {'X-Requested-With': 'XMLHttpRequest', 'Origin': FORUM_URL, 'Referer': FORUM_URL + '/search.php' } data = {'securitytoken': 'guest', 'do': 'usersearch', 'fragment': username } response = requests.post(AJAX_USERSEARCH_URL, data, headers=headers) root = etree.fromstring(response.content) try: found_name = root[0].text except __HOLE__: raise UserNotFoundError(username) # The request is basically a search, can return multiple userids # for users starting with username. Make sure we got the right one! if found_name.upper() != username.upper(): exc = AmbiguousUserNameError(username) # attach the extra users to the exception ExtraUser = namedtuple('ExtraUser', 'name, id') exc.users = tuple(ExtraUser(name=child.text, id=child.attrib['userid']) for child in root) raise exc userid = root[0].attrib['userid'] # userid is str on Python2, we need to decode to make it unicode return userid.decode('utf-8')
IndexError
dataset/ETHPy150Open pokerregion/poker/poker/website/twoplustwo.py/search_userid
4,880
def _parse_attributes(self, root): for attname, xpath, type_ in self._attributes: if type_ != tuple: try: setattr(self, attname, type_(root.xpath(xpath)[0])) except __HOLE__: setattr(self, attname, None) else: setattr(self, attname, type_(root.xpath(xpath)))
IndexError
dataset/ETHPy150Open pokerregion/poker/poker/website/twoplustwo.py/ForumMember._parse_attributes
4,881
def _parse_last_activity(self, root, tz): try: li = root.xpath('//div[@id="collapseobj_stats"]/div/fieldset[2]/ul/li[1]')[0] date_str = li[0].tail.strip() time_str = li[1].text.strip() self.last_activity = self._parse_date(date_str + ' ' + time_str, tz) except __HOLE__: self.last_activity = None
IndexError
dataset/ETHPy150Open pokerregion/poker/poker/website/twoplustwo.py/ForumMember._parse_last_activity
4,882
def _parse_join_date(self, root): ul = root.xpath('//div[@id="collapseobj_stats"]/div/fieldset[2]/ul')[0] try: join_date = ul.xpath('li[2]/text()')[0] except __HOLE__: # not everybody has a last activity field. # in this case, it's the first li element, not the second join_date = ul.xpath('li[1]/text()')[0] join_date = join_date.strip() self.join_date = datetime.strptime(join_date, '%m-%d-%Y').date()
IndexError
dataset/ETHPy150Open pokerregion/poker/poker/website/twoplustwo.py/ForumMember._parse_join_date
4,883
@staticmethod def _parse_date(date_str, tz): try: dt = datetime.strptime(date_str.strip(), '%m-%d-%Y %I:%M %p') return dt.replace(tzinfo=tz).astimezone(UTC) except __HOLE__: # in case like "Yesterday 3:30 PM" or dates like that. # calculates based on sourceTime. tz is 2p2 forum timezone source = datetime.now(UTC).astimezone(tz) dt, pt = parsedatetime.Calendar().parseDT(date_str, tzinfo=tz, sourceTime=source) # parsed as a C{datetime}, means that parsing was successful if pt == 3: return dt.astimezone(UTC) raise ValueError('Could not parse date: {}'.format(date_str))
ValueError
dataset/ETHPy150Open pokerregion/poker/poker/website/twoplustwo.py/ForumMember._parse_date
4,884
def write(self, target, message): self.counter+=1 if target=='': target='app' try: target_idx=self.targets.index(target) except __HOLE__: target_idx = -1 # add new target if target_idx<0: target_idx=self.addTarget(target) target_row=target_idx+1 old_text=self.getHTML(target_row, 1) log_line="%d: " % self.counter + message if old_text=='&nbsp;': new_text=log_line else: new_text=old_text + "<br>" + log_line self.setHTML(target_row, 1, new_text)
ValueError
dataset/ETHPy150Open anandology/pyjamas/examples/mail/Logger.py/LoggerCls.write
4,885
@qc(1) def get_host_cpu_mhz_exception(): cpu_mhz = 1 total = 1. prev_total = 0. busy = 1. prev_busy = 2. with MockTransaction: expect(collector).get_host_cpu_time(). \ and_return((total, busy)).once() try: collector.get_host_cpu_mhz(cpu_mhz, prev_total, prev_busy) assert False except __HOLE__: assert True
ValueError
dataset/ETHPy150Open beloglazov/openstack-neat/tests/locals/test_collector.py/Collector.get_host_cpu_mhz_exception
4,886
def tearDown(self): from djangocms_installer.config.settings import MIGRATIONS_CHECK_MODULES if self.verbose: print('deactivating virtualenv', self.virtualenv_dir) if os.path.exists(SYSTEM_ACTIVATE): try: execfile(SYSTEM_ACTIVATE, dict(__file__=SYSTEM_ACTIVATE)) except __HOLE__: with open(SYSTEM_ACTIVATE) as f: code = compile(f.read(), SYSTEM_ACTIVATE, 'exec') exec(code, dict(__file__=SYSTEM_ACTIVATE)) sys.executable = os.path.join(os.path.dirname(SYSTEM_ACTIVATE), 'python') super(IsolatedTestClass, self).tearDown() modules = copy(sys.modules) for module in modules: if 'django' in module: del sys.modules[module]
NameError
dataset/ETHPy150Open nephila/djangocms-installer/tests/base.py/IsolatedTestClass.tearDown
4,887
def setUp(self): super(IsolatedTestClass, self).setUp() if os.path.exists(SYSTEM_ACTIVATE): subprocess.check_call(['virtualenv', '-q', '--python=%s' % sys.executable, self.virtualenv_dir]) activate_temp = os.path.join(self.virtualenv_dir, 'bin', 'activate_this.py') try: execfile(activate_temp, dict(__file__=activate_temp)) except __HOLE__: with open(activate_temp) as f: code = compile(f.read(), activate_temp, 'exec') exec(code, dict(__file__=activate_temp)) if self.verbose: print('activating virtualenv', self.virtualenv_dir) sys.executable = os.path.join(self.virtualenv_dir, 'bin', 'python')
NameError
dataset/ETHPy150Open nephila/djangocms-installer/tests/base.py/IsolatedTestClass.setUp
4,888
def mkdir_p(path): '''Make directory and all subdirectories if they do not exist''' try: os.makedirs(os.path.abspath(path)) except __HOLE__ as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
OSError
dataset/ETHPy150Open blitzrk/sublime_libsass/libsass/pathutils.py/mkdir_p
4,889
def is_json(self, myjson): try: json.loads(myjson) except __HOLE__ as e: print(e) return False return True
ValueError
dataset/ETHPy150Open twilio/twilio-python/tests/task_router/test_workflow_config.py/WorkflowConfigTest.is_json
4,890
def do_oauth2_authorization(self, session): """ OAuth2. More info: https://vk.com/dev/auth_mobile """ logger.info('Doing oauth2') auth_data = { 'client_id': self.app_id, 'display': 'mobile', 'response_type': 'token', 'scope': self.scope, 'v': self.api_version } response = session.post(url=self.AUTHORIZE_URL, data=stringify_values(auth_data)) url_query_params = parse_url_query_params(response.url) if 'expires_in' in url_query_params: logger.info('Token will be expired in %s sec.' % url_query_params['expires_in']) if 'access_token' in url_query_params: return url_query_params # Permissions are needed logger.info('Getting permissions') action_url = parse_form_action_url(response.text) logger.debug('Response form action: %s', action_url) if action_url: response = session.get(action_url) url_query_params = parse_url_query_params(response.url) return url_query_params try: response_json = response.json() except __HOLE__: # not JSON in response error_message = 'OAuth2 grant access error' logger.error(response.text) else: error_message = 'VK error: [{}] {}'.format( response_json['error'], response_json['error_description']) logger.error('Permissions obtained') raise VkAuthError(error_message)
ValueError
dataset/ETHPy150Open prawn-cake/vk-requests/vk_requests/auth.py/AuthAPI.do_oauth2_authorization
4,891
def memoize(call): """Automatically memoize a callable.""" results = {} def wrapper(*args, **kwargs): key = (tuple(args), tuple(sorted(kwargs.iteritems()))) try: return results[key] except __HOLE__: results[key] = result = call(*args, **kwargs) return result return wrapper
KeyError
dataset/ETHPy150Open borg-project/borg/borg/util.py/memoize
4,892
def get_file_cache(): try: f = open(file_path, 'r') data = json.load(f) f.close() return data if data else {} except __HOLE__: return {}
IOError
dataset/ETHPy150Open ringcentral/ringcentral-python/demo.py/get_file_cache
4,893
def main(): cache = get_file_cache() # Create SDK instance sdk = SDK(APP_KEY, APP_SECRET, SERVER) platform = sdk.platform() # Set cached authentication data platform.auth().set_data(cache) # Check authentication try: platform.is_authorized() print('Authorized already by cached data') except Exception as e: platform.login(USERNAME, EXTENSION, PASSWORD) print('Authorized by credentials') # Perform refresh by force platform.refresh() print('Refreshed') # Simple GET response = platform.get('/account/~/extension/~') user = response.json() user_id = str(user.id) print('User loaded ' + user.name + ' (' + user_id + ')') print('Headers ' + str(response.response().headers)) # Multipart response try: multipart_response = platform.get('/account/~/extension/' + user_id + ',' + user_id + '/presence').multipart() print 'Multipart 1\n' + str(multipart_response[0].json_dict()) print 'Multipart 2\n' + str(multipart_response[1].json_dict()) except ApiException as e: print 'Cannot load multipart' print 'URL ' + e.api_response().request().url print 'Response' + str(e.api_response().json()) # Pubnub notifications example def on_message(msg): print(msg) def pubnub(): try: s = sdk.create_subscription() s.add_events(['/account/~/extension/~/message-store']) s.on(Events.notification, on_message) s.register() while True: sleep(0.1) except __HOLE__: print("Pubnub listener stopped...") p = Process(target=pubnub) try: p.start() except KeyboardInterrupt: p.terminate() print("Stopped by User") set_file_cache(platform.auth().data()) print("Authentication data has been cached") print("Wait for notification...")
KeyboardInterrupt
dataset/ETHPy150Open ringcentral/ringcentral-python/demo.py/main
4,894
def findall(self, element): nodeset = [element] index = 0 while 1: try: path = self.path[index] index = index + 1 except IndexError: return nodeset set = [] if isinstance(path, xpath_descendant_or_self): try: tag = self.path[index] if not isinstance(tag, type("")): tag = None else: index = index + 1 except __HOLE__: tag = None # invalid path for node in nodeset: new = list(node.getiterator(tag)) if new and new[0] is node: set.extend(new[1:]) else: set.extend(new) else: for node in nodeset: for node in node: if path == "*" or node.tag == path: set.append(node) if not set: return [] nodeset = set
IndexError
dataset/ETHPy150Open babble/babble/include/jython/Lib/xml/etree/ElementPath.py/Path.findall
4,895
def preinit(): "Load standard file format drivers." global _initialized if _initialized >= 1: return try: import BmpImagePlugin except __HOLE__: pass try: import GifImagePlugin except ImportError: pass try: import JpegImagePlugin except ImportError: pass try: import PpmImagePlugin except ImportError: pass try: import PngImagePlugin except ImportError: pass # try: # import TiffImagePlugin # except ImportError: # pass _initialized = 1 ## # Explicitly initializes the Python Imaging Library. This function # loads all available file format drivers.
ImportError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/preinit
4,896
def init(): "Load all file format drivers." global _initialized if _initialized >= 2: return 0 visited = {} directories = sys.path try: directories = directories + [os.path.dirname(__file__)] except __HOLE__: pass # only check directories (including current, if present in the path) for directory in filter(isDirectory, directories): fullpath = os.path.abspath(directory) if visited.has_key(fullpath): continue for file in os.listdir(directory): if file[-14:] == "ImagePlugin.py": f, e = os.path.splitext(file) try: sys.path.insert(0, directory) try: __import__(f, globals(), locals(), []) finally: del sys.path[0] except ImportError: if DEBUG: print "Image: failed to import", print f, ":", sys.exc_value visited[fullpath] = None if OPEN or SAVE: _initialized = 2 return 1 # -------------------------------------------------------------------- # Codec factories (used by tostring/fromstring and ImageFile.load)
NameError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/init
4,897
def _getdecoder(mode, decoder_name, args, extra=()): # tweak arguments if args is None: args = () elif not isTupleType(args): args = (args,) try: # get decoder decoder = getattr(core, decoder_name + "_decoder") # print decoder, (mode,) + args + extra return apply(decoder, (mode,) + args + extra) except __HOLE__: raise IOError("decoder %s not available" % decoder_name)
AttributeError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/_getdecoder
4,898
def _getencoder(mode, encoder_name, args, extra=()): # tweak arguments if args is None: args = () elif not isTupleType(args): args = (args,) try: # get encoder encoder = getattr(core, encoder_name + "_encoder") # print encoder, (mode,) + args + extra return apply(encoder, (mode,) + args + extra) except __HOLE__: raise IOError("encoder %s not available" % encoder_name) # -------------------------------------------------------------------- # Simple expression analyzer
AttributeError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/_getencoder
4,899
def _getscaleoffset(expr): stub = ["stub"] data = expr(_E(stub)).data try: (a, b, c) = data # simplified syntax if (a is stub and b == "__mul__" and isNumberType(c)): return c, 0.0 if (a is stub and b == "__add__" and isNumberType(c)): return 1.0, c except TypeError: pass try: ((a, b, c), d, e) = data # full syntax if (a is stub and b == "__mul__" and isNumberType(c) and d == "__add__" and isNumberType(e)): return c, e except __HOLE__: pass raise ValueError("illegal expression") # -------------------------------------------------------------------- # Implementation wrapper ## # This class represents an image object. To create Image objects, use # the appropriate factory functions. There's hardly ever any reason # to call the Image constructor directly. # # @see #open # @see #new # @see #fromstring
TypeError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/Image.py/_getscaleoffset