Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,100
def create_connect_args(self, url): opts = url.translate_connect_args(database='db', username='user', password='passwd') opts.update(url.query) util.coerce_kw_type(opts, 'compress', bool) util.coerce_kw_type(opts, 'connect_timeout', int) util.coerce_kw_type(opts, 'read_timeout', int) util.coerce_kw_type(opts, 'client_flag', int) util.coerce_kw_type(opts, 'local_infile', int) # Note: using either of the below will cause all strings to be # returned as Unicode, both in raw SQL operations and with column # types like String and MSString. util.coerce_kw_type(opts, 'use_unicode', bool) util.coerce_kw_type(opts, 'charset', str) # Rich values 'cursorclass' and 'conv' are not supported via # query string. ssl = {} keys = ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher'] for key in keys: if key in opts: ssl[key[4:]] = opts[key] util.coerce_kw_type(ssl, key[4:], str) del opts[key] if ssl: opts['ssl'] = ssl # FOUND_ROWS must be set in CLIENT_FLAGS to enable # supports_sane_rowcount. client_flag = opts.get('client_flag', 0) if self.dbapi is not None: try: CLIENT_FLAGS = __import__( self.dbapi.__name__ + '.constants.CLIENT' ).constants.CLIENT client_flag |= CLIENT_FLAGS.FOUND_ROWS except (__HOLE__, ImportError): self.supports_sane_rowcount = False opts['client_flag'] = client_flag return [[], opts]
AttributeError
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/dialects/mysql/mysqldb.py/MySQLDialect_mysqldb.create_connect_args
5,101
def _get_server_version_info(self, connection): dbapi_con = connection.connection version = [] r = re.compile('[.\-]') for n in r.split(dbapi_con.get_server_info()): try: version.append(int(n)) except __HOLE__: version.append(n) return tuple(version)
ValueError
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/dialects/mysql/mysqldb.py/MySQLDialect_mysqldb._get_server_version_info
5,102
def _detect_charset(self, connection): """Sniff out the character set in use for connection results.""" try: # note: the SQL here would be # "SHOW VARIABLES LIKE 'character_set%%'" cset_name = connection.connection.character_set_name except __HOLE__: util.warn( "No 'character_set_name' can be detected with " "this MySQL-Python version; " "please upgrade to a recent version of MySQL-Python. " "Assuming latin1.") return 'latin1' else: return cset_name()
AttributeError
dataset/ETHPy150Open zzzeek/sqlalchemy/lib/sqlalchemy/dialects/mysql/mysqldb.py/MySQLDialect_mysqldb._detect_charset
5,103
def get_spatial_scale(wcs, assert_square=True): # Code adapted from APLpy wcs = wcs.sub([WCSSUB_CELESTIAL]) cdelt = np.matrix(wcs.wcs.get_cdelt()) pc = np.matrix(wcs.wcs.get_pc()) scale = np.array(cdelt * pc) if assert_square: try: np.testing.assert_almost_equal(abs(cdelt[0,0]), abs(cdelt[0,1])) np.testing.assert_almost_equal(abs(pc[0,0]), abs(pc[1,1])) np.testing.assert_almost_equal(abs(scale[0,0]), abs(scale[0,1])) except __HOLE__: raise ValueError("Non-square pixels. Please resample data.") return abs(scale[0,0]) * u.Unit(wcs.wcs.cunit[0])
AssertionError
dataset/ETHPy150Open glue-viz/glue/glue/external/pvextractor/utils/wcs_utils.py/get_spatial_scale
5,104
def GetThreadId(thread): try: return thread.__pydevd_id__ except __HOLE__: _nextThreadIdLock.acquire() try: #We do a new check with the lock in place just to be sure that nothing changed if not hasattr(thread, '__pydevd_id__'): try: pid = os.getpid() except AttributeError: try: #Jython does not have it! import java.lang.management.ManagementFactory #@UnresolvedImport -- just for jython pid = java.lang.management.ManagementFactory.getRuntimeMXBean().getName() pid = pid.replace('@', '_') except: #ok, no pid available (will be unable to debug multiple processes) pid = '000001' thread.__pydevd_id__ = 'pid%s_seq%s' % (pid, _nextThreadId()) finally: _nextThreadIdLock.release() return thread.__pydevd_id__ #=============================================================================== # Null #===============================================================================
AttributeError
dataset/ETHPy150Open CountZer0/PipelineConstructionSet/python/common/diagnostic/pydevDebug/pydevd_constants.py/GetThreadId
5,105
def hsts(self, name): """Test for HTTP Strict Transport Security header""" headers = requests.get("https://" + name).headers hsts_header = headers.get("strict-transport-security") if not hsts_header: return False # Split directives following RFC6797, section 6.1 directives = [d.split("=") for d in hsts_header.split(";")] max_age = [d for d in directives if d[0] == "max-age"] if not max_age: logger.error("Server responded with invalid HSTS header field") return False try: _, max_age_value = max_age[0] max_age_value = int(max_age_value) except __HOLE__: logger.error("Server responded with invalid HSTS header field") return False # Test whether HSTS does not expire for at least two weeks. if max_age_value <= (2 * 7 * 24 * 3600): logger.error("HSTS should not expire in less than two weeks") return False return True
ValueError
dataset/ETHPy150Open letsencrypt/letsencrypt/certbot-compatibility-test/certbot_compatibility_test/validator.py/Validator.hsts
5,106
def get_all(type_obj, include_subtypes=True): """Get a list containing all instances of a given type. This will work for the vast majority of types out there. >>> class Ratking(object): pass >>> wiki, hak, sport = Ratking(), Ratking(), Ratking() >>> len(get_all(Ratking)) 3 However, there are some exceptions. For example, ``get_all(bool)`` returns an empty list because ``True`` and ``False`` are themselves built-in and not tracked. >>> get_all(bool) [] Still, it's not hard to see how this functionality can be used to find all instances of a leaking type and track them down further using :func:`gc.get_referrers` and :func:`gc.get_referents`. ``get_all()`` is optimized such that getting instances of user-created types is quite fast. Setting *include_subtypes* to ``False`` will further increase performance in cases where instances of subtypes aren't required. .. note:: There are no guarantees about the state of objects returned by ``get_all()``, especially in concurrent environments. For instance, it is possible for an object to be in the middle of executing its ``__init__()`` and be only partially constructed. """ # TODO: old-style classes if not isinstance(type_obj, type): raise TypeError('expected a type, not %r' % type_obj) try: type_is_tracked = gc.is_tracked(type_obj) except __HOLE__: type_is_tracked = False # Python 2.6 and below don't get the speedup if type_is_tracked: to_check = gc.get_referrers(type_obj) else: to_check = gc.get_objects() if include_subtypes: ret = [x for x in to_check if isinstance(x, type_obj)] else: ret = [x for x in to_check if type(x) is type_obj] return ret
AttributeError
dataset/ETHPy150Open mahmoud/boltons/boltons/gcutils.py/get_all
5,107
def import_dicts(): data = [] with open('DICTLINE.GEN', encoding="ISO-8859-1") as f: for i, line in enumerate( f ): orth = line[0:19].replace("zzz", "").strip() parts = [orth] if len( line[19:38].strip() ) > 0: parts.append( line[19:38].replace("zzz", "").strip() ) if len( line[38:57].strip() ) > 0: parts.append( line[38:57].replace("zzz", "").strip() ) if len( line[57:76].strip() ) > 0: parts.append( line[57:76].replace("zzz", "").strip() ) if len( line[83:87].strip() ) > 0: n = line[83:87].strip().split(" ") for n_i, v in enumerate(n): try: n[n_i] = int(v) except __HOLE__: pass senses = line[109:].strip().split(";") new_senses = [] for sense in senses: sense = sense.strip() if len( sense ): new_senses.append(sense) data.append({ 'id' : i + 1, 'orth' : orth, 'parts' : parts, 'pos' : line[76:83].strip(), 'form' : line[83:100].strip(), 'n' : n, 'senses' : new_senses }) with open('data.json', 'w') as out: json.dump(data, out) return
ValueError
dataset/ETHPy150Open segetes/open_words/open_words/format_data.py/import_dicts
5,108
def import_stems(): data = [] with open('STEMLIST.GEN') as f: for line in f: if len( line[26:30].strip() ) > 0: n = line[26:30].strip().split(" ") for i, v in enumerate(n): try: n[i] = int(v) except __HOLE__: pass data.append({ 'orth' : line[0:19].strip(), 'pos' : line[19:26].strip(), 'form' : line[26:45].strip(), 'n' : n, 'wid' : int(line[50:].strip()) }) with open('data.json', 'w') as out: json.dump(data, out) return
ValueError
dataset/ETHPy150Open segetes/open_words/open_words/format_data.py/import_stems
5,109
def parse_infl_type(s): if len( s.strip() ) > 0: n = s.strip().split(" ") for i, v in enumerate(n): try: n[i] = int(v) except __HOLE__: pass return n
ValueError
dataset/ETHPy150Open segetes/open_words/open_words/format_data.py/parse_infl_type
5,110
def extract_eliot_from_twisted_log(twisted_log_line): """ Given a line from a Twisted log message, return the text of the Eliot log message that is on that line. If there is no Eliot message on that line, return ``None``. :param str twisted_log_line: A line from a Twisted test.log. :return: A logged eliot message without Twisted logging preamble, or ``None``. :rtype: unicode or ``NoneType``. """ open_brace = twisted_log_line.find('{') close_brace = twisted_log_line.rfind('}') if open_brace == -1 or close_brace == -1: return None candidate = twisted_log_line[open_brace:close_brace + 1] try: fields = json.loads(candidate) except (ValueError, __HOLE__): return None # Eliot lines always have these two keys. if {"task_uuid", "timestamp"}.difference(fields): return None return candidate
TypeError
dataset/ETHPy150Open ClusterHQ/flocker/flocker/testtools/_base.py/extract_eliot_from_twisted_log
5,111
def check_data_classes(test, classes): import inspect for data_class in classes: test.assert_(data_class.__doc__ is not None, 'The class %s should have a docstring' % data_class) if hasattr(data_class, '_qname'): qname_versions = None if isinstance(data_class._qname, tuple): qname_versions = data_class._qname else: qname_versions = (data_class._qname,) for versioned_qname in qname_versions: test.assert_(isinstance(versioned_qname, str), 'The class %s has a non-string _qname' % data_class) test.assert_(not versioned_qname.endswith('}'), 'The _qname for class %s is only a namespace' % ( data_class)) for attribute_name, value in data_class.__dict__.items(): # Ignore all elements that start with _ (private members) if not attribute_name.startswith('_'): try: if not (isinstance(value, str) or inspect.isfunction(value) or (isinstance(value, list) and issubclass(value[0], atom.core.XmlElement)) or type(value) == property # Allow properties. or inspect.ismethod(value) # Allow methods. or inspect.ismethoddescriptor(value) # Allow method descriptors. # staticmethod et al. or issubclass(value, atom.core.XmlElement)): test.fail( 'XmlElement member should have an attribute, XML class,' ' or list of XML classes as attributes.') except __HOLE__: test.fail('Element %s in %s was of type %s' % ( attribute_name, data_class._qname, type(value)))
TypeError
dataset/ETHPy150Open kuri65536/python-for-android/python3-alpha/python-libs/gdata/test_config.py/check_data_classes
5,112
def _import_identity(import_str): try: import_str = _id_type(import_str) full_str = "pyrax.identity.%s" % import_str return utils.import_class(full_str) except __HOLE__: pass return utils.import_class(import_str)
ImportError
dataset/ETHPy150Open rackspace/pyrax/pyrax/__init__.py/_import_identity
5,113
def get(self, key, env=None): """ Returns the config setting for the specified environment. If no environment is specified, the value for the current environment is returned. If an unknown key or environment is passed, None is returned. """ if env is None: env = self.environment try: ret = self._settings[env][key] except KeyError: ret = None if ret is None: # See if it's set in the environment if key == "identity_class": # This is defined via the identity_type env_var = self.env_dct.get("identity_type") ityp = os.environ.get(env_var) if ityp: return _import_identity(ityp) else: env_var = self.env_dct.get(key) try: ret = os.environ[env_var] except __HOLE__: ret = None return ret
KeyError
dataset/ETHPy150Open rackspace/pyrax/pyrax/__init__.py/Settings.get
5,114
def _safe_region(region=None, context=None): """Value to use when no region is specified.""" ret = region or settings.get("region") context = context or identity if not ret: # Nothing specified; get the default from the identity object. if not context: _create_identity() context = identity ret = context.get_default_region() if not ret: # Use the first available region try: ret = regions[0] except __HOLE__: ret = "" return ret
IndexError
dataset/ETHPy150Open rackspace/pyrax/pyrax/__init__.py/_safe_region
5,115
def connect_to_cloudservers(region=None, context=None, verify_ssl=None, **kwargs): """Creates a client for working with cloud servers.""" context = context or identity _cs_auth_plugin.discover_auth_systems() id_type = get_setting("identity_type") if id_type != "keystone": auth_plugin = _cs_auth_plugin.load_plugin(id_type) else: auth_plugin = None region = _safe_region(region, context=context) mgt_url = _get_service_endpoint(context, "compute", region) cloudservers = None if not mgt_url: # Service is not available return if verify_ssl is None: insecure = not get_setting("verify_ssl") else: insecure = not verify_ssl try: extensions = nc.discover_extensions(_cs_max_version) except __HOLE__: extensions = None clt_class = _cs_client.get_client_class(_cs_max_version) cloudservers = clt_class(context.username, context.password, project_id=context.tenant_id, auth_url=context.auth_endpoint, auth_system=id_type, region_name=region, service_type="compute", auth_plugin=auth_plugin, insecure=insecure, extensions=extensions, http_log_debug=_http_debug, **kwargs) agt = cloudservers.client.USER_AGENT cloudservers.client.USER_AGENT = _make_agent_name(agt) cloudservers.client.management_url = mgt_url cloudservers.client.auth_token = context.token cloudservers.exceptions = _cs_exceptions # Add some convenience methods cloudservers.list_images = cloudservers.images.list cloudservers.list_flavors = cloudservers.flavors.list cloudservers.list = cloudservers.servers.list def list_base_images(): """ Returns a list of all base images; excludes any images created by this account. """ return [image for image in cloudservers.images.list() if not hasattr(image, "server")] def list_snapshots(): """ Returns a list of all images created by this account; in other words, it excludes all the base images. """ return [image for image in cloudservers.images.list() if hasattr(image, "server")] def find_images_by_name(expr): """ Returns a list of images whose name contains the specified expression. The value passed is treated as a regular expression, allowing for more specific searches than simple wildcards. The matching is done in a case-insensitive manner. """ return [image for image in cloudservers.images.list() if re.search(expr, image.name, re.I)] cloudservers.list_base_images = list_base_images cloudservers.list_snapshots = list_snapshots cloudservers.find_images_by_name = find_images_by_name cloudservers.identity = identity return cloudservers
AttributeError
dataset/ETHPy150Open rackspace/pyrax/pyrax/__init__.py/connect_to_cloudservers
5,116
@transaction.atomic def import_pages_from_json(modeladmin, request, queryset, template_name='admin/pages/page/import_pages.html'): try: j = request.FILES['json'] except __HOLE__: return render(request, template_name, { 'nofile': True, 'app_label': 'pages', 'opts': Page._meta, }, RequestContext(request)) errors, pages_created = json_to_pages(j.read(), request.user, get_language_from_request(request)) return render(request, template_name, { 'errors': errors, 'pages_created': pages_created, 'app_label': 'pages', 'opts': Page._meta, }, RequestContext(request))
KeyError
dataset/ETHPy150Open batiste/django-page-cms/pages/plugins/jsonexport/actions.py/import_pages_from_json
5,117
def handle(self, *args, **options): skip = options.get('skip') resource_urlhandlers = [] if not args: resource_urlhandlers = URLInfo.objects.filter(auto_update=True) else: resources = [] for arg in args: try: prj, res = arg.split('.') resources.extend(Resource.objects.filter(project__slug=prj, slug=res) or None) except (__HOLE__, TypeError), e: sys.stderr.write((u"No matching resource was found for %s\n" % arg).encode('UTF-8')) resource_urlhandlers = URLInfo.objects.filter(resource__in=resources) num = resource_urlhandlers.count() if num == 0: sys.stderr.write("No resources suitable for updating found. Exiting...\n") sys.exit() sys.stdout.write("A total of %s resources are listed for updating.\n" % num) for seq, handler in enumerate(resource_urlhandlers): sys.stdout.write((u"Updating resource %s.%s (%s of %s)\n" % ( handler.resource.project.slug, handler.resource.slug, seq+1,num)).encode('UTF-8')) try: handler.update_source_file() except Exception, e: sys.stderr.write((u"Error updating source file for resource %s.%s\n" % ( handler.resource.project.slug, handler.resource.slug)).encode('UTF-8')) sys.stderr.write("Exception was: %s\n" % e) if skip: continue sys.stderr.write("Aborting...\n") sys.exit(1) else: sys.stdout.write((u"Updated source file for resource %s.%s\n" % (handler.resource.project.slug, handler.resource.slug)).encode('UTF-8'))
ValueError
dataset/ETHPy150Open rvanlaar/easy-transifex/src/transifex/transifex/addons/autofetch/management/commands/txfetch.py/Command.handle
5,118
def login(request, **credentials): """ If the given credentials are valid, return a User object. """ backend = local.app.auth_backend try: user = backend.login(request, **credentials) except __HOLE__: # This backend doesn't accept these credentials as arguments. # Try the next one. pass return user or False
TypeError
dataset/ETHPy150Open IanLewis/kay/kay/auth/__init__.py/login
5,119
def create_new_user(user_name, password=None, **kwargs): try: auth_model = import_string(settings.AUTH_USER_MODEL) except (ImportError, __HOLE__), e: logging.warn("Failed importing auth user model: %s." % settings.AUTH_USER_MODEL) return if password: kwargs['password'] = auth_model.hash_password(password) else: kwargs['password'] = auth_model.get_unusable_password() def txn(): user = auth_model.get_by_key_name(auth_model.get_key_name(user_name)) if user: raise DuplicateKeyError("An user: %s is already registered." % user_name) new_user = auth_model(key_name=auth_model.get_key_name(user_name), user_name=user_name, **kwargs) new_user.put() return new_user return db.run_in_transaction(txn)
AttributeError
dataset/ETHPy150Open IanLewis/kay/kay/auth/__init__.py/create_new_user
5,120
def test_idxmapping_key_len_check(self): try: MultiDimensionalMapping(initial_items=self.init_item_odict) raise AssertionError('Invalid key length check failed.') except __HOLE__: pass
KeyError
dataset/ETHPy150Open ioam/holoviews/tests/testndmapping.py/NdIndexableMappingTest.test_idxmapping_key_len_check
5,121
def __init__( self, model, csv_path, mapping, using=None, delimiter=',', null=None, encoding=None, static_mapping=None ): self.model = model self.mapping = mapping if os.path.exists(csv_path): self.csv_path = csv_path else: raise ValueError("csv_path does not exist") if using is not None: self.using = using else: self.using = router.db_for_write(model) self.conn = connections[self.using] if self.conn.vendor != 'postgresql': raise TypeError("Only PostgreSQL backends supported") self.backend = self.conn.ops self.delimiter = delimiter self.null = null self.encoding = encoding if static_mapping is not None: self.static_mapping = OrderedDict(static_mapping) else: self.static_mapping = {} # Connect the headers from the CSV with the fields on the model self.field_header_crosswalk = [] inverse_mapping = {v: k for k, v in self.mapping.items()} for h in self.get_headers(): try: f_name = inverse_mapping[h] except KeyError: raise ValueError("Map does not include %s field" % h) try: f = [f for f in self.model._meta.fields if f.name == f_name][0] except IndexError: raise ValueError("Model does not include %s field" % f_name) self.field_header_crosswalk.append((f, h)) # Validate that the static mapping columns exist for f_name in self.static_mapping.keys(): try: [s for s in self.model._meta.fields if s.name == f_name][0] except __HOLE__: raise ValueError("Model does not include %s field" % f_name) self.temp_table_name = "temp_%s" % self.model._meta.db_table
IndexError
dataset/ETHPy150Open california-civic-data-coalition/django-postgres-copy/postgres_copy/__init__.py/CopyMapping.__init__
5,122
def compare_versions(version1, version2): try: return cmp(StrictVersion(version1), StrictVersion(version2)) # in case of abnormal version number, fall back to LooseVersion except __HOLE__: pass try: return cmp(LooseVersion(version1), LooseVersion(version2)) except TypeError: # certain LooseVersion comparions raise due to unorderable types, # fallback to string comparison return cmp([str(v) for v in LooseVersion(version1).version], [str(v) for v in LooseVersion(version2).version])
ValueError
dataset/ETHPy150Open cloudaice/simple-data/misc/virtenv/lib/python2.7/site-packages/pip-1.2.1-py2.7.egg/pip/commands/search.py/compare_versions
5,123
def run(self): try: algo_config = config_loader.load(os.path.join(os.path.dirname(__file__), '../../config/services.json')) algo_config = algo_config.get(self.plugin_name)['worker_options'] except __HOLE__: return None for service, options in algo_config.iteritems(): if service and options: params = {'params': options, 'service': service} app.task_runner.delay(self.plugin, params) return True
AttributeError
dataset/ETHPy150Open trademob/anna-molly/lib/plugins/poll_task.py/PollTask.run
5,124
def __init__(self, timeout, scaling=1): super(Timeout, self).__init__() try: self.test_timeout = int(timeout) except __HOLE__: # If timeout value is invalid do not set a timeout. self.test_timeout = 0 if scaling >= 1: self.test_timeout *= scaling else: raise ValueError('scaling value must be >= 1')
ValueError
dataset/ETHPy150Open openstack/nova/nova/tests/fixtures.py/Timeout.__init__
5,125
def get_context_data(self, **kwargs): kwargs = super(DiffView, self).get_context_data(**kwargs) try: before_url = self.request.GET['before'] after_url = self.request.GET['after'] except __HOLE__: raise Http404 before = self.call_view_from_url(before_url) after = self.call_view_from_url(after_url) kwargs['diff'] = daisydiff(before, after) return kwargs
KeyError
dataset/ETHPy150Open fusionbox/django-widgy/widgy/views/versioning.py/DiffView.get_context_data
5,126
def create_subscription(self, credit_card, amount, start, days=None, months=None, occurrences=None, trial_amount=None, trial_occurrences=None): """ Creates a recurring subscription payment on the CreditCard provided. ``credit_card`` The CreditCard instance to create the subscription for. Subscriptions require that you provide a first and last name with the credit card. ``amount`` The amount to charge every occurrence, either as an int, float, or Decimal. ``start`` The date to start the subscription, as a date object. ``days`` Provide either the days or the months argument to indicate the interval at which the subscription should recur. ``months`` Provide either the days or the months argument to indicate the interval at which the subscription should recur. ``occurrences`` If provided, this is the number of times to charge the credit card before ending. If not provided, will last until canceled. ``trial_amount`` If you want to have a trial period at a lower amount for this subscription, provide the amount. (Either both trial arguments should be provided, or neither.) ``trial_occurrences`` If you want to have a trial period at a lower amount for this subscription, provide the number of occurences the trial period should last for. (Either both trial arguments should be provided, or neither.) """ subscription = self.client.factory.create('ARBSubscriptionType') # Add the basic amount and payment fields amount = Decimal(str(amount)).quantize(Decimal('0.01')) subscription.amount = str(amount) payment_type = self.client.factory.create('PaymentType') credit_card_type = self.client.factory.create('CreditCardType') credit_card_type.cardNumber = credit_card.card_number credit_card_type.expirationDate = '{0}-{1:0>2}'.format( credit_card.exp_year, credit_card.exp_month) credit_card_type.cardCode = credit_card.cvv payment_type.creditCard = credit_card_type subscription.payment = payment_type if not (credit_card.first_name and credit_card.last_name): raise AuthorizeInvalidError('Subscriptions require first name ' 'and last name to be provided with the credit card.') subscription.billTo.firstName = credit_card.first_name subscription.billTo.lastName = credit_card.last_name # Add the fields for the payment schedule if (days and months) or not (days or months): raise AuthorizeInvalidError('Please provide either the months or ' 'days argument to define the subscription interval.') if days: try: days = int(days) assert days >= 7 and days <= 365 except (AssertionError, __HOLE__): raise AuthorizeInvalidError('The interval days must be an ' 'integer value between 7 and 365.') subscription.paymentSchedule.interval.unit = \ self.client.factory.create('ARBSubscriptionUnitEnum').days subscription.paymentSchedule.interval.length = days elif months: try: months = int(months) assert months >= 1 and months <= 12 except (AssertionError, ValueError): raise AuthorizeInvalidError('The interval months must be an ' 'integer value between 1 and 12.') subscription.paymentSchedule.interval.unit = \ self.client.factory.create('ARBSubscriptionUnitEnum').months subscription.paymentSchedule.interval.length = months if start < date.today(): raise AuthorizeInvalidError('The start date for the subscription ' 'may not be in the past.') subscription.paymentSchedule.startDate = start.strftime('%Y-%m-%d') if occurrences is None: occurrences = 9999 # That's what they say to do in the docs subscription.paymentSchedule.totalOccurrences = occurrences # If a trial period has been specified, add those fields if trial_amount and trial_occurrences: subscription.paymentSchedule.trialOccurrences = trial_occurrences trial_amount = Decimal(str(trial_amount)) trial_amount = trial_amount.quantize(Decimal('0.01')) subscription.trialAmount = str(trial_amount) elif trial_amount or trial_occurrences: raise AuthorizeInvalidError('To indicate a trial period, you ' 'must provide both a trial amount and occurrences.') # Make the API call to create the subscription response = self._make_call('ARBCreateSubscription', subscription) return response.subscriptionId
ValueError
dataset/ETHPy150Open drewisme/authorizesauce/authorize/apis/recurring.py/RecurringAPI.create_subscription
5,127
def GetTestGroupsFromFile(self, file_path): # This needs to be a list instead of a dictionary to preserve order in python < 2.7 TestGroups = [] ConfigFile = FileOperations.open(file_path, 'r').read().splitlines() for line in ConfigFile: if '#' == line[0]: continue # Skip comments try: Code, Priority, Descrip, Hint, URL = line.strip().split(' | ') except __HOLE__: self.error_handler.FrameworkAbort("Problem in Test Groups file: '" + file_path + "' -> Cannot parse line: " + line) if len(Descrip) < 2: Descrip = Hint if len(Hint) < 2: Hint = "" TestGroups.append({'code': Code, 'priority': Priority, 'descrip': Descrip, 'hint': Hint, 'url': URL}) return TestGroups
ValueError
dataset/ETHPy150Open owtf/owtf/framework/db/plugin_manager.py/PluginDB.GetTestGroupsFromFile
5,128
def LoadFromFileSystem(self): """Loads the plugins from the filesystem and updates their info. Walks through each sub-directory of `PLUGINS_DIR`. For each file, loads it thanks to the imp module. Updates the database with the information for each plugin: + 'title': the title of the plugin + 'name': the name of the plugin + 'code': the internal code of the plugin + 'group': the group of the plugin (ex: web) + 'type': the type of the plugin (ex: active, passive, ...) + 'descrip': the description of the plugin + 'file': the filename of the plugin + 'internet_res': does the plugin use internet resources? """ # TODO: When the -t, -e or -o is given to OWTF command line, only load # the specific plugins (and not all of them like below). # Retrieve the list of the plugins (sorted) from the directory given by # 'PLUGIN_DIR'. plugins = [] for root, _, files in os.walk(self.config.FrameworkConfigGet('PLUGINS_DIR')): plugins.extend([ os.path.join(root, filename) for filename in files if filename.endswith('py')]) plugins = sorted(plugins) # Retrieve the information of the plugin. for plugin_path in plugins: # Only keep the relative path to the plugin plugin = plugin_path.replace( self.config.FrameworkConfigGet('PLUGINS_DIR'), '') # TODO: Using os.path.sep might not be portable especially on # Windows platform since it allows '/' and '\' in the path. # Retrieve the group, the type and the file of the plugin. chunks = plugin.split(os.path.sep) # TODO: Ensure that the variables group, type and file exist when # the length of chunks is less than 3. if len(chunks) == 3: group, type, file = chunks # Retrieve the internal name and code of the plugin. name, code = os.path.splitext(file)[0].split('@') # Only load the plugin if in XXX_TEST_GROUPS configuration (e.g. web_testgroups.cfg) if self.db.session.query(models.TestGroup).get(code) is None: continue # Load the plugin as a module. filename, pathname, desc = imp.find_module( os.path.splitext(os.path.basename(plugin_path))[0], [os.path.dirname(plugin_path)]) plugin_module = imp.load_module( os.path.splitext(file)[0], filename, pathname, desc) # Try te retrieve the `attr` dictionary from the module and convert # it to json in order to save it into the database. attr = None try: attr = json.dumps(plugin_module.ATTR) except __HOLE__: # The plugin didn't define an attr dict. pass # Save the plugin into the database. self.db.session.merge( models.Plugin( key=type + '@' + code, group=group, type=type, title=name.title().replace('_', ' '), name=name, code=code, file=file, descrip=plugin_module.DESCRIPTION, attr=attr ) ) self.db.session.commit()
AttributeError
dataset/ETHPy150Open owtf/owtf/framework/db/plugin_manager.py/PluginDB.LoadFromFileSystem
5,129
def paginate(wrapped=None, page_size=PAGE_SIZE): """ Decorate a view function, providing basic pagination facilities. Wraps a view function that returns a :py:class:`sqlalchemy.orm.query.Query` object in order to enable basic pagination. Returns a dictionary containing the results for the current page and page metadata. For example, the simple view function @paginate def my_view(context, request): return User.query will, when wrapped, return a dictionary like the following: { "results": [<user1>, <user2>, ..., <user20>], "total": 135, "page": { "cur": 1, "max": 7, "next": 2, "prev": None, } } You can also call :py:func:`paginate` as a function which returns a decorator, if you wish to modify the options used by the function: paginate = paginator.paginate(page_size=10) @paginate def my_view(...): ... N.B. The wrapped view function must accept two arguments: the request context and the current request. This decorator does not support view functions which accept only a single argument. """ if wrapped is None: def decorator(wrap): return paginate(wrap, page_size=page_size) return decorator @functools.wraps(wrapped) def wrapper(context, request): result = wrapped(context, request) total = result.count() page_max = int(math.ceil(total / page_size)) page_max = max(1, page_max) # there's always at least one page try: page = int(request.params['page']) except (__HOLE__, ValueError): page = 1 page = max(1, page) page = min(page, page_max) offset = (page - 1) * page_size limit = page_size out = { 'results': result.offset(offset).limit(limit).all(), 'total': total, 'page': { 'cur': page, 'max': page_max, 'next': page + 1 if page < page_max else None, 'prev': page - 1 if page > 1 else None, } } return out return wrapper
KeyError
dataset/ETHPy150Open hypothesis/h/h/paginator.py/paginate
5,130
def make_skipper(module, label=None, version=None): label = label or module try: mod = __import__(module) if version: assert LooseVersion(mod.__version__) >= LooseVersion(version) installed = True except (__HOLE__, AssertionError): installed = False return installed, pytest.mark.skipif(str(not installed), reason='Requires %s' % label)
ImportError
dataset/ETHPy150Open glue-viz/glue/glue/tests/helpers.py/make_skipper
5,131
def parse_config(self, config_file_path): try: config = ConfigObj(infile=config_file_path, configspec=CONFIG_GRAMMAR.split("\n"), file_error=True) except (ConfigObjError, __HOLE__), error: raise TelesphorusConfigError("Config file parsing failed (%s)" % error) validator = Validator() results = config.validate(validator) print "Validating configuration file '%s'" % config_file_path if not results: for section_list, key, _ in flatten_errors(config, results): section_string = '.'.join(section_list) if key is not None: raise TelesphorusConfigError("Missing key %s section %s" % (key, section_string)) else: raise TelesphorusConfigError("Section '%s' is missing" % section_string) self.DEGREE_IN = config["main"]["degree_in"] self.FULL_PATH = config["main"]["full_path"] self.LIB_CALL = config["main"]["lib_call"] self.POSIX_CALL = config["main"]["posix_call"] self.TIMING_POSIX = config["main"]["timing_posix"] self.STACK_DEPTH = config["main"]["stack_depth"] self.LIB = config["main"]["lib"] self.HIDDEN_ABSTR = config["main"]["hidden_abstr"] self.OBSOLETE_ABSTR = config["main"]["obsolete_abstr"] self.MISSING_ABSTR = config["main"]["missing_abstr"] self.CIRCULAR_ABSTR = config["main"]["circular_abstr"] self.OBSCURITY_RATIO = config["main"]["obscurity_ratio"] self.FULL_PATH_THREASHOLD = config["main"]["full_path_threashold"] self.LIB_THREASHOLD = config["main"]["lib_threashold"] self.LIB_CALL_THREASHOLD = config["main"]["lib_call_threashold"] self.POSIX_CALL_THREASHOLD = config["main"]["posix_call_threashold"] self.HIDDEN_ABSTR_THREASHOLD = config["main"]["hidden_abstr_threashold"] self.OBSOLETE_ABSTR_THREASHOLD = config["main"]["obsolete_abstr_threashold"] self.MISSING_ABSTR_SAMPLES = config["main"]["missing_abstr_samples"] self.MISSING_ABSTR_MAX_WINDOW_SIZE = config["main"]["missing_abstr_max_window_size"] self.CIRCULAR_ABSTR_SAMPLES = config["main"]["circular_abstr_samples"] self.OBSCURITY_SAMPLES = config["main"]["obscurity_samples"]
IOError
dataset/ETHPy150Open columbia/libtrack/libtrack/parser/src/telesphorus/settings.py/Settings.parse_config
5,132
def main(): "Main function. Handles delegation to other functions." # Parse the arguments that parser = argparse.ArgumentParser( description="Run tests on a web app.") parser.add_argument("package", help="The path of the package you're testing") parser.add_argument("-o", "--output", default="text", choices=("text", "json"), help="The output format that you expect", required=False) parser.add_argument("-v", "--verbose", action="store_const", const=True, help="""If the output format supports it, makes the analysis summary include extra info.""") parser.add_argument("--boring", action="store_const", const=True, help="""Activating this flag will remove color support from the terminal.""") parser.add_argument("--unlisted", action="store_const", const=True, help="Indicates that the app will not be listed on " "the Firefox Marketplace.") parser.add_argument("--timeout", help="The amount of time before validation is " "terminated with a timeout exception.", default="60") parser.add_argument("--acorn", action="store_const", const=True, help="Uses Acorn instead of Spidermonkey for JS " "parsing. Requirees Node and Acorn.") args = parser.parse_args() try: timeout = int(args.timeout) except __HOLE__: print "Invalid timeout. Integer expected." sys.exit(1) if "://" in args.package: error_bundle = validate_app( requests.get(args.package).content, listed=not args.unlisted, format=None, url=args.package, acorn=args.acorn) elif args.package.endswith(".webapp"): with open(args.package) as f: error_bundle = validate_app( f.read(), listed=not args.unlisted, format=None, acorn=args.acorn) else: error_bundle = validate_packaged_app( args.package, listed=not args.unlisted, format=None, timeout=timeout, acorn=args.acorn) # Print the output of the tests based on the requested format. if args.output == "text": print error_bundle.print_summary( verbose=args.verbose, no_color=args.boring).encode("utf-8") elif args.output == "json": sys.stdout.write(error_bundle.render_json()) if error_bundle.failed(): sys.exit(1) else: sys.exit(0) # Start up the testing and return the output.
ValueError
dataset/ETHPy150Open mozilla/app-validator/appvalidator/main.py/main
5,133
def _parse_metrics(self, message): """ Given a raw message of metrics split by newline characters, this will parse the metrics and return an array of metric objects. This will raise a :exc:`ValueError` if any metrics are invalid, unless ``ignore_errors`` is set to True. """ results = [] for line in message.split("\n"): # If the line is blank, we ignore it if len(line) == 0: continue # Parse the line, and skip it if its invalid try: (key, value, metric_type, flag) = parser.parse_line(line) except __HOLE__: self.logger.error("Invalid line syntax: %s" % line) continue # Create the metric and store it in our results if metric_type in metrics.METRIC_TYPES: # Create and store the metric object metric = metrics.METRIC_TYPES[metric_type](key, value, flag) results.append(metric) else: # Ignore the bad invalid metric, but log it self.logger.error("Invalid metric '%s' in line: %s" % (metric_type, line)) return results
ValueError
dataset/ETHPy150Open kiip/statsite/statsite/collector.py/Collector._parse_metrics
5,134
def stop(self, callback=None, **kwargs): """Stops this mode. Args: **kwargs: Catch-all since this mode might start from events with who-knows-what keyword arguments. Warning: You can safely call this method, but do not override it in your mode code. If you want to write your own mode code by subclassing Mode, put whatever code you want to run when this mode stops in the mode_stop method which will be called automatically. """ self.log.debug('Mode Stop.') self.priority = 0 self.active = False for item in self.stop_methods: try: item[0](item[1]) except __HOLE__: pass self.stop_methods = list() self.delete_slides_from_mode()
TypeError
dataset/ETHPy150Open missionpinball/mpf/mpf/media_controller/core/mode.py/Mode.stop
5,135
@retry(9, TypeError, 0.01, 'pypet.retry') def release_lock(self): if self.is_locked and not self.is_open: try: self.lock.release() except (__HOLE__, ThreadError): self._logger.exception('Could not release lock, ' 'probably has been released already!') self.is_locked = False
ValueError
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/utils/mpwrappers.py/LockAcquisition.release_lock
5,136
def store(self, *args, **kwargs): """Acquires a lock before storage and releases it afterwards.""" try: self.acquire_lock() return self._storage_service.store(*args, **kwargs) finally: if self.lock is not None: try: self.release_lock() except __HOLE__: self._logger.error('Could not release lock `%s`!' % str(self.lock))
RuntimeError
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/utils/mpwrappers.py/LockWrapper.store
5,137
def load(self, *args, **kwargs): """Acquires a lock before loading and releases it afterwards.""" try: self.acquire_lock() return self._storage_service.load(*args, **kwargs) finally: if self.lock is not None: try: self.release_lock() except __HOLE__: self._logger.error('Could not release lock `%s`!' % str(self.lock))
RuntimeError
dataset/ETHPy150Open SmokinCaterpillar/pypet/pypet/utils/mpwrappers.py/LockWrapper.load
5,138
def rmgeneric(path, __func__): try: __func__(path) except __HOLE__, (errno, strerror): pass
OSError
dataset/ETHPy150Open moxie0/sslstrip/setup.py/rmgeneric
5,139
def list_commands(): """ List the commands available. """ commands = [] for f in listdir(_commands_dir): if isdir(join(_commands_dir, f)): continue if not f.endswith('.py'): continue try: commands.append(get_command(f.split('.')[0])) except (__HOLE__, AttributeError): continue return commands
ImportError
dataset/ETHPy150Open robmadole/jig/src/jig/commands/base.py/list_commands
5,140
def add_plugin(pm, plugin, gitdir): """ Adds a plugin by filename or URL. Where ``pm`` is an instance of :py:class:`PluginManager` and ``plugin`` is either the URL to a Git Jig plugin repository or the file name of a Jig plugin. The ``gitdir`` is the path to the Git repository which will be used to find the :file:`.jig/plugins` directory. """ # If this looks like a URL we will clone it first url = urlparse(plugin) if url.scheme: # This is a URL, let's clone it first into .jig/plugins # directory. plugin_parts = plugin.rsplit('@', 1) branch = None try: branch = plugin_parts[1] except __HOLE__: pass to_dir = join(gitdir, JIG_DIR_NAME, JIG_PLUGIN_DIR, uuid().hex) clone(plugin_parts[0], to_dir, branch) plugin = to_dir try: return pm.add(plugin) except PluginError: # Clean-up the cloned directory becuase this wasn't installed correctly if url.scheme: rmtree(plugin) raise
IndexError
dataset/ETHPy150Open robmadole/jig/src/jig/commands/base.py/add_plugin
5,141
def __init__(self, argv): """ Parse the command line arguments and call process with the results. Where argv is a split string. See :py:module:`shlex`. """ args = self.parser.parse_args(argv) # Setup something our command can use to send output self.view = create_view() # A shorter alias to the view's out decorator self.out = self.view.out # Finally, process the arguments try: self.process(args) except (__HOLE__, SystemExit, ForcedExit): raise except Exception as e: # Uncaught exception, usually means there is a bug in Jig self.crash_report(e, args) sys.exit(2)
NotImplementedError
dataset/ETHPy150Open robmadole/jig/src/jig/commands/base.py/BaseCommand.__init__
5,142
def read_from_which_host( client, pref, tag_sets=None, ): """Read from a client with the given Read Preference. Return the 'host:port' which was read from. :Parameters: - `client`: A MongoClient - `mode`: A ReadPreference - `tag_sets`: List of dicts of tags for data-center-aware reads """ db = client.pymongo_test if isinstance(tag_sets, dict): tag_sets = [tag_sets] if tag_sets: tags = tag_sets or pref.tag_sets pref = pref.__class__(tags) db.read_preference = pref cursor = db.test.find() try: try: next(cursor) except __HOLE__: # No documents in collection, that's fine pass return cursor.address except AutoReconnect: return None
StopIteration
dataset/ETHPy150Open mongodb/mongo-python-driver/test/utils.py/read_from_which_host
5,143
def perform_destroy(self, instance): user = self.request.user comment = self.get_comment() try: comment.retract_report(user, save=True) except __HOLE__ as error: raise ValidationError(error.message)
ValueError
dataset/ETHPy150Open CenterForOpenScience/osf.io/api/comments/views.py/CommentReportDetail.perform_destroy
5,144
def getAvailablePluginModules(pluginPath = None): """ Determine the available plugin modules on the system. @returns a list of found plugins """ if pluginPath is None: pluginPath = Resource.getPath("plugins", required = True) if pluginPath is None: return [] if not pluginPath in sys.path: sys.path.append(pluginPath) plugins = [] for name in os.listdir(pluginPath): try: if os.path.isfile(os.path.join(pluginPath, name, "__init__.py")): plugins.append(__import__(name)) except __HOLE__, e: Log.error("Unable to load plugin %s: %s" % (name, e)) return plugins
ImportError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Plugin.py/getAvailablePluginModules
5,145
def _loadPlugins(analyzer, plugin, pluginClass): assert type(plugin) == type(sys) # Check that we fullfil the plugin's project requirements try: if not analyzer.project.config.name.lower().startswith(plugin.requiredLibrary): return [] except AttributeError: pass try: plugins = plugin.plugins except __HOLE__: return [] return [cls(analyzer) for cls in plugins if issubclass(cls, pluginClass)]
AttributeError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/Plugin.py/_loadPlugins
5,146
def check_hexstring(opt): """ Return the calculated CRC sum of a hex string. """ if opt.undefined_crc_parameters: sys.stderr.write("{0:s}: error: undefined parameters\n".format(sys.argv[0])) sys.exit(1) if len(opt.check_string) % 2 != 0: opt.check_string = "0" + opt.check_string if sys.version_info >= (3, 0): opt.check_string = bytes(opt.check_string, 'utf_8') try: check_str = bytearray(binascii.unhexlify(opt.check_string)) except __HOLE__: sys.stderr.write( "{0:s}: error: invalid hex string {1:s}\n".format(sys.argv[0], opt.check_string)) sys.exit(1) opt.check_string = check_str return check_string(opt) # function crc_file_update ###############################################################################
TypeError
dataset/ETHPy150Open tpircher/pycrc/pycrc.py/check_hexstring
5,147
def check_file(opt): """ Calculate the CRC of a file. This algorithm uses the table_driven CRC algorithm. """ if opt.undefined_crc_parameters: sys.stderr.write("{0:s}: error: undefined parameters\n".format(sys.argv[0])) sys.exit(1) alg = Crc( width=opt.width, poly=opt.poly, reflect_in=opt.reflect_in, xor_in=opt.xor_in, reflect_out=opt.reflect_out, xor_out=opt.xor_out, table_idx_width=opt.tbl_idx_width) if not opt.reflect_in: register = opt.xor_in else: register = alg.reflect(opt.xor_in, opt.width) try: with open(opt.check_file, 'rb') as f: check_bytes = bytearray(f.read(1024)) while check_bytes != b"": register = crc_file_update(alg, register, check_bytes) check_bytes = bytearray(f.read(1024)) except __HOLE__: sys.stderr.write( "{0:s}: error: can't open file {1:s}\n".format(sys.argv[0], opt.check_file)) sys.exit(1) if opt.reflect_out: register = alg.reflect(register, opt.width) register = register ^ opt.xor_out return register # function write_file ###############################################################################
IOError
dataset/ETHPy150Open tpircher/pycrc/pycrc.py/check_file
5,148
def write_file(filename, out_str): """ Write the content of out_str to filename. """ try: out_file = open(filename, "w") out_file.write(out_str) out_file.close() except __HOLE__: sys.stderr.write("{0:s}: error: cannot write to file {1:s}\n".format(sys.argv[0], filename)) sys.exit(1) # main function ###############################################################################
IOError
dataset/ETHPy150Open tpircher/pycrc/pycrc.py/write_file
5,149
@patch def add_braces_to_openid_regex(): try: import openid.urinorm as urinorm except __HOLE__: return if hasattr(urinorm, 'uri_illegal_char_re'): if urinorm.uri_illegal_char_re.search("{"): # Invalid regexp for RedIRIS. Try to avoid it. urinorm.uri_illegal_char_re = re.compile(urinorm.uri_illegal_char_re.pattern.replace('A-Z', 'A-Z{}'))
ImportError
dataset/ETHPy150Open weblabdeusto/weblabdeusto/server/src/voodoo/patcher.py/add_braces_to_openid_regex
5,150
def main(): ''' Store configuration and interfaces into the datastore How to run this from command line: bin/store_interfaces -s system name [ -of filename | -sf filename | -fc true|false] -of Load object definition file -sf Load service definition file -fc Force clean the database Example: Load all object and service definitions bin/python bin/store_interfaces -s mysysname Load all object and service definitions with force clean the database bin/python bin/store_interfaces -s mysysname -fc Load object definition from a file bin/python bin/store_interfaces -s mysysname -of obj/data/coi/org.yml Load service definition from a file bin/python bin/store_interfaces -s mysysname -sf obj/services/coi/datastore_service.yml ''' parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', type=str, help='Additional config files to load or dict config content.', default=[]) parser.add_argument('-fc', '--force_clean', action='store_true', help='Force clean.') parser.add_argument("-of", "--object", dest="fobject", help="Load object definition from a file") parser.add_argument("-s", "--sysname", dest="sysname", help="System name") parser.add_argument("-sf", "--service", dest="fservice", help="Load service definition from a file") options, extra = parser.parse_known_args() args, command_line_config = parse_args(extra) print "store_interfaces: Storing ION config and interfaces in datastore, with options:" , str(options) # ------------------------------------------------------------------------- # Store config and interfaces # Set global testing flag to False. We are running as standalone script. This is NO TEST. bootstrap.testing = False # Set sysname if provided in startup argument if options.sysname: bootstrap.set_sys_name(options.sysname) # Load config override if provided. Supports variants literal and list of paths config_override = None if options.config: if '{' in options.config: # Variant 1: Dict of config values try: eval_value = ast.literal_eval(options.config) config_override = eval_value except __HOLE__: raise Exception("Value error in config arg '%s'" % options.config) else: # Variant 2: List of paths from pyon.util.config import Config config_override = Config([options.config]).data # bootstrap_config - Used for running this store_interfaces script bootstrap_config = config.read_local_configuration(['res/config/pyon_min_boot.yml']) config.apply_local_configuration(bootstrap_config, pyon.DEFAULT_LOCAL_CONFIG_PATHS) if config_override: config.apply_configuration(bootstrap_config, config_override) config.apply_configuration(bootstrap_config, command_line_config) # Override sysname from config file or command line if not options.sysname and bootstrap_config.get_safe("system.name", None): new_sysname = bootstrap_config.get_safe("system.name") bootstrap.set_sys_name(new_sysname) # Delete sysname datastores if option "force_clean" is set if options.force_clean: from pyon.datastore import clear_couch_util from pyon.util.file_sys import FileSystem print "store_interfaces: force_clean=True. DROP DATASTORES for sysname=%s" % bootstrap.get_sys_name() pyon_config = config.read_standard_configuration() # Initial pyon.yml + pyon.local.yml clear_couch_util.clear_couch(bootstrap_config, prefix=bootstrap.get_sys_name(), sysname=bootstrap.get_sys_name()) FileSystem._clean(pyon_config) # ion_config - Holds the new CFG object for the system (independent of this tool's config) ion_config = config.read_standard_configuration() if config_override: config.apply_configuration(ion_config, config_override) config.apply_configuration(ion_config, command_line_config) # ------------------------------------------------------------------------- # Store config and interfaces iadm = InterfaceAdmin(bootstrap.get_sys_name(), config=bootstrap_config) # Make sure core datastores exist iadm.create_core_datastores() # Store system CFG properties iadm.store_config(ion_config) # Store system interfaces iadm.store_interfaces(options.fobject, options.fservice) iadm.close()
ValueError
dataset/ETHPy150Open ooici/pyon/scripts/store_interfaces.py/main
5,151
def _weight_by_vector(trajectories, w_vector): r"""weights the values of `trajectories` given a weighting vector `w_vector`. Each value in `trajectories` will be weighted by the 'rate of change' to 'optimal rate of change' ratio. The 'rate of change' of a vector measures how each point in the vector changes with respect to its predecessor point. The 'optimal rate of change' is the rate of change in which each point in the vector performs the same change than its predecessor, meaning that when calling this function over evenly spaced `w_vector` values, no change will be reflected on the output. Parameters ---------- trajectories: pandas.DataFrame Values to weight w_vector: pandas.Series Values used to weight `trajectories` Returns ------- pandas.DataFrame A weighted version of `trajectories`. Raises ------ ValueError If `trajectories` and `w_vector` don't have equal lengths If `w_vector` is not a gradient TypeError If `trajectories` and `w_vector` are not iterables """ try: if len(trajectories) != len(w_vector): raise ValueError("trajectories (%d) & w_vector (%d) must be equal " "lengths" % (len(trajectories), len(w_vector))) except __HOLE__: raise TypeError("trajectories and w_vector must be iterables") # check no repeated values are passed in the weighting vector if len(set(w_vector)) != len(w_vector): raise ValueError("The weighting vector must be a gradient") # no need to weight in case of a one element vector if len(w_vector) == 1: return trajectories # Cast to float so divisions have a floating point resolution total_length = float(max(w_vector) - min(w_vector)) # Reflects the expected gradient between subsequent values in w_vector # the first value isn't weighted so subtract one from the number of # elements optimal_gradient = total_length/(len(w_vector)-1) # for all elements apply the weighting function for i, idx in enumerate(trajectories.index): # Skipping the first element is it doesn't need to be weighted if i != 0: trajectories.ix[idx] = (trajectories.ix[idx] * optimal_gradient / (np.abs((w_vector[i] - w_vector[i-1])))) return trajectories
TypeError
dataset/ETHPy150Open biocore/scikit-bio/skbio/stats/gradient.py/_weight_by_vector
5,152
@experimental(as_of="0.4.0") def __init__(self, coords, prop_expl, metadata_map, trajectory_categories=None, sort_category=None, axes=3, weighted=False): if not trajectory_categories: # If trajectory_categories is not provided, use all the categories # present in the metadata map trajectory_categories = metadata_map.keys() else: # Check that trajectory_categories are in metadata_map for category in trajectory_categories: if category not in metadata_map: raise ValueError("Category %s not present in metadata." % category) # Check that sort_categories is in metadata_map if sort_category and sort_category not in metadata_map: raise ValueError("Sort category %s not present in metadata." % sort_category) if axes == 0: # If axes == 0, we should compute the trajectories for all axes axes = len(prop_expl) elif axes > len(prop_expl) or axes < 0: # Axes should be 0 <= axes <= len(prop_expl) raise ValueError("axes should be between 0 and the max number of " "axes available (%d), found: %d " % (len(prop_expl), axes)) # Restrict coordinates to those axes that we actually need to compute self._coords = coords.ix[:, :axes-1] self._prop_expl = prop_expl[:axes] self._metadata_map = metadata_map self._weighted = weighted # Remove any samples from coords not present in mapping file # and remove any samples from metadata_map not present in coords self._normalize_samples() # Create groups self._make_groups(trajectory_categories, sort_category) # Compute the weighting_vector self._weighting_vector = None if weighted: if not sort_category: raise ValueError("You should provide a sort category if you " "want to weight the trajectories") try: self._weighting_vector = \ self._metadata_map[sort_category].astype(np.float64) except __HOLE__: raise ValueError("The sorting category must be numeric") # Initialize the message buffer self._message_buffer = []
ValueError
dataset/ETHPy150Open biocore/scikit-bio/skbio/stats/gradient.py/GradientANOVA.__init__
5,153
def _get_group_trajectories(self, group_name, sids): r"""Compute the trajectory results for `group_name` containing the samples `sids`. Weights the data if `self._weighted` is True and ``len(sids) > 1`` Parameters ---------- group_name : str The name of the group sids : list of str The sample ids in the group Returns ------- GroupResults The trajectory results for the given group Raises ------ RuntimeError If sids is an empty list """ # We multiply the coord values with the prop_expl trajectories = self._coords.ix[sids] * self._prop_expl if trajectories.empty: # Raising a RuntimeError since in a usual execution this should # never happen. The only way this can happen is if the user # directly calls this method, which shouldn't be done # (that's why the method is private) raise RuntimeError("No samples to process, an empty list cannot " "be processed") # The weighting can only be done over trajectories with a length # greater than 1 if self._weighted and len(sids) > 1: trajectories_copy = deepcopy(trajectories) try: trajectories = _weight_by_vector(trajectories_copy, self._weighting_vector[sids]) except (FloatingPointError, __HOLE__): self._message_buffer.append("Could not weight group, no " "gradient in the the " "weighting vector.\n") trajectories = trajectories_copy return self._compute_trajectories_results(group_name, trajectories.ix[sids])
ValueError
dataset/ETHPy150Open biocore/scikit-bio/skbio/stats/gradient.py/GradientANOVA._get_group_trajectories
5,154
def _fileobj_to_fd(fileobj): """Return a file descriptor from a file object. Parameters: fileobj -- file object or file descriptor Returns: corresponding file descriptor Raises: ValueError if the object is invalid """ if isinstance(fileobj, six.integer_types): fd = fileobj else: try: fd = int(fileobj.fileno()) except (AttributeError, __HOLE__, ValueError): raise ValueError("Invalid file object: " "{0!r}".format(fileobj)) if fd < 0: raise ValueError("Invalid file descriptor: {0}".format(fd)) return fd
TypeError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/_fileobj_to_fd
5,155
def __getitem__(self, fileobj): try: fd = self._selector._fileobj_lookup(fileobj) return self._selector._fd_to_key[fd] except __HOLE__: raise KeyError("{0!r} is not registered".format(fileobj))
KeyError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/_SelectorMapping.__getitem__
5,156
def get_key(self, fileobj): """Return the key associated to a registered file object. Returns: SelectorKey for this file object """ mapping = self.get_map() if mapping is None: raise RuntimeError('Selector is closed') try: return mapping[fileobj] except __HOLE__: raise KeyError("{0!r} is not registered".format(fileobj))
KeyError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/BaseSelector.get_key
5,157
def _fileobj_lookup(self, fileobj): """Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. This is used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping. """ try: return _fileobj_to_fd(fileobj) except __HOLE__: # Do an exhaustive search. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise
ValueError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/_BaseSelectorImpl._fileobj_lookup
5,158
def unregister(self, fileobj): try: key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) except __HOLE__: raise KeyError("{0!r} is not registered".format(fileobj)) return key
KeyError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/_BaseSelectorImpl.unregister
5,159
def modify(self, fileobj, events, data=None): # TODO: Subclasses can probably optimize this even further. try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except __HOLE__: raise KeyError("{0!r} is not registered".format(fileobj)) if events != key.events: self.unregister(fileobj) key = self.register(fileobj, events, data) elif data != key.data: # Use a shortcut to update the data. key = key._replace(data=data) self._fd_to_key[key.fd] = key return key
KeyError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/_BaseSelectorImpl.modify
5,160
def _key_from_fd(self, fd): """Return the key associated to a given file descriptor. Parameters: fd -- file descriptor Returns: corresponding key, or None if not found """ try: return self._fd_to_key[fd] except __HOLE__: return None
KeyError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/_BaseSelectorImpl._key_from_fd
5,161
def unregister(self, fileobj): key = super(EpollSelector, self).unregister(fileobj) try: self._epoll.unregister(key.fd) except __HOLE__: # This can happen if the FD was closed since it # was registered. pass return key
IOError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/EpollSelector.unregister
5,162
def select(self, timeout=None): if timeout is None: timeout = -1 elif timeout <= 0: timeout = 0 else: # epoll_wait() has a resolution of 1 millisecond, round away # from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) * 1e-3 # epoll_wait() expects `maxevents` to be greater than zero; # we want to make sure that `select()` can be called when no # FD is registered. max_ev = max(len(self._fd_to_key), 1) ready = [] try: fd_event_list = self._epoll.poll(timeout, max_ev) except __HOLE__ as exc: if exc.errno == EINTR: return ready else: raise for fd, event in fd_event_list: events = 0 if event & ~select.EPOLLIN: events |= EVENT_WRITE if event & ~select.EPOLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready
IOError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/EpollSelector.select
5,163
def select(self, timeout=None): if timeout is None: timeout = None elif timeout <= 0: timeout = 0 else: # devpoll() has a resolution of 1 millisecond, round away from # zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) ready = [] try: fd_event_list = self._devpoll.poll(timeout) except __HOLE__ as exc: if exc.errno == EINTR: return ready else: raise for fd, event in fd_event_list: events = 0 if event & ~select.POLLIN: events |= EVENT_WRITE if event & ~select.POLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready
OSError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/DevpollSelector.select
5,164
def unregister(self, fileobj): key = super(KqueueSelector, self).unregister(fileobj) if key.events & EVENT_READ: kev = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_DELETE) try: self._kqueue.control([kev], 0, 0) except OSError: # This can happen if the FD was closed since it # was registered. pass if key.events & EVENT_WRITE: kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE) try: self._kqueue.control([kev], 0, 0) except __HOLE__: # See comment above. pass return key
OSError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/KqueueSelector.unregister
5,165
def select(self, timeout=None): timeout = None if timeout is None else max(timeout, 0) max_ev = len(self._fd_to_key) ready = [] try: kev_list = self._kqueue.control(None, max_ev, timeout) except __HOLE__ as exc: if exc.errno == EINTR: return ready else: raise for kev in kev_list: fd = kev.ident flag = kev.filter events = 0 if flag == select.KQ_FILTER_READ: events |= EVENT_READ if flag == select.KQ_FILTER_WRITE: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready
OSError
dataset/ETHPy150Open dpkp/kafka-python/kafka/selectors34.py/KqueueSelector.select
5,166
def __getattr__(self, name): try: return tuple([(self.x, self.y)['xy'.index(c)] \ for c in name]) except __HOLE__: raise AttributeError, name
ValueError
dataset/ETHPy150Open ardekantur/pyglet/contrib/toys/euclid.py/Vector2.__getattr__
5,167
def __setattr__(self, name, value): if len(name) == 1: object.__setattr__(self, name, value) else: try: l = [self.x, self.y] for c, v in map(None, name, value): l['xy'.index(c)] = v self.x, self.y = l except __HOLE__: raise AttributeError, name
ValueError
dataset/ETHPy150Open ardekantur/pyglet/contrib/toys/euclid.py/Vector2.__setattr__
5,168
def __getattr__(self, name): try: return tuple([(self.x, self.y, self.z)['xyz'.index(c)] \ for c in name]) except __HOLE__: raise AttributeError, name
ValueError
dataset/ETHPy150Open ardekantur/pyglet/contrib/toys/euclid.py/Vector3.__getattr__
5,169
def __setattr__(self, name, value): if len(name) == 1: object.__setattr__(self, name, value) else: try: l = [self.x, self.y, self.z] for c, v in map(None, name, value): l['xyz'.index(c)] = v self.x, self.y, self.z = l except __HOLE__: raise AttributeError, name
ValueError
dataset/ETHPy150Open ardekantur/pyglet/contrib/toys/euclid.py/Vector3.__setattr__
5,170
def force_bytes(s): try: return s.encode('utf-8') except (AttributeError, __HOLE__): return s
UnicodeDecodeError
dataset/ETHPy150Open gavinwahl/sdb/sdb/util.py/force_bytes
5,171
def _obtain_lock_or_raise(self): """Create a lock file as flag for other instances, mark our instance as lock-holder :raise IOError: if a lock was already present or a lock file could not be written""" if self._has_lock(): return lock_file = self._lock_file_path() if os.path.isfile(lock_file): raise IOError("Lock for file %r did already exist, delete %r in case the lock is illegal" % (self._file_path, lock_file)) try: fd = os.open(lock_file, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0) os.close(fd) except __HOLE__,e: raise IOError(str(e)) self._owns_lock = True
OSError
dataset/ETHPy150Open codeinn/vcs/vcs/utils/lockfiles.py/LockFile._obtain_lock_or_raise
5,172
def _release_lock(self): """Release our lock if we have one""" if not self._has_lock(): return # if someone removed our file beforhand, lets just flag this issue # instead of failing, to make it more usable. lfp = self._lock_file_path() try: # on bloody windows, the file needs write permissions to be removable. # Why ... if os.name == 'nt': os.chmod(lfp, 0777) # END handle win32 os.remove(lfp) except __HOLE__: pass self._owns_lock = False
OSError
dataset/ETHPy150Open codeinn/vcs/vcs/utils/lockfiles.py/LockFile._release_lock
5,173
def _supportsSymlinks(self): """ Check for symlink support usable for Twisted's purposes. @return: C{True} if symlinks are supported on the current platform, otherwise C{False}. @rtype: L{bool} """ if self.isWindows(): # We do the isWindows() check as newer Pythons support the symlink # support in Vista+, but only if you have some obscure permission # (SeCreateSymbolicLinkPrivilege), which can only be given on # platforms with msc.exe (so, Business/Enterprise editions). # This uncommon requirement makes the Twisted test suite test fail # in 99.99% of cases as general users don't have permission to do # it, even if there is "symlink support". return False else: # If we're not on Windows, check for existence of os.symlink. try: os.symlink except __HOLE__: return False else: return True
AttributeError
dataset/ETHPy150Open twisted/twisted/twisted/python/runtime.py/Platform._supportsSymlinks
5,174
def supportsThreads(self): """ Can threads be created? @return: C{True} if the threads are supported on the current platform. @rtype: C{bool} """ try: return imp.find_module(_threadModule)[0] is None except __HOLE__: return False
ImportError
dataset/ETHPy150Open twisted/twisted/twisted/python/runtime.py/Platform.supportsThreads
5,175
def supportsINotify(self): """ Return C{True} if we can use the inotify API on this platform. @since: 10.1 """ try: from twisted.python._inotify import INotifyError, init except __HOLE__: return False if self.isDocker(): return False try: os.close(init()) except INotifyError: return False return True
ImportError
dataset/ETHPy150Open twisted/twisted/twisted/python/runtime.py/Platform.supportsINotify
5,176
def onVerify(self, verify): try: receivingServer = jid.JID(verify['from']).host originatingServer = jid.JID(verify['to']).host except (__HOLE__, jid.InvalidFormat): raise error.StreamError('improper-addressing') if originatingServer not in self.service.domains: raise error.StreamError('host-unknown') if (self.xmlstream.otherEntity and receivingServer != self.xmlstream.otherEntity.host): raise error.StreamError('invalid-from') streamID = verify.getAttribute('id', '') key = unicode(verify) calculatedKey = generateKey(self.service.secret, receivingServer, originatingServer, streamID) validity = (key == calculatedKey) and 'valid' or 'invalid' reply = domish.Element((NS_DIALBACK, 'verify')) reply['from'] = originatingServer reply['to'] = receivingServer reply['id'] = streamID reply['type'] = validity self.xmlstream.send(reply)
KeyError
dataset/ETHPy150Open ralphm/wokkel/wokkel/server.py/XMPPServerListenAuthenticator.onVerify
5,177
@jsexpose(body_cls=TriggerTypeAPI, status_code=http_client.CREATED) def post(self, triggertype): """ Create a new triggertype. Handles requests: POST /triggertypes/ """ try: triggertype_db = TriggerTypeAPI.to_model(triggertype) triggertype_db = TriggerType.add_or_update(triggertype_db) except (__HOLE__, ValueError) as e: LOG.exception('Validation failed for triggertype data=%s.', triggertype) abort(http_client.BAD_REQUEST, str(e)) return else: extra = {'triggertype_db': triggertype_db} LOG.audit('TriggerType created. TriggerType.id=%s' % (triggertype_db.id), extra=extra) if not triggertype_db.parameters_schema: TriggerTypeController._create_shadow_trigger(triggertype_db) triggertype_api = TriggerTypeAPI.from_model(triggertype_db) return triggertype_api
ValidationError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerTypeController.post
5,178
@jsexpose(arg_types=[str], body_cls=TriggerTypeAPI) def put(self, triggertype_ref_or_id, triggertype): triggertype_db = self._get_by_ref_or_id(ref_or_id=triggertype_ref_or_id) triggertype_id = triggertype_db.id try: validate_not_part_of_system_pack(triggertype_db) except ValueValidationException as e: abort(http_client.BAD_REQUEST, str(e)) try: triggertype_db = TriggerTypeAPI.to_model(triggertype) if triggertype.id is not None and len(triggertype.id) > 0 and \ triggertype.id != triggertype_id: LOG.warning('Discarding mismatched id=%s found in payload and using uri_id=%s.', triggertype.id, triggertype_id) triggertype_db.id = triggertype_id old_triggertype_db = triggertype_db triggertype_db = TriggerType.add_or_update(triggertype_db) except (ValidationError, __HOLE__) as e: LOG.exception('Validation failed for triggertype data=%s', triggertype) abort(http_client.BAD_REQUEST, str(e)) return extra = {'old_triggertype_db': old_triggertype_db, 'new_triggertype_db': triggertype_db} LOG.audit('TriggerType updated. TriggerType.id=%s' % (triggertype_db.id), extra=extra) triggertype_api = TriggerTypeAPI.from_model(triggertype_db) return triggertype_api
ValueError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerTypeController.put
5,179
@staticmethod def _create_shadow_trigger(triggertype_db): try: trigger_type_ref = triggertype_db.get_reference().ref trigger = {'name': triggertype_db.name, 'pack': triggertype_db.pack, 'type': trigger_type_ref, 'parameters': {}} trigger_db = TriggerService.create_or_update_trigger_db(trigger) extra = {'trigger_db': trigger_db} LOG.audit('Trigger created for parameter-less TriggerType. Trigger.id=%s' % (trigger_db.id), extra=extra) except (ValidationError, __HOLE__) as e: LOG.exception('Validation failed for trigger data=%s.', trigger) # Not aborting as this is convenience. return except StackStormDBObjectConflictError as e: LOG.warn('Trigger creation of "%s" failed with uniqueness conflict. Exception: %s', trigger, str(e)) # Not aborting as this is convenience. return
ValueError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerTypeController._create_shadow_trigger
5,180
@jsexpose(body_cls=TriggerAPI, status_code=http_client.CREATED) def post(self, trigger): """ Create a new trigger. Handles requests: POST /triggers/ """ try: trigger_db = TriggerService.create_trigger_db(trigger) except (__HOLE__, ValueError) as e: LOG.exception('Validation failed for trigger data=%s.', trigger) abort(http_client.BAD_REQUEST, str(e)) return extra = {'trigger': trigger_db} LOG.audit('Trigger created. Trigger.id=%s' % (trigger_db.id), extra=extra) trigger_api = TriggerAPI.from_model(trigger_db) return trigger_api
ValidationError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerController.post
5,181
@jsexpose(arg_types=[str], body_cls=TriggerAPI) def put(self, trigger_id, trigger): trigger_db = TriggerController.__get_by_id(trigger_id) try: if trigger.id is not None and trigger.id is not '' and trigger.id != trigger_id: LOG.warning('Discarding mismatched id=%s found in payload and using uri_id=%s.', trigger.id, trigger_id) trigger_db = TriggerAPI.to_model(trigger) trigger_db.id = trigger_id trigger_db = Trigger.add_or_update(trigger_db) except (__HOLE__, ValueError) as e: LOG.exception('Validation failed for trigger data=%s', trigger) abort(http_client.BAD_REQUEST, str(e)) return extra = {'old_trigger_db': trigger, 'new_trigger_db': trigger_db} LOG.audit('Trigger updated. Trigger.id=%s' % (trigger.id), extra=extra) trigger_api = TriggerAPI.from_model(trigger_db) return trigger_api
ValidationError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerController.put
5,182
@staticmethod def __get_by_id(trigger_id): try: return Trigger.get_by_id(trigger_id) except (__HOLE__, ValidationError): LOG.exception('Database lookup for id="%s" resulted in exception.', trigger_id) abort(http_client.NOT_FOUND)
ValueError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerController.__get_by_id
5,183
@staticmethod def __get_by_name(trigger_name): try: return [Trigger.get_by_name(trigger_name)] except __HOLE__ as e: LOG.debug('Database lookup for name="%s" resulted in exception : %s.', trigger_name, e) return []
ValueError
dataset/ETHPy150Open StackStorm/st2/st2api/st2api/controllers/v1/triggers.py/TriggerController.__get_by_name
5,184
def savitzky_golay(y, window_size, order=2, deriv=0, rate=1): if window_size % 2 != 1: window_size += 1 try: window_size = np.abs(np.int(window_size)) order = np.abs(np.int(order)) except __HOLE__, msg: raise ValueError("window_size and order have to be of type int") if window_size % 2 != 1 or window_size < 1: raise TypeError("window_size size must be a positive odd number") if window_size < order + 2: raise TypeError("window_size is too small for the polynomials order") order_range = range(order+1) half_window = (window_size -1) // 2 # precompute coefficients b = np.mat([[k**i for i in order_range] for k in range(-half_window, half_window+1)]) m = np.linalg.pinv(b).A[deriv] * rate**deriv * factorial(deriv) # pad the signal at the extremes with # values taken from the signal itself firstvals = y[0] - np.abs( y[1:half_window+1][::-1] - y[0] ) lastvals = y[-1] + np.abs(y[-half_window-1:-1][::-1] - y[-1]) y = np.concatenate((firstvals, y, lastvals)) return np.convolve( m[::-1], y, mode='valid')
ValueError
dataset/ETHPy150Open taoliu/MACS/test/test_callsummits.py/savitzky_golay
5,185
def setUp(self): """ Patch the L{ls} module's time function so the results of L{lsLine} are deterministic. """ self.now = 123456789 def fakeTime(): return self.now self.patch(ls, 'time', fakeTime) # Make sure that the timezone ends up the same after these tests as # it was before. if 'TZ' in os.environ: self.addCleanup(operator.setitem, os.environ, 'TZ', os.environ['TZ']) self.addCleanup(time.tzset) else: def cleanup(): # os.environ.pop is broken! Don't use it! Ever! Or die! try: del os.environ['TZ'] except __HOLE__: pass time.tzset() self.addCleanup(cleanup)
KeyError
dataset/ETHPy150Open kuri65536/python-for-android/python-modules/twisted/twisted/conch/test/test_cftp.py/ListingTests.setUp
5,186
def send(self, template_name, from_email, recipient_list, context, cc=None, bcc=None, fail_silently=False, headers=None, template_prefix=None, template_suffix=None, template_dir=None, file_extension=None, auth_user=None, auth_password=None, connection=None, **kwargs): connection = connection or get_connection(username=auth_user, password=auth_password, fail_silently=fail_silently) e = self.get_email_message(template_name, context, from_email=from_email, to=recipient_list, cc=cc, bcc=bcc, headers=headers, template_prefix=template_prefix, template_suffix=template_suffix, template_dir=template_dir, file_extension=file_extension) e.connection = connection try: e.send(fail_silently) except __HOLE__: raise EmailRenderException("Couldn't render plain or html parts") return e.extra_headers.get('Message-Id', None)
NameError
dataset/ETHPy150Open BradWhittington/django-templated-email/templated_email/backends/vanilla_django.py/TemplateBackend.send
5,187
def recv_message(**kwargs): connection = BrokerConnection('amqp://%(mq_user)s:%(mq_password)s@' '%(mq_host)s:%(mq_port)s//' % kwargs['mq_args']) with connection as conn: try: SomeConsumer(conn, **kwargs).run() except __HOLE__: LOG.warning('Quitting %s' % __name__)
KeyboardInterrupt
dataset/ETHPy150Open openstack/entropy/entropy/examples/repair/vmbooter.py/recv_message
5,188
@classmethod def get_date(cls, request, date_type): date_str = cls.get_date_str(request, date_type) if date_str is not None: try: return datetime.datetime.combine( iso_string_to_date(date_str), datetime.time()) except __HOLE__: if date_type == cls.START_DATE: return datetime.datetime.today() - datetime.timedelta(days=cls.default_days) elif date_type == cls.END_DATE: return datetime.datetime.today() else: return None else: return None
ValueError
dataset/ETHPy150Open dimagi/commcare-hq/corehq/apps/accounting/filters.py/DateRangeFilter.get_date
5,189
@property def method(self): ''' Returns the zmq method constant. >>> ZeroSetup('pull', 8000).method == zmq.PULL True ''' try: return getattr(zmq, self._method.upper()) except __HOLE__: raise UnsupportedZmqMethod('Unsupported ZMQ method', self._method, {})
AttributeError
dataset/ETHPy150Open philipbergen/zero/py/zero/__init__.py/ZeroSetup.method
5,190
@property def point(self): ''' Returns the ZMQ socket string. >>> ZeroSetup('pull', 'tcp://other.host.net:9000') ZeroSetup('pull', 'tcp://other.host.net:9000').binding(True) ''' if str(self._point)[:1] == ':': self._point = self._point[1:] try: int(self._point) if self.bind: return 'tcp://*:%s' % self._point return 'tcp://localhost:%s' % self._point except __HOLE__: return self._point
ValueError
dataset/ETHPy150Open philipbergen/zero/py/zero/__init__.py/ZeroSetup.point
5,191
def zauto(zero, loops, wait=False): 'Keep listening and sending until the loop ends. All received objects are yielded.' try: if zero.setup.replies: for rep, msg in izip(loops, zero): yield msg zero(rep) elif zero.setup.transmits: for msg in loops: res = zero(msg) if zero.setup.yields: yield res else: for _, msg in izip(loops, zero): yield msg except __HOLE__: zero.setup.debug('Quit by user') except StopIteration: zero.setup.debug('Loop ended') finally: if wait: raw_input('Press enter when done.') zero.setup.debug('Closing: %r', zero) zero.close()
KeyboardInterrupt
dataset/ETHPy150Open philipbergen/zero/py/zero/__init__.py/zauto
5,192
def handleRequest(self, req): """handles a request by calling the appropriete method the service exposes""" name = req["method"] params = req["params"] id=req["id"] obj=None try: #to get a callable obj obj = getMethodByName(self.service, name) except MethodNameNotAllowed,e: self.sendResponse(id, None, e) except: self.sendResponse(id, None, MethodNotFound()) if obj: try: #to call the object with parameters rslt = obj(*params) self.sendResponse(id, rslt, None) except __HOLE__: # wrong arguments #todo what if the TypeError was not thrown directly by the callable obj s=getTracebackStr() self.sendResponse(id, None, InvalidMethodParameters()) except: #error inside the callable object s=getTracebackStr() self.sendResponse(id, None, s)
TypeError
dataset/ETHPy150Open anandology/pyjamas/examples/jsonrpc/public/services/jsonrpc/__init__.py/SimpleServiceHandler.handleRequest
5,193
def decode(self, stream, encoding, fallback=None): if not hasattr(stream, 'decode'): return stream try: return stream.decode(encoding) except UnicodeDecodeError: if fallback: for enc in fallback: try: return stream.decode(enc) except __HOLE__: pass raise # sync commands
UnicodeDecodeError
dataset/ETHPy150Open SublimeGit/SublimeGit/sgit/cmd.py/Cmd.decode
5,194
def cmd(self, cmd, stdin=None, cwd=None, ignore_errors=False, encoding=None, fallback=None): command = self.build_command(cmd) environment = self.env() encoding = encoding or get_setting('encoding', 'utf-8') fallback = fallback or get_setting('fallback_encodings', []) try: logger.debug("cmd: %s", command) if stdin and hasattr(stdin, 'encode'): stdin = stdin.encode(encoding) if cwd: os.chdir(cwd) proc = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=self.startupinfo(), env=environment) stdout, stderr = proc.communicate(stdin) logger.debug("out: (%s) %s", proc.returncode, [stdout[:100]]) return (proc.returncode, self.decode(stdout, encoding, fallback), self.decode(stderr, encoding, fallback)) except OSError as e: if ignore_errors: return (0, '') sublime.error_message(self.get_executable_error()) raise SublimeGitException("Could not execute command: %s" % e) except __HOLE__ as e: if ignore_errors: return (0, '') sublime.error_message(self.get_decoding_error(encoding, fallback)) raise SublimeGitException("Could not execute command: %s" % command) # async commands
UnicodeDecodeError
dataset/ETHPy150Open SublimeGit/SublimeGit/sgit/cmd.py/Cmd.cmd
5,195
def cmd_async(self, cmd, cwd=None, **callbacks): command = self.build_command(cmd) environment = self.env() encoding = get_setting('encoding', 'utf-8') fallback = get_setting('fallback_encodings', []) def async_inner(cmd, cwd, encoding, on_data=None, on_complete=None, on_error=None, on_exception=None): try: logger.debug('async-cmd: %s', cmd) if cwd: os.chdir(cwd) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=self.startupinfo(), env=environment) for line in iter(proc.stdout.readline, b''): logger.debug('async-out: %s', line.strip()) line = self.decode(line, encoding, fallback) if callable(on_data): sublime.set_timeout(partial(on_data, line), 0) proc.wait() logger.debug('async-exit: %s', proc.returncode) if proc.returncode == 0: if callable(on_complete): sublime.set_timeout(partial(on_complete, proc.returncode), 0) else: if callable(on_error): sublime.set_timeout(partial(on_error, proc.returncode), 0) except (OSError, __HOLE__) as e: logger.debug('async-exception: %s' % e) if callable(on_exception): sublime.set_timeout(partial(on_exception, e), 0) thread = threading.Thread(target=partial(async_inner, command, cwd, encoding, **callbacks)) return thread # messages
UnicodeDecodeError
dataset/ETHPy150Open SublimeGit/SublimeGit/sgit/cmd.py/Cmd.cmd_async
5,196
def get_id(self): """ Return the primary key for the model instance. If the model is unsaved, then this value will be ``None``. """ try: return getattr(self, self._primary_key) except __HOLE__: return None
KeyError
dataset/ETHPy150Open coleifer/walrus/walrus/models.py/Model.get_id
5,197
def delete(self, for_update=False): """ Delete the given model instance. """ hash_key = self.get_hash_id() try: original_instance = self.load(hash_key, convert_key=False) except __HOLE__: return # Remove from the `all` index. all_index = self._query.all_index() all_index.remove(hash_key) # Remove from the secondary indexes. for field in self._indexes: for index in field.get_indexes(): index.remove(original_instance) if not for_update: for field in self._fields.values(): if isinstance(field, _ContainerField): field._delete(self) # Remove the object itself. self.database.delete(hash_key)
KeyError
dataset/ETHPy150Open coleifer/walrus/walrus/models.py/Model.delete
5,198
def contains(self, task_id, node_id=None, result=None): try: if result is None and node_id is None: return task_id in self._d elif result is None: return node_id in self._d[task_id] else: return result in self._d[task_id][node_id] except __HOLE__: return False
KeyError
dataset/ETHPy150Open BasicWolf/kaylee/kaylee/contrib/storages.py/MemoryTemporalStorage.contains
5,199
def add(self, task_id, result): try: self._d[task_id].append(result) except __HOLE__: self._d[task_id] = [result, ] self._total_count += 1
KeyError
dataset/ETHPy150Open BasicWolf/kaylee/kaylee/contrib/storages.py/MemoryPermanentStorage.add