Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
5,500
def _parse_vtinfo(self, info, use_filename=True): name = None version = None if use_filename and os.path.isfile(info): name = info else: data = info.split(":") if len(data) >= 2: if use_filename: if os.path.isfile(str(data[0])): name = str(data[0]) else: # maybe we are running on Windows and a full path # was passed as the filename so it has a : separating # the driver letter if system.systemType in ["Windows", "Microsoft"]: if os.path.isfile(":".join(data[:2])): name = ":".join(data[:2]) data.pop(0) data[0] = name elif not use_filename: name = str(data[0]) # will try to convert version to int # if it fails, it's a tag name #maybe a tag name contains ':' in its name #so we need to bring it back together rest = ":".join(data[1:]) try: version = int(rest) except __HOLE__: version = rest elif len(data) == 1: if use_filename and os.path.isfile(str(data[0])): name = str(data[0]) elif not use_filename: name = str(data[0]) return (name, version)
ValueError
dataset/ETHPy150Open VisTrails/VisTrails/vistrails/core/application.py/VistrailsApplicationInterface._parse_vtinfo
5,501
def safe_int(val, allow_zero=True): """ This function converts the six.moves.input values to integers. It handles invalid entries, and optionally forbids values of zero. """ try: ret = int(val) except __HOLE__: print("Sorry, '%s' is not a valid integer." % val) return False if not allow_zero and ret == 0: print("Please enter a non-zero integer.") return False return ret # Get the current scaling groups
ValueError
dataset/ETHPy150Open rackspace/pyrax/samples/autoscale/add_webhook.py/safe_int
5,502
def userinfo_endpoint(self, request, **kwargs): access_token = self._parse_access_token(request) shr = SignedHttpRequest(self._get_client_public_key(access_token)) http_signature = self._parse_signature(request) try: shr.verify(http_signature, method=request["method"], host=request["host"], path=request["path"], query_params=request["query"], headers=request["headers"], body=request["body"], strict_query_param_verification=True, strict_headers_verification=False) except __HOLE__: return self._error_response("access_denied", descr="Could not verify proof of " "possession") return self._do_user_info(self.access_tokens[access_token], **kwargs)
ValidationError
dataset/ETHPy150Open rohe/pyoidc/src/oic/extension/proof_of_possesion.py/PoPProvider.userinfo_endpoint
5,503
def _get_client_public_key(self, access_token): _jws = jws.factory(access_token) if _jws: data = _jws.verify_compact(access_token, self.keyjar.get_verify_key(owner="")) try: return keyrep(data["cnf"]["jwk"]) except __HOLE__: raise NonPoPTokenError( "Could not extract public key as JWK from access token") raise NonPoPTokenError("Unsigned access token, maybe not PoP?")
KeyError
dataset/ETHPy150Open rohe/pyoidc/src/oic/extension/proof_of_possesion.py/PoPProvider._get_client_public_key
5,504
@task @transaction.commit_manually def verify_count(upload_id, localstore, language): """ Initialize the verification process by counting the number of geounits in the uploaded file. After this step completes, the verify_preload method is called. Parameters: upload_id - The id of the SubjectUpload record. localstore - a new subject file that remains when the task is complete language - Optional. If provided, translate the status messages into the specified language (if message files are complete). """ reader = csv.DictReader(open(localstore,'r')) if len(reader.fieldnames) < 2: msg = _('There are missing columns in the uploaded Subject file') return {'task_id':None, 'success':False, 'messages':[msg]} upload = SubjectUpload.objects.get(id=upload_id) upload.subject_name = reader.fieldnames[1][0:50] upload.save() transaction.commit() logger.debug('Created new SubjectUpload transaction record for "%s".', upload.subject_name) # do this in bulk! # insert upload_id, portable_id, number sql = 'INSERT INTO "%s" ("%s","%s","%s") VALUES (%%(upload_id)s, %%(geoid)s, %%(number)s)' % (SubjectStage._meta.db_table, SubjectStage._meta.fields[1].attname, SubjectStage._meta.fields[2].attname, SubjectStage._meta.fields[3].attname) args = [] try: for row in reader: args.append( {'upload_id':upload.id, 'geoid':row[reader.fieldnames[0]].strip(), 'number':row[reader.fieldnames[1]].strip()} ) # django ORM takes about 320s for 280K geounits #SubjectStage(upload=upload, portable_id=row[reader.fieldnames[0]],number=row[reader.fieldnames[1]]).save() # direct access to db-api takes about 60s for 280K geounits cursor = connection.cursor() cursor.executemany(sql, tuple(args)) logger.debug('Bulk loaded CSV records into the staging area.') except __HOLE__, aex: msg = _('There are an incorrect number of columns in the uploaded ' 'Subject file') transaction.rollback() return {'task_id':None, 'success':False, 'messages':[msg]} except Exception, ex: msg = _('Invalid data detected in the uploaded Subject file') transaction.rollback() return {'task_id':None, 'success':False, 'messages':[msg]} nlines = upload.subjectstage_set.all().count() geolevel, nunits = LegislativeLevel.get_basest_geolevel_and_count() prev_lang = None if not language is None: prev_lang = get_language() activate(language) # Validation #1: if the number of geounits in the uploaded file # don't match the geounits in the database, the content is not valid if nlines != nunits: # The number of geounits in the uploaded file do not match the base geolevel geounits msg = _('There are an incorrect number of geounits in the uploaded Subject file. ') if nlines < nunits: missing = nunits - nlines msg += _n( 'There is %(count)d geounit missing.', 'There are %(count)d geounits missing.', missing) % { 'count':missing } else: extra = nlines - nunits msg += _n( 'There is %(count)d extra geounit.', 'There are %(count)d extra geounits.', extra) % { 'count':extra } # since the transaction was never committed after all the inserts, this nullifies # all the insert statements, so there should be no quarantine to clean up transaction.rollback() logger.debug(msg) upload.status = 'ER' upload.save() status = {'task_id':None, 'success':False, 'messages':[msg]} else: # The next task will preload the units into the quarintine table task = verify_preload.delay(upload_id, language=language).task_id status = {'task_id':task, 'success':True, 'messages':[_('Verifying consistency of uploaded geounits ...')]} transaction.commit() # reset language to default if not prev_lang is None: activate(prev_lang) return status
AttributeError
dataset/ETHPy150Open PublicMapping/DistrictBuilder/django/publicmapping/redistricting/tasks.py/verify_count
5,505
def check_ftest_pvalues(results): res = results use_t = res.use_t k_vars = len(res.params) # check default use_t pvals = [res.wald_test(np.eye(k_vars)[k], use_f=use_t).pvalue for k in range(k_vars)] assert_allclose(pvals, res.pvalues, rtol=5e-10, atol=1e-25) # sutomatic use_f based on results class use_t pvals = [res.wald_test(np.eye(k_vars)[k]).pvalue for k in range(k_vars)] assert_allclose(pvals, res.pvalues, rtol=5e-10, atol=1e-25) # label for pvalues in summary string_use_t = 'P>|z|' if use_t is False else 'P>|t|' summ = str(res.summary()) assert_(string_use_t in summ) # try except for models that don't have summary2 try: summ2 = str(res.summary2()) except __HOLE__: summ2 = None if summ2 is not None: assert_(string_use_t in summ2) # TODO The following is not (yet) guaranteed across models #@knownfailureif(True)
AttributeError
dataset/ETHPy150Open statsmodels/statsmodels/statsmodels/tools/_testing.py/check_ftest_pvalues
5,506
def parse_parametrs(p): """ Parses the parameters given from POST or websocket reqs expecting the parameters as: "11|par1='asd'|6|par2=1" returns a dict like {par1:'asd',par2:1} """ ret = {} while len(p) > 1 and p.count('|') > 0: s = p.split('|') l = int(s[0]) # length of param field if l > 0: p = p[len(s[0]) + 1:] field_name = p.split('|')[0].split('=')[0] field_value = p[len(field_name) + 1:l] p = p[l + 1:] if field_value.count("'") == 0 and field_value.count('"') == 0: try: field_value = int(field_value) except ValueError: try: field_value = float(field_value) except __HOLE__: pass ret[field_name] = field_value return ret
ValueError
dataset/ETHPy150Open dddomodossola/remi/remi/server.py/parse_parametrs
5,507
def process_all(self, function): self.log.debug('get: %s' % function) static_file = re.match(r"^/*res\/(.*)$", function) attr_call = re.match(r"^\/*(\w+)\/(\w+)\?{0,1}(\w*\={1}\w+\${0,1})*$", function) if (function == '/') or (not function): # build the root page once if necessary should_call_main = not hasattr(self.client, 'root') if should_call_main: self.client.root = self.main(*self.server.userdata) self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() self.wfile.write(encode_text("<!DOCTYPE html>\n")) self.wfile.write(encode_text("<html>\n<head>\n")) self.wfile.write(encode_text( """<meta content='text/html;charset=utf-8' http-equiv='Content-Type'> <meta content='utf-8' http-equiv='encoding'> <meta name="viewport" content="width=device-width, initial-scale=1.0">""")) self.wfile.write(encode_text(self.client.css_header)) self.wfile.write(encode_text(self.client.html_header)) self.wfile.write(encode_text(self.client.script_header)) self.wfile.write(encode_text("\n</head>\n<body>\n")) # render the HTML replacing any local absolute references to the correct IP of this instance html = self.client.root.repr(self.client) self.wfile.write(encode_text(html)) self.wfile.write(encode_text(self.client.html_footer)) self.wfile.write(encode_text("</body>\n</html>")) elif static_file: static_paths = [os.path.join(os.path.dirname(__file__), 'res')] static_paths.extend(self._app_args.get('static_paths', ())) filename = None found = False for s in reversed(static_paths): filename = os.path.join(s, static_file.groups()[0]) if os.path.exists(filename): found = True break if not found: self.send_response(404) return mimetype,encoding = mimetypes.guess_type(filename) self.send_response(200) self.send_header('Content-type', mimetype if mimetype else 'application/octet-stream') if self.server.enable_file_cache: self.send_header('Cache-Control', 'public, max-age=86400') self.end_headers() with open(filename, 'rb') as f: content = f.read() self.wfile.write(content) elif attr_call: params = list() param_dict = parse_qs(urlparse(function).query) for k in param_dict: params.append(param_dict[k]) widget, function = attr_call.group(1, 2) try: content, headers = get_method_by(get_method_by(self.client.root, widget), function)(*params) if content is None: self.send_response(503) return self.send_response(200) except __HOLE__: self.log.error('attr %s/%s call error' % (widget, function), exc_info=True) self.send_response(404) return except (TypeError, AttributeError): self.log.error('attr %s/%s not available' % (widget, function)) self.send_response(503) return for k in headers.keys(): self.send_header(k, headers[k]) self.end_headers() self.wfile.write(content)
IOError
dataset/ETHPy150Open dddomodossola/remi/remi/server.py/App.process_all
5,508
def start(mainGuiClass, **kwargs): """This method starts the webserver with a specific App subclass.""" try: debug = kwargs.pop('debug') except __HOLE__: debug = False logging.basicConfig(level=logging.DEBUG if debug else logging.INFO, format='%(name)-16s %(levelname)-8s %(message)s') s = Server(mainGuiClass, start=True, **kwargs) s.serve_forever()
KeyError
dataset/ETHPy150Open dddomodossola/remi/remi/server.py/start
5,509
def testFrozenPhoneNumber(self): # Python version extra tests gb_mobile = PhoneNumber(country_code=44, national_number=7912345678) it_number = PhoneNumber(country_code=39, national_number=236618300, italian_leading_zero=True) frozen_gb_mobile1 = FrozenPhoneNumber(country_code=44, national_number=7912345678) frozen_it_number1 = FrozenPhoneNumber(country_code=39, national_number=236618300, italian_leading_zero=True) frozen_gb_mobile2 = FrozenPhoneNumber(gb_mobile) frozen_it_number2 = FrozenPhoneNumber(it_number) self.assertEqual(frozen_gb_mobile1, gb_mobile) self.assertEqual(frozen_gb_mobile2, gb_mobile) self.assertEqual(frozen_gb_mobile1, frozen_gb_mobile2) self.assertEqual(frozen_it_number1, it_number) self.assertEqual(frozen_it_number2, it_number) self.assertEqual(frozen_it_number1, frozen_it_number2) self.assertEqual(hash(frozen_it_number1), hash(frozen_it_number2)) self.assertNotEqual(hash(frozen_it_number1), hash(frozen_gb_mobile1)) phonedict = {frozen_it_number1: 1, frozen_gb_mobile1: 2} self.assertEqual(phonedict[frozen_it_number1], 1) try: frozen_gb_mobile1.country_code = 12 self.fail("Should not be able to modify FrozenPhoneNubmer") except TypeError: pass try: frozen_gb_mobile2.raw_input = "" self.fail("Should not be able to modify FrozenPhoneNubmer") except __HOLE__: pass try: frozen_gb_mobile1.clear() self.fail("Should not be able to modify FrozenPhoneNubmer") except TypeError: pass try: frozen_gb_mobile1.merge_from(frozen_it_number1) self.fail("Should not be able to modify FrozenPhoneNubmer") except TypeError: pass try: del frozen_gb_mobile1.country_code self.fail("Should not be able to modify FrozenPhoneNubmer") except TypeError: pass # Coverage test frozen_gb_mobile1._mutable = True del frozen_gb_mobile1.country_code
TypeError
dataset/ETHPy150Open daviddrysdale/python-phonenumbers/python/tests/phonenumbertest.py/PhoneNumberTest.testFrozenPhoneNumber
5,510
@require_json @require_POST @require_api_key def update_build_status(request, repository_name, build_number): """ Update a build status. Useful when another separate micro-service runs the builds. """ try: status = request.json['status'] except (__HOLE__, KeyError): return JsonResponse({'error': 'Missing status field.'}, status=400) build = get_object_or_404(Build, repository__name=repository_name, number=build_number) try: build.update_status(status) except InvalidStatus as error: return JsonResponse({'error': str(error)}, status=400) except MissingToken: return JsonResponse({'error': 'No token.'}, status=400) return JsonResponse({})
TypeError
dataset/ETHPy150Open m-vdb/github-buildservice-boilerplate/buildservice/views/api.py/update_build_status
5,511
def play(self, utterance, start=0, end=None): """ Play the given audio sample. :param utterance: The utterance id of the sample to play """ # Method 1: os audio dev. try: import ossaudiodev try: dsp = ossaudiodev.open('w') dsp.setfmt(ossaudiodev.AFMT_S16_LE) dsp.channels(1) dsp.speed(16000) dsp.write(self.audiodata(utterance, start, end)) dsp.close() except __HOLE__ as e: print(("can't acquire the audio device; please " "activate your audio device."), file=sys.stderr) print("system error message:", str(e), file=sys.stderr) return except ImportError: pass # Method 2: pygame try: # FIXME: this won't work under python 3 import pygame.mixer, StringIO pygame.mixer.init(16000) f = StringIO.StringIO(self.wav(utterance, start, end)) pygame.mixer.Sound(f).play() while pygame.mixer.get_busy(): time.sleep(0.01) return except ImportError: pass # Method 3: complain. :) print(("you must install pygame or ossaudiodev " "for audio playback."), file=sys.stderr)
IOError
dataset/ETHPy150Open nltk/nltk/nltk/corpus/reader/timit.py/TimitCorpusReader.play
5,512
def _GetJdbcTypeForArg(self, arg): """Get the JDBC type which corresponds to the given Python object type.""" arg_jdbc_type = _PYTHON_TYPE_TO_JDBC_TYPE.get(type(arg)) if arg_jdbc_type: return arg_jdbc_type for python_t, jdbc_t in _PYTHON_TYPE_TO_JDBC_TYPE.items(): if isinstance(arg, python_t): return jdbc_t try: return self._GetJdbcTypeForArg(arg[0]) except __HOLE__: raise TypeError('unknown type')
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/storage/speckle/python/api/rdbms.py/Cursor._GetJdbcTypeForArg
5,513
def _AddBindVariablesToRequest(self, statement, args, bind_variable_factory, direction=client_pb2.BindVariableProto.IN): """Add args to the request BindVariableProto list. Args: statement: The SQL statement. args: Sequence of arguments to turn into BindVariableProtos. bind_variable_factory: A callable which returns new BindVariableProtos. direction: The direction to set for all variables in the request. Raises: InterfaceError: Unknown type used as a bind variable. """ if isinstance(args, dict): args = _ConvertArgsDictToList(statement, args) for i, arg in enumerate(args): bv = bind_variable_factory() bv.position = i + 1 bv.direction = direction if arg is None: bv.type = jdbc_type.NULL else: try: bv.type, bv.value = self._EncodeVariable(arg) except __HOLE__: raise InterfaceError('unknown type %s for arg %d' % (type(arg), i))
TypeError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/storage/speckle/python/api/rdbms.py/Cursor._AddBindVariablesToRequest
5,514
def fetchone(self): """Fetches the next row of a query result set. Returns: A sequence, or None when no more data is available. Raises: InternalError: The cursor has been closed, or no statement has been executed yet. """ self._CheckOpen() self._CheckExecuted('fetchone() called before execute') if not self._rows and self._more_rows: self._FetchMoreRows() try: return self._rows.popleft() except __HOLE__: return None
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/storage/speckle/python/api/rdbms.py/Cursor.fetchone
5,515
def destroy(self, obj, silent=True): was_dropped = False with self._lock: try: self._used_objs.remove(obj) was_dropped = True except __HOLE__: if not silent: raise if was_dropped and self._after_remove is not None: self._after_remove(obj)
ValueError
dataset/ETHPy150Open pinterest/pymemcache/pymemcache/pool.py/ObjectPool.destroy
5,516
def release(self, obj, silent=True): with self._lock: try: self._used_objs.remove(obj) self._free_objs.append(obj) except __HOLE__: if not silent: raise
ValueError
dataset/ETHPy150Open pinterest/pymemcache/pymemcache/pool.py/ObjectPool.release
5,517
def check_exists(fips_dir) : """test if cmake-gui is in the path :returns: True if cmake-gui is in the path """ try: out = subprocess.check_output(['cmake-gui', '--version']) return True except (__HOLE__, subprocess.CalledProcessError) : return False; #-------------------------------------------------------------------------------
OSError
dataset/ETHPy150Open floooh/fips/mod/tools/cmake_gui.py/check_exists
5,518
def parseConfigFile(configFile=None): """Returns a configparser.SafeConfigParser instance with configs read from the config file. Default location of the config file is at ~/.wakatime.cfg. """ if not configFile: configFile = os.path.join(os.path.expanduser('~'), '.wakatime.cfg') configs = configparser.SafeConfigParser() try: with open(configFile, 'r', encoding='utf-8') as fh: try: configs.readfp(fh) except configparser.Error: print(traceback.format_exc()) return None except __HOLE__: print(u('Error: Could not read from config file {0}').format(u(configFile))) return configs
IOError
dataset/ETHPy150Open wakatime/sublime-wakatime/packages/wakatime/main.py/parseConfigFile
5,519
def parseArguments(): """Parse command line arguments and configs from ~/.wakatime.cfg. Command line arguments take precedence over config file settings. Returns instances of ArgumentParser and SafeConfigParser. """ # define supported command line arguments parser = argparse.ArgumentParser( description='Common interface for the WakaTime api.') parser.add_argument('--entity', dest='entity', metavar='FILE', action=FileAction, help='absolute path to file for the heartbeat; can also be a '+ 'url, domain, or app when --entity-type is not file') parser.add_argument('--file', dest='file', action=FileAction, help=argparse.SUPPRESS) parser.add_argument('--key', dest='key', help='your wakatime api key; uses api_key from '+ '~/.wakatime.cfg by default') parser.add_argument('--write', dest='is_write', action='store_true', help='when set, tells api this heartbeat was triggered from '+ 'writing to a file') parser.add_argument('--plugin', dest='plugin', help='optional text editor plugin name and version '+ 'for User-Agent header') parser.add_argument('--time', dest='timestamp', metavar='time', type=float, help='optional floating-point unix epoch timestamp; '+ 'uses current time by default') parser.add_argument('--lineno', dest='lineno', help='optional line number; current line being edited') parser.add_argument('--cursorpos', dest='cursorpos', help='optional cursor position in the current file') parser.add_argument('--entity-type', dest='entity_type', help='entity type for this heartbeat. can be one of "file", '+ '"domain", or "app"; defaults to file.') parser.add_argument('--proxy', dest='proxy', help='optional https proxy url; for example: '+ 'https://user:pass@localhost:8080') parser.add_argument('--project', dest='project', help='optional project name') parser.add_argument('--alternate-project', dest='alternate_project', help='optional alternate project name; auto-discovered project '+ 'takes priority') parser.add_argument('--alternate-language', dest='alternate_language', help='optional alternate language name; auto-detected language'+ 'takes priority') parser.add_argument('--hostname', dest='hostname', help='hostname of '+ 'current machine.') parser.add_argument('--disableoffline', dest='offline', action='store_false', help='disables offline time logging instead of queuing logged time') parser.add_argument('--hidefilenames', dest='hidefilenames', action='store_true', help='obfuscate file names; will not send file names to api') parser.add_argument('--exclude', dest='exclude', action='append', help='filename patterns to exclude from logging; POSIX regex '+ 'syntax; can be used more than once') parser.add_argument('--include', dest='include', action='append', help='filename patterns to log; when used in combination with '+ '--exclude, files matching include will still be logged; '+ 'POSIX regex syntax; can be used more than once') parser.add_argument('--ignore', dest='ignore', action='append', help=argparse.SUPPRESS) parser.add_argument('--extra-heartbeats', dest='extra_heartbeats', action='store_true', help='reads extra heartbeats from STDIN as a JSON array until EOF') parser.add_argument('--logfile', dest='logfile', help='defaults to ~/.wakatime.log') parser.add_argument('--apiurl', dest='api_url', help='heartbeats api url; for debugging with a local server') parser.add_argument('--timeout', dest='timeout', type=int, help='number of seconds to wait when sending heartbeats to api; '+ 'defaults to 60 seconds') parser.add_argument('--config', dest='config', help='defaults to ~/.wakatime.cfg') parser.add_argument('--verbose', dest='verbose', action='store_true', help='turns on debug messages in log file') parser.add_argument('--version', action='version', version=__version__) # parse command line arguments args = parser.parse_args() # use current unix epoch timestamp by default if not args.timestamp: args.timestamp = time.time() # parse ~/.wakatime.cfg file configs = parseConfigFile(args.config) if configs is None: return args, configs # update args from configs if not args.key: default_key = None if configs.has_option('settings', 'api_key'): default_key = configs.get('settings', 'api_key') elif configs.has_option('settings', 'apikey'): default_key = configs.get('settings', 'apikey') if default_key: args.key = default_key else: parser.error('Missing api key') if not args.entity: if args.file: args.entity = args.file else: parser.error('argument --entity is required') if not args.exclude: args.exclude = [] if configs.has_option('settings', 'ignore'): try: for pattern in configs.get('settings', 'ignore').split("\n"): if pattern.strip() != '': args.exclude.append(pattern) except TypeError: # pragma: nocover pass if configs.has_option('settings', 'exclude'): try: for pattern in configs.get('settings', 'exclude').split("\n"): if pattern.strip() != '': args.exclude.append(pattern) except TypeError: # pragma: nocover pass if not args.include: args.include = [] if configs.has_option('settings', 'include'): try: for pattern in configs.get('settings', 'include').split("\n"): if pattern.strip() != '': args.include.append(pattern) except __HOLE__: # pragma: nocover pass if args.offline and configs.has_option('settings', 'offline'): args.offline = configs.getboolean('settings', 'offline') if not args.hidefilenames and configs.has_option('settings', 'hidefilenames'): args.hidefilenames = configs.getboolean('settings', 'hidefilenames') if not args.proxy and configs.has_option('settings', 'proxy'): args.proxy = configs.get('settings', 'proxy') if not args.verbose and configs.has_option('settings', 'verbose'): args.verbose = configs.getboolean('settings', 'verbose') if not args.verbose and configs.has_option('settings', 'debug'): args.verbose = configs.getboolean('settings', 'debug') if not args.logfile and configs.has_option('settings', 'logfile'): args.logfile = configs.get('settings', 'logfile') if not args.api_url and configs.has_option('settings', 'api_url'): args.api_url = configs.get('settings', 'api_url') if not args.timeout and configs.has_option('settings', 'timeout'): try: args.timeout = int(configs.get('settings', 'timeout')) except ValueError: print(traceback.format_exc()) return args, configs
TypeError
dataset/ETHPy150Open wakatime/sublime-wakatime/packages/wakatime/main.py/parseArguments
5,520
def should_exclude(entity, include, exclude): if entity is not None and entity.strip() != '': try: for pattern in include: try: compiled = re.compile(pattern, re.IGNORECASE) if compiled.search(entity): return False except re.error as ex: log.warning(u('Regex error ({msg}) for include pattern: {pattern}').format( msg=u(ex), pattern=u(pattern), )) except __HOLE__: # pragma: nocover pass try: for pattern in exclude: try: compiled = re.compile(pattern, re.IGNORECASE) if compiled.search(entity): return pattern except re.error as ex: log.warning(u('Regex error ({msg}) for exclude pattern: {pattern}').format( msg=u(ex), pattern=u(pattern), )) except TypeError: # pragma: nocover pass return False
TypeError
dataset/ETHPy150Open wakatime/sublime-wakatime/packages/wakatime/main.py/should_exclude
5,521
def RenderAjax(self, request, response): """Return the count on unseen notifications.""" response = super(NotificationCount, self).RenderAjax(request, response) number = 0 try: user_fd = aff4.FACTORY.Open(aff4.ROOT_URN.Add("users").Add( request.user), token=request.token) notifications = user_fd.Get(user_fd.Schema.PENDING_NOTIFICATIONS) if notifications: number = len(notifications) except __HOLE__: pass return renderers.JsonResponse(dict(number=number))
IOError
dataset/ETHPy150Open google/grr/grr/gui/plugins/notifications.py/NotificationCount.RenderAjax
5,522
def GetUserSettings(self, request): try: user_record = aff4.FACTORY.Open( aff4.ROOT_URN.Add("users").Add(request.user), "GRRUser", token=request.token) return user_record.Get(user_record.Schema.GUI_SETTINGS) except __HOLE__: return aff4.GRRUser.SchemaCls.GUI_SETTINGS()
IOError
dataset/ETHPy150Open google/grr/grr/gui/plugins/notifications.py/UserSettingsDialog.GetUserSettings
5,523
def BuildTable(self, start_row, end_row, request): """Add all the notifications to this table.""" row_index = 0 search_term = request.REQ.get("sSearch") # We modify this object by changing the notification from pending to # shown. try: user_fd = aff4.FACTORY.Open(aff4.ROOT_URN.Add("users").Add( request.user), aff4_type="GRRUser", token=request.token) except __HOLE__: return # Hack for sorting. Requires retrieval of all notifications. notifications = list(user_fd.ShowNotifications(reset=False)) for notification in sorted(notifications, key=lambda x: x.timestamp, reverse=True): if row_index < start_row: continue if row_index > end_row: break if (search_term and search_term.lower() not in notification.message.lower()): continue row = {"Message": notification.message, "Target": self.FormatFromTemplate( self.target_template, hash=self.BuildHashFromNotification(notification), notification_type=notification.type, target=notification.subject), "Timestamp": rdfvalue.RDFDatetime(notification.timestamp)} self.AddRow(row, row_index) row_index += 1 flow.GRRFlow.StartFlow(flow_name="ResetUserNotifications", token=request.token)
IOError
dataset/ETHPy150Open google/grr/grr/gui/plugins/notifications.py/ViewNotifications.BuildTable
5,524
def handle(self, *args, **options): path = options.get('path') @transaction.atomic def do_backup(src_path, dest_path): # perform a simple file-copy backup of the database # first we need a shared lock on the database, issuing a select() # will do this for us cursor = connection.cursor() cursor.execute("SELECT count(*) from sqlite_master") # now copy the file try: shutil.copy(src_path, dest_path) except __HOLE__: # TODO: use the IOError message as message for the user raise CommandError("Database backup failed.") database_path = get_database_path_from_settings() if database_path: do_backup(database_path, path) self.stdout.write('Database %s successfully stored at %s.' % (database_path, path)) else: raise CommandError( 'Default database is not SQLite3. Only SQLite3 databases' 'can currently be backuped.')
IOError
dataset/ETHPy150Open OpenSlides/OpenSlides/openslides/core/management/commands/backupdb.py/Command.handle
5,525
def enableDebug(globals_dict): templates = dict(globals_dict) class TemplateWrapper: """ Wrapper around templates. To better trace and control template usage. """ def __init__(self, name, value): self.name = name self.value = value def __str__(self): return self.value def __mod__(self, other): assert type(other) is dict, self.name for key in other.keys(): if "%%(%s)" % key not in self.value: from logging import warning warning( "Extra value '%s' provided to template '%s'.", key, self.name ) try: return self.value % other except __HOLE__ as e: raise KeyError(self.name, *e.args) def split(self, sep): return self.value.split(sep) for template_name, template_value in iterItems(templates): # Ignore internal attribute like "__name__" that the module will also # have of course. if template_name.startswith('_'): continue if type(template_value) is str: globals_dict[template_name] = TemplateWrapper( template_name, template_value )
KeyError
dataset/ETHPy150Open kayhayen/Nuitka/nuitka/codegen/templates/TemplateDebugWrapper.py/enableDebug
5,526
def setUp(self): self.__stdoutSaved = sys.stdout try: from StringIO import StringIO except __HOLE__: from io import StringIO self.__out = StringIO() sys.stdout = self.__out
ImportError
dataset/ETHPy150Open Cimpress-MCP/JustReleaseNotes/tests/artifacters/GitHubReleases_Test.py/GitHubReleases_Test.setUp
5,527
def sites_google_site_proxyfree4u(self): proxies = [] url = "https://sites.google.com/site/proxyfree4u/proxy-list?offset=" # fetch the latest 10 pages for i in range(0, 100, 10): # print url + str(i) soup = BeautifulSoup(self.getpage(url + str(i))) http_client = HTTPClient() for link in soup.find_all('a'): fetch_url = link.get('href') #get the correct URL if fetch_url == None: continue if fetch_url.find("&single=true&gid=0&output=txt") != -1: request = HTTPRequest( url=fetch_url, connect_timeout=30, request_timeout=30, follow_redirects=False, use_gzip=True, user_agent=Proxy_Miner.User_agent ) # sometime during tests the response was 599. # re-sending the packet 4 times for times in range(0, 4): try: response = http_client.fetch(request) except HTTPError as e: if e.code in [408, 599]: continue #getting the cookies. In order to get the proxy list 2 cookies are needed first_redirect = e.response.headers['Location'] cookie = e.response.headers['Set-Cookie'] cookie_headers = HTTPHeaders() cookie_headers.add("Cookie", cookie.split(";")[0]) req2 = HTTPRequest( url=first_redirect, connect_timeout=30.0, request_timeout=30.0, follow_redirects=False, use_gzip=True, headers=cookie_headers, user_agent=Proxy_Miner.User_agent ) try: http_client.fetch(req2) except __HOLE__ as e2: second_redirect = e2.response.headers['Location'] # get the second cookie cookie2 = e2.response.headers['Set-Cookie'] cookie_headers.add("Cookie", cookie2.split(";")[0]) req3 = HTTPRequest( url=second_redirect, connect_timeout=30.0, request_timeout=30.0, follow_redirects=True, use_gzip=True, headers=cookie_headers, user_agent=Proxy_Miner.User_agent ) resp3 = http_client.fetch(req3) # print resp3.body lines = resp3.body.split("\n") counter = 0 for j in range(1, len(lines)): proxy = lines[j].split(":") if self.check_proxy(proxy): proxies.append(proxy) # if the list contains non valid proxies else: counter += 1 if counter == 15: break break return proxies #---------------------------------------------------------------
HTTPError
dataset/ETHPy150Open owtf/owtf/framework/http/proxy/outbound_proxyminer.py/Proxy_Miner.sites_google_site_proxyfree4u
5,528
def clean(self, value): "Since the ProductSelectField does not specify choices by itself, accept any returned value" try: return int(value) except __HOLE__: pass
ValueError
dataset/ETHPy150Open awesto/django-shop/shop/cascade/plugin_base.py/ProductSelectField.clean
5,529
def set_initial_product(self, initial): try: # check if that product still exists, otherwise return nothing Model = apps.get_model(*initial['link']['model'].split('.')) initial['product'] = Model.objects.get(pk=initial['link']['pk']).pk except (KeyError, __HOLE__, Model.DoesNotExist): pass
ValueError
dataset/ETHPy150Open awesto/django-shop/shop/cascade/plugin_base.py/CatalogLinkForm.set_initial_product
5,530
def get_render_template(self, context, instance, placeholder): render_type = instance.glossary.get('render_type') if render_type not in ('form', 'summary',): render_type = 'form' try: template_names = [ '{0}/checkout/{1}'.format(shop_settings.APP_LABEL, self.template_leaf_name).format(render_type), 'shop/checkout/{}'.format(self.template_leaf_name).format(render_type), ] return select_template(template_names) except (__HOLE__, TemplateDoesNotExist): return self.render_template
AttributeError
dataset/ETHPy150Open awesto/django-shop/shop/cascade/plugin_base.py/DialogFormPluginBase.get_render_template
5,531
def create_schema(storage_index_url): # making three tries, in case of communication errors with elasticsearch for _ in xrange(3): try: # delete index if already exist response = requests.head(storage_index_url) if response.status_code == 200: response = requests.delete(storage_index_url) response.raise_for_status() # create index response = requests.post(storage_index_url, data=json.dumps( SETTINGS)) response.raise_for_status() # set mappings response = requests.put("{0}/blueprint/_mapping".format( storage_index_url), json.dumps(BLUEPRINT_SCHEMA)) response.raise_for_status() response = requests.put("{0}/deployment/_mapping".format( storage_index_url), json.dumps(DEPLOYMENT_SCHEMA)) response.raise_for_status() response = requests.put("{0}/node/_mapping".format( storage_index_url), json.dumps(NODE_SCHEMA)) response.raise_for_status() response = requests.put("{0}/node_instance/_mapping".format( storage_index_url), json.dumps(NODE_INSTANCE_SCHEMA)) response.raise_for_status() print 'Done creating elasticsearch storage schema.' break except __HOLE__: pass
HTTPError
dataset/ETHPy150Open cloudify-cosmo/cloudify-manager/tests/testenv/es_schema_creator.py/create_schema
5,532
def getGameSpeed(self): try: return self.gameSpeed except __HOLE__: self.gameSpeed = 0 return self.gameSpeed
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getGameSpeed
5,533
def getTalentsReader(self): try: return self.talentsReader except __HOLE__: replayVersion = self.reader.getReplayProtocolVersion() try: self.talentsReader = __import__('stormreplay.talents%s' % replayVersion, fromlist=['talents']) except ImportError: raise Exception('Unsupported StormReplay build number for talents: %i' % replayVersion) return self.talentsReader
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getTalentsReader
5,534
def getTalents(self): try: return self.talents except __HOLE__: self.talents = [[] for _ in xrange(10)] talentsReader = self.getTalentsReader() generator = talentsReader.decode_game_events_talent_choices(self.reader.getReplayGameEvents(), self.getPlayersHeroChoiceArray()) for choice in generator: self.talents[choice['_userid']].append({ 'seconds': self.gameloopToSeconds(choice['_gameloop']), 'level': choice['m_level'], 'name': choice['m_talentName'], 'description': choice['m_talentDescription'], 'index': choice['m_talentIndex'], }) return self.talents
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getTalents
5,535
def getTeamTalentTierTimes(self): try: return self.teamTalentTierTimes except __HOLE__: teamTalentTierLevel = [[], []] teamTalentTiersFirstPick = [[], []] teamTalentTiersLastPick = [[], []] players = self.getPlayers() for playerIndex, playerTalentPicks in enumerate(self.getTalents()): player = players[playerIndex] for talentTierIndex, talentPick in enumerate(playerTalentPicks): talentPickTime = talentPick['seconds'] teamIndex = player['m_teamId'] tiersFirstPick = teamTalentTiersFirstPick[teamIndex] if (talentTierIndex >= len(tiersFirstPick)): tiersFirstPick.append(talentPickTime) elif (talentPickTime < tiersFirstPick[talentTierIndex]): tiersFirstPick[talentTierIndex] = talentPickTime tiersLastPick = teamTalentTiersLastPick[teamIndex] if (talentTierIndex >= len(tiersLastPick)): tiersLastPick.append(talentPickTime) elif (talentPickTime > tiersLastPick[talentTierIndex]): tiersLastPick[talentTierIndex] = talentPickTime if (talentTierIndex >= len(teamTalentTierLevel[teamIndex])): teamTalentTierLevel[teamIndex].append(talentPick['level']) else: teamTalentTierLevel[teamIndex][talentTierIndex] = talentPick['level'] self.teamTalentTierTimes = [[], []] for teamIndex in xrange(2): for talentTierIndex, level in enumerate(teamTalentTierLevel[teamIndex]): self.teamTalentTierTimes[teamIndex].append({ 'earliest': teamTalentTiersFirstPick[teamIndex][talentTierIndex], 'latest': teamTalentTiersLastPick[teamIndex][talentTierIndex], 'level': level, }) return self.teamTalentTierTimes
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getTeamTalentTierTimes
5,536
def getTeamLevels(self): try: return self.teamLevels except __HOLE__: teamTalentTierTimes = self.getTeamTalentTierTimes() self.teamLevels = [[], []] for teamIndex in xrange(2): talentTierTimes = teamTalentTierTimes[teamIndex] levelTimes = [0] * talentTierTimes[-1]['level'] for firstTier, nextTier in zip(talentTierTimes, talentTierTimes[1:]): levelRange = nextTier['level'] - firstTier['level'] for level in xrange(firstTier['level'], nextTier['level']+1): levelIndex = level-1 lerp = float(level - firstTier['level']) / levelRange time = lerp * (nextTier['earliest'] - firstTier['earliest']) + firstTier['earliest'] levelTimes[levelIndex] = time levelToTalentTierInfo = {} for tierInfo in talentTierTimes: levelToTalentTierInfo[str(tierInfo['level'])] = tierInfo for levelIndex, time in enumerate(levelTimes): level = levelIndex + 1 levelInfo = { 'level': levelIndex + 1, 'seconds': time, 'is_talent_tier': False, } if levelToTalentTierInfo.has_key(str(level)): tierInfo = levelToTalentTierInfo[str(level)] levelInfo['is_talent_tier'] = True levelInfo['earliest_talent_picked_time'] = tierInfo['earliest'] levelInfo['latest_talent_picked_time'] = tierInfo['latest'] self.teamLevels[teamIndex].append(levelInfo) return self.teamLevels
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getTeamLevels
5,537
def getMapName(self): try: return self.mapName except __HOLE__: self.mapName = self.reader.getReplayDetails()['m_title']['utf8'] return self.mapName
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getMapName
5,538
def getPlayersHeroChoiceArray(self): try: return self.playersHeroArray except __HOLE__: self.playersHeroArray = [None] * 10 for i, player in enumerate(self.getPlayerSpawnInfo()): self.playersHeroArray[i] = player['hero'] return self.playersHeroArray # returns array indexed by user ID
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getPlayersHeroChoiceArray
5,539
def getPlayers(self): try: return self.players except __HOLE__: self.players = [None] * 10 for i, player in enumerate(self.getReplayDetails()['m_playerList']): #TODO: confirm that m_workingSetSlotId == i always toon = player['m_toon'] player['m_toonId'] = "%i-%s-%i-%i" % (toon['m_region'], toon['m_programId'], toon['m_realm'], toon['m_id']) player['m_name'] = player['m_name']['utf8'] player['m_controlPlayerId'] = i+1 self.players[i] = player return self.players # returns array indexed by user ID
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getPlayers
5,540
def getPlayerSpawnInfo(self): try: return self.playerSpawnInfo except __HOLE__: self.playerSpawnInfo = [None] * 10 playerIdToUserId = {} for event in self.getReplayTrackerEvents(): if event['_event'] == 'NNet.Replay.Tracker.SPlayerSetupEvent': playerIdToUserId[event['m_playerId']] = event['m_userId'] elif event['_event'] == 'NNet.Replay.Tracker.SUnitBornEvent' and (int(event['_gameloop']) > 0): playerId = event['m_controlPlayerId'] if (playerIdToUserId.has_key(playerId)): playerIndex = playerIdToUserId[playerId] # always playerId-1 so far, but this is safer self.playerSpawnInfo[playerIndex] = { 'hero': event['m_unitTypeName']['utf8'], 'unit_tag': event['m_unitTag'] } del playerIdToUserId[playerId] if len(playerIdToUserId) == 0: break return self.playerSpawnInfo
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getPlayerSpawnInfo
5,541
def getMatchUTCTimestamp(self): try: return self.utcTimestamp except __HOLE__: self.utcTimestamp = (self.getReplayDetails()['m_timeUTC'] / 10000000) - 11644473600 return self.utcTimestamp
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getMatchUTCTimestamp
5,542
def getChat(self): try: return self.chat except __HOLE__: self.chat = [] for messageEvent in self.getReplayMessageEvents(): if (messageEvent['_event'] != 'NNet.Game.SChatMessage'): continue userId = messageEvent['_userid']['m_userId'] chatData = { 't': self.gameloopToTimestamp(messageEvent['_gameloop']), 'user': userId, 'msg': messageEvent['m_string']['utf8'], } self.chat.append(chatData) return self.chat
AttributeError
dataset/ETHPy150Open karlgluck/heroes-of-the-storm-replay-parser/stormreplay/analyzer.py/StormReplayAnalyzer.getChat
5,543
@app.route('/metrics', methods=methods) @app.route('/metrics/find', methods=methods) def metrics_find(): errors = {} from_time = None until_time = None wildcards = False try: wildcards = bool(int(RequestParams.get('wildcards', 0))) except ValueError: errors['wildcards'] = 'must be 0 or 1.' try: from_time = int(RequestParams.get('from', -1)) except ValueError: errors['from'] = 'must be an epoch timestamp.' try: until_time = int(RequestParams.get('until', -1)) except __HOLE__: errors['until'] = 'must be an epoch timestamp.' if from_time == -1: from_time = None if until_time == -1: until_time = None format = RequestParams.get('format', 'treejson') if format not in ['treejson', 'completer']: errors['format'] = 'unrecognized format: "{0}".'.format(format) if 'query' not in RequestParams: errors['query'] = 'this parameter is required.' if errors: return jsonify({'errors': errors}, status=400) query = RequestParams['query'] matches = sorted( app.store.find(query, from_time, until_time), key=lambda node: node.name ) base_path = query.rsplit('.', 1)[0] + '.' if '.' in query else '' if format == 'treejson': data = tree_json(matches, base_path, wildcards=wildcards) return ( json.dumps(data), 200, {'Content-Type': 'application/json'} ) results = [] for node in matches: node_info = { 'path': node.path, 'name': node.name, 'is_leaf': int(node.is_leaf), # XXX Y was this cast to str } if not node.is_leaf: node_info['path'] += '.' results.append(node_info) if len(results) > 1 and wildcards: results.append({'name': '*'}) return jsonify({'metrics': results})
ValueError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/app.py/metrics_find
5,544
@app.route('/metrics/expand', methods=methods) def metrics_expand(): errors = {} try: group_by_expr = bool(int(RequestParams.get('groupByExpr', 0))) except ValueError: errors['groupByExpr'] = 'must be 0 or 1.' try: leaves_only = bool(int(RequestParams.get('leavesOnly', 0))) except __HOLE__: errors['leavesOnly'] = 'must be 0 or 1.' if 'query' not in RequestParams: errors['query'] = 'this parameter is required.' if errors: return jsonify({'errors': errors}, status=400) results = defaultdict(set) for query in RequestParams.getlist('query'): for node in app.store.find(query): if node.is_leaf or not leaves_only: results[query].add(node.path) if group_by_expr: for query, matches in results.items(): results[query] = sorted(matches) else: new_results = set() for value in results.values(): new_results = new_results.union(value) results = sorted(new_results) return jsonify({'results': results})
ValueError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/app.py/metrics_expand
5,545
@app.route('/render', methods=methods) def render(): errors = {} graph_options = { 'width': 600, 'height': 300, } request_options = {} graph_type = RequestParams.get('graphType', 'line') try: graph_class = GraphTypes[graph_type] request_options['graphType'] = graph_type request_options['graphClass'] = graph_class except KeyError: errors['graphType'] = ( "Invalid graphType '{0}', must be one of '{1}'.".format( graph_type, "', '".join(sorted(GraphTypes.keys())))) request_options['pieMode'] = RequestParams.get('pieMode', 'average') targets = RequestParams.getlist('target') if not len(targets): errors['target'] = 'This parameter is required.' request_options['targets'] = targets if 'rawData' in RequestParams: request_options['format'] = 'raw' if 'format' in RequestParams: request_options['format'] = RequestParams['format'] if 'jsonp' in RequestParams: request_options['jsonp'] = RequestParams['jsonp'] if 'maxDataPoints' in RequestParams: try: request_options['maxDataPoints'] = int( float(RequestParams['maxDataPoints'])) except ValueError: errors['maxDataPoints'] = 'Must be an integer.' if errors: return jsonify({'errors': errors}, status=400) for opt in graph_class.customizable: if opt in RequestParams: value = RequestParams[opt] try: value = int(value) except ValueError: try: value = float(value) except __HOLE__: if value.lower() in ('true', 'false'): value = value.lower() == 'true' elif value.lower() == 'default' or not value: continue graph_options[opt] = value tzinfo = pytz.timezone(app.config['TIME_ZONE']) tz = RequestParams.get('tz') if tz: try: tzinfo = pytz.timezone(tz) except pytz.UnknownTimeZoneError: errors['tz'] = "Unknown timezone: '{0}'.".format(tz) request_options['tzinfo'] = tzinfo until_time = parseATTime(RequestParams.get('until', 'now'), tzinfo) from_time = parseATTime(RequestParams.get('from', '-1d'), tzinfo) start_time = min(from_time, until_time) end_time = max(from_time, until_time) if start_time == end_time: errors['from'] = errors['until'] = 'Invalid empty time range' request_options['startTime'] = start_time request_options['endTime'] = end_time use_cache = app.cache is not None and 'noCache' not in RequestParams cache_timeout = RequestParams.get('cacheTimeout') if cache_timeout is not None: cache_timeout = int(cache_timeout) if errors: return jsonify({'errors': errors}, status=400) # Done with options. if use_cache: request_key = hash_request() response = app.cache.get(request_key) if response is not None: return response headers = { 'Last-Modified': http_date(time.time()), 'Expires': http_date(time.time() + (cache_timeout or 60)), 'Cache-Control': 'max-age={0}'.format(cache_timeout or 60) } if use_cache else { 'Pragma': 'no-cache', 'Cache-Control': 'no-cache', } context = { 'startTime': request_options['startTime'], 'endTime': request_options['endTime'], 'tzinfo': request_options['tzinfo'], 'data': [], } # Gather all data to take advantage of backends with fetch_multi paths = [] for target in request_options['targets']: if request_options['graphType'] == 'pie': if ':' in target: continue if target.strip(): paths += pathsFromTarget(target) data_store = fetchData(context, paths) if request_options['graphType'] == 'pie': for target in request_options['targets']: if ':' in target: name, value = target.split(':', 1) try: value = float(value) except ValueError: errors['target'] = "Invalid target: '{0}'.".format(target) context['data'].append((name, value)) else: series_list = evaluateTarget(context, target, data_store) for series in series_list: func = app.functions[request_options['pieMode']] context['data'].append((series.name, func(context, series) or 0)) if errors: return jsonify({'errors': errors}, status=400) else: # graphType == 'line' for target in request_options['targets']: if not target.strip(): continue series_list = evaluateTarget(context, target, data_store) context['data'].extend(series_list) request_options['format'] = request_options.get('format') if request_options['format'] == 'csv': response = BytesIO() if six.PY2 else StringIO() writer = csv.writer(response, dialect='excel') for series in context['data']: for index, value in enumerate(series): ts = datetime.fromtimestamp( series.start + index * series.step, request_options['tzinfo'] ) writer.writerow((series.name, ts.strftime("%Y-%m-%d %H:%M:%S"), value)) response.seek(0) headers['Content-Type'] = 'text/csv' return response.read(), 200, headers if request_options['format'] == 'json': series_data = [] if 'maxDataPoints' in request_options and any(context['data']): start_time = min([s.start for s in context['data']]) end_time = max([s.end for s in context['data']]) for series in context['data']: series_data.append(prune_datapoints( series, request_options['maxDataPoints'], start_time, end_time)) else: for series in context['data']: timestamps = range(series.start, series.end + series.step, series.step) datapoints = zip(series, timestamps) series_data.append({'target': series.name, 'datapoints': datapoints}) return jsonify(series_data, headers=headers) if request_options['format'] == 'raw': response = StringIO() for series in context['data']: response.write(u"%s,%d,%d,%d|" % ( series.name, series.start, series.end, series.step)) response.write(u','.join(map(repr, series))) response.write(u'\n') response.seek(0) headers['Content-Type'] = 'text/plain' return response.read(), 200, headers if request_options['format'] == 'svg': graph_options['outputFormat'] = 'svg' graph_options['data'] = context['data'] image = doImageRender(request_options['graphClass'], graph_options) use_svg = graph_options.get('outputFormat') == 'svg' if use_svg and 'jsonp' in request_options: headers['Content-Type'] = 'text/javascript' response = ('{0}({1})'.format(request_options['jsonp'], json.dumps(image.decode('utf-8'))), 200, headers) else: ctype = 'image/svg+xml' if use_svg else 'image/png' headers['Content-Type'] = ctype response = image, 200, headers if use_cache: app.cache.add(request_key, response, cache_timeout) return response
ValueError
dataset/ETHPy150Open brutasse/graphite-api/graphite_api/app.py/render
5,546
def test_pickling(self): try: self.stream = cPickle.loads(cPickle.dumps(self.stream)) server_data = self.stream.get_epoch_iterator() expected_data = get_stream().get_epoch_iterator() for _, s, e in zip(range(3), server_data, expected_data): for data in zip(s, e): assert_allclose(*data, rtol=1e-3) except __HOLE__ as e: raise SkipTest("Skip test_that failed with: {}".format(e)) assert_raises(StopIteration, next, server_data)
AssertionError
dataset/ETHPy150Open mila-udem/fuel/tests/test_server.py/TestServer.test_pickling
5,547
def is_wxpython_installed(): """Returns True if wxpython is installed""" try: return __import__("wx") except __HOLE__: return False
ImportError
dataset/ETHPy150Open weecology/retriever/setup.py/is_wxpython_installed
5,548
def _addAuthLevelAlias(self, auth_level_uri, alias=None): """Add an auth level URI alias to this request. @param auth_level_uri: The auth level URI to send in the request. @param alias: The namespace alias to use for this auth level in this message. May be None if the alias is not important. """ if alias is None: try: alias = self._getAlias(auth_level_uri) except __HOLE__: alias = self._generateAlias() else: existing_uri = self.auth_level_aliases.get(alias) if existing_uri is not None and existing_uri != auth_level_uri: raise KeyError('Attempting to redefine alias %r from %r to %r', alias, existing_uri, auth_level_uri) self.auth_level_aliases[alias] = auth_level_uri
KeyError
dataset/ETHPy150Open necaris/python3-openid/openid/extensions/draft/pape5.py/PAPEExtension._addAuthLevelAlias
5,549
def parseExtensionArgs(self, args, is_openid1, strict=False): """Set the state of this request to be that expressed in these PAPE arguments @param args: The PAPE arguments without a namespace @param strict: Whether to raise an exception if the input is out of spec or otherwise malformed. If strict is false, malformed input will be ignored. @param is_openid1: Whether the input should be treated as part of an OpenID1 request @rtype: None @raises ValueError: When the max_auth_age is not parseable as an integer """ # preferred_auth_policies is a space-separated list of policy URIs self.preferred_auth_policies = [] policies_str = args.get('preferred_auth_policies') if policies_str: if isinstance(policies_str, bytes): policies_str = str(policies_str, encoding="utf-8") for uri in policies_str.split(' '): if uri not in self.preferred_auth_policies: self.preferred_auth_policies.append(uri) # max_auth_age is base-10 integer number of seconds max_auth_age_str = args.get('max_auth_age') self.max_auth_age = None if max_auth_age_str: try: self.max_auth_age = int(max_auth_age_str) except __HOLE__: if strict: raise # Parse auth level information preferred_auth_level_types = args.get('preferred_auth_level_types') if preferred_auth_level_types: aliases = preferred_auth_level_types.strip().split() for alias in aliases: key = 'auth_level.ns.%s' % (alias,) try: uri = args[key] except KeyError: if is_openid1: uri = self._default_auth_level_aliases.get(alias) else: uri = None if uri is None: if strict: raise ValueError('preferred auth level %r is not ' 'defined in this message' % (alias,)) else: self.addAuthLevel(uri, alias)
ValueError
dataset/ETHPy150Open necaris/python3-openid/openid/extensions/draft/pape5.py/Request.parseExtensionArgs
5,550
def _getNISTAuthLevel(self): try: return int(self.getAuthLevel(LEVELS_NIST)) except __HOLE__: return None
KeyError
dataset/ETHPy150Open necaris/python3-openid/openid/extensions/draft/pape5.py/Response._getNISTAuthLevel
5,551
def parseExtensionArgs(self, args, is_openid1, strict=False): """Parse the provider authentication policy arguments into the internal state of this object @param args: unqualified provider authentication policy arguments @param strict: Whether to raise an exception when bad data is encountered @returns: None. The data is parsed into the internal fields of this object. """ policies_str = args.get('auth_policies') if policies_str: auth_policies = policies_str.split(' ') elif strict: raise ValueError('Missing auth_policies') else: auth_policies = [] if (len(auth_policies) > 1 and strict and AUTH_NONE in auth_policies): raise ValueError('Got some auth policies, as well as the special ' '"none" URI: %r' % (auth_policies,)) if 'none' in auth_policies: msg = '"none" used as a policy URI (see PAPE draft < 5)' if strict: raise ValueError(msg) else: warnings.warn(msg, stacklevel=2) auth_policies = [u for u in auth_policies if u not in ['none', AUTH_NONE]] self.auth_policies = auth_policies for (key, val) in args.items(): if key.startswith('auth_level.'): alias = key[11:] # skip the already-processed namespace declarations if alias.startswith('ns.'): continue try: uri = args['auth_level.ns.%s' % (alias,)] except __HOLE__: if is_openid1: uri = self._default_auth_level_aliases.get(alias) else: uri = None if uri is None: if strict: raise ValueError( 'Undefined auth level alias: %r' % (alias,)) else: self.setAuthLevel(uri, val, alias) auth_time = args.get('auth_time') if auth_time: if TIME_VALIDATOR.match(auth_time): self.auth_time = auth_time elif strict: raise ValueError("auth_time must be in RFC3339 format")
KeyError
dataset/ETHPy150Open necaris/python3-openid/openid/extensions/draft/pape5.py/Response.parseExtensionArgs
5,552
def get_filename(metadata, directory=None, ext="txt"): """Construct data file name from a metadata dictionary. Returns the file name, as a string. """ metadata = metadata.copy() name = metadata.pop("name", "data") directory = metadata.pop("directory", directory) try: timestamp = metadata.pop("timestamp") except __HOLE__: tstring = "" else: tstring = timestamp.strftime("+%Y%m%d%H%M%S") mmd = [] for key in sorted(metadata.keys()): value = metadata[key] if type(value) is str: value = value.upper() mmd.append("%s%s" % (key.lower(), value)) fname = "%s%s+%s.%s" % ( name, tstring, "+".join(mmd), ext) if directory: return os.path.join(directory, fname) else: return fname
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/datafile.py/get_filename
5,553
def compare_metadata(self, other, namelist, missingok=True): for name in namelist: try: if other[name] != self[name]: return False except __HOLE__: if not missingok: return False return True
KeyError
dataset/ETHPy150Open kdart/pycopia/QA/pycopia/datafile.py/DataFileData.compare_metadata
5,554
def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None): assert maxtasks is None or (type(maxtasks) == int and maxtasks > 0) put = outqueue.put get = inqueue.get if hasattr(inqueue, '_writer'): inqueue._writer.close() outqueue._reader.close() if initializer is not None: initializer(*initargs) completed = 0 while maxtasks is None or (maxtasks and completed < maxtasks): try: task = get() except (EOFError, __HOLE__): debug('worker got EOFError or IOError -- exiting') break if task is None: debug('worker got sentinel -- exiting') break job, i, func, args, kwds = task try: result = (True, func(*args, **kwds)) except Exception as e: result = (False, e) try: put((job, i, result)) except Exception as e: wrapped = MaybeEncodingError(e, result[1]) debug("Possible encoding error while sending result: %s" % ( wrapped)) put((job, i, (False, wrapped))) completed += 1 debug('worker exiting after %d tasks' % completed) # # Class representing a process pool #
IOError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/multiprocessing/pool.py/worker
5,555
def __init__(self, processes=None, initializer=None, initargs=(), maxtasksperchild=None): self._setup_queues() self._taskqueue = queue.Queue() self._cache = {} self._state = RUN self._maxtasksperchild = maxtasksperchild self._initializer = initializer self._initargs = initargs if processes is None: try: processes = cpu_count() except __HOLE__: processes = 1 if processes < 1: raise ValueError("Number of processes must be at least 1") if initializer is not None and not callable(initializer): raise TypeError('initializer must be a callable') self._processes = processes self._pool = [] self._repopulate_pool() self._worker_handler = threading.Thread( target=Pool._handle_workers, args=(self, ) ) self._worker_handler.daemon = True self._worker_handler._state = RUN self._worker_handler.start() self._task_handler = threading.Thread( target=Pool._handle_tasks, args=(self._taskqueue, self._quick_put, self._outqueue, self._pool) ) self._task_handler.daemon = True self._task_handler._state = RUN self._task_handler.start() self._result_handler = threading.Thread( target=Pool._handle_results, args=(self._outqueue, self._quick_get, self._cache) ) self._result_handler.daemon = True self._result_handler._state = RUN self._result_handler.start() self._terminate = Finalize( self, self._terminate_pool, args=(self._taskqueue, self._inqueue, self._outqueue, self._pool, self._worker_handler, self._task_handler, self._result_handler, self._cache), exitpriority=15 )
NotImplementedError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/multiprocessing/pool.py/Pool.__init__
5,556
@staticmethod def _handle_tasks(taskqueue, put, outqueue, pool): thread = threading.current_thread() for taskseq, set_length in iter(taskqueue.get, None): i = -1 for i, task in enumerate(taskseq): if thread._state: debug('task handler found thread._state != RUN') break try: put(task) except __HOLE__: debug('could not put task on queue') break else: if set_length: debug('doing set_length()') set_length(i+1) continue break else: debug('task handler got sentinel') try: # tell result handler to finish when cache is empty debug('task handler sending sentinel to result handler') outqueue.put(None) # tell workers there is no more work debug('task handler sending sentinel to workers') for p in pool: put(None) except IOError: debug('task handler got IOError when sending sentinels') debug('task handler exiting')
IOError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/multiprocessing/pool.py/Pool._handle_tasks
5,557
@staticmethod def _handle_results(outqueue, get, cache): thread = threading.current_thread() while 1: try: task = get() except (IOError, EOFError): debug('result handler got EOFError/IOError -- exiting') return if thread._state: assert thread._state == TERMINATE debug('result handler found thread._state=TERMINATE') break if task is None: debug('result handler got sentinel') break job, i, obj = task try: cache[job]._set(i, obj) except KeyError: pass while cache and thread._state != TERMINATE: try: task = get() except (IOError, EOFError): debug('result handler got EOFError/IOError -- exiting') return if task is None: debug('result handler ignoring extra sentinel') continue job, i, obj = task try: cache[job]._set(i, obj) except KeyError: pass if hasattr(outqueue, '_reader'): debug('ensuring that outqueue is not full') # If we don't make room available in outqueue then # attempts to add the sentinel (None) to outqueue may # block. There is guaranteed to be no more than 2 sentinels. try: for i in range(10): if not outqueue._reader.poll(): break get() except (__HOLE__, EOFError): pass debug('result handler exiting: len(cache)=%s, thread._state=%s', len(cache), thread._state)
IOError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/multiprocessing/pool.py/Pool._handle_results
5,558
def next(self, timeout=None): self._cond.acquire() try: try: item = self._items.popleft() except __HOLE__: if self._index == self._length: raise StopIteration self._cond.wait(timeout) try: item = self._items.popleft() except IndexError: if self._index == self._length: raise StopIteration raise TimeoutError finally: self._cond.release() success, value = item if success: return value raise value
IndexError
dataset/ETHPy150Open amrdraz/kodr/app/brython/www/src/Lib/multiprocessing/pool.py/IMapIterator.next
5,559
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): try: return __import__(pkg_or_module_string) except __HOLE__: raise ExceptionType(*args, **kwargs)
ImportError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/import_or_raise
5,560
def ensure_dir(path): """os.path.makedirs without EEXIST.""" try: os.makedirs(path) except __HOLE__ as e: if e.errno != errno.EEXIST: raise
OSError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/ensure_dir
5,561
def get_prog(): try: if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'): return "%s -m pip" % sys.executable except (AttributeError, __HOLE__, IndexError): pass return 'pip' # Retry every half second for up to 3 seconds
TypeError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/get_prog
5,562
def renames(old, new): """Like os.renames(), but handles renaming across devices.""" # Implementation borrowed from os.renames(). head, tail = os.path.split(new) if head and tail and not os.path.exists(head): os.makedirs(head) shutil.move(old, new) head, tail = os.path.split(old) if head and tail: try: os.removedirs(head) except __HOLE__: pass
OSError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/renames
5,563
def untar_file(filename, location): """ Untar the file (with path `filename`) to the destination `location`. All files are written based on system defaults and umask (i.e. permissions are not preserved), except that regular file members with any execute permissions (user, group, or world) have "chmod +x" applied after being written. Note that for windows, any execute changes using os.chmod are no-ops per the python docs. """ ensure_dir(location) if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): mode = 'r:gz' elif filename.lower().endswith(BZ2_EXTENSIONS): mode = 'r:bz2' elif filename.lower().endswith('.tar'): mode = 'r' else: logger.warning( 'Cannot determine compression type for file %s', filename, ) mode = 'r:*' tar = tarfile.open(filename, mode) try: # note: python<=2.5 doesn't seem to know about pax headers, filter them leading = has_leading_dir([ member.name for member in tar.getmembers() if member.name != 'pax_global_header' ]) for member in tar.getmembers(): fn = member.name if fn == 'pax_global_header': continue if leading: fn = split_leading_dir(fn)[1] path = os.path.join(location, fn) if member.isdir(): ensure_dir(path) elif member.issym(): try: tar._extract_member(member, path) except Exception as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) logger.warning( 'In the tar file %s the member %s is invalid: %s', filename, member.name, exc, ) continue else: try: fp = tar.extractfile(member) except (__HOLE__, AttributeError) as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) logger.warning( 'In the tar file %s the member %s is invalid: %s', filename, member.name, exc, ) continue ensure_dir(os.path.dirname(path)) destfp = open(path, 'wb') try: shutil.copyfileobj(fp, destfp) finally: destfp.close() fp.close() # member have any execute permissions for user/group/world? if member.mode & 0o111: # make dest file have execute for user/group/world # no-op on windows per python docs os.chmod(path, (0o777 - current_umask() | 0o111)) finally: tar.close()
KeyError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/untar_file
5,564
def read_text_file(filename): """Return the contents of *filename*. Try to decode the file contents with utf-8, the preferred system encoding (e.g., cp1252 on some Windows machines), and latin1, in that order. Decoding a byte string with latin1 will never raise an error. In the worst case, the returned string will contain some garbage characters. """ with open(filename, 'rb') as fp: data = fp.read() encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] for enc in encodings: try: data = data.decode(enc) except __HOLE__: continue break assert type(data) != bytes # Latin1 should have worked. return data
UnicodeDecodeError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/read_text_file
5,565
def readline(self): try: try: return next(self._gen) except NameError: return self._gen.next() except __HOLE__: return ''
StopIteration
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/utils/__init__.py/FakeFile.readline
5,566
def run_test(base_path, plots_path, test_name, params): def safe_add_key(args, key, name): if name in params: args.extend((key, str(params[name]))) def safe_add_path(args, folder, key, name): if name in params: args.extend((key, os.path.join(folder, params[name]))) logging.info('Testing "{0}"'.format(test_name)) folder = os.path.join(base_path, params['folder']) cmd = [] safe_add_path(cmd, folder, '--src', 'src') safe_add_path(cmd, folder, '--dst', 'dst') safe_add_path(cmd, folder, '--src-keyframes', 'src-keyframes') safe_add_path(cmd, folder, '--dst-keyframes', 'dst-keyframes') safe_add_path(cmd, folder, '--src-timecodes', 'src-timecodes') safe_add_path(cmd, folder, '--dst-timecodes', 'dst-timecodes') safe_add_path(cmd, folder, '--script', 'script') safe_add_path(cmd, folder, '--chapters', 'chapters') safe_add_path(cmd, folder, '--src-script', 'src-script') safe_add_path(cmd, folder, '--dst-script', 'dst-script') safe_add_key(cmd, '--max-kf-distance', 'max-kf-distance') safe_add_key(cmd, '--max-ts-distance', 'max-ts-distance') safe_add_key(cmd, '--max-ts-duration', 'max-ts-duration') output_path = os.path.join(folder, params['dst']) + '.sushi.test.ass' cmd.extend(('-o', output_path)) if plots_path: cmd.extend(('--test-shift-plot', os.path.join(plots_path, '{0}.png'.format(test_name)))) with set_file_logger(os.path.join(folder, 'sushi_test.log')): try: with remove_console_logger(): parse_args_and_run(cmd) except Exception as e: logging.critical('Sushi failed on test "{0}": {1}'.format(test_name, e.message)) return False ideal_path = os.path.join(folder, params['ideal']) try: timecodes = Timecodes.from_file(os.path.join(folder, params['dst-timecodes'])) except __HOLE__: timecodes = Timecodes.cfr(params['fps']) return compare_scripts(ideal_path, output_path, timecodes, test_name, params['expected_errors'])
KeyError
dataset/ETHPy150Open tp7/Sushi/regression-tests.py/run_test
5,567
def run(): root_logger.setLevel(logging.DEBUG) global console_handler console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(logging.Formatter('%(message)s')) root_logger.addHandler(console_handler) try: with open('tests.json') as file: json = load(file) except __HOLE__ as e: logging.critical(e) sys.exit(2) run_only = json.get('run-only') failed = ran = 0 for test_name, params in json.get('tests', {}).iteritems(): if run_only and test_name not in run_only: continue if not params.get('disabled', False): ran += 1 if not run_test(json['basepath'], json['plots'], test_name, params): failed += 1 logging.info('') else: logging.warn('Test "{0}" disabled'.format(test_name)) for test_name, params in json.get('wavs', {}).iteritems(): ran += 1 if not run_wav_test(test_name, os.path.join(json['basepath'], params['file']), params): failed += 1 logging.info('') logging.info('Ran {0} tests, {1} failed'.format(ran, failed))
IOError
dataset/ETHPy150Open tp7/Sushi/regression-tests.py/run
5,568
def process_token_request(r, decoder, *args): try: data = decoder(r.content) return tuple(data[key] for key in args) except __HOLE__ as e: # pragma: no cover bad_key = e.args[0] raise KeyError(PROCESS_TOKEN_ERROR.format(key=bad_key, raw=r.content))
KeyError
dataset/ETHPy150Open litl/rauth/rauth/service.py/process_token_request
5,569
def transkey(self, keyname): self.logger.debug("key name in tk '%s'" % (keyname)) try: return self._keytbl[keyname.lower()] except __HOLE__: return keyname
KeyError
dataset/ETHPy150Open ejeschke/ginga/ginga/tkw/ImageViewTk.py/ImageViewEvent.transkey
5,570
def _entity_criterion(self, entity): #if hasattr(entity, "property"): # entity = entity.property.mapper if hasattr(entity, 'parententity'): # XXX is this used? entity = entity.parententity try: cls = _class_to_mapper(entity).class_ except __HOLE__: # XXX For tables, table columns #pass raise # XXX temporal, to verify it's used else: alias = entity if isinstance(entity, AliasedClass) else cls prop = self.property_name if prop in dir(cls): crit = getattr(alias, prop) if crit is not None: if not isinstance(crit, ClauseElement): # This simplest safe way to make bare boolean column # accepted as expression. crit = cast(crit, Boolean) return crit return None
AttributeError
dataset/ETHPy150Open SmartTeleMax/iktomi/iktomi/unstable/db/sqla/public_query.py/PublicQuery._entity_criterion
5,571
def check(self, code, filename): """Run pep8 on code and return the output.""" options = { 'reporter': self.get_report() } type_map = { 'select': [], 'ignore': [], 'max-line-length': 0, 'max-complexity': 0 } self.build_options(options, type_map, transform=lambda s: s.replace('-', '_')) final_options = options.copy() # Try to read options from pep8 default configuration files (.pep8, tox.ini). # If present, they will override the ones defined by Sublime Linter's config. try: # `onError` will be called by `process_options` when no pep8 configuration file is found. # Override needed to supress OptionParser.error() output in the default parser. def onError(msg): pass from pep8 import process_options, get_parser parser = get_parser() parser.error = onError pep8_options, _ = process_options([os.curdir], True, True, parser=parser) # Merge options only if the pep8 config file actually exists; # pep8 always returns a config filename, even when it doesn't exist! if os.path.isfile(pep8_options.config): pep8_options = vars(pep8_options) pep8_options.pop('reporter', None) for opt_n, opt_v in pep8_options.items(): if isinstance(final_options.get(opt_n, None), list): final_options[opt_n] += opt_v else: final_options[opt_n] = opt_v except __HOLE__: # Catch and ignore parser.error() when no config files are found. pass if persist.debug_mode(): persist.printf('{} ST options: {}'.format(self.name, options)) persist.printf('{} options: {}'.format(self.name, final_options)) checker = self.module.StyleGuide(**final_options) return checker.input_file( filename=os.path.basename(filename), lines=code.splitlines(keepends=True) )
SystemExit
dataset/ETHPy150Open SublimeLinter/SublimeLinter-pep8/linter.py/PEP8.check
5,572
def __init__(self, headers, body=''): # first of all, lets make sure that if headers or body are # unicode strings, it must be converted into a utf-8 encoded # byte string self.raw_headers = utf8(headers.strip()) self.body = utf8(body) # Now let's concatenate the headers with the body, and create # `rfile` based on it self.rfile = StringIO(b'\r\n\r\n'.join([self.raw_headers, self.body])) # Creating `wfile` as an empty StringIO, just to avoid any # real I/O calls self.wfile = StringIO() # parsing the request line preemptively self.raw_requestline = self.rfile.readline() # initiating the error attributes with None self.error_code = None self.error_message = None # Parse the request based on the attributes above if not self.parse_request(): return # making the HTTP method string available as the command self.method = self.command # Now 2 convenient attributes for the HTTPretty API: # `querystring` holds a dictionary with the parsed query string try: self.path = self.path.encode('iso-8859-1') except __HOLE__: pass self.path = decode_utf8(self.path) qstring = self.path.split("?", 1)[-1] self.querystring = self.parse_querystring(qstring) # And the body will be attempted to be parsed as # `application/json` or `application/x-www-form-urlencoded` self.parsed_body = self.parse_request_body(self.body)
UnicodeDecodeError
dataset/ETHPy150Open gabrielfalcao/HTTPretty/httpretty/core.py/HTTPrettyRequest.__init__
5,573
def connect(self, address): self._closed = False try: self._address = (self._host, self._port) = address except __HOLE__: # We get here when the address is just a string pointing to a # unix socket path/file # # See issue #206 self.is_http = False else: ports_to_check = ( POTENTIAL_HTTP_PORTS.union(POTENTIAL_HTTPS_PORTS)) self.is_http = self._port in ports_to_check if not self.is_http: if self.truesock: self.truesock.connect(self._address) else: raise UnmockedError()
ValueError
dataset/ETHPy150Open gabrielfalcao/HTTPretty/httpretty/core.py/fakesock.socket.connect
5,574
def sendall(self, data, *args, **kw): self._sent_data.append(data) self.fd = FakeSockFile() self.fd.socket = self try: requestline, _ = data.split(b'\r\n', 1) method, path, version = parse_requestline( decode_utf8(requestline)) is_parsing_headers = True except __HOLE__: path = '' is_parsing_headers = False if self._entry is None: # If the previous request wasn't mocked, don't # mock the subsequent sending of data return self.real_sendall(data, *args, **kw) else: method = self._entry.method path = self._entry.info.path self.fd.seek(0) if not is_parsing_headers: if len(self._sent_data) > 1: headers = utf8(last_requestline(self._sent_data)) meta = self._entry.request.headers body = utf8(self._sent_data[-1]) if meta.get('transfer-encoding', '') == 'chunked': if ( not body.isdigit() and (body != b'\r\n') and (body != b'0\r\n\r\n') ): self._entry.request.body += body else: self._entry.request.body += body httpretty.historify_request(headers, body, False) return # path might come with s = urlsplit(path) POTENTIAL_HTTP_PORTS.add(int(s.port or 80)) parts = list(map(utf8, data.split(b'\r\n\r\n', 1))) if len(parts) == 2: headers, body = parts else: headers = '' body = data request = httpretty.historify_request(headers, body) info = URIInfo( hostname=self._host, port=self._port, path=s.path, query=s.query, last_request=request ) matcher, entries = httpretty.match_uriinfo(info) if not entries: self._entry = None self.real_sendall(data) return self._entry = matcher.get_next_entry(method, info, request)
ValueError
dataset/ETHPy150Open gabrielfalcao/HTTPretty/httpretty/core.py/fakesock.socket.sendall
5,575
def validate(self): content_length_keys = 'Content-Length', 'content-length' for key in content_length_keys: got = self.adding_headers.get( key, self.forcing_headers.get(key, None)) if got is None: continue try: igot = int(got) except (ValueError, __HOLE__): warnings.warn( 'HTTPretty got to register the Content-Length header ' 'with "%r" which is not a number' % got) return if igot > self.body_length: raise HTTPrettyError( 'HTTPretty got inconsistent parameters. The header ' 'Content-Length you registered expects size "%d" but ' 'the body you registered for that has actually length ' '"%d".' % ( igot, self.body_length, ) )
TypeError
dataset/ETHPy150Open gabrielfalcao/HTTPretty/httpretty/core.py/Entry.validate
5,576
@classmethod @contextlib.contextmanager def record(cls, filename, indentation=4, encoding='utf-8'): try: import urllib3 except __HOLE__: msg = ( 'HTTPretty requires urllib3 installed ' 'for recording actual requests.' ) raise RuntimeError(msg) http = urllib3.PoolManager() cls.enable() calls = [] def record_request(request, uri, headers): cls.disable() response = http.request(request.method, uri) calls.append({ 'request': { 'uri': uri, 'method': request.method, 'headers': dict(request.headers), 'body': decode_utf8(request.body), 'querystring': request.querystring }, 'response': { 'status': response.status, 'body': decode_utf8(response.data), 'headers': dict(response.headers) } }) cls.enable() return response.status, response.headers, response.data for method in cls.METHODS: cls.register_uri(method, MULTILINE_ANY_REGEX, body=record_request) yield cls.disable() with codecs.open(filename, 'w', encoding) as f: f.write(json.dumps(calls, indent=indentation))
ImportError
dataset/ETHPy150Open gabrielfalcao/HTTPretty/httpretty/core.py/httpretty.record
5,577
def get_app_commands(app): try: app_command_module = importlib.import_module(app + '.commands') except __HOLE__: return [] ret = {} for command in getattr(app_command_module, 'commands', []): ret[command.name] = command return ret
ImportError
dataset/ETHPy150Open frappe/frappe/frappe/utils/bench_helper.py/get_app_commands
5,578
def start_tracker(): """Start the Torrent Tracker. """ # parse commandline options parser = OptionParser() parser.add_option('-p', '--port', help='Tracker Port', default=0) parser.add_option('-b', '--background', action='store_true', default=False, help='Start in background') parser.add_option('-d', '--debug', action='store_true', default=False, help='Debug mode') (options, args) = parser.parse_args() # setup directories create_pytt_dirs() # setup logging setup_logging(options.debug) try: # start the torrent tracker run_app(int(options.port) or get_config().getint('tracker', 'port')) except __HOLE__: logging.info('Tracker Stopped.') close_db() sys.exit(0) except Exception as ex: logging.fatal('%s' % str(ex)) close_db() sys.exit(-1)
KeyboardInterrupt
dataset/ETHPy150Open semk/Pytt/pytt/tracker.py/start_tracker
5,579
def xhtmlValidate(modelXbrl, elt): from lxml.etree import DTD, XMLSyntaxError from arelle import FunctionIxt ixNsStartTags = ["{" + ns + "}" for ns in XbrlConst.ixbrlAll] isEFM = modelXbrl.modelManager.disclosureSystem.validationType == "EFM" # find ix version for messages _ixNS = elt.modelDocument.ixNS _customTransforms = modelXbrl.modelManager.customTransforms or {} def checkAttribute(elt, isIxElt, attrTag, attrValue): ixEltAttrDefs = ixAttrDefined.get(elt.namespaceURI, EMPTYDICT).get(elt.localName, ()) if attrTag.startswith("{"): ns, sep, localName = attrTag[1:].partition("}") else: ns = None localName = attrTag if ns is not None and ns not in XbrlConst.ixbrlAll and attrTag not in ixEltAttrDefs: if isIxElt: allowedNs = allowedNonIxAttrNS.get(elt.localName, None) if allowedNs != "##other" and ns != allowedNs: modelXbrl.error(ixMsgCode("qualifiedAttributeNotExpected", elt), _("Inline XBRL element %(element)s has qualified attribute %(name)s"), modelObject=elt, element=str(elt.elementQname), name=attrTag) if ns == XbrlConst.xbrli and elt.localName in { "fraction", "nonFraction", "nonNumeric", "references", "relationship", "tuple"}: modelXbrl.error(ixMsgCode("qualifiedAttributeDisallowed", elt), _("Inline XBRL element %(element)s has disallowed attribute %(name)s"), modelObject=elt, element=str(elt.elementQname), name=attrTag) else: if ns in XbrlConst.ixbrlAll: modelXbrl.error(ixMsgCode("inlineAttributeMisplaced", elt, name="other"), _("Inline XBRL attributes are not allowed on html elements: ix:%(name)s"), modelObject=elt, name=localName) elif ns not in {XbrlConst.xml, XbrlConst.xsi, XbrlConst.xhtml}: modelXbrl.error(ixMsgCode("extensionAttributeMisplaced", ns=_ixNS), _("Extension attributes are not allowed on html elements: %(tag)s"), modelObject=elt, tag=attrTag) elif isIxElt: try: _xsdType = ixAttrType[elt.namespaceURI][localName] if isinstance(_xsdType, dict): baseXsdType = _xsdType["type"] facets = _xsdType else: baseXsdType = _xsdType facets = None XmlValidate.validateValue(modelXbrl, elt, attrTag, baseXsdType, attrValue, facets=facets) if not (attrTag in ixEltAttrDefs or (localName in ixEltAttrDefs and (not ns or ns in XbrlConst.ixbrlAll))): raise KeyError disallowedXbrliAttrs = ({"scheme", "periodType", "balance", "contextRef", "unitRef", "precision", "decimals"} - {"fraction": {"contextRef", "unitRef"}, "nonFraction": {"contextRef", "unitRef", "decimals", "precision"}, "nonNumeric": {"contextRef"}}.get(elt.localName, set())) disallowedAttrs = set(a for a in disallowedXbrliAttrs if elt.get(a) is not None) if disallowedAttrs: modelXbrl.error(ixMsgCode("inlineElementAttributes",elt), _("Inline XBRL element %(element)s has disallowed attributes %(attributes)s"), modelObject=elt, element=elt.elementQname, attributes=", ".join(disallowedAttrs)) except __HOLE__: modelXbrl.error(ixMsgCode("attributeNotExpected",elt), _("Attribute %(attribute)s is not expected on element ix:%(element)s"), modelObject=elt, attribute=attrTag, element=elt.localName) def checkHierarchyConstraints(elt): constraints = ixHierarchyConstraints.get(elt.localName) if constraints: for _rel, names in constraints: reqt = _rel[0] rel = _rel[1:] if reqt in ('&', '^'): nameFilter = ('*',) else: nameFilter = names if nameFilter == ('*',): namespaceFilter = namespacePrefix = '*' else: namespaceFilter = elt.namespaceURI namespacePrefix = elt.prefix relations = {"ancestor": XmlUtil.ancestor, "parent": XmlUtil.parent, "child-choice": XmlUtil.children, "child-sequence": XmlUtil.children, "child-or-text": XmlUtil.children, "descendant": XmlUtil.descendants}[rel]( elt, namespaceFilter, nameFilter) if rel in ("ancestor", "parent"): if relations is None: relations = [] else: relations = [relations] if rel == "child-or-text": relations += XmlUtil.innerTextNodes(elt, ixExclude=True, ixEscape=False, ixContinuation=False) issue = '' if reqt == '^': if not any(r.localName in names and r.namespaceURI == elt.namespaceURI for r in relations): issue = " and is missing one of " + ', '.join(names) if reqt in ('&', '^'): disallowed = [str(r.elementQname) for r in relations if not (r.tag in names or (r.localName in names and r.namespaceURI == elt.namespaceURI))] if disallowed: issue += " and may not have " + ", ".join(disallowed) elif rel == "child-sequence": sequencePosition = 0 for i, r in enumerate(relations): rPos = names.index(str(r.localName)) if rPos < sequencePosition: issue += " and is out of sequence: " + str(r.elementQname) else: sequencePosition = rPos if reqt == '?' and len(relations) > 1: issue = " may only have 0 or 1 but {0} present ".format(len(relations)) if reqt == '+' and len(relations) == 0: issue = " must have at least 1 but none present " if ((reqt == '+' and not relations) or (reqt == '-' and relations) or (issue)): code = "{}:{}".format(ixSect[elt.namespaceURI].get(elt.localName,"other")["constraint"], { 'ancestor': "ancestorNode", 'parent': "parentNode", 'child-choice': "childNodes", 'child-sequence': "childNodes", 'child-or-text': "childNodesOrText", 'descendant': "descendantNodes"}[rel] + { '+': "Required", '-': "Disallowed", '&': "Allowed", '^': "Specified"}.get(reqt, "Specified")) msg = _("Inline XBRL ix:{0} {1} {2} {3} {4} element").format( elt.localName, {'+': "must", '-': "may not", '&': "may only", '?': "may", '+': "must"}[reqt], {'ancestor': "be nested in", 'parent': "have parent", 'child-choice': "have child", 'child-sequence': "have child", 'child-or-text': "have child or text,", 'descendant': "have as descendant"}[rel], '' if rel == 'child-or-text' else ', '.join(str(r.elementQname) for r in relations) if names == ('*',) and relations else ", ".join("{}:{}".format(namespacePrefix, n) for n in names), issue) modelXbrl.error(code, msg, modelObject=[elt] + relations, requirement=reqt, messageCodes=("ix{ver.sect}:ancestorNode{Required|Disallowed}", "ix{ver.sect}:childNodesOrTextRequired", "ix{ver.sect}:childNodes{Required|Disallowed|Allowed}", "ix{ver.sect}:descendantNodesDisallowed", "ix{ver.sect}:parentNodeRequired")) # other static element checks (that don't require a complete object model, context, units, etc if elt.localName == "nonFraction": childElts = XmlUtil.children(elt, '*', '*') hasText = (elt.text or "").strip() or any((childElt.tail or "").strip() for childElt in childElts) if elt.isNil: ancestorNonFractions = XmlUtil.ancestors(elt, _ixNS, elt.localName) if ancestorNonFractions: modelXbrl.error(ixMsgCode("nonFractionAncestors", elt), _("Fact %(fact)s is a nil nonFraction and MUST not have an ancestor ix:nonFraction"), modelObject=[elt] + ancestorNonFractions, fact=elt.qname) if childElts or hasText: modelXbrl.error(ixMsgCode("nonFractionTextAndElementChildren", elt), _("Fact %(fact)s is a nil nonFraction and MUST not have an child elements or text"), modelObject=[elt] + childElts, fact=elt.qname) elt.setInvalid() # prevent further validation or cascading errors else: if ((childElts and (len(childElts) != 1 or childElts[0].namespaceURI != _ixNS or childElts[0].localName != "nonFraction")) or (childElts and hasText)): modelXbrl.error(ixMsgCode("nonFractionTextAndElementChildren", elt), _("Fact %(fact)s is a non-nil nonFraction and MUST have exactly one ix:nonFraction child element or text."), modelObject=[elt] + childElts, fact=elt.qname) elt.setInvalid() if elt.localName == "fraction": if elt.isNil: ancestorFractions = XmlUtil.ancestors(elt, _ixNS, elt.localName) if ancestorFractions: modelXbrl.error(ixMsgCode("fractionAncestors", elt), _("Fact %(fact)s is a nil fraction and MUST not have an ancestor ix:fraction"), modelObject=[elt] + ancestorFractions, fact=elt.qname) else: nonFrChildren = [e for e in XmlUtil.children(elt, _ixNS, '*') if e.localName not in ("fraction", "numerator", "denominator")] if nonFrChildren: modelXbrl.error(ixMsgCode("fractionElementChildren", elt), _("Fact %(fact)s is a non-nil fraction and not have any child elements except ix:fraction, ix:numerator and ix:denominator: %(children)s"), modelObject=[elt] + nonFrChildren, fact=elt.qname, children=", ".join(e.localName for e in nonFrChildren)) if elt.localName in ("nonFraction", "numerator", "denominator", "nonNumeric"): fmt = elt.format if fmt: if fmt in _customTransforms: pass elif fmt.namespaceURI not in FunctionIxt.ixtNamespaceFunctions: modelXbrl.error(ixMsgCode("invalidTransformation", elt, sect="validation"), _("Fact %(fact)s has unrecognized transformation namespace %(namespace)s"), modelObject=elt, fact=elt.qname, transform=fmt, namespace=fmt.namespaceURI) elt.setInvalid() elif fmt.localName not in FunctionIxt.ixtNamespaceFunctions[fmt.namespaceURI]: modelXbrl.error(ixMsgCode("invalidTransformation", elt, sect="validation"), _("Fact %(fact)s has unrecognized transformation name %(name)s"), modelObject=elt, fact=elt.qname, transform=fmt, name=fmt.localName) elt.setInvalid() def ixToXhtml(fromRoot): toRoot = etree.Element(fromRoot.localName) copyNonIxChildren(fromRoot, toRoot) for attrTag, attrValue in fromRoot.items(): checkAttribute(fromRoot, False, attrTag, attrValue) if attrTag not in ('version', # used in inline test cases but not valid xhtml '{http://www.w3.org/2001/XMLSchema-instance}schemaLocation'): toRoot.set(attrTag, attrValue) return toRoot def copyNonIxChildren(fromElt, toElt, excludeSubtree=False): for fromChild in fromElt.iterchildren(): if isinstance(fromChild, ModelObject): isIxNs = fromChild.namespaceURI in XbrlConst.ixbrlAll if isIxNs: if fromChild.localName not in ixElements[fromChild.namespaceURI]: modelXbrl.error(ixMsgCode("elementNameInvalid",ns=_ixNS), _("Inline XBRL element name %(element)s is not valid"), modelObject=fromChild, element=str(fromChild.elementQname)) else: checkHierarchyConstraints(fromChild) for attrTag, attrValue in fromChild.items(): checkAttribute(fromChild, True, attrTag, attrValue) for attrTag in ixAttrRequired[fromChild.namespaceURI].get(fromChild.localName,[]): if fromChild.get(attrTag) is None: modelXbrl.error(ixMsgCode("attributeRequired", fromChild), _("Attribute %(attribute)s required on element ix:%(element)s"), modelObject=fromChild, attribute=attrTag, element=fromChild.localName) if excludeSubtree or (fromChild.localName in {"references", "resources"} and isIxNs): copyNonIxChildren(fromChild, toElt, excludeSubtree=True) else: if fromChild.localName in {"footnote", "nonNumeric", "continuation"} and isIxNs: toChild = etree.Element("ixNestedContent") toElt.append(toChild) copyNonIxChildren(fromChild, toChild) if fromChild.text is not None: toChild.text = fromChild.text if fromChild.tail is not None: toChild.tail = fromChild.tail elif isIxNs: copyNonIxChildren(fromChild, toElt) else: toChild = etree.Element(fromChild.localName) toElt.append(toChild) copyNonIxChildren(fromChild, toChild) for attrTag, attrValue in fromChild.items(): checkAttribute(fromChild, False, attrTag, attrValue) toChild.set(attrTag, attrValue) if fromChild.text is not None: toChild.text = fromChild.text if fromChild.tail is not None: toChild.tail = fromChild.tail # copy xhtml elements to fresh tree with open(os.path.join(modelXbrl.modelManager.cntlr.configDir, "xhtml1-strict-ix.dtd")) as fh: dtd = DTD(fh) try: #with open("/users/hermf/temp/testDtd.htm", "w") as fh: # fh.write(etree.tostring(ixToXhtml(elt), encoding=_STR_UNICODE, pretty_print=True)) if not dtd.validate( ixToXhtml(elt) ): modelXbrl.error("html:syntaxError", _("%(element)s error %(error)s"), modelObject=elt, element=elt.localName.title(), error=', '.join(e.message for e in dtd.error_log.filter_from_errors())) if isEFM: ValidateFilingText.validateHtmlContent(modelXbrl, elt, elt, "InlineXBRL", "EFM.5.02.05.", isInline=True) except XMLSyntaxError as err: modelXbrl.error("html:syntaxError", _("%(element)s error %(error)s"), modelObject=elt, element=elt.localName.title(), error=dtd.error_log.filter_from_errors())
KeyError
dataset/ETHPy150Open Arelle/Arelle/arelle/XhtmlValidate.py/xhtmlValidate
5,580
def ignore_not_implemented(func): def _inner(*args, **kwargs): try: return func(*args, **kwargs) except __HOLE__: return None functional.update_wrapper(_inner, func) return _inner
NotImplementedError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/regressiontests/introspection/tests.py/ignore_not_implemented
5,581
def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'): """ Double fork-trick. For starting a posix daemon. This forks the current process into a daemon. The stdin, stdout, and stderr arguments are file names that will be opened and be used to replace the standard file descriptors in sys.stdin, sys.stdout, and sys.stderr. These arguments are optional and default to /dev/null. Note that stderr is opened unbuffered, so if it shares a file with stdout then interleaved output may not appear in the order that you expect. Thanks to: http://code.activestate.com/recipes/66012-fork-a-daemon-process-on-unix/ """ # Do first fork. try: pid = os.fork() if pid > 0: os.waitpid(pid, 0) return 0 # Return 0 from first parent. except __HOLE__ as e: sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror)) sys.exit(1) # Decouple from parent environment. os.chdir("/") os.umask(0) os.setsid() # Do second fork. try: pid = os.fork() if pid > 0: sys.exit(0) # Exit second parent. except OSError as e: sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror)) sys.exit(1) # Now I am a daemon! # Redirect standard file descriptors. # NOTE: For debugging, you meight want to take these instead of /dev/null. # so = open('/tmp/log2', 'ab+') # se = open('/tmp/log2', 'ab+', 0) si = open(stdin, 'rb') so = open(stdout, 'ab+') se = open(stderr, 'ab+', 0) os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) # Return 1 from daemon. return 1
OSError
dataset/ETHPy150Open jonathanslenders/pymux/pymux/utils.py/daemonize
5,582
def __cmp__(cls, other): # If the other object is not a Basic subclass, then we are not equal to # it. if not isinstance(other, BasicMeta): return -1 n1 = cls.__name__ n2 = other.__name__ if n1 == n2: return 0 UNKNOWN = len(ordering_of_classes) + 1 try: i1 = ordering_of_classes.index(n1) except ValueError: i1 = UNKNOWN try: i2 = ordering_of_classes.index(n2) except __HOLE__: i2 = UNKNOWN if i1 == UNKNOWN and i2 == UNKNOWN: return (n1 > n2) - (n1 < n2) return (i1 > i2) - (i1 < i2)
ValueError
dataset/ETHPy150Open sympy/sympy/sympy/core/core.py/BasicMeta.__cmp__
5,583
def get_cluster_or_404(id): try: name = id cluster = CLUSTERS.get()[name] except (__HOLE__, ValueError): raise Http404() cluster = { 'id': id, 'nice_name': id, 'host_ports': cluster.HOST_PORTS.get(), 'rest_url': cluster.REST_URL.get(), } return cluster
TypeError
dataset/ETHPy150Open cloudera/hue/apps/zookeeper/src/zookeeper/utils.py/get_cluster_or_404
5,584
def GetValue(self): "Get the value from the editor" s = wx.TextCtrl.GetValue(self).strip() try: return int(s) except __HOLE__: return None
ValueError
dataset/ETHPy150Open ODM2/ODMToolsPython/odmtools/lib/oldOlv/CellEditor.py/IntEditor.GetValue
5,585
def GetValue(self): "Get the value from the editor" s = wx.TextCtrl.GetValue(self).strip() try: return long(s) except __HOLE__: return None
ValueError
dataset/ETHPy150Open ODM2/ODMToolsPython/odmtools/lib/oldOlv/CellEditor.py/LongEditor.GetValue
5,586
def GetValue(self): "Get the value from the editor" s = wx.TextCtrl.GetValue(self).strip() try: return float(s) except __HOLE__: return None
ValueError
dataset/ETHPy150Open ODM2/ODMToolsPython/odmtools/lib/oldOlv/CellEditor.py/FloatEditor.GetValue
5,587
def _ParseDateTime(self, s): # Try the installed format string first try: return datetime.datetime.strptime(s, self.formatString) except __HOLE__: pass for x in self.STD_SEPARATORS: s = s.replace(x, " ") # Because of the logic of strptime, we have to check shorter patterns first. # For example: # "31 12" matches "%d %m %y" => datetime(2012, 1, 3, 0, 0) ?? # but we want: # "31 12" to match "%d %m" => datetime(1900, 12, 31, 0, 0) # JPP 4/4/2008 Python 2.5.1 for fmt in self.allDateTimeWithoutYearFormats: try: dt = datetime.datetime.strptime(s, fmt) return dt.replace(year=datetime.datetime.today().year) except ValueError: pass for fmt in self.allDateTimeFormats: try: return datetime.datetime.strptime(s, fmt) except ValueError: pass return None #----------------------------------------------------------------------------
ValueError
dataset/ETHPy150Open ODM2/ODMToolsPython/odmtools/lib/oldOlv/CellEditor.py/DateTimeEditor._ParseDateTime
5,588
def GetValue(self): "Get the value from the editor" s = wx.TextCtrl.GetValue(self).strip() fmts = self.STD_TIME_FORMATS[:] if self.formatString not in fmts: fmts.insert(0, self.formatString) for fmt in fmts: try: dt = datetime.datetime.strptime(s, fmt) return dt.time() except __HOLE__: pass return None #====================================================================== # Auto complete controls
ValueError
dataset/ETHPy150Open ODM2/ODMToolsPython/odmtools/lib/oldOlv/CellEditor.py/TimeEditor.GetValue
5,589
def test_adds_skip(self): result = self._make_one() try: result.addSkip(FakeTestCase(), 'a reason') self.assertEqual( len(result.tracker._test_cases['FakeTestCase']), 1) except __HOLE__: self.assertTrue(True, 'Python 2.6 does not support skip.')
AttributeError
dataset/ETHPy150Open mblayman/tappy/tap/tests/test_result.py/TestTAPTestResult.test_adds_skip
5,590
def test_adds_expected_failure(self): result = self._make_one() try: result.addExpectedFailure(FakeTestCase(), (None, None, None)) line = result.tracker._test_cases['FakeTestCase'][0] self.assertEqual(line.status, 'not ok') self.assertEqual(line.directive, '(expected failure)') except __HOLE__: self.assertTrue( True, 'Python 2.6 does not support expected failure.')
AttributeError
dataset/ETHPy150Open mblayman/tappy/tap/tests/test_result.py/TestTAPTestResult.test_adds_expected_failure
5,591
def test_adds_unexpected_success(self): result = self._make_one() try: result.addUnexpectedSuccess(FakeTestCase()) line = result.tracker._test_cases['FakeTestCase'][0] self.assertEqual(line.status, 'ok') self.assertEqual(line.directive, '(unexpected success)') except __HOLE__: self.assertTrue( True, 'Python 2.6 does not support unexpected success.')
AttributeError
dataset/ETHPy150Open mblayman/tappy/tap/tests/test_result.py/TestTAPTestResult.test_adds_unexpected_success
5,592
def __getattr__(self, name): """ Gets an attribute of this fake module from its attrs. @raise AttributeError: When the requested attribute is missing. """ try: return self._attrs[name] except __HOLE__: raise AttributeError()
KeyError
dataset/ETHPy150Open twisted/twisted/twisted/python/test/test_dist.py/FakeModule.__getattr__
5,593
def __init__(self): try: import _winreg except __HOLE__: # Python 3 import winreg as _winreg if self.info is not None: return info = [] try: #XXX: Bad style to use so long `try:...except:...`. Fix it! prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)" \ "\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE) chnd = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, self.pkey) pnum = 0 while 1: try: proc = _winreg.EnumKey(chnd, pnum) except _winreg.error: break else: pnum += 1 info.append({"Processor": proc}) phnd = _winreg.OpenKey(chnd, proc) pidx = 0 while True: try: name, value, vtpe = _winreg.EnumValue(phnd, pidx) except _winreg.error: break else: pidx = pidx + 1 info[-1][name] = value if name == "Identifier": srch = prgx.search(value) if srch: info[-1]["Family"] = int(srch.group("FML")) info[-1]["Model"] = int(srch.group("MDL")) info[-1]["Stepping"] = int(srch.group("STP")) except: print(sys.exc_value, '(ignoring)') self.__class__.info = info
ImportError
dataset/ETHPy150Open pydata/numexpr/numexpr/cpuinfo.py/Win32CPUInfo.__init__
5,594
def deserialize(cassette_string, serializer): try: data = serializer.deserialize(cassette_string) # Old cassettes used to use yaml object thingy so I have to # check for some fairly stupid exceptions here except (__HOLE__, yaml.constructor.ConstructorError): _warn_about_old_cassette_format() if _looks_like_an_old_cassette(data): _warn_about_old_cassette_format() requests = [Request._from_dict(r['request']) for r in data['interactions']] responses = [ compat.convert_to_bytes(r['response']) for r in data['interactions'] ] return requests, responses
ImportError
dataset/ETHPy150Open kevin1024/vcrpy/vcr/serialize.py/deserialize
5,595
def validate(self, auto_migrate=False): if self.use_autorefs: if not auto_migrate: # don't make reference if auto_migrate is True because this # mean validate was called from __init__ and no collection is # found when validating at __init__ with autorefs self._make_reference(self, self.structure) size = self.get_size() (size_limit, size_limit_str) = self._get_size_limit() if size > size_limit: raise MaxDocumentSizeError("The document size is too big, documents " "lower than %s is allowed (got %s bytes)" % (size_limit_str, size)) if auto_migrate: error = None try: super(Document, self).validate() except StructureError, e: error = e except __HOLE__, e: error = e except SchemaTypeError, e: error = e if error: if not self.migration_handler: raise StructureError(str(error)) else: # if we are here that's becose super.validate failed # but it has processed custom type to bson. self._migrate(process_to_bson=False) else: super(Document, self).validate()
KeyError
dataset/ETHPy150Open namlook/mongokit/mongokit/document.py/Document.validate
5,596
def one(self, *args, **kwargs): """ `one()` act like `find()` but will raise a `mongokit.MultipleResultsFound` exception if there is more than one result. If no document is found, `one()` returns `None` """ bson_obj = self.find(*args, **kwargs) count = bson_obj.count() if count > 1: raise MultipleResultsFound("%s results found" % count) elif count == 1: try: doc = bson_obj.next() except __HOLE__: doc = None return doc
StopIteration
dataset/ETHPy150Open namlook/mongokit/mongokit/document.py/Document.one
5,597
def to_json(self): """ convert the document into a json string and return it """ def _convert_to_python(doc, struct): for key in struct: if isinstance(struct[key], dict): if doc: # we don't need to process an empty doc if key in doc: # we don't care about missing fields _convert_to_python(doc[key], struct[key]) elif type(struct[key]) is list: if struct[key]: if isinstance(struct[key][0], R): l_objs = [] for obj in doc[key]: obj['_collection'] = self.collection.name obj['_database'] = self.db.name l_objs.append(obj) doc[key] = l_objs elif isinstance(struct[key][0], dict): if doc[key]: for obj in doc[key]: _convert_to_python(obj, struct[key][0]) else: if isinstance(struct[key], R) and doc[key] is not None: doc[key]['_collection'] = self.collection.name doc[key]['_database'] = self.db.name try: from json import dumps except ImportError: from anyjson import serialize as dumps except __HOLE__: raise ImportError("can't import anyjson. Please install it before continuing.") obj = self.to_json_type() _convert_to_python(obj, self.structure) return unicode(dumps(obj))
ImportError
dataset/ETHPy150Open namlook/mongokit/mongokit/document.py/Document.to_json
5,598
def from_json(self, json): """ convert a json string and return a SchemaDocument """ def _convert_to_python(doc, struct, path="", root_path=""): for key in struct: if type(key) is type: new_key = '$%s' % key.__name__ else: new_key = key new_path = ".".join([path, new_key]).strip('.') if isinstance(struct[key], dict): if doc: # we don't need to process an empty doc if key in doc: # we don't care about missing fields _convert_to_python(doc[key], struct[key], new_path, root_path) elif type(struct[key]) is list: if struct[key]: if struct[key][0] is datetime.datetime: l_objs = [] for obj in doc[key]: obj = fromtimestamp(obj) l_objs.append(obj) doc[key] = l_objs elif isinstance(struct[key][0], R): l_objs = [] for obj in doc[key]: db = obj.get('_database') or obj.get('$db') col = obj.get('_collection') or obj.get('$ref') if '_id' in obj: id_ref = '_id' if '$oid' in obj['_id']: obj['_id'] = ObjectId(obj['_id']['$oid']) elif '$id' in obj: id_ref = '$id' obj_class = struct[key][0]._doc _id = obj[id_ref] obj = getattr(self.connection[db][col], obj_class.__name__).one({'_id': _id}) #obj = struct[key][0]._doc(obj, collection=self.connection[db][col]).get_dbref() l_objs.append(obj) doc[key] = l_objs elif isinstance(struct[key][0], dict): if doc[key]: for obj in doc[key]: _convert_to_python(obj, struct[key][0], new_path, root_path) elif struct[key] is datetime.datetime and doc[key] is not None: doc[key] = fromtimestamp(doc[key]) elif (isinstance(struct[key], R) or isinstance(struct[key], DocumentProperties)) and doc[key] is not None: db = doc[key].get('_database') or doc[key].get('$db') col = doc[key].get('_collection') or doc[key].get('$ref') if '_id' in doc[key]: id_ref = '_id' elif '$id' in doc[key]: id_ref = '$id' if '$oid' in doc[key][id_ref]: doc[key][id_ref] = ObjectId(doc[key][id_ref]['$oid']) if isinstance(struct[key], R): obj_class = struct[key]._doc else: obj_class = struct[key] #_id = obj_class(doc[key], collection=self.connection[db][col])[id_ref] _id = doc[key][id_ref] doc[key] = getattr(self.connection[db][col], obj_class.__name__).one({'_id': _id}) try: from json import loads except __HOLE__: from anyjson import deserialize as loads except ImportError: raise ImportError("can't import anyjson. Please install it before continuing.") obj = loads(json) _convert_to_python(obj, self.structure) if '_id' in obj: if '$oid' in obj['_id']: obj['_id'] = ObjectId(obj['_id']['$oid']) return self._obj_class(obj, collection=self.collection) # # End of public API #
ImportError
dataset/ETHPy150Open namlook/mongokit/mongokit/document.py/Document.from_json
5,599
@classmethod def parse(cls, image_name): result = cls() # registry.org/namespace/repo:tag s = image_name.split('/', 2) if len(s) == 2: if '.' in s[0] or ':' in s[0]: result.registry = s[0] else: result.namespace = s[0] elif len(s) == 3: result.registry = s[0] result.namespace = s[1] result.repo = s[-1] try: result.repo, result.tag = result.repo.rsplit(':', 1) except __HOLE__: pass return result
ValueError
dataset/ETHPy150Open projectatomic/atomic-reactor/atomic_reactor/util.py/ImageName.parse