Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
4,500
def import_book(stream): """Return dataset of given stream.""" (format, stream) = detect(stream) try: databook = Databook() format.import_book(databook, stream) return databook except __HOLE__: return None
AttributeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/tablib-0.10.0/tablib/core.py/import_book
4,501
@osbsapi def create_build(self, **kwargs): """ take input args, create build request from provided build type and submit the build :param kwargs: keyword args for build :return: instance of BuildRequest """ build_type = self.build_conf.get_build_type() if build_type in (PROD_BUILD_TYPE, PROD_WITHOUT_KOJI_BUILD_TYPE, PROD_WITH_SECRET_BUILD_TYPE): kwargs.setdefault('git_branch', None) kwargs.setdefault('target', None) return self.create_prod_build(**kwargs) elif build_type == SIMPLE_BUILD_TYPE: # Only Prod Build type cares about potential koji scratch builds try: kwargs.pop("scratch") except __HOLE__: pass return self.create_simple_build(**kwargs) elif build_type == PROD_WITH_SECRET_BUILD_TYPE: return self.create_prod_with_secret_build(**kwargs) else: raise OsbsException("Unknown build type: '%s'" % build_type)
KeyError
dataset/ETHPy150Open projectatomic/osbs-client/osbs/api.py/OSBS.create_build
4,502
def isclass(obj): try: issubclass(obj, object) except __HOLE__: return False else: return True
TypeError
dataset/ETHPy150Open nicolaiarocci/cerberus/cerberus/utils.py/isclass
4,503
def get_library_instance(self, libname): try: return self._testlibs[libname.replace(' ', '')].get_instance() except __HOLE__: raise DataError("No library with name '%s' found." % libname)
KeyError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/running/namespace.py/Namespace.get_library_instance
4,504
def _is_old_x_times_syntax(self, name): if not name.lower().endswith('x'): return False times = name[:-1].strip() if is_scalar_var(times): return True try: int(times) except __HOLE__: return False else: return True
ValueError
dataset/ETHPy150Open shellderp/sublime-robot-plugin/lib/robot/running/namespace.py/Namespace._is_old_x_times_syntax
4,505
def _get_commands(self): try: variables = Utils.get_variables_from_file(self.abspath, self.script_encoding) SQL_UP = Migration.ensure_sql_unicode(variables['SQL_UP'], self.script_encoding) SQL_DOWN = Migration.ensure_sql_unicode(variables['SQL_DOWN'], self.script_encoding) (SQL_UP, SQL_DOWN) except __HOLE__: raise Exception("migration file is incorrect; it does not define 'SQL_UP' or 'SQL_DOWN' (%s)" % self.abspath) if SQL_UP is None or SQL_UP == "": raise Exception("migration command 'SQL_UP' is empty (%s)" % self.abspath) if SQL_DOWN is None or SQL_DOWN == "": raise Exception("migration command 'SQL_DOWN' is empty (%s)" % self.abspath) return SQL_UP, SQL_DOWN
KeyError
dataset/ETHPy150Open guilhermechapiewski/simple-db-migrate/simple_db_migrate/core/__init__.py/Migration._get_commands
4,506
@staticmethod def create(migration_name, migration_dir='.', script_encoding='utf-8', utc_timestamp = False): timestamp = strftime("%Y%m%d%H%M%S", gmtime() if utc_timestamp else localtime()) file_name = "%s_%s%s" % (timestamp, migration_name, Migration.MIGRATION_FILES_EXTENSION) if not Migration.is_file_name_valid(file_name): raise Exception("invalid migration name ('%s'); it should contain only letters, numbers and/or underscores" % migration_name) new_file_name = "%s/%s" % (migration_dir, file_name) try: f = codecs.open(new_file_name, "w", script_encoding) f.write(Migration.TEMPLATE % (script_encoding)) f.close() except __HOLE__: raise Exception("could not create file ('%s')" % new_file_name) return new_file_name
IOError
dataset/ETHPy150Open guilhermechapiewski/simple-db-migrate/simple_db_migrate/core/__init__.py/Migration.create
4,507
def get_all_migrations(self): if self.all_migrations: return self.all_migrations migrations = [] for _dir in self._migrations_dir: path = os.path.abspath(_dir) dir_list = None try: dir_list = os.listdir(path) except __HOLE__: raise Exception("directory not found ('%s')" % path) for dir_file in dir_list: if dir_file.endswith(Migration.MIGRATION_FILES_EXTENSION) and Migration.is_file_name_valid(dir_file): migration = Migration('%s/%s' % (path, dir_file), script_encoding=self._script_encoding) migrations.append(migration) if len(migrations) == 0: raise Exception("no migration files found") self.all_migrations = Migration.sort_migrations_list(migrations) return self.all_migrations
OSError
dataset/ETHPy150Open guilhermechapiewski/simple-db-migrate/simple_db_migrate/core/__init__.py/SimpleDBMigrate.get_all_migrations
4,508
def _makeTracePlayable(self, trace, traceFileName, checkCoherence = True): """Makes sure that the given trace is playable. @returns a tuple (trace_file_name, is_temporary), where the is_temporary flag indicates that the trace file is temporary and should be deleted after playing. """ # Find a suitable exporter try: exporter = [e for e in self.analyzer.exportPlugins if e.formatName == "binary"][0] except __HOLE__: self.analyzer.fail("No binary format exporter found.") # Check that the trace file is playable -- if not, we need to resave it if traceFileName and checkCoherence: reader = BinaryCodec.Reader(trace, open(traceFileName, "rb")) if not reader.isTracePlayable(): self.analyzer.reportWarning("The trace file will be normalized before playing. Save and reload the trace to avoid this step.") traceFileName = None # If there is no physical trace file, save the trace to a temporary file if not traceFileName: fd, traceFileName = tempfile.mkstemp(suffix = "_trace.bin") os.close(fd) traceFile = open(traceFileName, "wb") exporter.saveTrace(trace, traceFile) traceFile.close() removeTraceFile = True else: removeTraceFile = False return (traceFileName, removeTraceFile)
IndexError
dataset/ETHPy150Open skyostil/tracy/src/analyzer/plugins/player/Player.py/TracePlayer._makeTracePlayable
4,509
def testUnknownType(self): for value in [None, 19410, "xyz"]: try: outils.ContainerToDicts(value) except __HOLE__, err: self.assertTrue(str(err).startswith("Unknown container type")) else: self.fail("Exception was not raised")
TypeError
dataset/ETHPy150Open ganeti/ganeti/test/py/ganeti.outils_unittest.py/TestContainerToDicts.testUnknownType
4,510
def testUnknownType(self): for cls in [str, int, bool]: try: outils.ContainerFromDicts(None, cls, NotImplemented) except __HOLE__, err: self.assertTrue(str(err).startswith("Unknown container type")) else: self.fail("Exception was not raised") try: outils.ContainerFromDicts(None, cls(), NotImplemented) except TypeError, err: self.assertTrue(str(err).endswith("is not a type")) else: self.fail("Exception was not raised")
TypeError
dataset/ETHPy150Open ganeti/ganeti/test/py/ganeti.outils_unittest.py/TestContainerFromDicts.testUnknownType
4,511
def define_find_libary(): import re import tempfile import errno def _findlib_gcc(name): expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name) fdout, ccout = tempfile.mkstemp() _os.close(fdout) cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; else CC=cc; fi;' \ '$CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name trace = '' try: f = _os.popen(cmd) trace = f.read() f.close() finally: try: _os.unlink(ccout) except __HOLE__ as e: if e.errno != errno.ENOENT: raise res = re.search(expr, trace) if not res: return None return res.group(0) def _findlib_ldconfig(name): # XXX assuming GLIBC's ldconfig (with option -p) expr = r'/[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name) res = re.search(expr, _os.popen('/sbin/ldconfig -p 2>/dev/null').read()) if not res: # Hm, this works only for libs needed by the python executable. cmd = 'ldd %s 2>/dev/null' % _sys.executable res = re.search(expr, _os.popen(cmd).read()) if not res: return None return res.group(0) def _find_library(name): path = _findlib_ldconfig(name) or _findlib_gcc(name) if path: return _os.path.realpath(path) return path return _find_library
OSError
dataset/ETHPy150Open hgrecco/pyvisa/pyvisa/ctwrapper/cthelper.py/define_find_libary
4,512
def django_tests(verbosity, interactive, failfast, test_labels): from django.conf import settings state = setup(verbosity, test_labels) # Add tests for invalid models. extra_tests = [] for model_dir, model_name in get_invalid_models(): model_label = '.'.join([model_dir, model_name]) if not test_labels or model_name in test_labels: extra_tests.append(InvalidModelTestCase(model_label)) try: # Invalid models are not working apps, so we cannot pass them into # the test runner with the other test_labels test_labels.remove(model_name) except __HOLE__: pass # If GeoDjango is used, add it's tests that aren't a part of # an application (e.g., GEOS, GDAL, Distance objects). if geodjango(settings): from django.contrib.gis.tests import geodjango_suite extra_tests.append(geodjango_suite(apps=False)) # Run the test suite, including the extra validation tests. from django.test.utils import get_runner if not hasattr(settings, 'TEST_RUNNER'): settings.TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner' TestRunner = get_runner(settings) if hasattr(TestRunner, 'func_name'): # Pre 1.2 test runners were just functions, # and did not support the 'failfast' option. import warnings warnings.warn( 'Function-based test runners are deprecated. Test runners should be classes with a run_tests() method.', PendingDeprecationWarning ) failures = TestRunner(test_labels, verbosity=verbosity, interactive=interactive, extra_tests=extra_tests) else: test_runner = TestRunner(verbosity=verbosity, interactive=interactive, failfast=failfast) failures = test_runner.run_tests(test_labels, extra_tests=extra_tests) teardown(state) return failures
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/runtests.py/django_tests
4,513
def bisect_tests(bisection_label, options, test_labels): state = setup(int(options.verbosity), test_labels) if not test_labels: # Get the full list of test labels to use for bisection from django.db.models.loading import get_apps test_labels = [app.__name__.split('.')[-2] for app in get_apps()] print '***** Bisecting test suite:',' '.join(test_labels) # Make sure the bisection point isn't in the test list # Also remove tests that need to be run in specific combinations for label in [bisection_label, 'model_inheritance_same_model_name']: try: test_labels.remove(label) except __HOLE__: pass subprocess_args = ['python','runtests.py', '--settings=%s' % options.settings] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: subprocess_args.append('--verbosity=%s' % options.verbosity) if not options.interactive: subprocess_args.append('--noinput') iteration = 1 while len(test_labels) > 1: midpoint = len(test_labels)/2 test_labels_a = test_labels[:midpoint] + [bisection_label] test_labels_b = test_labels[midpoint:] + [bisection_label] print '***** Pass %da: Running the first half of the test suite' % iteration print '***** Test labels:',' '.join(test_labels_a) failures_a = subprocess.call(subprocess_args + test_labels_a) print '***** Pass %db: Running the second half of the test suite' % iteration print '***** Test labels:',' '.join(test_labels_b) print failures_b = subprocess.call(subprocess_args + test_labels_b) if failures_a and not failures_b: print "***** Problem found in first half. Bisecting again..." iteration = iteration + 1 test_labels = test_labels_a[:-1] elif failures_b and not failures_a: print "***** Problem found in second half. Bisecting again..." iteration = iteration + 1 test_labels = test_labels_b[:-1] elif failures_a and failures_b: print "***** Multiple sources of failure found" break else: print "***** No source of failure found... try pair execution (--pair)" break if len(test_labels) == 1: print "***** Source of error:",test_labels[0] teardown(state)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/runtests.py/bisect_tests
4,514
def paired_tests(paired_test, options, test_labels): state = setup(int(options.verbosity), test_labels) if not test_labels: print "" # Get the full list of test labels to use for bisection from django.db.models.loading import get_apps test_labels = [app.__name__.split('.')[-2] for app in get_apps()] print '***** Trying paired execution' # Make sure the bisection point isn't in the test list # Also remove tests that need to be run in specific combinations for label in [paired_test, 'model_inheritance_same_model_name']: try: test_labels.remove(label) except __HOLE__: pass subprocess_args = ['python','runtests.py', '--settings=%s' % options.settings] if options.failfast: subprocess_args.append('--failfast') if options.verbosity: subprocess_args.append('--verbosity=%s' % options.verbosity) if not options.interactive: subprocess_args.append('--noinput') for i, label in enumerate(test_labels): print '***** %d of %d: Check test pairing with %s' % (i+1, len(test_labels), label) failures = subprocess.call(subprocess_args + [label, paired_test]) if failures: print '***** Found problem pair with',label return print '***** No problem pair found' teardown(state)
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.2/tests/runtests.py/paired_tests
4,515
def sort_by(iterable, sortstr=None, reverse=False, col_map=None, default_type=None, default_value=None): """sort an iterable, cast to ``col_map.get(colkey, default_type)``, and default to ``default_value``. Args: iterable (iterable): iterable of lines/rows Kwargs: sortstr (None, str): comma separated list of column index (``1,2,3``) reverse (bool): (True, Descending), (False, Ascending) default: False col_map (None, dict): dict mapping column n to a typefunc default_type (None, callable): type callable (default: None) default_value (\*): default N/A value for columns not specified in col_map (default: None) Returns: list: sorted list of lines/rows """ # raise Exception() def keyfunc_iter(obj, sortstr=sortstr, col_map=col_map): """Parse and yield column values according to ``sortstr`` and ``col_map`` Args: obj (object): obj to sort (as from ``sorted(keyfunc=thisfunc)``) sortstr (str): sort string of comma-separated columns col_map (None, dict): dict mapping column n to a typefunc (default: None) Yields: object: typecasted column value """ if sortstr: column_sequence = get_list_from_str(sortstr, typefunc=int) else: column_sequence = xrange(len(obj.result)) log.debug(('column_sequence', column_sequence)) if col_map is None: col_map = {} for n in column_sequence: type_func = col_map.get(str(n), default_type) retval = default_value if n < len(obj.result): colvalue = obj.result[n] if type_func: try: retval = type_func(colvalue) except __HOLE__ as e: e.msg += "\n" + repr((type_func, colvalue, e,)) raise else: retval = colvalue else: retval = default_value yield retval def keyfunc(obj, sortstr=sortstr, col_map=col_map): """key function (e.g. for ``sorted(key=keyfunc)``) Args: obj (PylineResult): ``obj.result = ['col1', 'col2', 'coln']`` Returns: tuple: (col2, col0, col1) """ keyvalue = tuple(keyfunc_iter(obj, sortstr, col_map)) errdata = [ (('keyvalue', keyvalue), ('sortstr', sortstr))] log.debug((errdata,)) return keyvalue sorted_values = sorted(iterable, key=keyfunc, reverse=reverse) return sorted_values
ValueError
dataset/ETHPy150Open westurner/pyline/pyline.py/sort_by
4,516
def str2boolintorfloat(str_): """ Try to cast a string as a ``bool``, ``float``, ``int``, or ``str_.__class__``. Args: str_ (basestring): string to try and cast Returns: object: casted ``{boot, float, int, or str_.__class__}`` """ match = re.match('([\d\.]+)', str_) type_ = None if not match: type_ = str_.__class__ value = str_ value_lower = value.strip().lower() if value_lower == 'true': type_ = bool value = True elif value_lower == 'false': type_ = bool value = False return value else: try: numstr = match.group(1) if '.' in numstr: type_ = float value = type_(numstr) else: type_ = int value = type_(numstr) except (__HOLE__, NameError, IndexError) as e: value = str_ log.exception((e, (type_, value))) return value
ValueError
dataset/ETHPy150Open westurner/pyline/pyline.py/str2boolintorfloat
4,517
def get_request_url(self, request_id): """Returns the URL the request e.g. 'http://localhost:8080/foo?bar=baz'. Args: request_id: The string id of the request making the API call. Returns: The URL of the request as a string. """ try: host = os.environ['HTTP_HOST'] except __HOLE__: host = os.environ['SERVER_NAME'] port = os.environ['SERVER_PORT'] if port != '80': host += ':' + port url = 'http://' + host url += urllib.quote(os.environ.get('PATH_INFO', '/')) if os.environ.get('QUERY_STRING'): url += '?' + os.environ['QUERY_STRING'] return url
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/google/appengine/api/request_info.py/_LocalRequestInfo.get_request_url
4,518
def main(argv=None): import lymph.monkey lymph.monkey.patch() import docopt import sys import logging from lymph import __version__ as VERSION from lymph.cli.help import HELP from lymph.cli.base import get_command_class from lymph.utils import logging as lymph_logging bootup_handler = logging.StreamHandler() logging.getLogger().addHandler(bootup_handler) args = docopt.docopt(HELP, argv, version=VERSION, options_first=True) name = args.pop('<command>') argv = args.pop('<args>') try: command_cls = get_command_class(name) except __HOLE__: print("'%s' is not a valid lymph command. See 'lymph list' or 'lymph help'." % name) return 1 command_args = docopt.docopt(command_cls.get_help(), [name] + argv) args.update(command_args) config = setup_config(args) if command_cls.needs_config else None logging.getLogger().removeHandler(bootup_handler) if config: loglevel = args.get('--loglevel', 'ERROR') logfile = args.get('--logfile') lymph_logging.setup_logging(config, loglevel, logfile) else: logging.basicConfig() sys.excepthook = _excepthook terminal = setup_terminal(args, config) command = command_cls(args, config, terminal) return command.run()
KeyError
dataset/ETHPy150Open deliveryhero/lymph/lymph/cli/main.py/main
4,519
def main(directory): if not os.path.isdir(directory): sys.exit("ERROR: {} is not a directory".format(directory)) # tag the metrics to group them together metrics.tag("worker_latency", "worker") metrics.tag("worker_throughput", "worker") # register a csv reporter that will dump the metrics with the tag "worker" each 2 seconds # to csv files in the given directory reporter.register( reporter.CSVReporter(directory), reporter.fixed_interval_scheduler(2), "worker") # emulate some work print("Hit CTRL-C to stop the process") while True: try: worker() except __HOLE__: break
KeyboardInterrupt
dataset/ETHPy150Open avalente/appmetrics/examples/csv_reporter.py/main
4,520
def get_remote(self, node, method): method = method or "ssh" key = (node.name, method) remote = self.remotes.get(key) if not remote: parts = method.split(":", 1) if len(parts) == 2: method, args = parts args = [args] else: args = [] try: control_class = METHODS[method] except __HOLE__: raise errors.RemoteError( "unknown remote control method %r" % method) remote = control_class(node, *args) self.remotes[key] = remote return remote
KeyError
dataset/ETHPy150Open ohmu/poni/poni/rcontrol_all.py/RemoteManager.get_remote
4,521
def _read(fname): try: return open(op.join(op.dirname(__file__), fname)).read() except __HOLE__: return ''
IOError
dataset/ETHPy150Open klen/flask-pw/setup.py/_read
4,522
def run(self): try: while True: with self.abort_lock: if self.abort_flag: return # Get the value from the generator. try: msg = self.coro.next() except __HOLE__: break # Send messages to the next stage. for msg in _allmsgs(msg): with self.abort_lock: if self.abort_flag: return self.out_queue.put(msg) except: self.abort_all(sys.exc_info()) return # Generator finished; shut down the pipeline. self.out_queue.release()
StopIteration
dataset/ETHPy150Open beetbox/beets/beets/util/pipeline.py/FirstPipelineThread.run
4,523
def get_vid_for_direction(instance, direction): ''' get next video instance based on direction and current video instance''' category = instance.category video_qs = category.video_set.all() if direction == "next": new_qs = video_qs.filter(order__gt=instance.order) else: new_qs = video_qs.filter(order__lt=instance.order).reverse() next_vid = None if len(new_qs) >= 1: try: next_vid = new_qs[0] except __HOLE__: next_vid = None return next_vid
IndexError
dataset/ETHPy150Open codingforentrepreneurs/srvup-membership/src/videos/utils.py/get_vid_for_direction
4,524
def writeData(self,req,tblName): try: res = writeDataSrvResponse() db_username,db_password=self.getLogin() con = mdb.connect('localhost', db_username, db_password, 'RappStore'); cur = con.cursor() returncols=self.constructCommaColumns(req.req_cols) if (len(returncols)>1): returncols="("+returncols+")" print returncols values="" for i in range(len(req.req_data)): if (i==0): values=values+"("+self.constructCommaColumns(req.req_data[i].s)+")" else: values=values+",("+self.constructCommaColumns(req.req_data[i].s)+")" query="Insert into "+tblName+" "+ returncols+" values "+values cur.execute("LOCK TABLES "+tblName+" WRITE") cur.execute(query) cur.execute("UNLOCK TABLES") res.success.data=True res.trace.append("Success") except mdb.Error, e: res.trace.append(("Database Error %d: %s" % (e.args[0],e.args[1]))) res.success.data=False print "Error %d: %s" % (e.args[0],e.args[1]) except IndexError: res.trace.append("Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format") res.success.data=False print "Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format" except __HOLE__: print "Error: can\'t find login file or read data" res.success.data=False res.trace.append("Error: can\'t find login file or read data") return res ## @brief Implements the general delete data from table function # @param req [rapp_platform_ros_communications::deleteDataSrvRequest::Request&] The ROS service request # @param tblName [string] the table name # @param res [rapp_platform_ros_communications::deleteDataSrvResponse::Response&] The ROS service response
IOError
dataset/ETHPy150Open rapp-project/rapp-platform/rapp_mysql_wrapper/src/mysql_wrapper.py/MySQLdbWrapper.writeData
4,525
def deleteData(self,req,tblName): try: res = deleteDataSrvResponse() db_username,db_password=self.getLogin() con = mdb.connect('localhost', db_username, db_password, 'RappStore'); cur = con.cursor() where=self.constructAndQuery(req.where_data) query="Delete from "+tblName+where cur.execute("LOCK TABLES "+tblName+" WRITE") cur.execute(query) cur.execute("UNLOCK TABLES") res.success.data=True res.trace.append("Success") except mdb.Error, e: res.trace.append(("Database Error %d: %s" % (e.args[0],e.args[1]))) res.success.data=False print "Error %d: %s" % (e.args[0],e.args[1]) except IndexError: res.trace.append("Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format") res.success.data=False print "Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format" except __HOLE__: print "Error: can\'t find login file or read data" res.success.data=False res.trace.append("Error: can\'t find login file or read data") return res ## @brief Implements the general update data from table function # @param req [rapp_platform_ros_communications::updateDataSrvRequest::Request&] The ROS service request # @param tblName [string] the table name # @param res [rapp_platform_ros_communications::updateDataSrvResponse::Response&] The ROS service response
IOError
dataset/ETHPy150Open rapp-project/rapp-platform/rapp_mysql_wrapper/src/mysql_wrapper.py/MySQLdbWrapper.deleteData
4,526
def updateData(self,req,tblName): try: res = updateDataSrvResponse() db_username,db_password=self.getLogin() con = mdb.connect('localhost', db_username, db_password, 'RappStore'); cur = con.cursor() returncols=self.constructCommaColumns(req.set_cols) where=self.constructAndQuery(req.where_data) query="Update "+tblName+" SET "+returncols+where print query cur.execute("LOCK TABLES "+tblName+" WRITE") cur.execute(query) cur.execute("UNLOCK TABLES") res.success.data=True res.trace.append("Success") except mdb.Error, e: res.trace.append(("Database Error %d: %s" % (e.args[0],e.args[1]))) res.success.data=False print "Error %d: %s" % (e.args[0],e.args[1]) except __HOLE__: res.trace.append("Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format") res.success.data=False print "Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format" except IOError: print "Error: can\'t find login file or read data" res.success.data=False res.trace.append("Error: can\'t find login file or read data") return res ## @brief Implements the general fetch data from table function # @param req [rapp_platform_ros_communications::fetchDataSrvRequest::Request&] The ROS service request # @param tblName [string] the table name # @param res [rapp_platform_ros_communications::fetchDataSrvResponse::Response&] The ROS service response
IndexError
dataset/ETHPy150Open rapp-project/rapp-platform/rapp_mysql_wrapper/src/mysql_wrapper.py/MySQLdbWrapper.updateData
4,527
def fetchData(self,req,tblName): try: res = fetchDataSrvResponse() db_username,db_password=self.getLogin() con = mdb.connect('localhost', db_username, db_password, 'RappStore'); cur = con.cursor() returncols=self.constructCommaColumns(req.req_cols) where=self.constructAndQuery(req.where_data) query="SELECT "+returncols+" FROM "+tblName+where cur.execute(query) result_set = cur.fetchall() for i in range(len(result_set)): line=StringArrayMsg() for j in range(len(result_set[i])): temp_s=String(result_set[i][j]) line.s.append((str(result_set[i][j]))) res.res_data.append(line) con.close() if (returncols=="*"): res.res_cols=self.getTableColumnNames(tblName) else: res.res_cols=req.req_cols res.success.data=True res.trace.append("Success") except mdb.Error, e: res.trace.append(("Database Error %d: %s" % (e.args[0],e.args[1]))) res.success.data=False print "Error %d: %s" % (e.args[0],e.args[1]) except __HOLE__: res.trace.append("Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format") res.success.data=False print "Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format" except IOError: print "Error: can\'t find login file or read data" res.success.data=False res.trace.append("Error: can\'t find login file or read data") return res ## @brief Implements the whatRappsCanRun service # @param req [rapp_platform_ros_communications::whatRappsCanRunSrvResponse::Request&] The ROS service request # @param tblName [string] the table name # @param res [rapp_platform_ros_communications::whatRappsCanRunSrvResponse::Response&] The ROS service response
IndexError
dataset/ETHPy150Open rapp-project/rapp-platform/rapp_mysql_wrapper/src/mysql_wrapper.py/MySQLdbWrapper.fetchData
4,528
def whatRappsCanRun(self,req,tblName): try: res = whatRappsCanRunSrvResponse() db_username,db_password=self.getLogin() con = mdb.connect('localhost', db_username, db_password, 'RappStore'); cur = con.cursor() query="SELECT rapp_id from tblRappsModelsVersion where model_id='"+req.model_id+"' and minimum_coreagent_version<='"+req.core_agent_version+"'"; cur.execute(query) result_set = cur.fetchall() for i in range(len(result_set)): line=StringArrayMsg() for j in range(len(result_set[i])): temp_s=String(result_set[i][j]) line.s.append((str(result_set[i][j]))) res.res_data.append(line) con.close() res.success.data=True res.trace.append("Success") except mdb.Error, e: res.trace.append(("Database Error %d: %s" % (e.args[0],e.args[1]))) res.success.data=False print "Error %d: %s" % (e.args[0],e.args[1]) except IndexError: res.trace.append("Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format") res.success.data=False print "Wrong Query Input Format, check for empty required columns list or wrong/incomplete Query data format" except __HOLE__: print "Error: can\'t find login file or read data" res.success.data=False res.trace.append("Error: can\'t find login file or read data") return res ## @brief Places commas between columns and constructs a string # @param cols [list] the input columns # @return returncols [string] the output string
IOError
dataset/ETHPy150Open rapp-project/rapp-platform/rapp_mysql_wrapper/src/mysql_wrapper.py/MySQLdbWrapper.whatRappsCanRun
4,529
def apply(self, op, im1, im2=None, mode=None): im1 = self.__fixup(im1) if im2 is None: # unary operation out = Image.new(mode or im1.mode, im1.size, None) im1.load() try: op = getattr(_imagingmath, op+"_"+im1.mode) except AttributeError: raise TypeError, "bad operand type for '%s'" % op _imagingmath.unop(op, out.im.id, im1.im.id) else: # binary operation im2 = self.__fixup(im2) if im1.mode != im2.mode: # convert both arguments to floating point if im1.mode != "F": im1 = im1.convert("F") if im2.mode != "F": im2 = im2.convert("F") if im1.mode != im2.mode: raise ValueError, "mode mismatch" if im1.size != im2.size: # crop both arguments to a common size size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1])) if im1.size != size: im1 = im1.crop((0, 0) + size) if im2.size != size: im2 = im2.crop((0, 0) + size) out = Image.new(mode or im1.mode, size, None) else: out = Image.new(mode or im1.mode, im1.size, None) im1.load(); im2.load() try: op = getattr(_imagingmath, op+"_"+im1.mode) except __HOLE__: raise TypeError, "bad operand type for '%s'" % op _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) return _Operand(out) # unary operators
AttributeError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/ImageMath.py/_Operand.apply
4,530
def eval(expression, _dict={}, **kw): # build execution namespace args = ops.copy() args.update(_dict) args.update(kw) for k, v in args.items(): if hasattr(v, "im"): args[k] = _Operand(v) import __builtin__ out =__builtin__.eval(expression, args) try: return out.im except __HOLE__: return out
AttributeError
dataset/ETHPy150Open kleientertainment/ds_mod_tools/pkg/win32/Python27/Lib/site-packages/PIL/ImageMath.py/eval
4,531
def get_mod_func(callback): # Converts 'django.views.news.stories.story_detail' to # ['django.views.news.stories', 'story_detail'] try: dot = callback.rindex('.') except __HOLE__: return callback, '' return callback[:dot], callback[dot+1:]
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/get_mod_func
4,532
def __call__(self, match_obj): # match_obj.group(1) is the contents of the parenthesis. # First we need to figure out whether it's a named or unnamed group. # grouped = match_obj.group(1) m = re.search(r'^\?P<(\w+)>(.*?)$', grouped) if m: # If this was a named group... # m.group(1) is the name of the group # m.group(2) is the regex. try: value = self.kwargs[m.group(1)] except KeyError: # It was a named group, but the arg was passed in as a # positional arg or not at all. try: value = self.args[self.current_arg] self.current_arg += 1 except IndexError: # The arg wasn't passed in. raise NoReverseMatch('Not enough positional arguments passed in') test_regex = m.group(2) else: # Otherwise, this was a positional (unnamed) group. try: value = self.args[self.current_arg] self.current_arg += 1 except __HOLE__: # The arg wasn't passed in. raise NoReverseMatch('Not enough positional arguments passed in') test_regex = grouped # Note we're using re.match here on purpose because the start of # to string needs to match. if not re.match(test_regex + '$', str(value)): # TODO: Unicode? raise NoReverseMatch("Value %r didn't match regular expression %r" % (value, test_regex)) return str(value) # TODO: Unicode?
IndexError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/MatchChecker.__call__
4,533
def _get_callback(self): if self._callback is not None: return self._callback mod_name, func_name = get_mod_func(self._callback_str) try: self._callback = getattr(__import__(mod_name, {}, {}, ['']), func_name) except __HOLE__, e: raise ViewDoesNotExist, "Could not import %s. Error was: %s" % (mod_name, str(e)) except AttributeError, e: raise ViewDoesNotExist, "Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e)) return self._callback
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/RegexURLPattern._get_callback
4,534
def reverse(self, viewname, *args, **kwargs): mod_name, func_name = get_mod_func(viewname) try: lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name) except (__HOLE__, AttributeError): raise NoReverseMatch if lookup_view != self.callback: raise NoReverseMatch return self.reverse_helper(*args, **kwargs)
ImportError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/RegexURLPattern.reverse
4,535
def _get_urlconf_module(self): try: return self._urlconf_module except AttributeError: try: self._urlconf_module = __import__(self.urlconf_name, {}, {}, ['']) except __HOLE__, e: # Invalid urlconf_name, such as "foo.bar." (note trailing period) raise ImproperlyConfigured, "Error while importing URLconf %r: %s" % (self.urlconf_name, e) return self._urlconf_module
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/RegexURLResolver._get_urlconf_module
4,536
def _resolve_special(self, view_type): callback = getattr(self.urlconf_module, 'handler%s' % view_type) mod_name, func_name = get_mod_func(callback) try: return getattr(__import__(mod_name, {}, {}, ['']), func_name), {} except (ImportError, __HOLE__), e: raise ViewDoesNotExist, "Tried %s. Error was: %s" % (callback, str(e))
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/RegexURLResolver._resolve_special
4,537
def reverse(self, lookup_view, *args, **kwargs): if not callable(lookup_view): mod_name, func_name = get_mod_func(lookup_view) try: lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name) except (ImportError, __HOLE__): raise NoReverseMatch for pattern in self.urlconf_module.urlpatterns: if isinstance(pattern, RegexURLResolver): try: return pattern.reverse_helper(lookup_view, *args, **kwargs) except NoReverseMatch: continue elif pattern.callback == lookup_view: try: return pattern.reverse_helper(*args, **kwargs) except NoReverseMatch: continue raise NoReverseMatch
AttributeError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-0.96/django/core/urlresolvers.py/RegexURLResolver.reverse
4,538
def attach(self, timeout, wait=True): if self.skype is not None and windll.user32.IsWindow(self.skype): return self.acquire() self.skype = None try: if not self.isAlive(): try: self.start() except __HOLE__: raise SkypeAPIError('Skype API closed') # wait till the thread initializes while not self.hwnd: time.sleep(0.01) self.logger.debug('broadcasting SkypeControlAPIDiscover') fhwnd = self.get_foreground_window() try: if fhwnd: windll.user32.SetForegroundWindow(self.hwnd) if not windll.user32.SendMessageTimeoutA(HWND_BROADCAST, self.SkypeControlAPIDiscover, self.hwnd, None, 2, 5000, None): raise SkypeAPIError('Could not broadcast Skype discover message') # wait (with timeout) till the WindProc() attaches self.wait = True t = threading.Timer(timeout2float(timeout), lambda: setattr(self, 'wait', False)) if wait: t.start() while self.wait and self.attachment_status not in (apiAttachSuccess, apiAttachRefused): if self.attachment_status == apiAttachPendingAuthorization: # disable the timeout t.cancel() elif self.attachment_status == apiAttachAvailable: # rebroadcast self.logger.debug('broadcasting SkypeControlAPIDiscover') windll.user32.SetForegroundWindow(self.hwnd) if not windll.user32.SendMessageTimeoutA(HWND_BROADCAST, self.SkypeControlAPIDiscover, self.hwnd, None, 2, 5000, None): raise SkypeAPIError('Could not broadcast Skype discover message') time.sleep(0.01) t.cancel() finally: if fhwnd: windll.user32.SetForegroundWindow(fhwnd) finally: self.release() # check if we got the Skype window's hwnd if self.skype is not None: command = Command('PROTOCOL %s' % self.protocol, Blocking=True) self.send_command(command) self.protocol = int(command.Reply.rsplit(None, 1)[-1]) elif not self.wait: raise SkypeAPIError('Skype attach timeout')
AssertionError
dataset/ETHPy150Open Skype4Py/Skype4Py/Skype4Py/api/windows.py/SkypeAPI.attach
4,539
def _is_gzip(self, response): archive = StringIO(response.body) try: body = gzip.GzipFile(fileobj=archive).read() except __HOLE__: return respcls = responsetypes.from_args(body=body) return response.replace(body=body, cls=respcls)
IOError
dataset/ETHPy150Open wcong/ants/ants/contrib_exp/downloadermiddleware/decompression.py/DecompressionMiddleware._is_gzip
4,540
def _is_bzip2(self, response): try: body = bz2.decompress(response.body) except __HOLE__: return respcls = responsetypes.from_args(body=body) return response.replace(body=body, cls=respcls)
IOError
dataset/ETHPy150Open wcong/ants/ants/contrib_exp/downloadermiddleware/decompression.py/DecompressionMiddleware._is_bzip2
4,541
def __init__(self, locale): Console.info("Parsing CLDR files for %s..." % locale) Console.indent() splits = locale.split("_") # Store for internal usage self.__locale = locale self.__language = splits[0] self.__territory = splits[1] if len(splits) > 1 else None # This will hold all data extracted data self.__data = {} # Add info section self.__data["info"] = { "LOCALE" : self.__locale, "LANGUAGE" : self.__language, "TERRITORY" : self.__territory } # Add keys (fallback to C-default locale) path = "%s.xml" % os.path.join(CLDR_DIR, "keys", self.__language) try: Console.info("Processing %s..." % os.path.relpath(path, CLDR_DIR)) tree = xml.etree.ElementTree.parse(path) except __HOLE__: path = "%s.xml" % os.path.join(CLDR_DIR, "keys", "C") Console.info("Processing %s..." % os.path.relpath(path, CLDR_DIR)) tree = xml.etree.ElementTree.parse(path) self.__data["key"] = { "Short" : { key.get("type"): key.text for key in tree.findall("./keys/short/key") }, "Full" : { key.get("type"): key.text for key in tree.findall("./keys/full/key") } } # Add main CLDR data: Fallback chain for locales main = os.path.join(CLDR_DIR, "main") files = [] while True: files.append("%s.xml" % os.path.join(main, locale)) if "_" in locale: locale = locale[:locale.rindex("_")] else: break # Extend data with root data files.append(os.path.join(main, "root.xml")) # Finally import all these files in order for path in reversed(files): Console.info("Processing %s..." % os.path.relpath(path, CLDR_DIR)) tree = xml.etree.ElementTree.parse(path) self.__addDisplayNames(tree) self.__addDelimiters(tree) self.__addCalendars(tree) self.__addNumbers(tree) # Add supplemental CLDR data self.__addSupplementals(self.__territory) Console.outdent()
IOError
dataset/ETHPy150Open zynga/jasy/jasy/core/Locale.py/LocaleParser.__init__
4,542
def close(self): for ct in self.containers: if ct: try: self.docker.remove_container(container=ct.get('Id')) except (errors.DockerException, errors.APIError) as e: LOG.warning('Failed to remove container %s, %s' % (ct.get('Id'), str(e))) for img in self.images: try: self.docker.remove_image(image=img, force=True) except (errors.DockerException, errors.APIError) as e: LOG.warning('Failed to remove docker image %s, %s' % (img, str(e))) if self.work_dir: self._remove_cloned_repo(self.work_dir) try: utils.rm_tree(self.work_dir) except __HOLE__ as e: if e.errno != errno.ENOENT: LOG.critical('critical: cannot remove dir %s,' ' disk may be full.' % self.work_dir) if self.cloner_image: try: self.docker.remove_image(image=self.cloner_image, force=True) except (errors.DockerException, errors.APIError) as e: LOG.error('Error in removing docker image %s, %s' % (self.cloner_image, str(e)))
OSError
dataset/ETHPy150Open openstack/solum/solum/worker/app_handlers/base.py/BaseHandler.close
4,543
def _gen_docker_ignore(self, path, prefix=None): # Exclude .git from the docker build context content = '{}/.git'.format(prefix) if prefix else '.git' try: with open('{}/.dockerignore'.format(path), 'w') as f: f.write(content) except __HOLE__: pass
OSError
dataset/ETHPy150Open openstack/solum/solum/worker/app_handlers/base.py/BaseHandler._gen_docker_ignore
4,544
def _docker_build(self, tag, logger, timeout, limits, path=None, dockerfile=None, fileobj=None, forcerm=True, quiet=True, nocache=False, pull=True): success = 1 try: for l in self.docker.build(path=path, dockerfile=dockerfile, fileobj=fileobj, tag=tag, timeout=timeout, forcerm=forcerm, quiet=quiet, nocache=nocache, pull=pull, container_limits=limits): try: info = json.loads(l).get('stream', '') if info: if 'successfully built' in info.lower(): success = 0 else: err = json.loads(l).get('errorDetail', '') if err: logger.log(logging.ERROR, err) except __HOLE__: pass except req_exp.ReadTimeoutError: logger.log(logging.ERROR, 'docker build timed out, max value: %s' % timeout) except (errors.DockerException, errors.APIError) as e: LOG.error('Error in building docker image %s, assembly: %s, %s' % (tag, self.assembly.uuid, str(e))) return success
ValueError
dataset/ETHPy150Open openstack/solum/solum/worker/app_handlers/base.py/BaseHandler._docker_build
4,545
@utils.retry def _docker_save(self, image, output): result = 1 try: lp = self.docker.get_image(image) with open(output, 'w') as f: f.write(lp.data) result = 0 except (__HOLE__, errors.DockerException, errors.APIError) as e: LOG.error('Error saving docker image, %s' % str(e)) return result
OSError
dataset/ETHPy150Open openstack/solum/solum/worker/app_handlers/base.py/BaseHandler._docker_save
4,546
@utils.retry def _docker_load(self, path): result = 1 try: with open(path, 'rb') as f: self.docker.load_image(f) result = 0 except (__HOLE__, errors.DockerException, errors.APIError) as e: LOG.error('Error in loading docker image, %s' % str(e)) return result
OSError
dataset/ETHPy150Open openstack/solum/solum/worker/app_handlers/base.py/BaseHandler._docker_load
4,547
def run(self): self.prepare() self.logger.store_global('start_time', str(datetime.now())) try: while True: self._perform_one_run() except __HOLE__: pass self.logger.store_global('end_time', str(datetime.now())) try: self.logger.save() except self.logger.NoFileError: print('Not saving logs: no destination was provided.')
StopIteration
dataset/ETHPy150Open omangin/multimodal/multimodal/experiment.py/MultimodalExperiment.run
4,548
def _setup_macros_dict(parser): """ initiates the _macros attribute on the parser object, allowing for storage of the macros in the parser. """ # Each macro is stored in a new attribute # of the 'parser' class. That way we can access it later # in the template when processing 'use_macro' tags. try: # don't overwrite the attribute if it already exists parser._macros except __HOLE__: parser._macros = {}
AttributeError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/_setup_macros_dict
4,549
@register.tag(name="macro") def do_macro(parser, token): """ the function taking the parsed tag and returning a DefineMacroNode object. """ try: bits = token.split_contents() tag_name, macro_name, arguments = bits[0], bits[1], bits[2:] except __HOLE__: raise template.TemplateSyntaxError( "'{0}' tag requires at least one argument (macro name)".format( token.contents.split()[0])) # use regex's to parse the arguments into arg # and kwarg definitions # the regex for identifying python variable names is: # r'^[A-Za-z_][\w_]*$' # args must be proper python variable names # we'll want to capture it from the regex also. arg_regex = r'^([A-Za-z_][\w_]*)$' # kwargs must be proper variable names with a # default value, name="value", or name=value if # value is a template variable (potentially with # filters). # we'll want to capture the name and value from # the regex as well. kwarg_regex = ( r'^([A-Za-z_][\w_]*)=(".*"|{0}.*{0}|[A-Za-z_][\w_]*)$'.format("'")) # leave further validation to the template variable class args = [] kwargs = {} for argument in arguments: arg_match = regex_match( arg_regex, argument) if arg_match: args.append(arg_match.groups()[0]) else: kwarg_match = regex_match( kwarg_regex, argument) if kwarg_match: kwargs[kwarg_match.groups()[0]] = template.Variable( # convert to a template variable here kwarg_match.groups()[1]) else: raise template.TemplateSyntaxError( "Malformed arguments to the {0} tag.".format( tag_name)) # parse to the endmacro tag and get the contents nodelist = parser.parse(('endmacro',)) parser.delete_first_token() # store macro in parser._macros, creating attribute # if necessary _setup_macros_dict(parser) parser._macros[macro_name] = DefineMacroNode( macro_name, nodelist, args, kwargs) return parser._macros[macro_name]
IndexError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/do_macro
4,550
@register.tag(name="loadmacros") def do_loadmacros(parser, token): """ The function taking a parsed tag and returning a LoadMacrosNode object, while also loading the macros into the page. """ try: tag_name, filename = token.split_contents() except __HOLE__: raise template.TemplateSyntaxError( "'{0}' tag requires exactly one argument (filename)".format( token.contents.split()[0])) if filename[0] in ('"', "'") and filename[-1] == filename[0]: filename = filename[1:-1] else: raise template.TemplateSyntaxError( "Malformed argument to the {0} template tag." " Argument must be in quotes.".format(tag_name) ) t = get_template(filename) try: # Works for Django 1.8 nodelist = t.template.nodelist except AttributeError: # Works for Django < 1.8 nodelist = t.nodelist macros = nodelist.get_nodes_by_type(DefineMacroNode) # make sure the _macros attribute dictionary is instantiated # on the parser, then add the macros to it. _setup_macros_dict(parser) for macro in macros: parser._macros[macro.name] = macro # pass macros to LoadMacrosNode so that it can # resolve the macros template variable kwargs on render return LoadMacrosNode(macros)
ValueError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/do_loadmacros
4,551
def render(self, context): # add all of the use_macros args into context for i, arg in enumerate(self.macro.args): try: template_variable = self.args[i] context[arg] = template_variable.resolve(context) except __HOLE__: context[arg] = "" # add all of use_macros kwargs into context for name, default in self.macro.kwargs.items(): if name in self.kwargs: context[name] = self.kwargs[name].resolve(context) else: if isinstance(default, template.Variable): # variables must be resolved explicitly, # because otherwise if macro's loaded from # a separate file things will break context[name] = default.resolve(context) else: context[name] = default # return the nodelist rendered in the adjusted context return self.macro.nodelist.render(context)
IndexError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/UseMacroNode.render
4,552
def parse_macro_params(token): """ Common parsing logic for both use_macro and macro_block """ try: bits = token.split_contents() tag_name, macro_name, values = bits[0], bits[1], bits[2:] except __HOLE__: raise template.TemplateSyntaxError( "{0} tag requires at least one argument (macro name)".format( token.contents.split()[0])) args = [] kwargs = {} # leaving most validation up to the template.Variable # class, but use regex here so that validation could # be added in future if necessary. kwarg_regex = ( r'^([A-Za-z_][\w_]*)=(".*"|{0}.*{0}|[A-Za-z_][\w_]*)$'.format( "'")) arg_regex = r'^([A-Za-z_][\w_]*|".*"|{0}.*{0}|(\d+))$'.format( "'") for value in values: # must check against the kwarg regex first # because the arg regex matches everything! kwarg_match = regex_match( kwarg_regex, value) if kwarg_match: kwargs[kwarg_match.groups()[0]] = template.Variable( # convert to a template variable here kwarg_match.groups()[1]) else: arg_match = regex_match( arg_regex, value) if arg_match: args.append(template.Variable(arg_match.groups()[0])) else: raise template.TemplateSyntaxError( "Malformed arguments to the {0} tag.".format( tag_name)) return tag_name, macro_name, args, kwargs
IndexError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/parse_macro_params
4,553
@register.tag(name="use_macro") def do_usemacro(parser, token): """ The function taking a parsed template tag and returning a UseMacroNode. """ tag_name, macro_name, args, kwargs = parse_macro_params(token) try: macro = parser._macros[macro_name] except (AttributeError, __HOLE__): raise template.TemplateSyntaxError( "Macro '{0}' is not defined previously to the {1} tag".format( macro_name, tag_name)) macro.parser = parser return UseMacroNode(macro, args, kwargs)
KeyError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/do_usemacro
4,554
@register.tag(name="macro_block") def do_macro_block(parser, token): """ Function taking parsed template tag to a MacroBlockNode. """ tag_name, macro_name, args, kwargs = parse_macro_params(token) # could add extra validation on the macro_name tag # here, but probably don't need to since we're checking # if there's a macro by that name anyway. try: # see if the macro is in the context. macro = parser._macros[macro_name] except (__HOLE__, KeyError): raise template.TemplateSyntaxError( "Macro '{0}' is not defined ".format(macro_name) + "previously to the {0} tag".format(tag_name)) # get the arg and kwarg nodes from the nodelist nodelist = parser.parse(('endmacro_block',)) parser.delete_first_token() # Loop through nodes, sorting into args/kwargs # (we could do this more semantically, but we loop # only once like this as an optimization). for node in nodelist: if isinstance(node, MacroArgNode) and not isinstance(node, MacroKwargNode): # note that MacroKwargNode is also a MacroArgNode (via inheritance), # so we must check against this. args.append(node) elif isinstance(node, MacroKwargNode): if node.keyword in macro.kwargs: # check that the keyword is defined as an argument for # the macro. if node.keyword not in kwargs: # add the keyword argument to the dict # if it's not in there kwargs[node.keyword] = node else: # raise a template syntax error if the # keyword is already in the dict (thus a keyword # argument was passed twice. raise template.TemplateSyntaxError( "{0} template tag was supplied " "the same keyword argument multiple times.".format( tag_name)) else: raise template.TemplateSyntaxError( "{0} template tag was supplied with a " "keyword argument not defined by the {1} macro.".format( tag_name, macro_name)) # The following is a check that only whitespace is inside the macro_block tag, # but it's currently removed for reasons of backwards compatibility/potential # uses people might have to put extra stuff in te macro_block tag. # elif not isinstance(node, template.TextNode) or node.s.strip() != "": # # whitespace is allowed, anything else is not # raise template.TemplateSyntaxError( # "{0} template tag received an argument that " # "is neither a arg or a kwarg tag. Make sure there's " # "text or template tags directly descending " # "from the {0} tag.".format(tag_name)) # check that there aren't more arg tags than args # in the macro. if len(args) > len(macro.args): raise template.TemplateSyntaxError( "{0} template tag was supplied too many arg block tags.".format( tag_name)) macro.parser = parser return MacroBlockNode(macro, nodelist, args, kwargs)
AttributeError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/do_macro_block
4,555
@register.tag(name="macro_kwarg") def do_macro_kwarg(parser, token): """ Function taking a parsed template tag to a MacroKwargNode. """ try: tag_name, keyword = token.split_contents() except __HOLE__: raise template.TemplateSyntaxError( "{0} tag requires exactly one argument, a keyword".format( token.contents.split()[0])) # add some validation of the keyword argument here. nodelist = parser.parse(('endmacro_kwarg',)) parser.delete_first_token() return MacroKwargNode(keyword, nodelist)
ValueError
dataset/ETHPy150Open nalourie/django-macros/build/lib/macros/templatetags/macros.py/do_macro_kwarg
4,556
def Init(self, params): """Initializes Config. This is a separate method as it raises an exception if there is a parse error.""" generator_flags = params.get('generator_flags', {}) config_path = generator_flags.get('config_path', None) if not config_path: return try: f = open(config_path, 'r') config = json.load(f) f.close() except IOError: raise Exception('Unable to open file ' + config_path) except __HOLE__ as e: raise Exception('Unable to parse config file ' + config_path + str(e)) if not isinstance(config, dict): raise Exception('config_path must be a JSON file containing a dictionary') self.files = config.get('files', []) self.targets = set(config.get('targets', []))
ValueError
dataset/ETHPy150Open pyokagan/gyp/pylib/gyp/generator/analyzer.py/Config.Init
4,557
def _WriteOutput(params, **values): """Writes the output, either to stdout or a file is specified.""" if 'error' in values: print 'Error:', values['error'] if 'status' in values: print values['status'] if 'targets' in values: values['targets'].sort() print 'Supplied targets that depend on changed files:' for target in values['targets']: print '\t', target if 'invalid_targets' in values: values['invalid_targets'].sort() print 'The following targets were not found:' for target in values['invalid_targets']: print '\t', target if 'build_targets' in values: values['build_targets'].sort() print 'Targets that require a build:' for target in values['build_targets']: print '\t', target output_path = params.get('generator_flags', {}).get( 'analyzer_output_path', None) if not output_path: print json.dumps(values) return try: f = open(output_path, 'w') f.write(json.dumps(values) + '\n') f.close() except __HOLE__ as e: print 'Error writing to output file', output_path, str(e)
IOError
dataset/ETHPy150Open pyokagan/gyp/pylib/gyp/generator/analyzer.py/_WriteOutput
4,558
def createDestination(self, dstFile=None, import_lines=None): if dstFile is None: dstFile = self.dstFile if import_lines is None: import_lines = self.import_lines try: oldData = open(dstFile, 'r').readlines() except __HOLE__, e: if e[0] == 2: # No such file or directory oldData = [] else: raise newData = [] if len(import_lines) > 2: for line in import_lines: newData.append("%s\n" % line) skip = False for line in oldData: sline = line.rstrip() if skip: if sline == ')': skip = False continue if sline == import_lines[0]: skip = True continue newData.append(line) newData = "".join(newData) open(dstFile, 'w').write(newData)
IOError
dataset/ETHPy150Open anandology/pyjamas/contrib/create_imports.py/CreateImports.createDestination
4,559
def test_open_nonexistent(self): """Test that trying to open a non-existent file results in an IOError (and not some other arbitrary exception). """ try: fits.open(self.temp('foobar.fits')) except __HOLE__ as exc: assert 'No such file or directory' in str(exc) except: raise # But opening in ostream or append mode should be okay, since they # allow writing new files for mode in ('ostream', 'append'): with fits.open(self.temp('foobar.fits'), mode=mode) as h: pass assert os.path.exists(self.temp('foobar.fits')) os.remove(self.temp('foobar.fits'))
IOError
dataset/ETHPy150Open spacetelescope/PyFITS/pyfits/tests/test_core.py/TestFileFunctions.test_open_nonexistent
4,560
def _sort_commands(cmddict, order): def keyfn(key): try: return order.index(key[1]) except __HOLE__: # unordered items should come last return 0xff return sorted(cmddict.items(), key=keyfn)
ValueError
dataset/ETHPy150Open anzev/hedwig/build/pip/pip/commands/__init__.py/_sort_commands
4,561
def post_to_server(post_data, config, auth=None): """Send the given post_data to the pypi server. Parameters ---------- post_data: dict Usually the dict returned by build_post_data config: object A PyPIConfig instance auth: object or None HTTP authentification object. Returns ------- code: int HTTP status code msg: str Message received back from the server """ content_type, body = encode_multipart(post_data.items(), []) # build the Request headers = { 'Content-type': content_type, 'Content-length': str(len(body)) } req = Request(config.repository, body, headers) # handle HTTP and include the Basic Auth handler opener = build_opener(HTTPBasicAuthHandler(password_mgr=auth)) try: opener.open(req) except __HOLE__: e = extract_exception() code, msg = e.code, e.msg except URLError: e = extract_exception() code, msg = 500, str(e) else: code, msg = 200, 'OK' return code, msg
HTTPError
dataset/ETHPy150Open cournape/Bento/bento/pypi/register_utils.py/post_to_server
4,562
def __init__(self, file_id_mod, settings): """ The file_id_mod should be 'A' for the first of the day, 'B' for the second and so on. """ self.settings = settings try: self.header = Header( settings['immediate_dest'], settings['immediate_org'], file_id_mod, settings['immediate_dest_name'], settings['immediate_org_name'] ) except __HOLE__: raise Exception( 'Settings require: "immediate_dest", "immediate_org", \ immediate_dest_name", and "immediate_org_name"' ) self.batches = list()
KeyError
dataset/ETHPy150Open travishathaway/python-ach/ach/builder.py/AchFile.__init__
4,563
@property def root_domain(self): try: return self.domain_set.get( ~Q(master_domain__soa=F('soa')), soa__isnull=False ) except __HOLE__: return None
ObjectDoesNotExist
dataset/ETHPy150Open mozilla/inventory/mozdns/soa/models.py/SOA.root_domain
4,564
def readheaders(self): """Read header lines. Read header lines up to the entirely blank line that terminates them. The (normally blank) line that ends the headers is skipped, but not included in the returned list. If a non-header line ends the headers, (which is an error), an attempt is made to backspace over it; it is never included in the returned list. The variable self.status is set to the empty string if all went well, otherwise it is an error message. The variable self.headers is a completely uninterpreted list of lines contained in the header (so printing them will reproduce the header exactly as it appears in the file). If multiple header fields with the same name occur, they are combined according to the rules in RFC 2616 sec 4.2: Appending each subsequent field-value to the first, each separated by a comma. The order in which header fields with the same field-name are received is significant to the interpretation of the combined field value. """ # XXX The implementation overrides the readheaders() method of # rfc822.Message. The base class design isn't amenable to # customized behavior here so the method here is a copy of the # base class code with a few small changes. self.dict = {} self.unixfrom = '' self.headers = hlist = [] self.status = '' headerseen = "" firstline = 1 startofline = unread = tell = None if hasattr(self.fp, 'unread'): unread = self.fp.unread elif self.seekable: tell = self.fp.tell while True: if tell: try: startofline = tell() except __HOLE__: startofline = tell = None self.seekable = 0 line = self.fp.readline(_MAXLINE + 1) if len(line) > _MAXLINE: raise LineTooLong("header line") if not line: self.status = 'EOF in headers' break # Skip unix From name time lines if firstline and line.startswith('From '): self.unixfrom = self.unixfrom + line continue firstline = 0 if headerseen and line[0] in ' \t': # XXX Not sure if continuation lines are handled properly # for http and/or for repeating headers # It's a continuation line. hlist.append(line) self.addcontinue(headerseen, line.strip()) continue elif self.iscomment(line): # It's a comment. Ignore it. continue elif self.islast(line): # Note! No pushback here! The delimiter line gets eaten. break headerseen = self.isheader(line) if headerseen: # It's a legal header line, save it. hlist.append(line) self.addheader(headerseen, line[len(headerseen)+1:].strip()) continue else: # It's not a header line; throw it back and stop here. if not self.dict: self.status = 'No headers' else: self.status = 'Non-header line where header expected' # Try to undo the read. if unread: unread(line) elif tell: self.fp.seek(startofline) else: self.status = self.status + '; bad seek' break
IOError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/dist27/gae_override/httplib.py/HTTPMessage.readheaders
4,565
def request(self, method, url, body=None, headers=None): """Send a complete request to the server.""" self._method = method self._url = url try: # 'body' can be a file. self._body = body.read() except __HOLE__: self._body = body if headers is None: headers = [] elif hasattr(headers, 'items'): headers = headers.items() self.headers = headers
AttributeError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/dist27/gae_override/httplib.py/HTTPConnection.request
4,566
@staticmethod def _getargspec(callable_object): assert callable(callable_object) try: # Methods and lambdas. return inspect.getargspec(callable_object) except __HOLE__: # Class instances with __call__. return inspect.getargspec(callable_object.__call__)
TypeError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/dist27/gae_override/httplib.py/HTTPConnection._getargspec
4,567
def getresponse(self, buffering=False): """Get the response from the server. App Engine Note: buffering is ignored. """ # net.proto.ProcotolBuffer relies on httplib so importing urlfetch at the # module level causes a failure on prod. That means the import needs to be # lazy. from google.appengine.api import urlfetch import socket # Cannot be done at global scope due to circular import. if self.port and self.port != self.default_port: host = '%s:%s' % (self.host, self.port) else: host = self.host if not self._url.startswith(self._protocol): url = '%s://%s%s' % (self._protocol, host, self._url) else: url = self._url headers = dict(self.headers) if self.timeout in [_GLOBAL_DEFAULT_TIMEOUT, socket._GLOBAL_DEFAULT_TIMEOUT]: deadline = socket.getdefaulttimeout() else: deadline = self.timeout try: method = self._method_map[self._method.upper()] except __HOLE__: raise ValueError('%r is an unrecognized HTTP method' % self._method) try: # The Python Standard Library doesn't validate certificates so don't # validate them here either. But some libraries (httplib2, possibly # others) use an alternate technique where the fetch function does not # have a validate_certificate argument so only provide it when supported. argspec = self._getargspec(self._fetch) extra_kwargs = ( {'validate_certificate': False} if argspec.keywords or 'validate_certificate' in argspec.args else {}) fetch_response = self._fetch(url, self._body, method, headers, self._allow_truncated, self._follow_redirects, deadline, **extra_kwargs) except urlfetch.InvalidURLError, e: raise InvalidURL(str(e)) except (urlfetch.ResponseTooLargeError, urlfetch.DeadlineExceededError), e: raise HTTPException(str(e)) except urlfetch.SSLCertificateError, e: # Should be ssl.SSLError but the ssl module isn't available. # Continue to support this exception for versions of _fetch that do not # support validate_certificates. Also, in production App Engine defers # specific semantics so leaving this in just in case. raise HTTPException(str(e)) except urlfetch.DownloadError, e: # One of the following occured: UNSPECIFIED_ERROR, FETCH_ERROR raise socket.error( 'An error occured while connecting to the server: %s' % e) response = self.response_class(fetch_response, method=method) response.begin() self.close() return response
KeyError
dataset/ETHPy150Open GoogleCloudPlatform/python-compat-runtime/appengine-compat/exported_appengine_sdk/google/appengine/dist27/gae_override/httplib.py/HTTPConnection.getresponse
4,568
def get_tokens_unprocessed(self, data): sql = PsqlRegexLexer(**self.options) lines = lookahead(line_re.findall(data)) # prompt-output cycle while 1: # consume the lines of the command: start with an optional prompt # and continue until the end of command is detected curcode = '' insertions = [] while 1: try: line = lines.next() except __HOLE__: # allow the emission of partially collected items # the repl loop will be broken below break # Identify a shell prompt in case of psql commandline example if line.startswith('$') and not curcode: lexer = get_lexer_by_name('console', **self.options) for x in lexer.get_tokens_unprocessed(line): yield x break # Identify a psql prompt mprompt = re_prompt.match(line) if mprompt is not None: insertions.append((len(curcode), [(0, Generic.Prompt, mprompt.group())])) curcode += line[len(mprompt.group()):] else: curcode += line # Check if this is the end of the command # TODO: better handle multiline comments at the end with # a lexer with an external state? if re_psql_command.match(curcode) \ or re_end_command.search(curcode): break # Emit the combined stream of command and prompt(s) for item in do_insertions(insertions, sql.get_tokens_unprocessed(curcode)): yield item # Emit the output lines out_token = Generic.Output while 1: line = lines.next() mprompt = re_prompt.match(line) if mprompt is not None: # push the line back to have it processed by the prompt lines.send(line) break mmsg = re_message.match(line) if mmsg is not None: if mmsg.group(1).startswith("ERROR") \ or mmsg.group(1).startswith("FATAL"): out_token = Generic.Error yield (mmsg.start(1), Generic.Strong, mmsg.group(1)) yield (mmsg.start(2), out_token, mmsg.group(2)) else: yield (0, out_token, line)
StopIteration
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/pygments/lexers/sql.py/PostgresConsoleLexer.get_tokens_unprocessed
4,569
@staticmethod def action_check_host(request): hostname = extract_url_hostname(request['host']) try: is_active = Host.get(Host.hostname == hostname).is_active except __HOLE__: is_active = False return { 'result': 'active' if is_active else 'inactive', 'host': request['host'] }
DoesNotExist
dataset/ETHPy150Open CacheBrowser/cachebrowser/cachebrowser/api.py/APIHandler.action_check_host
4,570
def token(self): try: tok = self.next_token() return tok except __HOLE__: pass
StopIteration
dataset/ETHPy150Open ContinuumIO/ashiba/enaml/enaml/core/lexer.py/EnamlLexer.token
4,571
def synthesize_indentation_tokens(self, token_stream): # A stack of indentation levels; will never pop item 0 levels = [0] depth = 0 prev_was_ws = False # In case the token stream is empty for a completely # empty file. token = None for token in token_stream: # WS only occurs at the start of the line # There may be WS followed by NEWLINE so # only track the depth here. Don't indent/dedent # until there's something real. if token.type == 'WS': assert depth == 0 depth = len(token.value) prev_was_ws = True # WS tokens are never passed to the parser continue if token.type == 'NEWLINE': depth = 0 if prev_was_ws or token.at_line_start: # ignore blank lines continue # pass the other cases on through yield token continue # then it must be a real token (not WS, not NEWLINE) # which can affect the indentation level prev_was_ws = False if token.must_indent: # The current depth must be larger than the previous level if not (depth > levels[-1]): indentation_error('expected an indented block', token) levels.append(depth) yield self.indent(token.lineno) elif token.at_line_start: # Must be on the same level or one of the previous levels if depth == levels[-1]: # At the same level pass elif depth > levels[-1]: # indentation increase but not in new block indentation_error('unexpected indent', token) else: # Back up; but only if it matches a previous level try: i = levels.index(depth) except __HOLE__: msg = ('unindent does not match any outer level ' 'of indentation.') indentation_error(msg, token) for _ in range(i + 1, len(levels)): yield self.dedent(token.lineno) levels.pop() yield token # If the current token is WS (which is only emitted at the start # of a line), then the token before that was a newline unless # we're on line number 1. If that's the case, then we don't # need another newline token. if token is None: yield self.newline(-1) elif token.type != 'NEWLINE': if token.type != 'WS' or token.lineno == 1: yield self.newline(-1) # Must dedent any remaining levels if len(levels) > 1: assert token is not None for _ in range(1, len(levels)): yield self.dedent(token.lineno)
ValueError
dataset/ETHPy150Open ContinuumIO/ashiba/enaml/enaml/core/lexer.py/EnamlLexer.synthesize_indentation_tokens
4,572
def get_tests(app_module): try: app_path = app_module.__name__.split('.')[:-1] test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE) except ImportError, e: # Couldn't import tests.py. Was it due to a missing file, or # due to an import error in a tests.py that actually exists? import os.path from imp import find_module try: mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)]) except __HOLE__: # 'tests' module doesn't exist. Move on. test_module = None else: # The module exists, so there must be an import error in the # test module itself. We don't need the module; so if the # module was a single file module (i.e., tests.py), close the file # handle returned by find_module. Otherwise, the test module # is a directory, and there is nothing to close. if mod[0]: mod[0].close() raise return test_module
ImportError
dataset/ETHPy150Open Almad/django-sane-testing/djangosanetesting/runnercompat.py/get_tests
4,573
def build_suite(app_module): "Create a complete Django test suite for the provided application module" from django.test import _doctest as doctest from django.test.testcases import OutputChecker, DocTestRunner doctestOutputChecker = OutputChecker() suite = unittest.TestSuite() # Load unit and doctests in the models.py module. If module has # a suite() method, use it. Otherwise build the test suite ourselves. if hasattr(app_module, 'suite'): suite.addTest(app_module.suite()) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module)) try: suite.addTest(doctest.DocTestSuite(app_module, checker=doctestOutputChecker, runner=DocTestRunner)) except __HOLE__: # No doc tests in models.py pass # Check to see if a separate 'tests' module exists parallel to the # models module test_module = get_tests(app_module) if test_module: # Load unit and doctests in the tests.py module. If module has # a suite() method, use it. Otherwise build the test suite ourselves. if hasattr(test_module, 'suite'): suite.addTest(test_module.suite()) else: suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module)) try: suite.addTest(doctest.DocTestSuite(test_module, checker=doctestOutputChecker, runner=DocTestRunner)) except ValueError: # No doc tests in tests.py pass return suite
ValueError
dataset/ETHPy150Open Almad/django-sane-testing/djangosanetesting/runnercompat.py/build_suite
4,574
def build_test(label): """Construct a test case with the specified label. Label should be of the form model.TestClass or model.TestClass.test_method. Returns an instantiated test or test suite corresponding to the label provided. """ parts = label.split('.') if len(parts) < 2 or len(parts) > 3: raise ValueError("Test label '%s' should be of the form app.TestCase or app.TestCase.test_method" % label) # # First, look for TestCase instances with a name that matches # from django.db.models import get_app app_module = get_app(parts[0]) test_module = get_tests(app_module) TestClass = getattr(app_module, parts[1], None) # Couldn't find the test class in models.py; look in tests.py if TestClass is None: if test_module: TestClass = getattr(test_module, parts[1], None) try: if issubclass(TestClass, unittest.TestCase): if len(parts) == 2: # label is app.TestClass try: return unittest.TestLoader().loadTestsFromTestCase(TestClass) except TypeError: raise ValueError("Test label '%s' does not refer to a test class" % label) else: # label is app.TestClass.test_method return TestClass(parts[2]) except TypeError: # TestClass isn't a TestClass - it must be a method or normal class pass # # If there isn't a TestCase, look for a doctest that matches # from django.test import _doctest as doctest from django.test.testcases import OutputChecker, DocTestRunner doctestOutputChecker = OutputChecker() tests = [] for module in app_module, test_module: try: doctests = doctest.DocTestSuite(module, checker=doctestOutputChecker, runner=DocTestRunner) # Now iterate over the suite, looking for doctests whose name # matches the pattern that was given for test in doctests: if test._dt_test.name in ( '%s.%s' % (module.__name__, '.'.join(parts[1:])), '%s.__test__.%s' % (module.__name__, '.'.join(parts[1:]))): tests.append(test) except __HOLE__: # No doctests found. pass # If no tests were found, then we were given a bad test label. if not tests: raise ValueError("Test label '%s' does not refer to a test" % label) # Construct a suite out of the tests that matched. return unittest.TestSuite(tests)
ValueError
dataset/ETHPy150Open Almad/django-sane-testing/djangosanetesting/runnercompat.py/build_test
4,575
def _build_project(self, filename): try: with open(filename) as fp: script = fp.read() m = self.rate_re.search(script) if m: rate = float(m.group(1)) else: rate = 1 m = self.burst_re.search(script) if m: burst = float(m.group(1)) else: burst = 3 return { 'name': os.path.splitext(os.path.basename(filename))[0], 'group': None, 'status': 'RUNNING', 'script': script, 'comments': None, 'rate': rate, 'burst': burst, 'updatetime': os.path.getmtime(filename), } except __HOLE__ as e: logging.error('loading project script error: %s', e) return None
OSError
dataset/ETHPy150Open binux/pyspider/pyspider/database/local/projectdb.py/ProjectDB._build_project
4,576
def create_test_yaml(): file_loc = FILE_LOC config = \ """ queue: - name: default rate: 5/s - name: foo rate: 10/m """ try: os.mkdir("/var/apps/test_app") os.mkdir("/var/apps/test_app/app/") except __HOLE__: pass FILE = file_io.write(file_loc, config) # AppScale must already be running with RabbitMQ
OSError
dataset/ETHPy150Open AppScale/appscale/AppTaskQueue/test/functional/test_tq_startworker.py/create_test_yaml
4,577
def dictRemove(dct, value): try: del dct[value] except __HOLE__: pass
KeyError
dataset/ETHPy150Open twisted/twisted/twisted/internet/_threadedselect.py/dictRemove
4,578
def _workerInThread(self): try: while 1: fn, args = self.toThreadQueue.get() #print >>sys.stderr, "worker got", fn, args fn(*args) except __HOLE__: pass # exception indicates this thread should exit except: f = failure.Failure() self._sendToMain('Failure', f) #print >>sys.stderr, "worker finished"
SystemExit
dataset/ETHPy150Open twisted/twisted/twisted/internet/_threadedselect.py/ThreadedSelectReactor._workerInThread
4,579
def _doSelectInThread(self, timeout): """Run one iteration of the I/O monitor loop. This will run all selectables who had input or output readiness waiting for them. """ reads = self.reads writes = self.writes while 1: try: r, w, ignored = _select(reads.keys(), writes.keys(), [], timeout) break except ValueError: # Possibly a file descriptor has gone negative? log.err() self._preenDescriptorsInThread() except TypeError: # Something *totally* invalid (object w/o fileno, non-integral # result) was passed log.err() self._preenDescriptorsInThread() except (select.error, __HOLE__), se: # select(2) encountered an error if se.args[0] in (0, 2): # windows does this if it got an empty list if (not reads) and (not writes): return else: raise elif se.args[0] == EINTR: return elif se.args[0] == EBADF: self._preenDescriptorsInThread() else: # OK, I really don't know what's going on. Blow up. raise self._sendToMain('Notify', r, w)
IOError
dataset/ETHPy150Open twisted/twisted/twisted/internet/_threadedselect.py/ThreadedSelectReactor._doSelectInThread
4,580
def mainLoop(self): q = Queue() self.interleave(q.put) while self.running: try: q.get()() except __HOLE__: break
StopIteration
dataset/ETHPy150Open twisted/twisted/twisted/internet/_threadedselect.py/ThreadedSelectReactor.mainLoop
4,581
def __getitem__(self, key): if key in self._cache: return self._cache[key] try: instance = self._dict[key] except __HOLE__: ctrl = self.ctrl() candidates = [] for sectionname, section in ctrl.config.items(): if key in section: candidates.append(" %s:%s" % (sectionname, key)) if candidates: log.error("Instance '%s' not found. Did you forget to install a plugin? The following sections might match:\n%s" % ( key, "\n".join(candidates))) raise get_massagers = getattr(instance, 'get_massagers', lambda: []) for massager in get_massagers(): instance.config.add_massager(massager) for plugin in self.plugins.values(): if 'augment_instance' not in plugin: continue plugin['augment_instance'](instance) self._cache[key] = instance return instance
KeyError
dataset/ETHPy150Open ployground/ploy/ploy/__init__.py/LazyInstanceDict.__getitem__
4,582
@lazy def instances(self): result = LazyInstanceDict(self) try: config = self.config except __HOLE__: return result for instance_id in config.get('instance', {}): iconfig = config['instance'][instance_id] if 'master' not in iconfig: log.error("Instance 'instance:%s' has no master set." % instance_id) sys.exit(1) master = self.masters[iconfig['master']] if instance_id in master.instances: log.error("Instance 'instance:%s' conflicts with another instance with id '%s' in master '%s'." % (instance_id, instance_id, master.id)) sys.exit(1) instance_class = master.section_info.get(None) if instance_class is None: log.error("Master '%s' has no default instance class." % (master.id)) sys.exit(1) instance = instance_class(master, instance_id, iconfig) instance.sectiongroupname = 'instance' master.instances[instance_id] = instance shortname_map = {} for master in self.masters.values(): for instance_id in master.instances: instance = master.instances[instance_id] key = instance.uid result[key] = instance shortname_map.setdefault(instance_id, []).append(instance) for shortname, instances in shortname_map.items(): if len(instances) == 1: result[shortname] = instances[0] result.plugins = self.plugins return result
SystemExit
dataset/ETHPy150Open ployground/ploy/ploy/__init__.py/Controller.instances
4,583
def cmd_debug(self, argv, help): """Prints some debug info for this script""" parser = argparse.ArgumentParser( prog="%s debug" % self.progname, description=help, ) instances = self.instances parser.add_argument("instance", nargs=1, metavar="instance", help="Name of the instance from the config.", choices=sorted(instances)) parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", help="Print more info and output the startup script") parser.add_argument("-c", "--console-output", dest="console_output", action="store_true", help="Prints the console output of the instance if available") parser.add_argument("-i", "--interactive", dest="interactive", action="store_true", help="Creates a connection and drops you into an interactive Python session") parser.add_argument("-r", "--raw", dest="raw", action="store_true", help="Outputs the raw possibly compressed startup script") parser.add_argument("-o", "--override", nargs="*", type=str, dest="overrides", metavar="OVERRIDE", help="Option to override instance config for startup script (name=value).") args = parser.parse_args(argv) overrides = self._parse_overrides(args) overrides['instances'] = self.instances instance = instances[args.instance[0]] if hasattr(instance, 'startup_script'): startup_script = instance.startup_script(overrides=overrides, debug=True) max_size = getattr(instance, 'max_startup_script_size', 16 * 1024) log.info("Length of startup script: %s/%s", len(startup_script['raw']), max_size) if args.verbose: if 'startup_script' in instance.config: if startup_script['original'] == startup_script['raw']: log.info("Startup script:") elif args.raw: log.info("Compressed startup script:") else: log.info("Uncompressed startup script:") else: log.info("No startup script specified") if args.raw: print(startup_script['raw'], end='') elif args.verbose: print(startup_script['original'], end='') if args.console_output: if hasattr(instance, 'get_console_output'): print(instance.get_console_output()) else: log.error("The instance doesn't support console output.") if args.interactive: # pragma: no cover import readline from pprint import pprint local = dict( ctrl=self, instances=self.instances, instance=instance, pprint=pprint) readline.parse_and_bind('tab: complete') try: import rlcompleter readline.set_completer(rlcompleter.Completer(local).complete) except __HOLE__: pass __import__("code").interact(local=local)
ImportError
dataset/ETHPy150Open ployground/ploy/ploy/__init__.py/Controller.cmd_debug
4,584
@classmethod def active(cls, group=None, delta=datetime.timedelta(minutes=1)): """Get the currently active encoders. Args: group: Group to get active servers for. Defaults to all groups. delta: A time delta describing how long ago the encoder must have registered to be considered active. Defaults to 1 minute ago. Returns: A real list of Endpoint objects. """ # Get all the active streaming severs for this channel lastseen_after = datetime.datetime.now() - delta q = cls.objects.filter(lastseen__gte=lastseen_after).order_by( 'group', 'ip', '-lastseen' ) if group: q = q.filter(group__exact=group) # distinct_support = True try: qd = q.distinct('group', 'ip') return list(qd) except __HOLE__: distinct_endpoints = {} for endpoint in q: key = (endpoint.group, endpoint.ip) if key not in distinct_endpoints: distinct_endpoints[key] = endpoint return list(distinct_endpoints.values())
NotImplementedError
dataset/ETHPy150Open timvideos/streaming-system/website/tracker/models.py/Endpoint.active
4,585
def _test_extracting_missing_attributes(self, include_locations): # Verify behavior from glance objects that are missing attributes # TODO(jaypipes): Find a better way of testing this crappy # glanceclient magic object stuff. class MyFakeGlanceImage(object): def __init__(self, metadata): IMAGE_ATTRIBUTES = ['size', 'owner', 'id', 'created_at', 'updated_at', 'status', 'min_disk', 'min_ram', 'is_public'] raw = dict.fromkeys(IMAGE_ATTRIBUTES) raw.update(metadata) self.__dict__['raw'] = raw def __getattr__(self, key): try: return self.__dict__['raw'][key] except __HOLE__: raise AttributeError(key) def __setattr__(self, key, value): try: self.__dict__['raw'][key] = value except KeyError: raise AttributeError(key) metadata = { 'id': 1, 'created_at': NOW_DATETIME, 'updated_at': NOW_DATETIME, } image = MyFakeGlanceImage(metadata) observed = glance._extract_attributes( image, include_locations=include_locations) expected = { 'id': 1, 'name': None, 'is_public': None, 'size': 0, 'min_disk': None, 'min_ram': None, 'disk_format': None, 'container_format': None, 'checksum': None, 'created_at': NOW_DATETIME, 'updated_at': NOW_DATETIME, 'deleted_at': None, 'deleted': None, 'status': None, 'properties': {}, 'owner': None } if include_locations: expected['locations'] = None expected['direct_url'] = None self.assertEqual(expected, observed)
KeyError
dataset/ETHPy150Open BU-NU-CLOUD-SP16/Trusted-Platform-Module-nova/nova/tests/unit/image/test_glance.py/TestConversions._test_extracting_missing_attributes
4,586
def autorun(self, args, loop=True): """ The accessible method for dynamically running a screen. This method will basically parse the arguments, prepare them with the method `_parse_args` that is inherited in sub-classes, and with the property `cli_opts` that holds the formatting of the arguments. Once all is ready to go, this will call the `_run_cycle` method, which is filled in the sub-classes with the algorithms to display text on screen to behave as a screensaver. The arguments of this method are: * args: (MANDATORY) the arguments passed when termsaver is executed from command-line. See `termsaver` script for details. * loop: (OPTIONAL) defines if termsaver should be executing on an infinite looping (goes on until the keyboard interrupt (Ctrl+C) is pressed), or not. This is up to the screen action (or end-user through configuable setting) to decide. """ # prepare values and validate if not args: args = '' if not self.cli_opts \ or 'opts' not in self.cli_opts.keys() \ or not self.cli_opts['opts']: self.cli_opts['opts'] = '' if not self.cli_opts['long_opts']: self.cli_opts['long_opts'] = [] else: if not type(self.cli_opts['long_opts']) is list or \ [type(i) == str for i in self.cli_opts['long_opts']] \ != [True for __ in range(len(self.cli_opts['long_opts']))]: # # Don't worry too much about errors here. This is supposed to # help developers while programming screens for this app. # raise Exception("Value of 'long_opts' in cli_opts dict MUST "\ "be a list of strings.") try: self._parse_args(getopt.getopt(args, self.cli_opts['opts'], self.cli_opts['long_opts'])) except getopt.GetoptError, e: raise exception.InvalidOptionException("", str(e)) # execute the cycle self.clear_screen() while(loop): try: self._run_cycle() except __HOLE__, e: # # do some cleanup if applicable # self._on_keyboard_interrupt() raise e # Clear screen if appropriate if self.cleanup_per_cycle: self.clear_screen()
KeyboardInterrupt
dataset/ETHPy150Open brunobraga/termsaver/termsaverlib/screen/base/__init__.py/ScreenBase.autorun
4,587
def _element_to_regex(self, element, job_directory, join_on): tag = element.tag method_name = "_%s_to_regex" % tag try: method = getattr(self, method_name) except __HOLE__: raise NameError("Unknown XML validation tag [%s]" % tag) regex = method(element, job_directory) if self.__is_true(element.get("single_quote", "false")): regex = r"'%s'" % regex if self.__is_true(element.get("double_quote", "false")): regex = r'"%s"' % regex min_count = int(element.get("min", "1")) max_count = int(element.get("max", "1")) assert max_count > 0 if min_count != 1 or max_count != 1: single_regex = r"(?:%s)" % regex first = "%s%s" % (single_regex, "?" if min_count == 0 else "") rest = r"(?:%s%s){%d,%d}" % (join_on, regex, max(min_count - 1, 0), max_count - 1) regex = "%s%s" % (first, rest) return regex
NameError
dataset/ETHPy150Open galaxyproject/pulsar/pulsar/tools/validator.py/ExpressionValidator._element_to_regex
4,588
@staticmethod def load_feature(space, path, orig_path): if not os.path.exists(path): raise space.error(space.w_LoadError, orig_path) try: f = open_file_as_stream(path, buffering=0) try: contents = f.readall() finally: f.close() except __HOLE__ as e: raise error_for_oserror(space, e) space.execute(contents, filepath=path)
OSError
dataset/ETHPy150Open topazproject/topaz/topaz/modules/kernel.py/Kernel.load_feature
4,589
@moduledef.function("exec") def method_exec(self, space, args_w): if len(args_w) > 1 and space.respond_to(args_w[0], "to_hash"): raise space.error(space.w_NotImplementedError, "exec with environment") if len(args_w) > 1 and space.respond_to(args_w[-1], "to_hash"): raise space.error(space.w_NotImplementedError, "exec with options") if space.respond_to(args_w[0], "to_ary"): w_cmd = space.convert_type(args_w[0], space.w_array, "to_ary") cmd_w = space.listview(w_cmd) if len(cmd_w) != 2: raise space.error(space.w_ArgumentError, "wrong first argument") cmd, argv0 = [ space.str0_w(space.convert_type( w_e, space.w_string, "to_str" )) for w_e in cmd_w ] else: w_cmd = space.convert_type(args_w[0], space.w_string, "to_str") cmd = space.str0_w(w_cmd) argv0 = None if len(args_w) > 1 or argv0 is not None: if argv0 is None: sepidx = cmd.rfind(os.sep) + 1 if sepidx > 0: argv0 = cmd[sepidx:] else: argv0 = cmd args = [argv0] args += [ space.str0_w(space.convert_type( w_arg, space.w_string, "to_str" )) for w_arg in args_w[1:] ] try: os.execv(cmd, args) except OSError as e: raise error_for_oserror(space, e) else: if not cmd: raise error_for_errno(space, errno.ENOENT) shell = os.environ.get("RUBYSHELL") or os.environ.get("COMSPEC") or "/bin/sh" sepidx = shell.rfind(os.sep) + 1 if sepidx > 0: argv0 = shell[sepidx:] else: argv0 = shell try: os.execv(shell, [argv0, "-c", cmd]) except __HOLE__ as e: raise error_for_oserror(space, e)
OSError
dataset/ETHPy150Open topazproject/topaz/topaz/modules/kernel.py/Kernel.method_exec
4,590
@moduledef.function("Float") def method_Float(self, space, w_arg): if w_arg is space.w_nil: raise space.error(space.w_TypeError, "can't convert nil into Float") elif space.is_kind_of(w_arg, space.w_float): return space.newfloat(space.float_w(w_arg)) elif space.is_kind_of(w_arg, space.w_string): string = space.str_w(w_arg).strip(" ") try: return space.newfloat(float(string)) except __HOLE__: raise space.error(space.w_ArgumentError, "invalid value for Float(): %s" % string) else: return space.convert_type(w_arg, space.w_float, "to_f")
ValueError
dataset/ETHPy150Open topazproject/topaz/topaz/modules/kernel.py/Kernel.method_Float
4,591
def get_file_encoding(self, file_path, preferred_encoding=None): """ Gets an eventual cached encoding for file_path. Raises a KeyError if no encoding were cached for the specified file path. :param file_path: path of the file to look up :returns: The cached encoding. """ _logger().debug('getting encoding for %s', file_path) try: map = json.loads(self._settings.value('cachedFileEncodings')) except TypeError: map = {} try: return map[file_path] except KeyError: encodings = self.preferred_encodings if preferred_encoding: encodings.insert(0, preferred_encoding) for encoding in encodings: _logger().debug('trying encoding: %s', encoding) try: with open(file_path, encoding=encoding) as f: f.read() except (UnicodeDecodeError, IOError, __HOLE__): pass else: return encoding raise KeyError(file_path)
OSError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/cache.py/Cache.get_file_encoding
4,592
def set_file_encoding(self, path, encoding): """ Cache encoding for the specified file path. :param path: path of the file to cache :param encoding: encoding to cache """ try: map = json.loads(self._settings.value('cachedFileEncodings')) except __HOLE__: map = {} map[path] = encoding self._settings.setValue('cachedFileEncodings', json.dumps(map))
TypeError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/cache.py/Cache.set_file_encoding
4,593
def get_cursor_position(self, file_path): """ Gets the cached cursor position for file_path :param file_path: path of the file in the cache :return: Cached cursor position or (0, 0) """ try: map = json.loads(self._settings.value('cachedCursorPosition')) except TypeError: map = {} try: pos = map[file_path] except __HOLE__: pos = 0 if isinstance(pos, list): # changed in pyqode 2.6.3, now we store the cursor position # instead of the line and column (faster) pos = 0 return pos
KeyError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/cache.py/Cache.get_cursor_position
4,594
def set_cursor_position(self, path, position): """ Cache encoding for the specified file path. :param path: path of the file to cache :param position: cursor position to cache """ try: map = json.loads(self._settings.value('cachedCursorPosition')) except __HOLE__: map = {} map[path] = position self._settings.setValue('cachedCursorPosition', json.dumps(map))
TypeError
dataset/ETHPy150Open OpenCobolIDE/OpenCobolIDE/open_cobol_ide/extlibs/pyqode/core/cache.py/Cache.set_cursor_position
4,595
def main(self): if six.PY2: stream = codecs.getwriter('utf-8')(self.output_file) else: stream = self.output_file json_kwargs = { 'ensure_ascii': False, 'indent': self.args.indent, } if six.PY2: json_kwargs['encoding'] = 'utf-8' def default(obj): if isinstance(obj, (datetime.date, datetime.datetime)): return obj.isoformat() raise TypeError('%s is not JSON serializable' % repr(obj)) def dump_json(data, newline=False): json.dump(data, stream, default=default, **json_kwargs) if newline: stream.write("\n") """ Convert CSV to JSON. """ if self.args.lat and not self.args.lon: self.argparser.error('--lon is required whenever --lat is specified.') if self.args.lon and not self.args.lat: self.argparser.error('--lat is required whenever --lon is specified.') if self.args.crs and not self.args.lat: self.argparser.error('--crs is only allowed when --lat and --lon are also specified.') if self.args.streamOutput and (self.args.lat or self.args.lon or self.args.key): self.argparser.error('--stream is only allowed if --lat, --lon and --key are not specified.') # GeoJSON if self.args.lat and self.args.lon: table = agate.Table.from_csv(self.input_file, sniff_limit=self.args.sniff_limit, column_types=self.get_column_types(), **self.reader_kwargs) features = [] min_lon = None min_lat = None max_lon = None max_lat = None lat_column = match_column_identifier(table.column_names, self.args.lat, self.args.zero_based) lon_column = match_column_identifier(table.column_names, self.args.lon, self.args.zero_based) if self.args.key: id_column = match_column_identifier(table.column_names, self.args.key, self.args.zero_based) else: id_column = None for row in table.rows: feature = OrderedDict() feature['type'] = 'Feature' properties = OrderedDict() geoid = None lat = None lon = None for i, c in enumerate(row): if i == lat_column: try: lat = float(c) except __HOLE__: lat = None if min_lat is None or lat < min_lat: min_lat = lat if max_lat is None or lat > max_lat: max_lat = lat elif i == lon_column: try: lon = float(c) except ValueError: lon = None if min_lon is None or lon < min_lon: min_lon = lon if max_lon is None or lon > max_lon: max_lon = lon elif i == id_column: geoid = c else: properties[table.column_names[i]] = c if id_column is not None: feature['id'] = geoid feature['geometry'] = OrderedDict([ ('type', 'Point'), ('coordinates', [lon, lat]) ]) feature['properties'] = properties features.append(feature) output = OrderedDict([ ('type', 'FeatureCollection'), ('bbox', [min_lon, min_lat, max_lon, max_lat]), ('features', features) ]) if self.args.crs: output['crs'] = OrderedDict([ ('type', 'name'), ('properties', { 'name': self.args.crs }) ]) dump_json(output) elif self.args.streamOutput and self.args.no_inference: rows = agate.csv.reader(self.input_file, **self.reader_kwargs) column_names = next(rows) for row in rows: data = OrderedDict() for i, column in enumerate(column_names): try: data[column] = row[i] except IndexError: data[column] = None dump_json(data, newline=True) else: table = agate.Table.from_csv(self.input_file, sniff_limit=self.args.sniff_limit, column_types=self.get_column_types(), **self.reader_kwargs) table.to_json(self.output_file, key=self.args.key, newline=self.args.streamOutput, **json_kwargs)
ValueError
dataset/ETHPy150Open wireservice/csvkit/csvkit/utilities/csvjson.py/CSVJSON.main
4,596
def inet_pton(af, addr): """Convert an IP address from text representation into binary form""" print('hello') if af == socket.AF_INET: return inet_aton(addr) elif af == socket.AF_INET6: # IPv6: The use of "::" indicates one or more groups of 16 bits of zeros. # We deal with this form of wildcard using a special marker. JOKER = b"*" while b"::" in addr: addr = addr.replace(b"::", b":" + JOKER + b":") joker_pos = None # The last part of an IPv6 address can be an IPv4 address ipv4_addr = None if b"." in addr: ipv4_addr = addr.split(b":")[-1] result = b"" parts = addr.split(b":") for part in parts: if part == JOKER: # Wildcard is only allowed once if joker_pos is None: joker_pos = len(result) else: raise Exception("Illegal syntax for IP address") elif part == ipv4_addr: # FIXME: Make sure IPv4 can only be last part # FIXME: inet_aton allows IPv4 addresses with less than 4 octets result += socket.inet_aton(ipv4_addr) else: # Each part must be 16bit. Add missing zeroes before decoding. try: result += part.rjust(4, b"0").decode("hex") except __HOLE__: raise Exception("Illegal syntax for IP address") # If there's a wildcard, fill up with zeros to reach 128bit (16 bytes) if JOKER in addr: result = (result[:joker_pos] + b"\x00" * (16 - len(result)) + result[joker_pos:]) if len(result) != 16: raise Exception("Illegal syntax for IP address") return result else: raise Exception("Address family not supported")
TypeError
dataset/ETHPy150Open phaethon/scapy/scapy/pton_ntop.py/inet_pton
4,597
def inet_ntop(af, addr): """Convert an IP address from binary form into text represenation""" if af == socket.AF_INET: return inet_ntoa(addr) elif af == socket.AF_INET6: # IPv6 addresses have 128bits (16 bytes) if len(addr) != 16: raise Exception("Illegal syntax for IP address") parts = [] for left in [0, 2, 4, 6, 8, 10, 12, 14]: try: value = struct.unpack("!H", addr[left:left+2])[0] hexstr = hex(value)[2:] except __HOLE__: raise Exception("Illegal syntax for IP address") parts.append(hexstr.lstrip("0").lower()) result = b":".join(parts) while b":::" in result: result = result.replace(b":::", b"::") # Leaving out leading and trailing zeros is only allowed with :: if result.endswith(b":") and not result.endswith(b"::"): result = result + b"0" if result.startswith(b":") and not result.startswith(b"::"): result = b"0" + result return result else: raise Exception("Address family not supported yet")
TypeError
dataset/ETHPy150Open phaethon/scapy/scapy/pton_ntop.py/inet_ntop
4,598
def main(): if len(sys.argv) > 1: stream_name = sys.argv[1] else: stream_name = "main" try: player = subprocess.Popen(["mplayer", streams[stream_name]], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) except __HOLE__: print("Usage: {} [station]".format(sys.argv[0])) print("Available stations: {}".format(", ".join(streams))) print("Default stream: main") sys.exit(1) except FileNotFoundError: print("You need to have mplayer to use radioreddit.") sys.exit(1) try: while not player.poll(): player_line = player.stdout.readline().decode("utf-8") if player_line.startswith("ICY Info: "): song_name = re.match("ICY Info: StreamTitle='(.*?)';StreamUrl='';", player_line).group(1) print("New song! {}".format(song_name)) except KeyboardInterrupt: player.kill() except Exception as e: print("Exception: {}".format(e))
KeyError
dataset/ETHPy150Open gkbrk/radioreddit-cli/radioreddit-cli.py/main
4,599
@property def next_sibling(self): """ The node immediately following the invocant in their parent's children list. If the invocant does not have a next sibling, it is None """ if self.parent is None: return None # Can't use index(); we need to test by identity for i, child in enumerate(self.parent.children): if child is self: try: return self.parent.children[i+1] except __HOLE__: return None
IndexError
dataset/ETHPy150Open ctxis/canape/CANAPE.Scripting/Lib/lib2to3/pytree.py/Base.next_sibling