function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def test_binary_diff(self): """Testing SVN (<backend>) parsing SVN diff with binary file""" diff = ( b'Index: binfile\n' b'============================================================' b'=======\n' b'Cannot display: file marked as a binary type.\n' b'svn:mime-type = application/octet-stream\n' ) parsed_files = self.tool.get_parser(diff).parse() self.assertEqual(len(parsed_files), 1) self.assert_parsed_diff_file( parsed_files[0], orig_filename=b'binfile', orig_file_details=b'(unknown)', modified_filename=b'binfile', modified_file_details=b'(working copy)', index_header_value=b'binfile', binary=True, data=diff)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_keyword_diff(self): """Testing SVN (<backend>) parsing diff with keywords""" # 'svn cat' will expand special variables in svn:keywords, # but 'svn diff' doesn't expand anything. This causes the # patch to fail if those variables appear in the patch context. diff = (b'Index: Makefile\n' b'===========================================================' b'========\n' b'--- Makefile (revision 4)\n' b'+++ Makefile (working copy)\n' b'@@ -1,6 +1,7 @@\n' b' # $Id$\n' b' # $Rev$\n' b' # $Revision:: $\n' b'+# foo\n' b' include ../tools/Makefile.base-vars\n' b' NAME = misc-docs\n' b' OUTNAME = svn-misc-docs\n') filename = 'trunk/doc/misc-docs/Makefile' rev = Revision('4') file = self.tool.get_file(filename, rev) patch(diff, file, filename)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_svn16_property_diff(self): """Testing SVN (<backend>) parsing SVN 1.6 diff with property changes """ diff = ( b'Index:\n' b'======================================================' b'=============\n' b'--- (revision 123)\n' b'+++ (working copy)\n' b'Property changes on: .\n' b'______________________________________________________' b'_____________\n' b'Modified: reviewboard:url\n' b'## -1 +1 ##\n' b'-http://reviews.reviewboard.org\n' b'+http://reviews.reviewboard.org\n' b'Index: binfile\n' b'=======================================================' b'============\nCannot display: file marked as a ' b'binary type.\nsvn:mime-type = application/octet-stream\n' ) parsed_files = self.tool.get_parser(diff).parse() self.assertEqual(len(parsed_files), 1) self.assert_parsed_diff_file( parsed_files[0], orig_filename=b'binfile', orig_file_details=b'(unknown)', modified_filename=b'binfile', modified_file_details=b'(working copy)', index_header_value=b'binfile', binary=True, data=diff)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_unicode_diff(self): """Testing SVN (<backend>) parsing diff with unicode characters""" diff = ( 'Index: Filé\n' '===========================================================' '========\n' '--- Filé (revision 4)\n' '+++ Filé (working copy)\n' '@@ -1,6 +1,7 @@\n' '+# foó\n' ' include ../tools/Makefile.base-vars\n' ' NAME = misc-docs\n' ' OUTNAME = svn-misc-docs\n' ).encode('utf-8') parsed_files = self.tool.get_parser(diff).parse() self.assertEqual(len(parsed_files), 1) self.assert_parsed_diff_file( parsed_files[0], orig_filename='Filé'.encode('utf-8'), orig_file_details=b'(revision 4)', modified_filename='Filé'.encode('utf-8'), modified_file_details=b'(working copy)', index_header_value='Filé'.encode('utf-8'), insert_count=1, data=diff)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_diff_with_added_empty_file(self): """Testing parsing SVN diff with added empty file""" diff = ( b'Index: empty-file\t(added)\n' b'===========================================================' b'========\n' b'--- empty-file\t(revision 0)\n' b'+++ empty-file\t(revision 0)\n' ) parsed_files = self.tool.get_parser(diff).parse() self.assertEqual(len(parsed_files), 1) self.assert_parsed_diff_file( parsed_files[0], orig_filename=b'empty-file', orig_file_details=b'(revision 0)', modified_filename=b'empty-file', modified_file_details=b'(revision 0)', index_header_value=b'empty-file\t(added)', data=diff)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_diff_with_nonexistent_revision_for_dest_file(self): """Testing parsing SVN diff with deleted file using "nonexistent" destination revision """ diff = ( b'Index: deleted-file\n' b'===========================================================' b'========\n' b'--- deleted-file\t(revision 4)\n' b'+++ deleted-file\t(nonexistent)\n' b'@@ -1,2 +0,0 @@\n' b'-line 1\n' b'-line 2\n' ) parsed_files = self.tool.get_parser(diff).parse() self.assertEqual(len(parsed_files), 1) self.assert_parsed_diff_file( parsed_files[0], orig_filename=b'deleted-file', orig_file_details=b'(revision 4)', modified_filename=b'deleted-file', modified_file_details=b'(nonexistent)', index_header_value=b'deleted-file', deleted=True, delete_count=2, data=diff)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_get_branches(self): """Testing SVN (<backend>) get_branches""" branches = self.tool.get_branches() self.assertEqual(len(branches), 3) self.assertEqual(branches[0], Branch(id='trunk', name='trunk', commit='12', default=True)) self.assertEqual(branches[1], Branch(id='branches/branch1', name='branch1', commit='7', default=False)) self.assertEqual(branches[2], Branch(id='top-level-branch', name='top-level-branch', commit='10', default=False))
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_get_commits_with_branch(self): """Testing SVN (<backend>) get_commits with branch""" commits = self.tool.get_commits(branch='/branches/branch1', start='5') self.assertEqual(len(commits), 5) self.assertEqual( commits[0], Commit('chipx86', '5', '2010-05-21T09:33:40.893946', 'Add an unterminated keyword for testing bug #1523\n', '4')) commits = self.tool.get_commits(branch='/branches/branch1', start='7') self.assertEqual(len(commits), 6) self.assertEqual( commits[0], Commit('david', '7', '2013-06-13T07:43:27.259554', 'Add a branch', '5')) self.assertEqual( commits[1], Commit('chipx86', '5', '2010-05-21T09:33:40.893946', 'Add an unterminated keyword for testing bug #1523\n', '4'))
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def _get_log(*args, **kwargs): return [ { 'author': 'chipx86', 'revision': '5', 'message': 'Commit 1', }, ]
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_get_commits_with_exception(self): """Testing SVN (<backend>) get_commits with exception""" def _get_log(*args, **kwargs): raise Exception('Bad things happened') self.spy_on(self.tool.client.get_log, _get_log) with self.assertRaisesMessage(SCMError, 'Bad things happened'): self.tool.get_commits(start='5')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_utf8_keywords(self): """Testing SVN (<backend>) with UTF-8 files with keywords""" self.repository.get_file('trunk/utf8-file.txt', '9')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_normalize_patch_with_svn_and_no_expanded_keywords(self): """Testing SVN (<backend>) normalize_patch with no expanded keywords""" diff = ( b'Index: Makefile\n' b'===========================================================' b'========\n' b'--- Makefile (revision 4)\n' b'+++ Makefile (working copy)\n' b'@@ -1,6 +1,7 @@\n' b' # $Id$\n' b' # $Rev$\n' b' # $Revision:: $\n' b'+# foo\n' b' include ../tools/Makefile.base-vars\n' b' NAME = misc-docs\n' b' OUTNAME = svn-misc-docs\n' ) normalized = self.tool.normalize_patch( patch=diff, filename='trunk/doc/misc-docs/Makefile', revision='4') self.assertEqual( normalized, b'Index: Makefile\n' b'===========================================================' b'========\n' b'--- Makefile (revision 4)\n' b'+++ Makefile (working copy)\n' b'@@ -1,6 +1,7 @@\n' b' # $Id$\n' b' # $Rev$\n' b' # $Revision:: $\n' b'+# foo\n' b' include ../tools/Makefile.base-vars\n' b' NAME = misc-docs\n' b' OUTNAME = svn-misc-docs\n')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_collapse_svn_keywords(self): """Testing collapse_svn_keywords""" keyword_test_data = [ (b'Id', b'/* $Id: test2.c 3 2014-08-04 22:55:09Z david $ */', b'/* $Id$ */'), (b'id', b'/* $Id: test2.c 3 2014-08-04 22:55:09Z david $ */', b'/* $Id$ */'), (b'id', b'/* $id: test2.c 3 2014-08-04 22:55:09Z david $ */', b'/* $id$ */'), (b'Id', b'/* $id: test2.c 3 2014-08-04 22:55:09Z david $ */', b'/* $id$ */') ] for keyword, data, result in keyword_test_data: self.assertEqual(collapse_svn_keywords(data, keyword), result)
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_fields(self): """Testing SVNTool authentication form fields""" form = SVNTool.create_auth_form() self.assertEqual(list(form.fields), ['username', 'password']) self.assertEqual(form['username'].help_text, '') self.assertEqual(form['username'].label, 'Username') self.assertEqual(form['password'].help_text, '') self.assertEqual(form['password'].label, 'Password')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_load(self): """Tetting SVNTool authentication form load""" repository = self.create_repository( tool_name='Subversion', username='test-user', password='test-pass') form = SVNTool.create_auth_form(repository=repository) form.load() self.assertEqual(form['username'].value(), 'test-user') self.assertEqual(form['password'].value(), 'test-pass')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_save(self): """Tetting SVNTool authentication form save""" repository = self.create_repository(tool_name='Subversion') form = SVNTool.create_auth_form( repository=repository, data={ 'username': 'test-user', 'password': 'test-pass', }) self.assertTrue(form.is_valid()) form.save() self.assertEqual(repository.username, 'test-user') self.assertEqual(repository.password, 'test-pass')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_fields(self): """Testing SVNTool repository form fields""" form = SVNTool.create_repository_form() self.assertEqual(list(form.fields), ['path', 'mirror_path']) self.assertEqual(form['path'].help_text, 'The path to the repository. This will generally be ' 'the URL you would use to check out the repository.') self.assertEqual(form['path'].label, 'Path') self.assertEqual(form['mirror_path'].help_text, '') self.assertEqual(form['mirror_path'].label, 'Mirror Path')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def test_load(self): """Tetting SVNTool repository form load""" repository = self.create_repository( tool_name='Subversion', path='https://svn.example.com/', mirror_path='https://svn.mirror.example.com') form = SVNTool.create_repository_form(repository=repository) form.load() self.assertEqual(form['path'].value(), 'https://svn.example.com/') self.assertEqual(form['mirror_path'].value(), 'https://svn.mirror.example.com')
reviewboard/reviewboard
[ 1464, 419, 1464, 1, 1250977189 ]
def _main(): parser = argparse.ArgumentParser() parser.add_argument('config', help='JSON configuration file') parser.add_argument('--only-download', action='store_true', help='Only download GTFS file') parser.add_argument('--use-no-q-dirs', action='store_true', help='Do not use Q dirs') args = parser.parse_args() _init_logging() start_time = time.time() logging.debug('started {}'.format(sys.argv)) config = _load_config(args.config) gtfs_name = config['name'] downloaded_gtfs_zip = _download_gtfs(config['url']) modify_date = _get_modify_date(downloaded_gtfs_zip) gtfs_dir = _get_q_dir(config['gtfs_dir'], modify_date, not args.use_no_q_dirs) gtfs_zip = _rename_gtfs_zip(gtfs_dir, downloaded_gtfs_zip, gtfs_name, modify_date) if gtfs_zip and (not args.only_download): log_dir = _get_q_dir(config['log_dir'], modify_date, not args.use_no_q_dirs) _generate_json(gtfs_name, modify_date, gtfs_zip, config['json_dir'], log_dir) logging.debug('took {} seconds, max mem: {} megabytes'.format( int(time.time() - start_time), resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024))
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _progress(text): print(text) logging.debug(text)
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _load_config(config_path): with open(config_path) as config_file: return json.load(config_file)
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _execute_command(command): if os.system(command) != 0: raise SystemExit('failed to execute: {}'.format(command))
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _get_modify_times(zip_filename): modify_times = set() with zipfile.ZipFile(zip_filename) as zip_file: for info in zip_file.infolist(): modify_times.add(datetime.datetime(*info.date_time).strftime('%Y%m%d')) return modify_times
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _rename_gtfs_zip(gtfs_dir, old_filename, gtfs_name, modify_date): _create_dir(gtfs_dir) new_filename = os.path.join(gtfs_dir, '{}_{}.zip'.format(gtfs_name, modify_date)) if os.path.isfile(new_filename): if _compare_files(old_filename, new_filename): _progress('downloaded gtfs file is identical to: {}'.format(new_filename)) os.remove(old_filename) return None _rename_existing_file(new_filename) os.rename(old_filename, new_filename) _progress('renamed: {} -> {}'.format(old_filename, new_filename)) return new_filename
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _compare_files(filename_a, filename_b): return _get_hash(filename_a) == _get_hash(filename_b)
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _generate_json(gtfs_name, modify_date, gtfs_zip, json_dir, log_dir): _create_dir(json_dir) date_output_file = os.path.join(json_dir, '{}_{}.json'.format(gtfs_name, modify_date)) _rename_existing_file(date_output_file) _create_dir(log_dir) log_path = os.path.join(log_dir, 'gtfs2json_{}_{}_{}.log'.format(gtfs_name, modify_date, _get_now_timestamp())) _progress('generating json for {}'.format(gtfs_zip)) command = '{}/gtfs2json.py --log-file {} {} {}'.format(os.path.dirname(__file__), log_path, gtfs_zip, date_output_file) _execute_command(command) _create_base_output_file(date_output_file, os.path.join(json_dir, '{}.json'.format(gtfs_name)))
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def _rename_existing_file(filename): if os.path.isfile(filename): suffix = filename.split('.')[-1] new_filename = filename.replace('.{}'.format(suffix), '_{}.{}'.format(_get_now_timestamp(), suffix)) os.rename(filename, new_filename) _progress_warning('renamed existing {} file {} -> {}'.format(suffix, filename, new_filename))
panur/kartalla
[ 2, 3, 2, 8, 1450813898 ]
def __init__(self, input_size, splice, num_stack, parameter_init, name='cnn_student_xe_encoder'): assert input_size % 3 == 0 self.num_channels = (input_size // 3) // num_stack // splice self.splice = splice self.num_stack = num_stack self.parameter_init = parameter_init self.name = name
hirofumi0810/tensorflow_end2end_speech_recognition
[ 311, 126, 311, 11, 1495618521 ]
def is_number(string): return bool(float_match(string))
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def __init__(self, database_name, username, password, host='localhost'): """ Here we'll try to connect to the database using the variables that we passed through and if the connection fails we'll print out the error """ try: self.db = _mysql.connect(db=database_name, host=host, user=username, passwd=password) self.database_name = database_name print "Connected to MySQL!" except _mysql.Error, e: print e
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def __del__(self): """ Here we'll do a check to see if `self.db` is present. This will only be the case if the connection was successfully made in the initialiser. Inside that condition we'll close the connection """ if hasattr(self, 'db'): self.db.close() print "MySQL Connection Closed"
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def convert_to_named_tuples(self, cursor): results = None names = " ".join(d[0] for d in cursor.description) klass = namedtuple('Results', names) try: results = map(klass._make, cursor.fetchall()) except _mysql.ProgrammingError, e: print e return results
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def select(self, table, columns=None, named_tuples=False, **kwargs): """ We'll create our `select` method in order to make it simpler for extracting data from the database. select(table_name, [list_of_column_names]) """ sql_str = "SELECT " # add columns or just use the wildcard if not columns: sql_str += " * " else: for column in columns: sql_str += "%s, " % column
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def delete(self, table, **wheres): """ This function will allow us to delete data from a given tables based on wether or not a WHERE clause is present or not """ sql_str = "DELETE FROM `%s`.`%s`" % (self.database_name, table) if wheres is not None: first_where_clause = True for where, term in wheres.iteritems(): if first_where_clause: # This is the first WHERE clause sql_str += " WHERE `%s`.`%s` %s" % (table, where, term) first_where_clause = False else: # this is the second (additional) WHERE clause so we use AND sql_str += " AND `%s`.`%s` %s" % (table, where, term) sql_str += ";" cursor = self.db.cursor() cursor.execute(sql_str) self.db.commit() cursor.close()
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def is_number(string): return bool(float_match(string))
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def update(self, table, where=None, **column_values): sql_str = "UPDATE `%s`.`%s` SET " % (self.database_name, table) if column_values is not None: for column_name, value in column_names.iteritems(): sql_str += "`%s`=" % column_name # check how we should add this to the column string if is_number(value): # it's a number so we don't add '' sql_str += "%s, " % value else: # it's a date or string so add the '' sql_str += "'%s', " % value sql_str = sql_str[:-2] # strip off the last , and space character if where: sql_str += " WHERE %s" % where cusrsor = self.db.cursor() cursor.execute(sql_str) self.db.commit() cursor.close()
GunnerJnr/_CodeInstitute
[ 7, 6, 7, 104, 1501600555 ]
def set_host_url_arg(): parser.add_argument('--host', required=True, help='the url for the Materials Commons server')
materials-commons/materialscommons.org
[ 10, 1, 10, 110, 1365603676 ]
def set_apikey_arg(): parser.add_argument('--apikey', required=True, help='rapikey for the user building the demo project')
materials-commons/materialscommons.org
[ 10, 1, 10, 110, 1365603676 ]
def user(): return User.objects.create_user( "fred", first_name="Fred", last_name="Flinstone", email="[email protected]" )
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def applicant(user): data = copy.deepcopy(TEST_APPLICANT) return Applicant.objects.create_applicant(user=user, raw=data)
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def check(applicant): data = copy.deepcopy(TEST_CHECK) return Check.objects.create_check(applicant, raw=data)
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def identity_report(check): data = copy.deepcopy(TEST_REPORT_IDENTITY_ENHANCED) return Report.objects.create_report(check, raw=data)
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def document_report(check): data = copy.deepcopy(TEST_REPORT_DOCUMENT) return Report.objects.create_report(check, raw=data)
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def report(identity_report): return identity_report
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def event(check): data = copy.deepcopy(TEST_EVENT) return Event().parse(data)
yunojuno/django-onfido
[ 3, 6, 3, 1, 1476047324 ]
def parse_times(setting): """ read out the date and times of a recording """ def timestr2timeobj(time_str): """ convert a time string with milliseconds to a datetime object """ time, milli = time_str.split('.') time = datetime.datetime.strptime(time, '%H:%M:%S') time += datetime.timedelta(seconds=int(milli)/1000) return time tstart, tstop = [timestr2timeobj(rec[1]) for rec in setting.start_rec, setting.stop_rec] if setting.folder is None: folder_date_obj = None else: date_str = date_pattern.match(setting.folder).groups()[0] folder_date_obj = datetime.datetime.strptime(date_str, r'%Y-%m-%d_%H-%M-%S') tstart = datetime.datetime.combine(folder_date_obj, tstart.time()) tstop = datetime.datetime.combine(folder_date_obj, tstop.time()) # by default assume that recording is stopped once every day if tstop < tstart: tstop += datetime.timedelta(days=1) return folder_date_obj, tstart, tstop
jniediek/combinato
[ 36, 12, 36, 23, 1441963704 ]
def __init__(self): self.num2name = None self.name2num = None self.lrefs = None self.grefs = None self.crefs = None self.start_rec = None self.stop_rec = None self.start_timestamp = None self.stop_timestamp = None self.folder = None
jniediek/combinato
[ 36, 12, 36, 23, 1441963704 ]
def board_num_to_chan(board, num): return (board - 1) * 16 + num
jniediek/combinato
[ 36, 12, 36, 23, 1441963704 ]
def parser(fname): """ transform logfile into header, log, and ignored lines """ with open(fname, 'r') as fid: lines = fid.readlines() fid.close() in_header = True is_notice = False ignored_lines = [] protocol = [] header = {} for line in lines: if line[:13] == '-* NOTICE *-': is_notice = True else: is_notice = False if in_header: # this means header is over if is_notice: in_header = False else: if len(line) > 3: key, value = line.split(':', 1) header[key] = value.strip() else: if is_notice: fields = line[15:].split(' - ', 4) time = fields[0] stamp = int(fields[1]) msg = fields[2].strip().replace('\r', '') if len(fields) == 4: msg2 = fields[3].strip().replace('\r', '') else: msg2 = '' protocol.append((time, stamp, msg, msg2)) elif line.startswith('Log file successfully moved to'): target = line.split()[-1] # this indicates a log file move # mov is our key protocol.append((0, 0, 'mov', target)) else: ignored_lines.append(line.strip()) try: bn = 'Cheetah ' + header['Cheetah Build Number'] except KeyError: bn = 'ATLAS ' + header['Cheetah ATLAS Build Number'] print(bn) return header, protocol, ignored_lines
jniediek/combinato
[ 36, 12, 36, 23, 1441963704 ]
def print_refs(lrefs, grefs): """ overview of local and global refrences """ sorted_keys = sorted(lrefs.keys()) for board, ref in sorted_keys: lref = lrefs[(board, ref)] if lref in grefs: gboard = grefs[lref] stri = 'global, board {}'.format(gboard) else: stri = 'local' print('board {} ref {} - {} ({})'. format(board, ref, lrefs[(board, ref)], stri))
jniediek/combinato
[ 36, 12, 36, 23, 1441963704 ]
def create_rep(num2name, name2num, crefs, lrefs, grefs): """ create a human readable representation of the referencing """ all_defined_check(num2name, crefs) if DEBUG: print_refs(lrefs, grefs) chnames = [] for num in sorted(num2name.keys()): chnames += num2name[num] out_str = [] for name in chnames: try: chan = name2num[name] except KeyError: print('Processing {}, but no channel number was ' 'assigned. Check results carefully!'.format(name)) continue ch_board, ch_board_num = chan_to_board_num(chan) local_ref_num = crefs[chan] # gives the local ref number # this is now a local number, so it's in 0..7 maybe_global = False if local_ref_num in grefs: ref_board = grefs[local_ref_num] if ref_board != ch_board: maybe_global = True # here, I have to check whether the # driving channel is the same number on my local board # i.e., if b1_15 is b1_ref_2 and b1_ref_2 is gd # and b3_7 has ref_2, then it's global only if b3_15 is b3_ref_2 else: ref_board = ch_board ref_num = lrefs[(ref_board, local_ref_num)] ref_num2 = lrefs[(ch_board, local_ref_num)] add_str = '' if maybe_global: # print('Special case, global ref {}, local ref {}' # .format(ref_num, lrefs[(ch_board, local_ref_num)])) if ref_num2 != 38: add_str = ' ?' if ref_num != ref_num2: # print(ref_num, lrefs[(ch_board, local_ref_num)]) ref_board = ch_board ref_num = ref_num2 else: add_str = ' ???' ref_board = ch_board ref_num = ref_num2 pass # print('Using channel 38') if ref_board == ch_board: board_str = 'local{}'.format(add_str) else: board_str = 'global{}'.format(add_str) if ref_num > 31: # these are the reference wires if ref_num == 38: ref_name = 'board {} Unknown Ground'.format(ref_board) elif ref_num == 36: ref_name = 'board {} Patient Ground'.format(ref_board) else: tnum = (ref_num - 32) * 8 refchan = board_num_to_chan(ref_board, tnum) if refchan in num2name: pref_name = num2name[refchan] idx = 0 if len(pref_name) == 2: if pref_name[0][0] == 'u': idx = 1 ref_name = pref_name[idx][:-1] + ' reference wire' else: ref_name = 'board {} head stage {} reference wire'.\ format(ref_board, ref_num - 32) else: global_num = board_num_to_chan(ref_board, ref_num) chlist = num2name[global_num] if len(chlist): ref_name = chlist[0] else: ref_name = 'UNDEF' if name == ref_name: board_str += ' ZERO' out_str.append(('{:03d}'.format(chan), name, ref_name, board_str)) return out_str
jniediek/combinato
[ 36, 12, 36, 23, 1441963704 ]
def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True
Bitcoin-ABC/bitcoin-abc
[ 1114, 682, 1114, 71, 1493632695 ]
def setup_network(self, split=False): self.setup_nodes()
Bitcoin-ABC/bitcoin-abc
[ 1114, 682, 1114, 71, 1493632695 ]
def run_test(self): self.log.info("Mining blocks...") self.nodes[0].generate(1) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] # Sync the timestamp to the wallet, so that importmulti works self.nodes[1].syncwithvalidationinterfacequeue() node0_address1 = self.nodes[0].getaddressinfo( self.nodes[0].getnewaddress()) # Check only one address assert_equal(node0_address1['ismine'], True) # Node 1 sync test assert_equal(self.nodes[1].getblockcount(), 1) # Address Test - before import address_info = self.nodes[1].getaddressinfo(node0_address1['address']) assert_equal(address_info['iswatchonly'], False) assert_equal(address_info['ismine'], False) # RPC importmulti ----------------------------------------------- # Bitcoin Address (implicit non-internal) self.log.info("Should import an address") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now"}, success=True) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, timestamp=timestamp, ischange=False) watchonly_address = key.p2pkh_addr watchonly_timestamp = timestamp self.log.info("Should not import an invalid address") self.test_importmulti({"scriptPubKey": {"address": "not valid address"}, "timestamp": "now"}, success=False, error_code=-5, error_message='Invalid address \"not valid address\"') # ScriptPubKey + internal self.log.info("Should import a scriptPubKey with internal flag") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": key.p2pkh_script, "timestamp": "now", "internal": True}, success=True) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, timestamp=timestamp, ischange=True) # ScriptPubKey + internal + label self.log.info( "Should not allow a label to be specified when internal is true") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": key.p2pkh_script, "timestamp": "now", "internal": True, "label": "Unsuccessful labelling for internal addresses"}, success=False, error_code=-8, error_message='Internal addresses should not have a label') # Nonstandard scriptPubKey + !internal self.log.info( "Should not import a nonstandard scriptPubKey without internal flag") nonstandardScriptPubKey = key.p2pkh_script + CScript([OP_NOP]).hex() key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey, "timestamp": "now"}, success=False, error_code=-8, error_message='Internal must be set to true for nonstandard scriptPubKey imports.') test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=False, ismine=False, timestamp=None) # Address + Public key + !Internal(explicit) self.log.info("Should import an address with public key") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now", "pubkeys": [key.pubkey], "internal": False}, success=True, warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, timestamp=timestamp) # ScriptPubKey + Public key + internal self.log.info( "Should import a scriptPubKey with internal and with public key") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": key.p2pkh_script, "timestamp": "now", "pubkeys": [key.pubkey], "internal": True}, success=True, warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, timestamp=timestamp) # Nonstandard scriptPubKey + Public key + !internal self.log.info( "Should not import a nonstandard scriptPubKey without internal and with public key") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", "pubkeys": [key.pubkey]}, success=False, error_code=-8, error_message='Internal must be set to true for nonstandard scriptPubKey imports.') test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=False, ismine=False, timestamp=None) # Address + Private key + !watchonly self.log.info("Should import an address with private key") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now", "keys": [key.privkey]}, success=True) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=False, ismine=True, timestamp=timestamp) self.log.info( "Should not import an address with private key if is already imported") self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now", "keys": [key.privkey]}, success=False, error_code=-4, error_message='The wallet already contains the private key for this address or script ("' + key.p2pkh_script + '")') # Address + Private key + watchonly self.log.info( "Should import an address with private key and with watchonly") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now", "keys": [key.privkey], "watchonly": True}, success=True, warnings=["All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=False, ismine=True, timestamp=timestamp) # ScriptPubKey + Private key + internal self.log.info( "Should import a scriptPubKey with internal and with private key") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": key.p2pkh_script, "timestamp": "now", "keys": [key.privkey], "internal": True}, success=True) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=False, ismine=True, timestamp=timestamp) # Nonstandard scriptPubKey + Private key + !internal self.log.info( "Should not import a nonstandard scriptPubKey without internal and with private key") key = get_key(self.nodes[0]) self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey, "timestamp": "now", "keys": [key.privkey]}, success=False, error_code=-8, error_message='Internal must be set to true for nonstandard scriptPubKey imports.') test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=False, ismine=False, timestamp=None) # P2SH address multisig = get_multisig(self.nodes[0]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.nodes[1].syncwithvalidationinterfacequeue() self.log.info("Should import a p2sh") self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr}, "timestamp": "now"}, success=True) test_address(self.nodes[1], multisig.p2sh_addr, isscript=True, iswatchonly=True, timestamp=timestamp) p2shunspent = self.nodes[1].listunspent( 0, 999999, [multisig.p2sh_addr])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], False) # P2SH + Redeem script multisig = get_multisig(self.nodes[0]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.nodes[1].syncwithvalidationinterfacequeue() self.log.info("Should import a p2sh with respective redeem script") self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr}, "timestamp": "now", "redeemscript": multisig.redeem_script}, success=True, warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address( self.nodes[1], multisig.p2sh_addr, timestamp=timestamp, iswatchonly=True, ismine=False, solvable=True) p2shunspent = self.nodes[1].listunspent( 0, 999999, [multisig.p2sh_addr])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], True) # P2SH + Redeem script + Private Keys + !Watchonly multisig = get_multisig(self.nodes[0]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.nodes[1].syncwithvalidationinterfacequeue() self.log.info( "Should import a p2sh with respective redeem script and private keys") self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr}, "timestamp": "now", "redeemscript": multisig.redeem_script, "keys": multisig.privkeys[0:2]}, success=True, warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], multisig.p2sh_addr, timestamp=timestamp, ismine=False, iswatchonly=True, solvable=True) p2shunspent = self.nodes[1].listunspent( 0, 999999, [multisig.p2sh_addr])[0] assert_equal(p2shunspent['spendable'], False) assert_equal(p2shunspent['solvable'], True) # P2SH + Redeem script + Private Keys + Watchonly multisig = get_multisig(self.nodes[0]) self.nodes[1].generate(100) self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00) self.nodes[1].generate(1) timestamp = self.nodes[1].getblock( self.nodes[1].getbestblockhash())['mediantime'] self.nodes[1].syncwithvalidationinterfacequeue() self.log.info( "Should import a p2sh with respective redeem script and private keys") self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr}, "timestamp": "now", "redeemscript": multisig.redeem_script, "keys": multisig.privkeys[0:2], "watchonly": True}, success=True) test_address(self.nodes[1], multisig.p2sh_addr, iswatchonly=True, ismine=False, solvable=True, timestamp=timestamp) # Address + Public key + !Internal + Wrong pubkey self.log.info( "Should not import an address with the wrong public key as non-solvable") key = get_key(self.nodes[0]) wrong_key = get_key(self.nodes[0]).pubkey self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now", "pubkeys": [wrong_key]}, success=True, warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, solvable=False, timestamp=timestamp) # ScriptPubKey + Public key + internal + Wrong pubkey self.log.info( "Should import a scriptPubKey with internal and with a wrong public key as non-solvable") key = get_key(self.nodes[0]) wrong_key = get_key(self.nodes[0]).pubkey self.test_importmulti({"scriptPubKey": key.p2pkh_script, "timestamp": "now", "pubkeys": [wrong_key], "internal": True}, success=True, warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, solvable=False, timestamp=timestamp) # Address + Private key + !watchonly + Wrong private key self.log.info( "Should import an address with a wrong private key as non-solvable") key = get_key(self.nodes[0]) wrong_privkey = get_key(self.nodes[0]).privkey self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now", "keys": [wrong_privkey]}, success=True, warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, solvable=False, timestamp=timestamp) # ScriptPubKey + Private key + internal + Wrong private key self.log.info( "Should import a scriptPubKey with internal and with a wrong private key as non-solvable") key = get_key(self.nodes[0]) wrong_privkey = get_key(self.nodes[0]).privkey self.test_importmulti({"scriptPubKey": key.p2pkh_script, "timestamp": "now", "keys": [wrong_privkey], "internal": True}, success=True, warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, iswatchonly=True, ismine=False, solvable=False, timestamp=timestamp) # Importing existing watch only address with new timestamp should # replace saved timestamp. assert_greater_than(timestamp, watchonly_timestamp) self.log.info("Should replace previously saved watch only timestamp.") self.test_importmulti({"scriptPubKey": {"address": watchonly_address}, "timestamp": "now"}, success=True) test_address(self.nodes[1], watchonly_address, iswatchonly=True, ismine=False, timestamp=timestamp) watchonly_timestamp = timestamp # restart nodes to check for proper serialization/deserialization of # watch only address self.stop_nodes() self.start_nodes() test_address(self.nodes[1], watchonly_address, iswatchonly=True, ismine=False, timestamp=watchonly_timestamp) # Bad or missing timestamps self.log.info("Should throw on invalid or missing timestamp values") assert_raises_rpc_error(-3, 'Missing required timestamp field for key', self.nodes[1].importmulti, [{"scriptPubKey": key.p2pkh_script}]) assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string', self.nodes[1].importmulti, [{ "scriptPubKey": key.p2pkh_script, "timestamp": "" }]) # Test that importing of a P2PKH address via descriptor without # checksum fails key = get_key(self.nodes[0]) self.log.info( "Should fail to import a p2pkh address from descriptor with no checksum") self.test_importmulti({"desc": "pkh(" + key.pubkey + ")", "timestamp": "now", "label": "Descriptor import test"}, success=False, error_code=-5, error_message='Missing checksum') # Test ranged descriptor fails if range is not specified xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg" # hdkeypath=m/0'/0'/0' and 1' addresses = [ "ecregtest:prvn9ycvgr5atuyh49sua3mapskh2mnnzg7t9yp6dt", "ecregtest:pp3n087yx0njv2e5wcvltahfxqst7l66rutz8ceeat"] # pkh subscripts corresponding to the above addresses addresses += [ "ecregtest:qqdkxd2xnzftq2p8wr3sqqyw8lntap7tncs546s6pr", "ecregtest:qpyryy83jfaec5u0gpzldk6teadsuq8zlyqh5l30uq", ] desc = "sh(pkh(" + xpriv + "/0'/0'/*'" + "))" self.log.info( "Ranged descriptor import should fail without a specified range") self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now"}, success=False, error_code=-8, error_message='Descriptor is ranged, please specify the range') # Test importing of a ranged descriptor with xpriv self.log.info( "Should import the ranged descriptor with specified range as solvable") self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": 1}, success=True) for address in addresses: test_address(self.nodes[1], address, solvable=True, ismine=True) self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": -1}, success=False, error_code=-8, error_message='End of range is too high') self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]}, success=False, error_code=-8, error_message='Range should be greater or equal than 0') self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}, success=False, error_code=-8, error_message='End of range is too high') self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]}, success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end') self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]}, success=False, error_code=-8, error_message='Range is too large') # Test importing a descriptor containing a WIF private key wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh" # Note: in Core's test, this address refers to the sh(wpkh()) address. # For a sh(pkh()) this does not refer to a key, so we use the subscript # address instead, which returns the same privkey. address = "ecregtest:qzh6rch6st3wjvp0h2ud87gn7xnxvf6h8yrk8gcg8t" desc = "sh(pkh(" + wif_priv + "))" self.log.info( "Should import a descriptor with a WIF private key as spendable") self.test_importmulti({"desc": descsum_create(desc), "timestamp": "now"}, success=True) test_address(self.nodes[1], address, solvable=True, ismine=True) # dump the private key to ensure it matches what was imported privkey = self.nodes[1].dumpprivkey(address) assert_equal(privkey, wif_priv) # Test importing of a P2PKH address via descriptor key = get_key(self.nodes[0]) p2pkh_label = "P2PKH descriptor import" self.log.info("Should import a p2pkh address from descriptor") self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"), "timestamp": "now", "label": p2pkh_label}, success=True, warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) test_address(self.nodes[1], key.p2pkh_addr, solvable=True, ismine=False, labels=[p2pkh_label]) # Test import fails if both desc and scriptPubKey are provided key = get_key(self.nodes[0]) self.log.info( "Import should fail if both scriptPubKey and desc are provided") self.test_importmulti({"desc": descsum_create("pkh(" + key.pubkey + ")"), "scriptPubKey": {"address": key.p2pkh_addr}, "timestamp": "now"}, success=False, error_code=-8, error_message='Both a descriptor and a scriptPubKey should not be provided.') # Test import fails if neither desc nor scriptPubKey are present key = get_key(self.nodes[0]) self.log.info( "Import should fail if neither a descriptor nor a scriptPubKey are provided") self.test_importmulti({"timestamp": "now"}, success=False, error_code=-8, error_message='Either a descriptor or scriptPubKey must be provided.') # Test importing of a multisig via descriptor key1 = get_key(self.nodes[0]) key2 = get_key(self.nodes[0]) self.log.info("Should import a 1-of-2 bare multisig from descriptor") self.test_importmulti({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"), "timestamp": "now"}, success=True, warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."]) self.log.info( "Should not treat individual keys from the imported bare multisig as watchonly") test_address(self.nodes[1], key1.p2pkh_addr, ismine=False, iswatchonly=False) # Import pubkeys with key origin info self.log.info( "Addresses should have hd keypath and master key id after import with key origin") pub_addr = self.nodes[1].getnewaddress() pub_addr = self.nodes[1].getnewaddress() info = self.nodes[1].getaddressinfo(pub_addr) pub = info['pubkey'] pub_keypath = info['hdkeypath'] pub_fpr = info['hdmasterfingerprint'] result = self.nodes[0].importmulti( [{ 'desc': descsum_create("pkh([" + pub_fpr + pub_keypath[1:] + "]" + pub + ")"), "timestamp": "now", }] ) assert result[0]['success'] pub_import_info = self.nodes[0].getaddressinfo(pub_addr) assert_equal(pub_import_info['hdmasterfingerprint'], pub_fpr) assert_equal(pub_import_info['pubkey'], pub) assert_equal(pub_import_info['hdkeypath'], pub_keypath) # Import privkeys with key origin info priv_addr = self.nodes[1].getnewaddress() info = self.nodes[1].getaddressinfo(priv_addr) priv = self.nodes[1].dumpprivkey(priv_addr) priv_keypath = info['hdkeypath'] priv_fpr = info['hdmasterfingerprint'] result = self.nodes[0].importmulti( [{ 'desc': descsum_create("pkh([" + priv_fpr + priv_keypath[1:] + "]" + priv + ")"), "timestamp": "now", }] ) assert result[0]['success'] priv_import_info = self.nodes[0].getaddressinfo(priv_addr) assert_equal(priv_import_info['hdmasterfingerprint'], priv_fpr) assert_equal(priv_import_info['hdkeypath'], priv_keypath) # Make sure the key origin info are still there after a restart self.stop_nodes() self.start_nodes() import_info = self.nodes[0].getaddressinfo(pub_addr) assert_equal(import_info['hdmasterfingerprint'], pub_fpr) assert_equal(import_info['hdkeypath'], pub_keypath) import_info = self.nodes[0].getaddressinfo(priv_addr) assert_equal(import_info['hdmasterfingerprint'], priv_fpr) assert_equal(import_info['hdkeypath'], priv_keypath) # Check legacy import does not import key origin info self.log.info("Legacy imports don't have key origin info") pub_addr = self.nodes[1].getnewaddress() info = self.nodes[1].getaddressinfo(pub_addr) pub = info['pubkey'] result = self.nodes[0].importmulti( [{ 'scriptPubKey': {'address': pub_addr}, 'pubkeys': [pub], "timestamp": "now", }] ) assert result[0]['success'] pub_import_info = self.nodes[0].getaddressinfo(pub_addr) assert_equal(pub_import_info['pubkey'], pub) assert 'hdmasterfingerprint' not in pub_import_info assert 'hdkeypath' not in pub_import_info # Import some public keys to the keypool of a no privkey wallet self.log.info("Adding pubkey to keypool of disableprivkey wallet") self.nodes[1].createwallet( wallet_name="noprivkeys", disable_private_keys=True) wrpc = self.nodes[1].get_wallet_rpc("noprivkeys") addr1 = self.nodes[0].getnewaddress() addr2 = self.nodes[0].getnewaddress() pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey'] pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey'] result = wrpc.importmulti( [{ 'desc': descsum_create('pkh(' + pub1 + ')'), 'keypool': True, "timestamp": "now", }, { 'desc': descsum_create('pkh(' + pub2 + ')'), 'keypool': True, "timestamp": "now", }] ) assert result[0]['success'] assert result[1]['success'] assert_equal(wrpc.getwalletinfo()["keypoolsize"], 2) newaddr1 = wrpc.getnewaddress() assert_equal(addr1, newaddr1) newaddr2 = wrpc.getnewaddress() assert_equal(addr2, newaddr2) # Import some public keys to the internal keypool of a no privkey # wallet self.log.info( "Adding pubkey to internal keypool of disableprivkey wallet") addr1 = self.nodes[0].getnewaddress() addr2 = self.nodes[0].getnewaddress() pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey'] pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey'] result = wrpc.importmulti( [{ 'desc': descsum_create('pkh(' + pub1 + ')'), 'keypool': True, 'internal': True, "timestamp": "now", }, { 'desc': descsum_create('pkh(' + pub2 + ')'), 'keypool': True, 'internal': True, "timestamp": "now", }] ) assert result[0]['success'] assert result[1]['success'] assert_equal(wrpc.getwalletinfo()["keypoolsize_hd_internal"], 2) newaddr1 = wrpc.getrawchangeaddress() assert_equal(addr1, newaddr1) newaddr2 = wrpc.getrawchangeaddress() assert_equal(addr2, newaddr2) # Import a multisig and make sure the keys don't go into the keypool self.log.info( 'Imported scripts with pubkeys shoud not have their pubkeys go into the keypool') addr1 = self.nodes[0].getnewaddress() addr2 = self.nodes[0].getnewaddress() pub1 = self.nodes[0].getaddressinfo(addr1)['pubkey'] pub2 = self.nodes[0].getaddressinfo(addr2)['pubkey'] result = wrpc.importmulti( [{ 'desc': descsum_create('sh(multi(2,' + pub1 + ',' + pub2 + '))'), 'keypool': True, "timestamp": "now", }] ) assert result[0]['success'] assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0) # Cannot import those pubkeys to keypool of wallet with privkeys self.log.info( "Pubkeys cannot be added to the keypool of a wallet with private keys") wrpc = self.nodes[1].get_wallet_rpc(self.default_wallet_name) assert wrpc.getwalletinfo()['private_keys_enabled'] result = wrpc.importmulti( [{ 'desc': descsum_create('pkh(' + pub1 + ')'), 'keypool': True, "timestamp": "now", }] ) assert_equal(result[0]['error']['code'], -8) assert_equal( result[0]['error']['message'], "Keys can only be imported to the keypool when private keys are disabled") # Make sure ranged imports import keys in order self.log.info('Key ranges should be imported in order') wrpc = self.nodes[1].get_wallet_rpc("noprivkeys") assert_equal(wrpc.getwalletinfo()["keypoolsize"], 0) assert_equal(wrpc.getwalletinfo()["private_keys_enabled"], False) xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY" addresses = [ 'ecregtest:qp0v86h53rc92hjrlpwzpjtdlgzsxu25svv6g40fpl', # m/0'/0'/0 'ecregtest:qqasy0zlkdleqt4pkn8fs4ehm5gnnz6qpgdcpt90fq', # m/0'/0'/1 'ecregtest:qp0sp4wlhctvprqvdt2dgvqcfdjssu04xgey0l3syw', # m/0'/0'/2 'ecregtest:qrhn24tegn04cptfv4ldhtkduxq55zcwrycjfdj9vr', # m/0'/0'/3 'ecregtest:qzpqhett2uwltq803vrxv7zkqhft5vsnmcjeh50v0p', # m/0'/0'/4 ] result = wrpc.importmulti( [{ 'desc': descsum_create('pkh([80002067/0h/0h]' + xpub + '/*)'), 'keypool': True, 'timestamp': 'now', 'range': [0, 4], }] ) self.log.info(result) for i in range(0, 5): addr = wrpc.getnewaddress('') assert_equal(addr, addresses[i])
Bitcoin-ABC/bitcoin-abc
[ 1114, 682, 1114, 71, 1493632695 ]
def make_data(cuda=False): train_x = torch.linspace(0, 1, 100) train_y = torch.sin(train_x * (2 * pi)) train_y.add_(torch.randn_like(train_y), alpha=1e-2) test_x = torch.rand(51) test_y = torch.sin(test_x * (2 * pi)) if cuda: train_x = train_x.cuda() train_y = train_y.cuda() test_x = test_x.cuda() test_y = test_y.cuda() return train_x, train_y, test_x, test_y
jrg365/gpytorch
[ 3035, 485, 3035, 323, 1497019700 ]
def __init__(self, train_x, train_y, likelihood): super(GPRegressionModel, self).__init__(train_x, train_y, likelihood) self.mean_module = ConstantMean(prior=SmoothedBoxPrior(-1e-5, 1e-5)) self.base_covar_module = ScaleKernel(RBFKernel(lengthscale_prior=SmoothedBoxPrior(exp(-5), exp(6), sigma=0.1))) self.covar_module = InducingPointKernel( self.base_covar_module, inducing_points=torch.linspace(0, 1, 32), likelihood=likelihood )
jrg365/gpytorch
[ 3035, 485, 3035, 323, 1497019700 ]
def setUp(self): if os.getenv("UNLOCK_SEED") is None or os.getenv("UNLOCK_SEED").lower() == "false": self.rng_state = torch.get_rng_state() torch.manual_seed(0) if torch.cuda.is_available(): torch.cuda.manual_seed_all(0) random.seed(0)
jrg365/gpytorch
[ 3035, 485, 3035, 323, 1497019700 ]
def test_sgpr_mean_abs_error(self): # Suppress numerical warnings warnings.simplefilter("ignore", NumericalWarning) train_x, train_y, test_x, test_y = make_data() likelihood = GaussianLikelihood() gp_model = GPRegressionModel(train_x, train_y, likelihood) mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model) # Optimize the model gp_model.train() likelihood.train() optimizer = optim.Adam(gp_model.parameters(), lr=0.1) for _ in range(30): optimizer.zero_grad() output = gp_model(train_x) loss = -mll(output, train_y) loss.backward() optimizer.step() for param in gp_model.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) # Test the model gp_model.eval() likelihood.eval() test_preds = likelihood(gp_model(test_x)).mean mean_abs_error = torch.mean(torch.abs(test_y - test_preds)) self.assertLess(mean_abs_error.squeeze().item(), 0.05)
jrg365/gpytorch
[ 3035, 485, 3035, 323, 1497019700 ]
def test_sgpr_mean_abs_error_cuda(self): # Suppress numerical warnings warnings.simplefilter("ignore", NumericalWarning) if not torch.cuda.is_available(): return with least_used_cuda_device(): train_x, train_y, test_x, test_y = make_data(cuda=True) likelihood = GaussianLikelihood().cuda() gp_model = GPRegressionModel(train_x, train_y, likelihood).cuda() mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model) # Optimize the model gp_model.train() likelihood.train() optimizer = optim.Adam(gp_model.parameters(), lr=0.1) optimizer.n_iter = 0 for _ in range(25): optimizer.zero_grad() output = gp_model(train_x) loss = -mll(output, train_y) loss.backward() optimizer.n_iter += 1 optimizer.step() for param in gp_model.parameters(): self.assertTrue(param.grad is not None) self.assertGreater(param.grad.norm().item(), 0) # Test the model gp_model.eval() likelihood.eval() test_preds = likelihood(gp_model(test_x)).mean mean_abs_error = torch.mean(torch.abs(test_y - test_preds)) self.assertLess(mean_abs_error.squeeze().item(), 0.02)
jrg365/gpytorch
[ 3035, 485, 3035, 323, 1497019700 ]
def fill_initial_data(self, *args, **kwargs): # Pass initial data for start and stop to their SplitDateTimeField clones if 'start' in kwargs['initial']: kwargs['initial']['start_datetime'] = kwargs['initial']['start'] if 'stop' in kwargs['initial']: kwargs['initial']['stop_datetime'] = kwargs['initial']['stop']
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def init_fields(self, *args, **kwargs): self.fields['start_datetime'] = forms.SplitDateTimeField(label=_('start'), **DATETIME_FORMATS) self.fields['stop_datetime'] = forms.SplitDateTimeField(label=_('stop'), **DATETIME_FORMATS)
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def clean_content(self): """ Text content validation """ content = self.cleaned_data.get("content") validation_helper = safe_import_module(settings.DATEBOOK_TEXT_VALIDATOR_HELPER_PATH) if validation_helper is not None: return validation_helper(self, content) else: return content
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def clean_start_datetime(self): start = self.cleaned_data['start_datetime'] # Day entry can't start before the targeted day date if start and start.date() < self.daydate: raise forms.ValidationError(_("You can't start a day before itself")) # Day entry can't start after the targeted day date if start and start.date() > self.daydate: raise forms.ValidationError(_("You can't start a day after itself"))
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def clean_stop_datetime(self): start = self.cleaned_data.get('start_datetime') stop = self.cleaned_data['stop_datetime'] # Day entry can't stop before the start if start and stop and stop <= start: raise forms.ValidationError(_("Stop time can't be less or equal to start time")) # Day entry can't stop in more than one futur day from the targeted day date if stop and stop.date() > Arrow.fromdate(self.daydate).replace(days=1).date(): raise forms.ValidationError(_("Stop time can't be more than the next day"))
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def __init__(self, datebook, day, *args, **kwargs): self.datebook = datebook self.daydate = datebook.period.replace(day=day)
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def clean(self): cleaned_data = super(DayBaseFormMixin, self).clean() content = cleaned_data.get("content") vacation = cleaned_data.get("vacation") # Content text is only required when vacation is not checked if not vacation and not content: raise forms.ValidationError(_("Worked days require a content text"))
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def save(self, *args, **kwargs): instance = super(DayEntryForm, self).save(commit=False, *args, **kwargs) instance.start = self.cleaned_data['start_datetime'] instance.stop = self.cleaned_data['stop_datetime'] instance.datebook = self.datebook instance.activity_date = self.daydate instance.save()
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def clean(self): cleaned_data = super(DayEntryCreateForm, self).clean()
sveetch/django-datebook
[ 2, 1, 2, 6, 1363741906 ]
def remote_convolution(image, kernel, host_id): """ This function ... :param image: :param kernel: :param host_id: """ # Check whether we are already connected to the specified remote host if host_id in connected_remotes and connected_remotes[host_id] is not None: remote = connected_remotes[host_id] else: # Debugging log.debug("Logging in to remote host ...") # Create a remote instance for the specified host ID remote = Remote() remote.setup(host_id) # Debugging log.debug("Creating temporary directory remotely ...") # Create a temporary directory to do the convolution remote_home_directory = remote.home_directory remote_temp_path = fs.join(remote_home_directory, time.unique_name("convolution")) remote.create_directory(remote_temp_path) # Debugging #log.debug("Uploading the kernel to the remote directory ...") # Upload the kernel FITS file to the remote directory #remote_kernel_path = fs.join(remote_temp_path, "kernel.fits") #remote.upload(kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True) # Debugging log.debug("Creating a local temporary directory ...") # Create a temporary directory locally to contain the frames local_temp_path = fs.join(fs.home(), time.unique_name("convolution")) fs.create_directory(local_temp_path) # Debugging log.debug("Saving the image frames to the temporary directory ...") # Save the frames local_frame_paths = [] constant_frames = [] for frame_name in image.frames: frame_path = fs.join(local_temp_path, frame_name + ".fits") # Only upload and convolve non-constant frames if not image.frames[frame_name].is_constant(): image.frames[frame_name].save(frame_path) local_frame_paths.append(frame_path) else: log.debug("The " + frame_name + " frame is constant, so this won't be uploaded and convolved") constant_frames.append(frame_name) # Debugging log.debug("Saving the kernel to the temporary directory ...") local_kernel_path = fs.join(local_temp_path, "kernel.fits") kernel.save(local_kernel_path) # Debugging log.debug("Uploading the image frames to the remote directory ...") # Upload the frames remote_frame_paths = [] for local_frame_path in local_frame_paths: # Determine the name of the local frame file frame_file_name = fs.name(local_frame_path) # Debugging log.debug("Uploading the " + fs.strip_extension(frame_file_name) + " frame ...") # Upload the frame file remote_frame_path = fs.join(remote_temp_path, frame_file_name) remote.upload(local_frame_path, remote_temp_path, new_name=frame_file_name, compress=True, show_output=True) remote_frame_paths.append(remote_frame_path) # Debugging log.debug("Uploading the kernel to the remote directory ...") # Upload the kernel remote_kernel_path = fs.join(remote_temp_path, "kernel.fits") remote.upload(local_kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True) # Debugging log.debug("Creating a python script to perform the convolution remotely ...") # Create a python script that does the convolution #script_file = tempfile.NamedTemporaryFile() #local_script_path = script_file.name local_script_path = fs.join(local_temp_path, "convolve.py") script_file = open(local_script_path, 'w') script_file.write("#!/usr/bin/env python\n") script_file.write("# -*- coding: utf8 -*-\n") script_file.write("\n") script_file.write("# Import astronomical modules\n") script_file.write("from astropy.units import Unit\n") script_file.write("\n") script_file.write("# Import the relevant PTS classes and modules\n") script_file.write("from pts.magic.core.frame import Frame\n") script_file.write("from pts.magic.core.image import Image\n") script_file.write("from pts.magic.core.kernel import ConvolutionKernel\n") script_file.write("from pts.core.tools.logging import log\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Opening the kernel frame ...')\n") script_file.write("\n") script_file.write("# Open the kernel\n") script_file.write("kernel = ConvolutionKernel.from_file('" + remote_kernel_path + "')\n") script_file.write("\n") for remote_frame_path in remote_frame_paths: frame_name = fs.strip_extension(fs.name(remote_frame_path)) script_file.write("# Inform the user\n") script_file.write("log.info('Opening the " + frame_name + " frame ...')\n") script_file.write("\n") script_file.write("# Open the frame\n") script_file.write("frame = Frame.from_file('" + remote_frame_path + "')\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Convolving the " + frame_name + " frame ...')\n") script_file.write("\n") script_file.write("# Do the convolution and save the result\n") script_file.write("frame.convolve(kernel, allow_huge=True)\n") script_file.write("frame.save('" + remote_frame_path + "')\n") # overwrite the frame script_file.write("\n") #script_file.write("# Save the image\n") #script_file.write("image.save(" + remote_image_path + ")\n") # Write to disk #script_file.flush() script_file.close() # Debugging log.debug("Uploading the python script ...") # Upload the script file remote_script_path = fs.join(remote_temp_path, "convolve.py") remote.upload(local_script_path, remote_temp_path, new_name="convolve.py", show_output=True) # Close the local script (it is automatically removed) #script_file.close() # Debugging log.debug("Executing the script remotely ...") # Execute the script file remotely remote.execute("python " + remote_script_path, output=False, show_output=True) # Debugging log.debug("Downloading the results ...") # Download the resulting FITS file (the convolved image) #local_result_path = self.full_output_path("convolved.fits") #remote.download(remote_image_path, fs.directory_of(local_result_path), new_name="convolved.fits", compress=True) for remote_frame_path in remote_frame_paths: # Determine the name of the local frame file frame_file_name = fs.name(remote_frame_path) # Debugging log.debug("Downloading the " + fs.strip_extension(frame_file_name) + " frame ...") # Download remote.download(remote_frame_path, local_temp_path, new_name=frame_file_name, compress=True, show_output=True) # Remove the temporary directory on the remote's filesystem remote.remove_directory(remote_temp_path) # Load the result #self.image = Image.from_file(local_result_path) for frame_name in image.frames.keys(): if frame_name in constant_frames: continue # Skip constant frames, these are not convolved local_frame_path = fs.join(local_temp_path, frame_name + ".fits") image.frames[frame_name] = Frame.from_file(local_frame_path) # Remove the local temporary directory fs.remove_directory(local_temp_path)
Stargrazer82301/CAAPR
[ 8, 2, 8, 1, 1453231962 ]
def remote_convolution_frame(frame, kernel_path, host_id): """ This function ... :param frame: :param kernel_path: :param host_id: :return: """ # Check whether the frame is constant. If it is, we don't have to convolve! if frame.is_constant(): return frame.copy() # Check whether we are already connected to the specified remote host if host_id in connected_remotes and connected_remotes[host_id] is not None: remote = connected_remotes[host_id] else: # Debugging log.debug("Logging in to remote host ...") # Create a remote instance for the specified host ID remote = Remote() remote.setup(host_id) # Debugging log.debug("Creating temporary directory remotely ...") # Create a temporary directory to do the convolution remote_home_directory = remote.home_directory remote_temp_path = fs.join(remote_home_directory, time.unique_name("convolution")) remote.create_directory(remote_temp_path) # Debugging log.debug("Creating local temporary directory ...") # Create a temporary directory locally to contain the frames local_temp_path = fs.join(fs.home(), time.unique_name("convolution")) fs.create_directory(local_temp_path) # Debugging log.debug("Writing the frame to the temporary directory ...") # Write the frame local_frame_path = fs.join(local_temp_path, frame.name + ".fits") frame.save(local_frame_path) # Debugging #log.debug("Writing the kernel to the temporary directory ...") # Write the kernel #local_kernel_path = fs.join(local_temp_path, "kernel.fits") #kernel.save(local_kernel_path) # Debugging log.debug("Uploading the frame to the remote directory ...") # Upload the frame file remote_frame_path = fs.join(remote_temp_path, frame.name) remote.upload(local_frame_path, remote_temp_path, new_name=frame.name, compress=True, show_output=True) # Debugging #log.debug("Uploading the kernel to the remote directory ...") # Upload the kernel FITS file to the remote directory #remote_kernel_path = fs.join(remote_temp_path, "kernel.fits") #remote.upload(local_kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True) # Debugging log.debug("Uploading the kernel to the remote directory ...") # Upload the kernel FITS file to the remote directory remote_kernel_path = fs.join(remote_temp_path, "kernel.fits") remote.upload(kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True) # Debugging log.debug("Creating a python script to perform the convolution remotely ...") # Create the script local_script_path = fs.join(local_temp_path, "convolve.py") script_file = open(local_script_path, 'w') script_file.write("#!/usr/bin/env python\n") script_file.write("# -*- coding: utf8 -*-\n") script_file.write("\n") script_file.write("# Import the relevant PTS classes and modules\n") script_file.write("from pts.magic.core.frame import Frame\n") script_file.write("from pts.core.tools.logging import log\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Opening the kernel frame ...')\n") script_file.write("\n") script_file.write("# Open the kernel frame\n") script_file.write("kernel = Frame.from_file('" + remote_kernel_path + "')\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Opening the frame ...')\n") script_file.write("\n") script_file.write("# Open the frame\n") script_file.write("frame = Frame.from_file('" + remote_frame_path + "')\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Convolving the frame ...')\n") script_file.write("\n") script_file.write("# Do the convolution and save the result\n") script_file.write("convolved = frame.convolved(kernel, allow_huge=True)\n") script_file.write("convolved.save('" + remote_frame_path + "')\n") # overwrite the frame # Write to disk script_file.close() # Debugging log.debug("Uploading the python script ...") # Upload the script file remote_script_path = fs.join(remote_temp_path, "convolve.py") remote.upload(local_script_path, remote_temp_path, new_name="convolve.py", show_output=True) # Debugging log.debug("Executing the script remotely ...") # Execute the script file remotely remote.execute("python " + remote_script_path, output=False, show_output=True) # Debugging log.debug("Downloading the result ...") # Determine the name of the local frame file frame_file_name = fs.name(remote_frame_path) # Debugging log.debug("Downloading the " + fs.strip_extension(frame_file_name) + " frame ...") # Download remote.download(remote_frame_path, local_temp_path, new_name=frame_file_name, compress=True, show_output=True) # Remove the temporary directory on the remote's filesystem remote.remove_directory(remote_temp_path) # Load the convolved frame convolved = Frame.from_file(local_frame_path) # Remove the local temporary directory fs.remove_directory(local_temp_path) # Return the convolved frame return convolved
Stargrazer82301/CAAPR
[ 8, 2, 8, 1, 1453231962 ]
def remote_filter_convolution_no_pts(host_id, datacube_path, wavelengths, filters): """ This function ... :param host_id: :param datacube_path: :param wavelengths: :param filters: :return: """ # Check whether we are already connected to the specified remote host if host_id in connected_remotes and connected_remotes[host_id] is not None: remote = connected_remotes[host_id] else: # Debugging log.debug("Logging in to remote host ...") # Create a remote instance for the specified host ID remote = Remote() remote.setup(host_id) # Debugging log.debug("Creating temporary directory remotely ...") # Create a temporary directory to do the convolution remote_home_directory = remote.home_directory remote_temp_path = fs.join(remote_home_directory, time.unique_name("filter-convolution")) remote.create_directory(remote_temp_path) # Debugging log.debug("Creating local temporary directory ...") # Create a temporary directory locally to contain the frames local_temp_path = fs.join(fs.home(), time.unique_name("filter-convolution")) fs.create_directory(local_temp_path) integrated_transmissions = dict() # Loop over the filters for fltr in filters: # Get the transmission data fltr_wavelengths = fltr._Wavelengths fltr_transmission = fltr._Transmission fltr_integrated_transmission = fltr._IntegratedTransmission integrated_transmissions[fltr.name] = fltr_integrated_transmission # Save the transmission data path = fs.join(local_temp_path, "transmission__" + str(fltr) + ".dat") np.savetxt(path, (fltr_wavelengths, fltr_transmission)) #print(integrated_transmissions) #print(local_temp_path) integrated_path = fs.join(local_temp_path, "integrated_transmissions.txt") with open(integrated_path, 'w') as integrated_trans_file: for fltr_name in integrated_transmissions: integrated_trans_file.write(fltr_name + ": " + str(integrated_transmissions[fltr_name]) + "\n") # NOT FINISHED ...
Stargrazer82301/CAAPR
[ 8, 2, 8, 1, 1453231962 ]
def remote_filter_convolution(host_id, datacube_path, wavelengths, filters, keep_output=False): """ This function ... :param host_id: :param datacube_path: :param wavelengths: :param filters: :param keep_output: :return: """ # Check whether we are already connected to the specified remote host if host_id in connected_remotes and connected_remotes[host_id] is not None: remote = connected_remotes[host_id] else: # Debugging log.debug("Logging in to remote host ...") # Create a remote instance for the specified host ID remote = Remote() remote.setup(host_id) # Debugging log.debug("Creating temporary directory remotely ...") # Create a temporary directory to do the convolution remote_home_directory = remote.home_directory remote_temp_path = fs.join(remote_home_directory, time.unique_name("filter-convolution")) remote.create_directory(remote_temp_path) # Debugging log.debug("Creating local temporary directory ...") # Create a temporary directory locally to contain the frames local_temp_path = fs.join(fs.home(), time.unique_name("filter-convolution")) fs.create_directory(local_temp_path) # Debugging log.debug("Uploading the datacube to the temporary remote directory ...") # Upload the frame file datacube_name = fs.name(datacube_path) remote_datacube_path = fs.join(remote_temp_path, datacube_name) remote.upload(datacube_path, remote_temp_path, compress=True, show_output=True) # Debugging log.debug("Writing the wavelengths to the temporary local directory ...") local_wavelengths_path = fs.join(local_temp_path, "wavelengths.txt") np.savetxt(local_wavelengths_path, wavelengths) # Debugging log.debug("Uploading the wavelengths file to the remote directory ...") # Upload the kernel FITS file to the remote directory remote_wavelengths_path = fs.join(remote_temp_path, "wavelengths.txt") remote.upload(local_wavelengths_path, remote_temp_path, compress=True, show_output=True) # Debugging log.debug("Creating a python script to perform the filter convolution remotely ...") # Create the script local_script_path = fs.join(local_temp_path, "make_images.py") script_file = open(local_script_path, 'w') script_file.write("#!/usr/bin/env python\n") script_file.write("# -*- coding: utf8 -*-\n") script_file.write("\n") script_file.write("# Import standard modules\n") script_file.write("import numpy as np\n") script_file.write("\n") script_file.write("# Import the relevant PTS classes and modules\n") script_file.write("from pts.magic.core.image import Image\n") script_file.write("from pts.magic.core.frame import Frame\n") script_file.write("from pts.core.basics.filter import Filter\n") script_file.write("from pts.core.tools.logging import log\n") script_file.write("from pts.core.tools import filesystem as fs\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Loading the datacube ...')\n") script_file.write("\n") script_file.write("# Open the datacube as an Image\n") script_file.write("datacube = Image.from_file('" + remote_datacube_path + "', always_call_first_primary=False)\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Loading the wavelengths ...')\n") script_file.write("\n") script_file.write("# Load the wavelengths from the text file\n") script_file.write("wavelengths = np.loadtxt('" + remote_wavelengths_path + "')\n") script_file.write("\n") script_file.write("# Convert the frames from neutral surface brightness to wavelength surface brightness\n") script_file.write("for l in range(len(wavelengths)):\n") script_file.write("\n") script_file.write(" # Get the wavelength\n") script_file.write(" wavelength = wavelengths[l]\n") script_file.write("\n") script_file.write(" # Determine the name of the frame in the datacube\n") script_file.write(" frame_name = 'frame' + str(l)\n") script_file.write("\n") script_file.write(" # Divide this frame by the wavelength in micron\n") script_file.write(" datacube.frames[frame_name] /= wavelength\n") script_file.write("\n") script_file.write(" # Set the new unit\n") script_file.write(" datacube.frames[frame_name].unit = 'W / (m2 * arcsec2 * micron)'\n") script_file.write("\n") script_file.write("# Convert the datacube to a numpy array where wavelength is the third dimension\n") script_file.write("fluxdensities = datacube.asarray()\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Creating the filters ...')\n") script_file.write("\n") script_file.write("filters = dict()\n") script_file.write("\n") for filter_name in filters: fltr = filters[filter_name] script_file.write("# Inform the user\n") script_file.write("log.info('Creating the " + str(fltr) + " filter')\n") script_file.write("\n") script_file.write("fltr = Filter.from_string('" + str(fltr) + "')\n") script_file.write("filters['" + filter_name + "'] = fltr\n") script_file.write("\n") script_file.write("# Inform the user\n") script_file.write("log.info('Performing the filter convolutions ...')\n") script_file.write("\n") script_file.write("# Loop over the filters, perform the convolution\n") script_file.write("for filter_name in filters:\n") script_file.write("\n") script_file.write(" log.info('Making the observed image for the ' + str(fltr) + ' filter ...')\n") script_file.write(" fltr = filters[filter_name]\n") script_file.write(" data = fltr.convolve(wavelengths, fluxdensities)\n") script_file.write(" frame = Frame(data)\n") script_file.write(" frame.unit = 'W/(m2 * arcsec2 * micron)'\n") script_file.write(" path = fs.join('" + remote_temp_path + "', filter_name + '.fits')\n") script_file.write(" frame.save(path)\n") # Write to disk script_file.close() # Debugging log.debug("Uploading the python script ...") # Upload the script file remote_script_path = fs.join(remote_temp_path, "make_images.py") remote.upload(local_script_path, remote_temp_path, new_name="make_images.py", show_output=True) # Debugging log.debug("Executing the script remotely ...") # Execute the script file remotely remote.execute("python " + remote_script_path, output=False, show_output=True) # Remove the datacube in the remote directory remote.remove_file(remote_datacube_path) # Debugging log.debug("Downloading the convolved frames ...") # Download local_downloaded_temp_path = fs.join(fs.home(), fs.name(remote_temp_path)) fs.create_directory(local_downloaded_temp_path) remote.download(remote_temp_path, local_downloaded_temp_path, compress=True, show_output=True) # Remove the temporary directory on the remote's filesystem remote.remove_directory(remote_temp_path) # Remove the local temporary directory fs.remove_directory(local_temp_path) # Create a dictionary to contain the frames frames = dict() # Loop over the filters, load the frame for filter_name in filters: # Determine the path to the resulting FITS file path = fs.join(local_downloaded_temp_path, filter_name + ".fits") # Check whether the frame exists if not fs.is_file(path): raise RuntimeError("The image for filter " + str(filters[filter_name]) + " is missing") # Load the FITS file frame = Frame.from_file(path) # Add the frame to the dictionary frames[filter_name] = frame # Remove the downloaded temporary directory if not keep_output: fs.remove_directory(local_downloaded_temp_path) # Return the dictionary of frames return frames
Stargrazer82301/CAAPR
[ 8, 2, 8, 1, 1453231962 ]
def __init__(self): if _debug: ReadWritePropertyServices._debug("__init__") Capability.__init__(self)
JoelBender/bacpypes
[ 243, 121, 243, 107, 1436992431 ]
def do_WritePropertyRequest(self, apdu): """Change the value of some property of one of our objects.""" if _debug: ReadWritePropertyServices._debug("do_WritePropertyRequest %r", apdu) # get the object obj = self.get_object_id(apdu.objectIdentifier) if _debug: ReadWritePropertyServices._debug(" - object: %r", obj) if not obj: raise ExecutionError(errorClass='object', errorCode='unknownObject') try: # check if the property exists if obj.ReadProperty(apdu.propertyIdentifier, apdu.propertyArrayIndex) is None: raise PropertyError(apdu.propertyIdentifier) # get the datatype, special case for null if apdu.propertyValue.is_application_class_null(): datatype = Null else: datatype = obj.get_datatype(apdu.propertyIdentifier) if _debug: ReadWritePropertyServices._debug(" - datatype: %r", datatype) # special case for array parts, others are managed by cast_out if issubclass(datatype, Array) and (apdu.propertyArrayIndex is not None): if apdu.propertyArrayIndex == 0: value = apdu.propertyValue.cast_out(Unsigned) else: value = apdu.propertyValue.cast_out(datatype.subtype) else: value = apdu.propertyValue.cast_out(datatype) if _debug: ReadWritePropertyServices._debug(" - value: %r", value) # change the value value = obj.WriteProperty(apdu.propertyIdentifier, value, apdu.propertyArrayIndex, apdu.priority) # success resp = SimpleAckPDU(context=apdu) if _debug: ReadWritePropertyServices._debug(" - resp: %r", resp) except PropertyError: raise ExecutionError(errorClass='property', errorCode='unknownProperty') # return the result self.response(resp)
JoelBender/bacpypes
[ 243, 121, 243, 107, 1436992431 ]
def read_property_to_any(obj, propertyIdentifier, propertyArrayIndex=None): """Read the specified property of the object, with the optional array index, and cast the result into an Any object.""" if _debug: read_property_to_any._debug("read_property_to_any %s %r %r", obj, propertyIdentifier, propertyArrayIndex) # get the datatype datatype = obj.get_datatype(propertyIdentifier) if _debug: read_property_to_any._debug(" - datatype: %r", datatype) if datatype is None: raise ExecutionError(errorClass='property', errorCode='datatypeNotSupported') # get the value value = obj.ReadProperty(propertyIdentifier, propertyArrayIndex) if _debug: read_property_to_any._debug(" - value: %r", value) if value is None: raise ExecutionError(errorClass='property', errorCode='unknownProperty') # change atomic values into something encodeable if issubclass(datatype, Atomic) or (issubclass(datatype, (Array, List)) and isinstance(value, list)): value = datatype(value) elif issubclass(datatype, Array) and (propertyArrayIndex is not None): if propertyArrayIndex == 0: value = Unsigned(value) elif issubclass(datatype.subtype, Atomic): value = datatype.subtype(value) elif not isinstance(value, datatype.subtype): raise TypeError("invalid result datatype, expecting %s and got %s" \ % (datatype.subtype.__name__, type(value).__name__)) elif not isinstance(value, datatype): raise TypeError("invalid result datatype, expecting %s and got %s" \ % (datatype.__name__, type(value).__name__)) if _debug: read_property_to_any._debug(" - encodeable value: %r", value) # encode the value result = Any() result.cast_in(value) if _debug: read_property_to_any._debug(" - result: %r", result) # return the object return result
JoelBender/bacpypes
[ 243, 121, 243, 107, 1436992431 ]
def read_property_to_result_element(obj, propertyIdentifier, propertyArrayIndex=None): """Read the specified property of the object, with the optional array index, and cast the result into an Any object.""" if _debug: read_property_to_result_element._debug("read_property_to_result_element %s %r %r", obj, propertyIdentifier, propertyArrayIndex) # save the result in the property value read_result = ReadAccessResultElementChoice() try: if not obj: raise ExecutionError(errorClass='object', errorCode='unknownObject') read_result.propertyValue = read_property_to_any(obj, propertyIdentifier, propertyArrayIndex) if _debug: read_property_to_result_element._debug(" - success") except PropertyError as error: if _debug: read_property_to_result_element._debug(" - error: %r", error) read_result.propertyAccessError = ErrorType(errorClass='property', errorCode='unknownProperty') except ExecutionError as error: if _debug: read_property_to_result_element._debug(" - error: %r", error) read_result.propertyAccessError = ErrorType(errorClass=error.errorClass, errorCode=error.errorCode) # make an element for this value read_access_result_element = ReadAccessResultElement( propertyIdentifier=propertyIdentifier, propertyArrayIndex=propertyArrayIndex, readResult=read_result, ) if _debug: read_property_to_result_element._debug(" - read_access_result_element: %r", read_access_result_element) # fini return read_access_result_element
JoelBender/bacpypes
[ 243, 121, 243, 107, 1436992431 ]
def __init__(self): if _debug: ReadWritePropertyMultipleServices._debug("__init__") Capability.__init__(self)
JoelBender/bacpypes
[ 243, 121, 243, 107, 1436992431 ]
def get_html_tag(markup): """Return the HTML tag associated with the given wiki-markup.""" return MARKUP_TO_HTML[markup]
earwig/mwparserfromhell
[ 593, 69, 593, 76, 1337539554 ]
def is_visible(tag): """Return whether or not the given *tag* contains visible text.""" return tag.lower() not in INVISIBLE_TAGS
earwig/mwparserfromhell
[ 593, 69, 593, 76, 1337539554 ]
def is_single_only(tag): """Return whether or not the given *tag* must exist without a close tag.""" return tag.lower() in SINGLE_ONLY
earwig/mwparserfromhell
[ 593, 69, 593, 76, 1337539554 ]
def matchtask( self, task: Dict[str, Any], file: 'Optional[Lintable]' = None
ansible/ansible-lint
[ 3021, 542, 3021, 56, 1376478480 ]
def __init__(self, stamp_size_arcsec = 20.0, mag_dict = {"lo":20.0, "hi":25.0 }, hlr_dict = {"lo":0.35, "hi":0.75 }, fbulge_dict = {"lo":0.5 , "hi":0.9 }, q_dict = {"lo":0.4 , "hi":1.0 }, pos_ang_dict = {"lo":0.0 , "hi":180.0}, ngals_arcmin2 = 15.0, nsimimages = 50, ncpu = 2, ): """ :param stamp_size_arcsec: The size of the stamp of each simulated source by **GalSim**. The stamp is with the size of ``stamp_size_arcsec`` x ``stamp_size_arcsec`` (``stamp_size_arcsec`` in arcsec) where the **GalSim** will simulate one single source on. By default, it is ``stamp_size_arcsec = 15.0``. :param mag_dict: The magnitude range which **GalSim** will simulate sources. It must be in the form of ``{"lo": _value_, "hi": _value_}``, where _value_ is expressed in magnitude. By default, it is ``mag_dict = {"lo":20.0, "hi":25.0 }``. :param hlr_dict: The half light radius configuration of the sources simulated by **GalSim**. It is in the unit of arcsec. It has to be in the form of ``{"lo": _value_, "high": _value_}``. By default, it is ``hlr_dict = {"lo":0.35 , "hi":0.75 }``. :param fbulge_dict: The configuration of the fraction of the bulge component. It must be in the form of ``{"lo": _value_, "high": _value_}``. Note that the _value_ has to be within [0,1] and 1 means the galaxy has zero fraction of light from the disk component. By default, it is ``fbulge_dict = {"lo":0.5 , "hi":0.9 }``. :param q_dict: The minor-to-major axis ratio configuration of the sources simulated by **GalSim**. It must be in the form of ``{"lo": _value_, "high": _value_}``. Note that the _value_ has to be within [0,1] and ``q = 1`` means spherical. By default, it is ``q_dict = {"lo":0.4 , "hi":1.0 }``. :param pos_ang_dict: The position angle configuration of the sources simulated by **GalSim**. It is in the unit of degree. It must be in the form of ``{"lo": _value_, "high": _value_}``. Note that the _value_ has to be within [0,180.0] and it is counter-clockwise with +x is 0 degree. By default, it is ``pos_ang_dict={"lo":0.0 , "hi":180.0 }``. :param ngals_arcmin2: The projected number of the sources simulated by **GalSim** per arcmin square. You dont want to set this number too high because it will cause the problem from blending in the source detection. However, you dont want to lose the statistic power if you set this number too low. By defualt, it is ``ngals_arcmin2 = 15.0``. :param nsimimages: The number of the images you want to simulate. It will be saved in the multi-extension file with the code name ``sims_nameroot``. By default, it is ``nsimimages = 50``. :param ncpu: The number of cpu for parallel running. By default, it is ``ncpu = 2``. Please do not set this number higher than the CPU cores you have. """ self.stamp_size_arcsec = float(stamp_size_arcsec) self.mag_dict = mag_dict self.hlr_dict = hlr_dict self.fbulge_dict = fbulge_dict self.q_dict = q_dict self.pos_ang_dict = pos_ang_dict self.ngals_arcmin2 = float(ngals_arcmin2) self.nsimimages = int(nsimimages) self.ncpu = int(ncpu) return
inonchiu/ComEst
[ 5, 3, 5, 1, 1448042190 ]
def main(): config = configparser.ConfigParser() config.optionxform = str config.read_file(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8")) env_conf = dict(config.items('environment')) parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() verbose = args.verbose if verbose: level = logging.DEBUG else: level = logging.ERROR formatter = '%(asctime)s - %(levelname)s - %(message)s' # Add the format/level to the logger logging.basicConfig(format=formatter, level=level) bctester(os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "syscoin-util-test.json", env_conf)
syscoin/syscoin2
[ 164, 70, 164, 5, 1456717652 ]
def bctest(testDir, testObj, buildenv): """Runs a single test, comparing output and RC to expected output and RC. Raises an error if input can't be read, executable fails, or output/RC are not as expected. Error is caught by bctester() and reported. """ # Get the exec names and arguments execprog = os.path.join(buildenv["BUILDDIR"], "src", testObj["exec"] + buildenv["EXEEXT"]) execargs = testObj['args'] execrun = [execprog] + execargs # Read the input data (if there is any) stdinCfg = None inputData = None if "input" in testObj: filename = os.path.join(testDir, testObj["input"]) inputData = open(filename, encoding="utf8").read() stdinCfg = subprocess.PIPE # Read the expected output data (if there is any) outputFn = None outputData = None outputType = None if "output_cmp" in testObj: outputFn = testObj['output_cmp'] outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare) try: outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read() except: logging.error("Output file " + outputFn + " can not be opened") raise if not outputData: logging.error("Output data missing for " + outputFn) raise Exception if not outputType: logging.error("Output file %s does not have a file extension" % outputFn) raise Exception # Run the test proc = subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) try: outs = proc.communicate(input=inputData) except OSError: logging.error("OSError, Failed to execute " + execprog) raise if outputData: data_mismatch, formatting_mismatch = False, False # Parse command output and expected output try: a_parsed = parse_output(outs[0], outputType) except Exception as e: logging.error('Error parsing command output as %s: %s' % (outputType, e)) raise try: b_parsed = parse_output(outputData, outputType) except Exception as e: logging.error('Error parsing expected output %s as %s: %s' % (outputFn, outputType, e)) raise # Compare data if a_parsed != b_parsed: logging.error("Output data mismatch for " + outputFn + " (format " + outputType + ")") data_mismatch = True # Compare formatting if outs[0] != outputData: error_message = "Output formatting mismatch for " + outputFn + ":\n" error_message += "".join(difflib.context_diff(outputData.splitlines(True), outs[0].splitlines(True), fromfile=outputFn, tofile="returned")) logging.error(error_message) formatting_mismatch = True assert not data_mismatch and not formatting_mismatch # Compare the return code to the expected return code wantRC = 0 if "return_code" in testObj: wantRC = testObj['return_code'] if proc.returncode != wantRC: logging.error("Return code mismatch for " + outputFn) raise Exception if "error_txt" in testObj: want_error = testObj["error_txt"] # Compare error text # TODO: ideally, we'd compare the strings exactly and also assert # That stderr is empty if no errors are expected. However, syscoin-tx # emits DISPLAY errors when running as a windows application on # linux through wine. Just assert that the expected error text appears # somewhere in stderr. if want_error not in outs[1]: logging.error("Error mismatch:\n" + "Expected: " + want_error + "\nReceived: " + outs[1].rstrip()) raise Exception
syscoin/syscoin2
[ 164, 70, 164, 5, 1456717652 ]
def clog(*args, condition=True, log_func=print, **kwargs): if condition: return log_func(*args, **kwargs)
thorwhalen/ut
[ 4, 3, 4, 24, 1426246351 ]
def __init__( self, plotly_name="arrowcolor", parent_name="layout.annotation", **kwargs
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def __init__(self, filenames_to_try=[]): # FUN FACT: In Python 3.2, they spontaneously changed the behaviour of # RawConfigParser so that it no longer considers ';' a comment delimiter # for inline comments. # # Compare: # "Configuration files may include comments, prefixed by specific # characters (# and ;). Comments may appear on their own in an otherwise # empty line, or may be entered in lines holding values or section names. # In the latter case, they need to be preceded by a whitespace character # to be recognized as a comment. (For backwards compatibility, only ; # starts an inline comment, while # does not.)" # -- https://docs.python.org/2/library/configparser.html # vs: # "Comment prefixes are strings that indicate the start of a valid comment # within a config file. comment_prefixes are used only on otherwise empty # lines (optionally indented) whereas inline_comment_prefixes can be used # after every valid value (e.g. section names, options and empty lines as # well). By default inline comments are disabled and '#' and ';' are used # as prefixes for whole line comments. # Changed in version 3.2: In previous versions of configparser behaviour # matched comment_prefixes=('#',';') and inline_comment_prefixes=(';',)." # -- https://docs.python.org/3/library/configparser.html#customizing-parser-behaviour # # Grrr... if sys.version_info.major >= 3: self._cp = RawConfigParser(dict_type=OrderedMultiDict, inline_comment_prefixes=(';',)) else: self._cp = RawConfigParser(dict_type=OrderedMultiDict) if isinstance(filenames_to_try, str): filenames_to_try = [filenames_to_try] self._filenames_to_try = filenames_to_try[:]
jboy/nim-pymod
[ 213, 11, 213, 10, 1447319234 ]
def sections(self): return self._cp.sections()
jboy/nim-pymod
[ 213, 11, 213, 10, 1447319234 ]
def get(self, section_name, option_name, do_optionxform=True): if do_optionxform: # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.optionxform option_name = self._cp.optionxform(option_name) if section_name is None: return self._get_optval_in_sections(self.sections(), option_name) elif isinstance(section_name, str): return self._get_optval_in_sections([section_name], option_name) else: return self._get_optval_in_sections(section_name, option_name)
jboy/nim-pymod
[ 213, 11, 213, 10, 1447319234 ]
def getboolean(self, section_name, option_name, do_optionxform=True): # https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean return [self._coerce_to_boolean(optval) for optval in self.get(section_name, option_name, do_optionxform)]
jboy/nim-pymod
[ 213, 11, 213, 10, 1447319234 ]
def setUp(self): # Making a HealthProfessional self.view = ListAllMedicines # Making medicati self.medicine = Medicine() self.medicine.name = "Medicamento Teste" self.medicine.active_ingredient = "Teste Lab" self.medicine.save() self.listing = Medicine.objects.all()
fga-gpp-mds/2017.2-Receituario-Medico
[ 17, 8, 17, 18, 1502385214 ]
def __init__( self, plotly_name="bgcolor", parent_name="sankey.node.hoverlabel", **kwargs
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def __init__(self, *args, **kwargs): self._yaqd_port = kwargs.pop("yaqd_port") super().__init__(*args, **kwargs) self.grating_index = pc.Combo( name="Grating", allowed_values=[1, 2], section=self.name, option="grating_index", display=True, set_method="set_turret", ) self.exposed.append(self.grating_index)
wright-group/PyCMDS
[ 6, 3, 6, 13, 1428602124 ]
def initialize(self, *args, **kwargs): # open control self.ctrl = yaqc.Client(self._yaqd_port) # import some information from control id_dict = self.ctrl.id() self.serial_number = id_dict["serial"] self.position.write(self.ctrl.get_position()) # recorded self.recorded[self.name] = [self.position, self.native_units, 1.0, "m", False] self.wait_until_still() # finish self.initialized.write(True) self.initialized_signal.emit()
wright-group/PyCMDS
[ 6, 3, 6, 13, 1428602124 ]
def set_position(self, destination): self.ctrl.set_position(float(destination)) self.wait_until_still()
wright-group/PyCMDS
[ 6, 3, 6, 13, 1428602124 ]
def __init__(self, *args, **kwargs): self.kind = "spectrometer" hw.Hardware.__init__(self, *args, **kwargs)
wright-group/PyCMDS
[ 6, 3, 6, 13, 1428602124 ]
def gen_params(raw_params): u""" Generator that yields tuples of (name, default_value) for each parameter in the list If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None')) """ assert raw_params[0].type == token.STAR and len(raw_params) > 2 curr_idx = 2 # the first place a keyword-only parameter name can be is index 2 max_idx = len(raw_params) while curr_idx < max_idx: curr_item = raw_params[curr_idx] prev_item = curr_item.prev_sibling if curr_item.type != token.NAME: curr_idx += 1 continue if prev_item is not None and prev_item.type == token.DOUBLESTAR: break name = curr_item.value nxt = curr_item.next_sibling if nxt is not None and nxt.type == token.EQUAL: default_value = nxt.next_sibling curr_idx += 2 else: default_value = None yield (name, default_value) curr_idx += 1
thonkify/thonkify
[ 17, 1, 17, 3, 1501859450 ]
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name): u""" Returns string with the name of the kwargs dict if the params after the first star need fixing Otherwise returns empty string """ found_kwargs = False needs_fix = False for t in raw_params[2:]: if t.type == token.COMMA: # Commas are irrelevant at this stage. continue elif t.type == token.NAME and not found_kwargs: # Keyword-only argument: definitely need to fix. needs_fix = True elif t.type == token.NAME and found_kwargs: # Return 'foobar' of **foobar, if needed. return t.value if needs_fix else u'' elif t.type == token.DOUBLESTAR: # Found either '*' from **foobar. found_kwargs = True else: # Never found **foobar. Return a synthetic name, if needed. return kwargs_default if needs_fix else u''
thonkify/thonkify
[ 17, 1, 17, 3, 1501859450 ]
def check_if_device_is_connected(f): """ Decorator. Checks if device is connected before invoking function. """ @functools.wraps(f) def wrapper(*args, **kwargs): if args[0].device is not None: return f(*args, **kwargs) else: print '[!] Device disconnected.' print return wrapper
rsc-dev/loophole
[ 146, 16, 146, 4, 1453282427 ]
def __init__(self): """Constructor. """ cmd.Cmd.__init__(self) self.prompt = LoopholeCli.__PROMPT.format('no device') self.device = None
rsc-dev/loophole
[ 146, 16, 146, 4, 1453282427 ]