function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def __init__(self): super(Run, self).__init__() self._test = None
ChromiumWebApps/chromium
[ 216, 323, 216, 1, 1392992388 ]
def AddCommandLineOptions(self, parser): test.Test.AddCommandLineOptions(parser) # Allow tests to add their own command line options. matching_tests = {} for arg in sys.argv[1:]: matching_tests.update(_MatchTestName(arg)) for test_class in matching_tests.itervalues(): test_class.AddTestCommandLineOptions(parser)
ChromiumWebApps/chromium
[ 216, 323, 216, 1, 1392992388 ]
def Run(self, options, args): return min(255, self._test().Run(copy.copy(options)))
ChromiumWebApps/chromium
[ 216, 323, 216, 1, 1392992388 ]
def _GetScriptName(): return os.path.basename(sys.argv[0])
ChromiumWebApps/chromium
[ 216, 323, 216, 1, 1392992388 ]
def _MatchTestName(input_test_name): def _Matches(input_string, search_string): if search_string.startswith(input_string): return True for part in search_string.split('.'): if part.startswith(input_string): return True return False # Exact matching. if input_test_name in test_aliases: exact_match = test_aliases[input_test_name] else: exact_match = input_test_name if exact_match in _GetTests(): return {exact_match: _GetTests()[exact_match]} # Fuzzy matching. return dict((test_name, test_class) for test_name, test_class in _GetTests().iteritems() if _Matches(input_test_name, test_name))
ChromiumWebApps/chromium
[ 216, 323, 216, 1, 1392992388 ]
def cltv_modify_tx(tx, prepend_scriptsig, nsequence=None, nlocktime=None): assert_equal(len(tx.vin), 1) if nsequence is not None: tx.vin[0].nSequence = nsequence tx.nLockTime = nlocktime tx.vin[0].scriptSig = CScript(prepend_scriptsig + list(CScript(tx.vin[0].scriptSig))) tx.rehash()
bitcoinknots/bitcoin
[ 150, 55, 150, 9, 1456398219 ]
def cltv_validate(tx, height): # Modify the signature in vin 0 and nSequence/nLockTime of the tx to pass CLTV scheme = [[CScriptNum(height), OP_CHECKLOCKTIMEVERIFY, OP_DROP], 0, height] cltv_modify_tx(tx, prepend_scriptsig=scheme[0], nsequence=scheme[1], nlocktime=scheme[2])
bitcoinknots/bitcoin
[ 150, 55, 150, 9, 1456398219 ]
def set_test_params(self): self.num_nodes = 1 self.extra_args = [[ '[email protected]', '-par=1', # Use only one script thread to get the exact reject reason for testing '-acceptnonstdtxn=1', # cltv_invalidate is nonstandard ]] self.setup_clean_chain = True self.rpc_timeout = 480
bitcoinknots/bitcoin
[ 150, 55, 150, 9, 1456398219 ]
def run_test(self): peer = self.nodes[0].add_p2p_connection(P2PInterface()) wallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_OP_TRUE) self.test_cltv_info(is_active=False) self.log.info("Mining %d blocks", CLTV_HEIGHT - 2) wallet.generate(10) self.nodes[0].generate(CLTV_HEIGHT - 2 - 10) self.log.info("Test that invalid-according-to-CLTV transactions can still appear in a block") # create one invalid tx per CLTV failure reason (5 in total) and collect them invalid_cltv_txs = [] for i in range(5): spendtx = wallet.create_self_transfer(from_node=self.nodes[0])['tx'] cltv_invalidate(spendtx, i) invalid_cltv_txs.append(spendtx) tip = self.nodes[0].getbestblockhash() block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1), block_time) block.nVersion = 3 block.vtx.extend(invalid_cltv_txs) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.test_cltv_info(is_active=False) # Not active as of current tip and next block does not need to obey rules peer.send_and_ping(msg_block(block)) self.test_cltv_info(is_active=True) # Not active as of current tip, but next block must obey rules assert_equal(self.nodes[0].getbestblockhash(), block.hash) self.log.info("Test that blocks must now be at least version 4") tip = block.sha256 block_time += 1 block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time) block.nVersion = 3 block.solve() with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]): peer.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) peer.sync_with_ping() self.log.info("Test that invalid-according-to-CLTV transactions cannot appear in a block") block.nVersion = 4 block.vtx.append(CTransaction()) # dummy tx after coinbase that will be replaced later # create and test one invalid tx per CLTV failure reason (5 in total) for i in range(5): spendtx = wallet.create_self_transfer(from_node=self.nodes[0])['tx'] cltv_invalidate(spendtx, i) expected_cltv_reject_reason = [ "non-mandatory-script-verify-flag (Operation not valid with the current stack size)", "non-mandatory-script-verify-flag (Negative locktime)", "non-mandatory-script-verify-flag (Locktime requirement not satisfied)", "non-mandatory-script-verify-flag (Locktime requirement not satisfied)", "non-mandatory-script-verify-flag (Locktime requirement not satisfied)", ][i] # First we show that this tx is valid except for CLTV by getting it # rejected from the mempool for exactly that reason. assert_equal( [{ 'txid': spendtx.hash, 'wtxid': spendtx.getwtxid(), 'allowed': False, 'reject-reason': expected_cltv_reject_reason, }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()], maxfeerate=0), ) # Now we verify that a block with this transaction is also invalid. block.vtx[1] = spendtx block.hashMerkleRoot = block.calc_merkle_root() block.solve() with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with {}'.format( block.vtx[-1].hash, expected_cltv_reject_reason)]): peer.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) peer.sync_with_ping() self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted") cltv_validate(spendtx, CLTV_HEIGHT - 1) block.vtx.pop(1) block.vtx.append(spendtx) block.hashMerkleRoot = block.calc_merkle_root() block.solve() self.test_cltv_info(is_active=True) # Not active as of current tip, but next block must obey rules peer.send_and_ping(msg_block(block)) self.test_cltv_info(is_active=True) # Active as of current tip assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
bitcoinknots/bitcoin
[ 150, 55, 150, 9, 1456398219 ]
def entities(hass): """Initialize the test switch.""" platform = getattr(hass.components, "test.switch") platform.init() yield platform.ENTITIES
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def __init__(self, sleeptime = 0.01): super(Lagger, self).__init__() self.sleeptime = sleeptime
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def setUp(self): self.files_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'qucs_prj' ) self.dummy_media = DefinedGammaZ0( frequency = Frequency(1,100,21,'ghz'), gamma=1j, z0 = 50 , )
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_resistor(self): """ """ fname = os.path.join(self.files_dir,\ 'resistor,1ohm.s2p') qucs_ntwk = Network(fname) self.dummy_media.frequency = qucs_ntwk.frequency skrf_ntwk = self.dummy_media.resistor(1) self.assertEqual(qucs_ntwk, skrf_ntwk)
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_inductor(self): """ """ fname = os.path.join(self.files_dir,\ 'inductor,p1nH.s2p') qucs_ntwk = Network(fname) self.dummy_media.frequency = qucs_ntwk.frequency skrf_ntwk = self.dummy_media.inductor(.1e-9) self.assertEqual(qucs_ntwk, skrf_ntwk)
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_vector_gamma_z0_media(self): """ test ability to create a Media from vector quantities for gamma/z0 """ freq = Frequency(1,10,101) a = DefinedGammaZ0(freq, gamma = 1j*npy.ones(len(freq)) , z0 = 50*npy.ones(len(freq)), ) self.assertEqual(a.line(1),a.line(1)) with self.assertRaises(NotImplementedError): a.npoints=4
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_from_csv(self): fname = os.path.join(self.files_dir,\ 'out.csv') self.dummy_media.write_csv(fname) a_media = DefinedGammaZ0.from_csv(fname) self.assertEqual(a_media,self.dummy_media) os.remove(fname)
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def setUp(self): self.dummy_media = DefinedGammaZ0( frequency=Frequency(1, 100, 21, 'GHz'), gamma=1j, z0=50, )
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_s_shunt_element(self): """ Shunt elements of admittance Y: β—‹---------β—‹ | [Y] | β—‹---------β—‹ have S matrix of the form: [ -Y Z0 / (Y Z0 + 2) 2/(Y Z0 + 2) ] [ 2/(Y Z0 + 2) Z/Z0 / (Y Z0 + 2) ] """ R = 1.0 # Ohm ntw = self.dummy_media.shunt(self.dummy_media.resistor(R)**self.dummy_media.short()) Z0 = self.dummy_media.z0 S11 = -(1/R*Z0) / (1/R*Z0 + 2) S21 = 2 / (1/R*Z0 + 2) npy.testing.assert_array_almost_equal(ntw.s[:,0,0], S11) npy.testing.assert_array_almost_equal(ntw.s[:,0,1], S21) npy.testing.assert_array_almost_equal(ntw.s[:,1,0], S21) npy.testing.assert_array_almost_equal(ntw.s[:,1,1], S11)
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_s_lossy_line(self): """ Lossy transmission line of characteristic impedance Z0, length l and propagation constant gamma = alpha + j beta β—‹---------β—‹ β—‹---------β—‹ has ABCD matrix of the form: [ cosh(gamma l) Z0 sinh(gamma l) ] [ 1/Z0 sinh(gamma l) cosh(gamma l) ] """
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def setUp(self): self.dummy_media = DefinedGammaZ0( frequency=Frequency(1, 100, 21,'GHz'), gamma=1j, z0=50 , )
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_abcd_shunt_element(self): """ Shunt elements of admittance Y: β—‹---------β—‹ | [Y] | β—‹---------β—‹ have ABCD matrix of the form: [ 1 0 ] [ Y 1 ] """ R = 1.0 # Ohm ntw = self.dummy_media.shunt(self.dummy_media.resistor(R)**self.dummy_media.short()) npy.testing.assert_array_almost_equal(ntw.a[:,0,0], 1.0) npy.testing.assert_array_almost_equal(ntw.a[:,0,1], 0.0) npy.testing.assert_array_almost_equal(ntw.a[:,1,0], 1.0/R) npy.testing.assert_array_almost_equal(ntw.a[:,1,1], 1.0)
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def test_abcd_thru(self): """ Thru has ABCD matrix of the form: [ 1 0 ] [ 0 1 ] """ ntw = self.dummy_media.thru() npy.testing.assert_array_almost_equal(ntw.a[:,0,0], 1.0) npy.testing.assert_array_almost_equal(ntw.a[:,0,1], 0.0) npy.testing.assert_array_almost_equal(ntw.a[:,1,0], 0.0) npy.testing.assert_array_almost_equal(ntw.a[:,1,1], 1.0)
scikit-rf/scikit-rf
[ 498, 231, 498, 47, 1326983158 ]
def glob_slash(dirname): """Like regular glob but replaces \ with / in returned paths.""" return [s.replace('\\', '/') for s in glob.glob(dirname)]
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def exits(request, context): return ignore.get_exit(request, context)
rusenask/stubo-app
[ 1, 1, 1, 1, 1434357123 ]
def setUp(self): super().setUp() # create users self.bad_user = UserFactory.create( username='bad_user', ) self.good_user = UserFactory.create( username='good_user', ) self.non_staff = UserFactory.create( username='non_staff', ) self.admin = UserFactory.create( username='admin', is_staff=True, ) # create clients self.bad_user_client = Client() self.good_user_client = Client() self.non_staff_client = Client() self.admin_client = Client() for user, client in [ (self.bad_user, self.bad_user_client), (self.good_user, self.good_user_client), (self.non_staff, self.non_staff_client), (self.admin, self.admin_client), ]: client.login(username=user.username, password='test') UserStandingFactory.create( user=self.bad_user, account_status=UserStanding.ACCOUNT_DISABLED, changed_by=self.admin ) # set stock url to test disabled accounts' access to site self.some_url = '/' # since it's only possible to disable accounts from lms, we're going # to skip tests for cms
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_can_access_manage_account_page(self): response = self.admin_client.get(reverse('manage_user_standing'), { 'user': self.admin, }) assert response.status_code == 200
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_disable_account(self): assert UserStanding.objects.filter(user=self.good_user).count() == 0 response = self.admin_client.post(reverse('disable_account_ajax'), { # lint-amnesty, pylint: disable=unused-variable 'username': self.good_user.username, 'account_action': 'disable', }) assert UserStanding.objects.get(user=self.good_user).account_status == UserStanding.ACCOUNT_DISABLED
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_reenable_account(self): response = self.admin_client.post(reverse('disable_account_ajax'), { # lint-amnesty, pylint: disable=unused-variable 'username': self.bad_user.username, 'account_action': 'reenable' }) assert UserStanding.objects.get(user=self.bad_user).account_status == UserStanding.ACCOUNT_ENABLED
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_non_staff_cant_access_disable_view(self): response = self.non_staff_client.get(reverse('manage_user_standing'), { 'user': self.non_staff, }) assert response.status_code == 404
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def initiateOp(self, handle, seekpos, buffer): assert len(buffer) > 0 assert seekpos >= 0 df = Deferred() try: self.op(handle, seekpos, buffer, self.ovDone, (handle, buffer)) except: df.errback(Failure()) else: self.df = df return df
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def ovDone(self, ret, bytes, (handle, buffer)): df = self.df del self.df if ret or not bytes: try: raise ctypes.WinError() except: df.errback(Failure()) else: self.opComplete(df, bytes, buffer)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def opComplete(self, df, bytes, buffer): df.callback(buffer[:bytes])
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def opComplete(self, df, bytes, buffer): df.callback(bytes)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def __init__(self, handle): from twisted.internet import reactor self.reactor = reactor self.handle = handle self.osfhandle = win32file._get_osfhandle(self.handle.fileno()) self.mode = self.handle.mode # CloseHandle automatically calls CancelIo self.close = self.handle.close self.fileno = self.handle.fileno self.read_op = ReadFileOp() self.write_op = WriteFileOp() self.readbuf = self.reactor.AllocateReadBuffer(self.buffer_size)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def write(self, data): return self.write_op.initiateOp(self.osfhandle, self.seekpos, data)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def open_sparse_file(path, mode, length=0, overlapped=True): return IOCPFile(open_sparse_file_base(path, mode, length, overlapped))
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def __init__(self, doneflag, add_task, external_add_task, max_files_open, num_disk_threads): self.add_task = add_task self.file_to_torrent = {} self.waiting_ops = [] self.active_file_to_handles = DictWithSets() self.open_file_to_handles = DictWithLists() self.set_max_files_open(max_files_open)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _close_all(self, df): failures = {} while len(self.open_file_to_handles) > 0: filename, handle = self.open_file_to_handles.popitem() try: handle.close() except: failures[self.file_to_torrent[filename]] = Failure() for torrent, failure in failures.iteritems(): torrent.got_exception(failure) if self.get_open_file_count() > 0: # it would be nice to wait on the deferred for the outstanding ops self.add_task(0.5, self._close_all, df) else: df.callback(True)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _close_files(self, df, file_set): failure = None done = False filenames = self.open_file_to_handles.keys() for filename in filenames: if filename not in file_set: continue handles = self.open_file_to_handles.poprow(filename) for handle in handles: try: handle.close() except: failure = Failure() done = True for filename in file_set.iterkeys(): if filename in self.active_file_to_handles: done = False break if failure is not None: df.errback(failure) if not done: # it would be nice to wait on the deferred for the outstanding ops self.add_task(0.5, self._close_files, df, file_set) else: df.callback(True)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def add_files(self, files, torrent): for filename in files: if filename in self.file_to_torrent: raise BTFailure(_("File %s belongs to another running torrent") % filename) for filename in files: self.file_to_torrent[filename] = torrent
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _ensure_exists(self, filename, length=0): if not os.path.exists(filename): f = os.path.split(filename)[0] if f != '' and not os.path.exists(f): os.makedirs(f) f = file(filename, 'wb') make_file_sparse(filename, f, length) f.close()
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def free_handle_notify(self): if self.waiting_ops: args = self.waiting_ops.pop(0) self._produce_handle(*args)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _produce_handle(self, df, filename, for_write, length): if filename in self.open_file_to_handles: handle = self.open_file_to_handles.pop_from_row(filename) if for_write and not is_open_for_write(handle.mode): handle.close() handle = open_sparse_file(filename, 'rb+', length=length) #elif not for_write and is_open_for_write(handle.mode): # handle.close() # handle = file(filename, 'rb', 0) else: if self.get_open_file_count() == self.max_files_open: oldfname, oldhandle = self.open_file_to_handles.popitem() oldhandle.close() self._ensure_exists(filename, length) if for_write: handle = open_sparse_file(filename, 'rb+', length=length) else: handle = open_sparse_file(filename, 'rb', length=length) self.active_file_to_handles.push_to_row(filename, handle) df.callback(handle)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def __init__(self, config, filepool, save_path, files, add_task, external_add_task, doneflag): self.filepool = filepool self.config = config self.doneflag = doneflag self.add_task = add_task self.external_add_task = external_add_task self.initialize(save_path, files)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _build_file_structs(self, filepool, files): total = 0 for filename, length in files: # we're shutting down, abort. if self.doneflag.isSet(): return False self.undownloaded[filename] = length if length > 0: self.ranges.append((total, total + length, filename)) self.range_by_name[filename] = (total, total + length) if os.path.exists(filename): if not os.path.isfile(filename): raise BTFailure(_("File %s already exists, but is not a " "regular file") % filename) l = os.path.getsize(filename) if l > length: # This is the truncation Bram was talking about that no one # else thinks is a good idea. #h = file(filename, 'rb+') #make_file_sparse(filename, h, length) #h.truncate(length) #h.close() l = length a = get_allocated_regions(filename, begin=0, length=l) if a is not None: a.offset(total) else: a = SparseSet() if l > 0: a.add(total, total + l) self.allocated_regions += a total += length self.total_length = total self.initialized = True return True
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def was_preallocated(self, pos, length): return self.allocated_regions.is_range_in(pos, pos+length)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _intervals(self, pos, amount): r = [] stop = pos + amount p = max(bisect_right(self.ranges, (pos, 2 ** 500)) - 1, 0) for begin, end, filename in self.ranges[p:]: if begin >= stop: break r.append((filename, max(pos, begin) - begin, min(end, stop) - begin)) return r
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def op(h): h.seek(pos) if write: odf = h.write(param) else: odf = h.read(param) def like_finally(r): self.filepool.release_handle(filename, h) return r odf.addBoth(like_finally) return odf
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _batch_read(self, pos, amount): dfs = [] r = [] # queue all the reads for filename, pos, end in self._intervals(pos, amount): df = self._file_op(filename, pos, end - pos, write=False) dfs.append(df) # yield on all the reads in order - they complete in any order exc = None for df in dfs: yield df try: r.append(df.getResult()) except: exc = exc or sys.exc_info() if exc: raise exc[0], exc[1], exc[2] r = ''.join(r) if len(r) != amount: raise BTFailure(_("Short read (%d of %d) - " "something truncated files?") % (len(r), amount)) yield r
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def _batch_write(self, pos, s): dfs = [] total = 0 amount = len(s) # queue all the writes for filename, begin, end in self._intervals(pos, amount): length = end - begin assert length > 0, '%s %s' % (pos, amount) d = buffer(s, total, length) total += length df = self._file_op(filename, begin, d, write=True) dfs.append(df) assert total == amount, '%s and %s' % (total, amount) written = 0 # yield on all the writes - they complete in any order exc = None for df in dfs: yield df try: written += df.getResult() except: exc = exc or sys.exc_info() if exc: raise exc[0], exc[1], exc[2] assert total == written, '%s and %s' % (total, written)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def write(self, pos, s): df = launch_coroutine(wrap_task(self.add_task), self._batch_write, pos, s) return df
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def post_init(r): return self.filepool.close_files(self.range_by_name)
epsylon3/torrentflux
[ 130, 67, 130, 40, 1274203656 ]
def setUp(self): super(TestViews, self).setUp() self.request_factory = RequestFactory() self.request = self.request_factory.get('') self.request.user = None self.simple_data = {'error': 'error'}
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def test_all_problem_grade_distribution_has_access(self, has_access): """ Test returns proper value when have proper access """ has_access.return_value = True response = views.all_problem_grade_distribution(self.request, 'test/test/test') self.assertEqual(json.dumps(self.simple_data), response.content)
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def test_all_problem_grade_distribution_no_access(self, has_access): """ Test for no access """ has_access.return_value = False response = views.all_problem_grade_distribution(self.request, 'test/test/test') self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def test_all_sequential_open_distribution_has_access(self, has_access): """ Test returns proper value when have proper access """ has_access.return_value = True response = views.all_sequential_open_distrib(self.request, 'test/test/test') self.assertEqual(json.dumps(self.simple_data), response.content)
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def test_all_sequential_open_distribution_no_access(self, has_access): """ Test for no access """ has_access.return_value = False response = views.all_sequential_open_distrib(self.request, 'test/test/test') self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def test_section_problem_grade_distribution_has_access(self, has_access): """ Test returns proper value when have proper access """ has_access.return_value = True response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1') self.assertEqual(json.dumps(self.simple_data), response.content)
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def test_section_problem_grade_distribution_no_access(self, has_access): """ Test for no access """ has_access.return_value = False response = views.section_problem_grade_distrib(self.request, 'test/test/test', '1') self.assertEqual("{\"error\": \"Access Denied: User does not have access to this course\'s data\"}", response.content)
miptliot/edx-platform
[ 1, 7, 1, 5, 1382087527 ]
def try_decode(byte_string, codec): try: s = byte_string.decode(codec) return s except: return None
sorgerlab/indra
[ 136, 54, 136, 41, 1407779045 ]
def fix_character_encoding(input_file, output_file): with open(input_file, 'rb') as f_in: with open(output_file, 'wb') as f_out: for line in f_in: # Try to decode with both latin_1 and utf-8 decoded = [try_decode(line, c) for c in codec_options] decoded = [d for d in decoded if d is not None] if len(decoded) == 0: # Hopefully at least one codec worked logger.info('Could not decode: %s' % line) sys.exit(1) else: # If more than one, choose the codec that gives the best # length chosen_string = shortest_string(decoded) # Write result as ascii, with non-ascii characters escaped f_out.write(chosen_string.encode('utf-8'))
sorgerlab/indra
[ 136, 54, 136, 41, 1407779045 ]
def __str__(self): """Prints string with field name if present on exception.""" return Error.__str__(self)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __init__(cls, name, bases, dct): """Constructor.""" type.__init__(cls, name, bases, dct) # Base classes may never be initialized. if cls.__bases__ != (object,): cls.__initialized = True
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __setattr__(cls, name, value): """Overridden to avoid setting variables after init. Setting attributes on a class must work during the period of initialization to set the enumation value class variables and build the name/number maps. Once __init__ has set the __initialized flag to True prohibits setting any more values on the class. The class is in effect frozen. Args: name: Name of value to set. value: Value to set. """ if cls.__initialized and name not in _POST_INIT_ATTRIBUTE_NAMES: raise AttributeError('May not change values: %s' % name) else: type.__setattr__(cls, name, value)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def definition_name(cls): """Helper method for creating definition name. Names will be generated to include the classes package name, scope (if the class is nested in another definition) and class name. By default, the package name for a definition is derived from its module name. However, this value can be overriden by placing a 'package' attribute in the module that contains the definition class. For example: package = 'some.alternate.package' class MyMessage(Message): ... >>> MyMessage.definition_name() some.alternate.package.MyMessage Returns: Dot-separated fully qualified name of definition. """ outer_definition_name = cls.outer_definition_name() if outer_definition_name is None: return six.text_type(cls.__name__) return u'%s.%s' % (outer_definition_name, cls.__name__)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def definition_package(cls): """Helper method for creating creating the package of a definition. Returns: Name of package that definition belongs to. """ outer_definition = cls.message_definition() if not outer_definition: return util.get_package_for_module(cls.__module__) return outer_definition.definition_package()
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __init__(cls, name, bases, dct): # Can only define one level of sub-classes below Enum. if not (bases == (object,) or bases == (Enum,)): raise EnumDefinitionError( 'Enum type %s may only inherit from Enum' % name) cls.__by_number = {} cls.__by_name = {} # Enum base class does not need to be initialized or locked. if bases != (object,): # Replace integer with number. for attribute, value in dct.items(): # Module will be in every enum class. if attribute in _RESERVED_ATTRIBUTE_NAMES: continue # Reject anything that is not an int. if not isinstance(value, six.integer_types): raise EnumDefinitionError( 'May only use integers in Enum definitions. ' 'Found: %s = %s' % (attribute, value)) # Protocol buffer standard recommends non-negative values. # Reject negative values. if value < 0: raise EnumDefinitionError( 'Must use non-negative enum values. Found: %s = %d' % (attribute, value)) if value > MAX_ENUM_VALUE: raise EnumDefinitionError( 'Must use enum values less than or equal %d. ' 'Found: %s = %d' % (MAX_ENUM_VALUE, attribute, value)) if value in cls.__by_number: raise EnumDefinitionError( 'Value for %s = %d is already defined: %s' % (attribute, value, cls.__by_number[value].name)) # Create enum instance and list in new Enum type. instance = object.__new__(cls) # pylint:disable=non-parent-init-called cls.__init__(instance, attribute, value) cls.__by_name[instance.name] = instance cls.__by_number[instance.number] = instance setattr(cls, attribute, instance) _DefinitionClass.__init__(cls, name, bases, dct)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def names(cls): """Get all names for Enum. Returns: An iterator for names of the enumeration in arbitrary order. """ return cls.__by_name.keys()
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def lookup_by_name(cls, name): """Look up Enum by name. Args: name: Name of enum to find. Returns: Enum sub-class instance of that value. """ return cls.__by_name[name]
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __len__(cls): return len(cls.__by_name)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __new__(cls, index): """Acts as look-up routine after class is initialized. The purpose of overriding __new__ is to provide a way to treat Enum subclasses as casting types, similar to how the int type functions. A program can pass a string or an integer and this method with "convert" that value in to an appropriate Enum instance. Args: index: Name or number to look up. During initialization this is always the name of the new enum value. Raises: TypeError: When an inappropriate index value is passed provided. """ # If is enum type of this class, return it. if isinstance(index, cls): return index # If number, look up by number. if isinstance(index, six.integer_types): try: return cls.lookup_by_number(index) except KeyError: pass # If name, look up by name. if isinstance(index, six.string_types): try: return cls.lookup_by_name(index) except KeyError: pass raise TypeError('No such value for %s in Enum %s' % (index, cls.__name__))
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __setattr__(self, name, value): raise TypeError('May not change enum values')
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __int__(self): return self.number
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __reduce__(self): """Enable pickling. Returns: A 2-tuple containing the class and __new__ args to be used for restoring a pickled instance. """ return self.__class__, (self.number,)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __lt__(self, other): """Order is by number.""" if isinstance(other, type(self)): return self.number < other.number return NotImplemented
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __eq__(self, other): """Order is by number.""" if isinstance(other, type(self)): return self.number == other.number return NotImplemented
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __ge__(self, other): """Order is by number.""" if isinstance(other, type(self)): return self.number >= other.number return NotImplemented
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __hash__(self): """Hash by number.""" return hash(self.number)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def to_dict(cls): """Make dictionary version of enumerated class. Dictionary created this way can be used with def_num. Returns: A dict (name) -> number """ return dict((item.name, item.number) for item in iter(cls))
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def def_enum(dct, name): """Define enum class from dictionary. Args: dct: Dictionary of enumerated values for type. name: Name of enum. """ return type(name, (Enum,), dct)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __new__(cls, name, bases, dct): """Create new Message class instance. The __new__ method of the _MessageClass type is overridden so as to allow the translation of Field instances to slots. """ by_number = {} by_name = {} variant_map = {} # pylint:disable=unused-variable if bases != (object,): # Can only define one level of sub-classes below Message. if bases != (Message,): raise MessageDefinitionError( 'Message types may only inherit from Message') enums = [] messages = [] # Must not use iteritems because this loop will change the state of # dct. for key, field in dct.items(): if key in _RESERVED_ATTRIBUTE_NAMES: continue if isinstance(field, type) and issubclass(field, Enum): enums.append(key) continue if (isinstance(field, type) and issubclass(field, Message) and field is not Message): messages.append(key) continue # Reject anything that is not a field. # pylint:disable=unidiomatic-typecheck if type(field) is Field or not isinstance(field, Field): raise MessageDefinitionError( 'May only use fields in message definitions. ' 'Found: %s = %s' % (key, field)) if field.number in by_number: raise DuplicateNumberError( 'Field with number %d declared more than once in %s' % (field.number, name)) field.name = key # Place in name and number maps. by_name[key] = field by_number[field.number] = field # Add enums if any exist. if enums: dct['__enums__'] = sorted(enums) # Add messages if any exist. if messages: dct['__messages__'] = sorted(messages) dct['_Message__by_number'] = by_number dct['_Message__by_name'] = by_name return _DefinitionClass.__new__(cls, name, bases, dct)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __init__(self, **kwargs): """Initialize internal messages state. Args: A message can be initialized via the constructor by passing in keyword arguments corresponding to fields. For example: class Date(Message): day = IntegerField(1) month = IntegerField(2) year = IntegerField(3) Invoking: date = Date(day=6, month=6, year=1911) is the same as doing: date = Date() date.day = 6 date.month = 6 date.year = 1911 """ # Tag being an essential implementation detail must be private. self.__tags = {} self.__unrecognized_fields = {} assigned = set() for name, value in kwargs.items(): setattr(self, name, value) assigned.add(name) # initialize repeated fields. for field in self.all_fields(): if field.repeated and field.name not in assigned: setattr(self, field.name, [])
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def is_initialized(self): """Get initialization status. Returns: True if message is valid, else False. """ try: self.check_initialized() except ValidationError: return False else: return True
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def all_fields(cls): """Get all field definition objects. Ordering is arbitrary. Returns: Iterator over all values in arbitrary order. """ return cls.__by_name.values()
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def field_by_name(cls, name): """Get field by name. Returns: Field object associated with name. Raises: KeyError if no field found by that name. """ return cls.__by_name[name]
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def field_by_number(cls, number): """Get field by number. Returns: Field object associated with number. Raises: KeyError if no field found by that number. """ return cls.__by_number[number]
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def reset(self, name): """Reset assigned value for field. Resetting a field will return it to its default value or None. Args: name: Name of field to reset. """ message_type = type(self) try: field = message_type.field_by_name(name) except KeyError: if name not in message_type.__by_name: raise AttributeError('Message %s has no field %s' % ( message_type.__name__, name)) if field.repeated: self.__tags[field.number] = FieldList(field, []) else: self.__tags.pop(field.number, None)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def get_unrecognized_field_info(self, key, value_default=None, variant_default=None): """Get the value and variant of an unknown field in this message. Args: key: The name or number of the field to retrieve. value_default: Value to be returned if the key isn't found. variant_default: Value to be returned as variant if the key isn't found. Returns: (value, variant), where value and variant are whatever was passed to set_unrecognized_field. """ value, variant = self.__unrecognized_fields.get(key, (value_default, variant_default)) return value, variant
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __setattr__(self, name, value): """Change set behavior for messages. Messages may only be assigned values that are fields. Does not try to validate field when set. Args: name: Name of field to assign to. value: Value to assign to field. Raises: AttributeError when trying to assign value that is not a field. """ if name in self.__by_name or name.startswith('_Message__'): object.__setattr__(self, name, value) else: raise AttributeError("May not assign arbitrary value %s " "to message %s" % (name, type(self).__name__))
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __eq__(self, other): """Equality operator. Does field by field comparison with other message. For equality, must be same type and values of all fields must be equal. Messages not required to be initialized for comparison. Does not attempt to determine equality for values that have default values that are not set. In other words: class HasDefault(Message): attr1 = StringField(1, default='default value') message1 = HasDefault() message2 = HasDefault() message2.attr1 = 'default value' message1 != message2 Does not compare unknown values. Args: other: Other message to compare with. """ # TODO(rafek): Implement "equivalent" which does comparisons # taking default values in to consideration. if self is other: return True if type(self) is not type(other): return False return self.__tags == other.__tags
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __init__(self, field_instance, sequence): """Constructor. Args: field_instance: Instance of field that validates the list. sequence: List or tuple to construct list from. """ if not field_instance.repeated: raise FieldDefinitionError( 'FieldList may only accept repeated fields') self.__field = field_instance self.__field.validate(sequence) list.__init__(self, sequence)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __setstate__(self, state): """Enable unpickling. Args: state: A 3-tuple containing: - The field instance, or None if it belongs to a Message class. - The Message class that the field instance belongs to, or None. - The field instance number of the Message class it belongs to, or None. """ field_instance, message_class, number = state if field_instance is None: self.__field = message_class.field_by_number(number) else: self.__field = field_instance
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def field(self): """Field that validates list.""" return self.__field
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __setitem__(self, index, value): """Validate item assignment to list.""" if isinstance(index, slice): self.__field.validate(value) else: self.__field.validate_element(value) list.__setitem__(self, index, value)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def extend(self, sequence): """Validate extension of list.""" self.__field.validate(sequence) return list.extend(self, sequence)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __init__(cls, name, bases, dct): getattr(cls, '_Field__variant_to_type').update( (variant, cls) for variant in dct.get('VARIANTS', [])) type.__init__(cls, name, bases, dct)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __init__(self, number, required=False, repeated=False, variant=None, default=None): """Constructor. The required and repeated parameters are mutually exclusive. Setting both to True will raise a FieldDefinitionError. Sub-class Attributes: Each sub-class of Field must define the following: VARIANTS: Set of variant types accepted by that field. DEFAULT_VARIANT: Default variant type if not specified in constructor. Args: number: Number of field. Must be unique per message class. required: Whether or not field is required. Mutually exclusive with 'repeated'. repeated: Whether or not field is repeated. Mutually exclusive with 'required'. variant: Wire-format variant hint. default: Default value for field if not found in stream. Raises: InvalidVariantError when invalid variant for field is provided. InvalidDefaultError when invalid default for field is provided. FieldDefinitionError when invalid number provided or mutually exclusive fields are used. InvalidNumberError when the field number is out of range or reserved. """ if not isinstance(number, int) or not 1 <= number <= MAX_FIELD_NUMBER: raise InvalidNumberError( 'Invalid number for field: %s\n' 'Number must be 1 or greater and %d or less' % (number, MAX_FIELD_NUMBER)) if FIRST_RESERVED_FIELD_NUMBER <= number <= LAST_RESERVED_FIELD_NUMBER: raise InvalidNumberError('Tag number %d is a reserved number.\n' 'Numbers %d to %d are reserved' % (number, FIRST_RESERVED_FIELD_NUMBER, LAST_RESERVED_FIELD_NUMBER)) if repeated and required: raise FieldDefinitionError('Cannot set both repeated and required') if variant is None: variant = self.DEFAULT_VARIANT if repeated and default is not None: raise FieldDefinitionError('Repeated fields may not have defaults') if variant not in self.VARIANTS: raise InvalidVariantError( 'Invalid variant: %s\nValid variants for %s are %r' % (variant, type(self).__name__, sorted(self.VARIANTS))) self.number = number self.required = required self.repeated = repeated self.variant = variant if default is not None: try: self.validate_default(default) except ValidationError as err: try: name = self.name except AttributeError: # For when raising error before name initialization. raise InvalidDefaultError( 'Invalid default value for %s: %r: %s' % (self.__class__.__name__, default, err)) else: raise InvalidDefaultError( 'Invalid default value for field %s: ' '%r: %s' % (name, default, err)) self.__default = default self.__initialized = True
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def __set__(self, message_instance, value): """Set value on message. Args: message_instance: Message instance to set value on. value: Value to set on message. """ # Reaches in to message instance directly to assign to private tags. if value is None: if self.repeated: raise ValidationError( 'May not assign None to repeated field %s' % self.name) else: message_instance._Message__tags.pop(self.number, None) else: if self.repeated: value = FieldList(self, value) else: value = self.validate(value) message_instance._Message__tags[self.number] = value
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def validate_element(self, value): """Validate single element of field. This is different from validate in that it is used on individual values of repeated fields. Args: value: Value to validate. Returns: The value casted in the expected type. Raises: ValidationError if value is not expected type. """ if not isinstance(value, self.type): # Authorize int values as float. if isinstance(value, six.integer_types) and self.type == float: return float(value) if value is None: if self.required: raise ValidationError('Required field is missing') else: try: name = self.name except AttributeError: raise ValidationError('Expected type %s for %s, ' 'found %s (type %s)' % (self.type, self.__class__.__name__, value, type(value))) else: raise ValidationError( 'Expected type %s for field %s, found %s (type %s)' % (self.type, name, value, type(value))) return value
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def validate(self, value): """Validate value assigned to field. Args: value: Value to validate. Returns: the value in casted in the correct type. Raises: ValidationError if value is not expected type. """ return self.__validate(value, self.validate_element)
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]