function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def test_delete_root_node(self): # make sure we can't delete a root node self.assertRaises(errors.NoPermission, self.vgw.delete_node, self.root.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_restore_node(self): """Test restore_node.""" self.vgw.make_subdirectory(self.root.id, 'the dir name') a_file = self.vgw.make_file(self.root.id, 'file.txt') self.vgw.delete_node(a_file.id) self.assertRaises(errors.DoesNotExist, self.vgw.get_node, a_file.id) self.vgw.restore_node(a_file.id) node = self.vgw.get_node(a_file.id) self.assertEqual(node.id, a_file.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_uploadjob(self): """Test get_uploadjob.""" a_file = self.vgw.make_file(self.root.id, 'the file name') new_hash = self.factory.get_fake_hash() crc = 12345 size = 100 # expected failures f = self.vgw.make_uploadjob self.assertRaises( errors.DoesNotExist, f, uuid.uuid4(), a_file.content_hash, new_hash, crc, 300) self.assertRaises( errors.QuotaExceeded, f, a_file.id, a_file.content_hash, new_hash, crc, self.user.free_bytes + 1) self.assertRaises( errors.HashMismatch, f, a_file.id, 'WRONG OLD HASH', new_hash, crc, 300) upload_job = f(a_file.id, a_file.content_hash, new_hash, crc, size) self.assertEqual(upload_job.storage_object_id, a_file.id) self.assertEqual(upload_job.hash_hint, new_hash) self.assertEqual(upload_job.crc32_hint, crc) self.assertEqual(upload_job.status, STATUS_LIVE) self.assertEqual(upload_job.content_exists, False) self.assertEqual(upload_job.file.id, a_file.id) upload = self.vgw.get_uploadjob(upload_job.id) self.assertEqual(upload.id, upload_job.id) # make sure delete works self.vgw.delete_uploadjob(upload_job.id) self.assertRaises(errors.DoesNotExist, self.vgw.get_uploadjob, upload_job.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_user_uploadjobs(self): """Test get_user_uploadjobs.""" jobs = list(self.vgw.get_user_uploadjobs()) self.assertEqual(jobs, []) file1 = self.vgw.make_file(self.root.id, 'the file1 name') file2 = self.vgw.make_file(self.root.id, 'the file2 name') file3 = self.vgw.make_file(self.root.id, 'the file3 name') new_hash = self.factory.get_fake_hash() crc = 12345 size = 100 self.vgw.make_uploadjob( file1.id, file1.content_hash, new_hash, crc, size) self.vgw.make_uploadjob( file1.id, file1.content_hash, new_hash, crc, size) self.vgw.make_uploadjob( file2.id, file2.content_hash, new_hash, crc, size) jobs = list(self.vgw.get_user_uploadjobs()) self.assertEqual(len(jobs), 3) jobs = list(self.vgw.get_user_uploadjobs(node_id=file1.id)) self.assertEqual(len(jobs), 2) jobs = list(self.vgw.get_user_uploadjobs(node_id=file3.id)) self.assertEqual(jobs, [])
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_add_uploadjob_part(self): """Test add_uploadjob_part.""" file1 = self.vgw.make_file(self.root.id, 'the file1 name') new_hash = self.factory.get_fake_hash() crc = 12345 size = 100 job = self.vgw.make_uploadjob( file1.id, file1.content_hash, new_hash, crc, size, multipart_key=uuid.uuid4()) job = self.vgw.add_uploadjob_part(job.id, 10) self.assertEqual(job.uploaded_bytes, 10) self.assertEqual(job.chunk_count, 1) job = self.vgw.add_uploadjob_part(job.id, 10) self.assertEqual(job.uploaded_bytes, 20) self.assertEqual(job.chunk_count, 2)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_content_simple_handle_errors(self): """Test make_content.""" file_node = self.vgw.make_file(self.root.id, 'the file name') old_hash = file_node.content_hash file_id = file_node.id new_hash = self.factory.get_fake_hash() new_storage_key = uuid.uuid4() crc = 12345 size = 100 def_size = 10000 # if we don't provide a storage_key, a content_blob is expected self.assertRaises(errors.ContentMissing, self.vgw.make_content, file_id, old_hash, new_hash, crc, size, def_size, None) # the content hash must match self.assertRaises(errors.HashMismatch, self.vgw.make_content, file_id, 'YYYY', new_hash, crc, size, def_size, None) # after the upload job was created/started, the users quota was reduced self.tweak_users_quota(self.owner, size - 1) self.assertRaises(errors.QuotaExceeded, self.vgw.make_content, file_id, old_hash, new_hash, crc, size, def_size, new_storage_key)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_content_updates_contentblob(self): """Contentblob is updated if needed when making content.""" filenode = self.vgw.make_file(self.root.id, 'the file name') new_hash = self.factory.get_fake_hash() new_storage_key = uuid.uuid4() crc = 12345 size = 100 def_size = 10000 magic_hash = b'magic_hash' # call it without the magic hash, as before n = self.vgw.make_content(filenode.id, filenode.content_hash, new_hash, crc, size, def_size, new_storage_key) assert n.content.magic_hash is None # call it with the magic hash n = self.vgw.make_content(filenode.id, new_hash, new_hash, crc, size, def_size, new_storage_key, magic_hash) assert n.content.magic_hash == magic_hash # reload the file and make sure it was stored ok filenode._load(with_content=True) self.assertEqual(filenode.content.magic_hash, magic_hash)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_get_content(self): """Test the make and get content.""" hash = self.factory.get_fake_hash() key = uuid.uuid4() crc = 12345 size = 100 def_size = 10000 magic_hash = b'magic_hash' content = self.vgw._make_content(hash, crc, size, def_size, key, magic_hash) content = self.vgw.get_content(content.hash) self.assertEqual(content.hash, hash) self.assertEqual(content.crc32, crc) self.assertEqual(content.size, size) self.assertEqual(content.deflated_size, def_size) self.assertEqual(content.status, STATUS_LIVE) self.assertEqual(content.storage_key, key) self.assertEqual(content.magic_hash, magic_hash) # make it Dead ContentBlob.objects.filter(hash=hash).update(status=STATUS_DEAD) # dead content throws exception self.assertRaises(errors.DoesNotExist, self.vgw.get_content, hash) # IRL we should resurrect dead content? No we throw an exception! # So we should NEVER set a content blob to Dead self.assertRaises( IntegrityError, self.vgw._make_content, hash, crc, size, def_size, key, magic_hash)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_file_with_content(self): """Make file with contentblob. This is similar to the way the updown server creates a file. But it's all handled in one function after the upload. """ name = 'filename' hash = self.factory.get_fake_hash() storage_key = uuid.uuid4() crc = 12345 size = 100 deflated_size = 1 magic_hash = b'magic_hash' node = self.vgw.make_file_with_content( self.root.id, name, hash, crc, size, deflated_size, storage_key, mimetype='image/tif', magic_hash=magic_hash) a_file = self.vgw.get_node(node.id, with_content=True) self.assertEqual(a_file.name, 'filename') self.assertEqual(a_file.mimetype, 'image/tif') self.assertEqual(a_file.status, STATUS_LIVE) self.assertEqual(a_file.content.hash, hash) self.assertEqual(a_file.content.crc32, crc) self.assertEqual(a_file.content.size, size) self.assertEqual(a_file.content.deflated_size, deflated_size) self.assertEqual(a_file.content.storage_key, storage_key) self.assertEqual(a_file.content.magic_hash, magic_hash)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_file_with_content_overwrite(self): """Make file with contentblob and overwite its existing content.""" name = 'filename.tif' hash = self.factory.get_fake_hash() storage_key = uuid.uuid4() crc = 12345 size = 100 deflated_size = 10000 magic_hash = b'magic_hash' f = self.vgw.make_file_with_content node1 = f(self.root.id, name, hash, crc, size, deflated_size, storage_key, mimetype='image/tif') newhash = self.factory.get_fake_hash('ZZZYYY') newstorage_key = uuid.uuid4() self.assertNotEqual(newhash, hash) self.assertNotEqual(newstorage_key, storage_key) f(self.root.id, name, newhash, crc, size, deflated_size, newstorage_key, magic_hash=magic_hash) node2 = self.vgw.get_node(node1.id, with_content=True) self.assertEqual(node2.id, node2.id) self.assertEqual(node2.content_hash, newhash) self.assertEqual(node2.content.storage_key, newstorage_key) self.assertEqual(node2.content.magic_hash, magic_hash) # test hashcheck when trying to overwrite with a wrong hash self.assertRaises( errors.HashMismatch, f, self.root.id, name, newhash, crc, size, deflated_size, newstorage_key, previous_hash=self.factory.get_fake_hash('ABC'))
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_file_with_content_enforces_quota(self): """Make file with contentblob enforces quota check (or not).""" name = 'filename' hash = self.factory.get_fake_hash() storage_key = uuid.uuid4() crc = 12345 size = deflated_size = 10000 f = self.vgw.make_file_with_content self.assertRaises(errors.QuotaExceeded, f, self.root.id, name, hash, crc, size, deflated_size, storage_key, enforce_quota=True) node = f(self.root.id, name, hash, crc, size, deflated_size, storage_key, mimetype='image/tif', enforce_quota=False) self.assertTrue(node is not None)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_move_node(self): """Test move_node method.""" root_id = self.vgw.get_root().id dira1 = self.vgw.make_subdirectory(root_id, 'dira1') dira2 = self.vgw.make_subdirectory(dira1.id, 'dira2') dira3 = self.vgw.make_subdirectory(dira2.id, 'dira3') dira4 = self.vgw.make_subdirectory(dira3.id, 'dira4') dira5 = self.vgw.make_subdirectory(dira4.id, 'dira5') dira6 = self.vgw.make_subdirectory(dira5.id, 'dira6') dirb1 = self.vgw.make_subdirectory(root_id, 'dirb1') dirb2 = self.vgw.make_subdirectory(dirb1.id, 'dirb2') dirb3 = self.vgw.make_subdirectory(dirb2.id, 'dirb3') dirb4 = self.vgw.make_subdirectory(dirb3.id, 'dirb4') dirb5 = self.vgw.make_subdirectory(dirb4.id, 'dirb5') self.vgw.make_subdirectory(dirb5.id, 'dirb6') self.assertRaises(errors.DoesNotExist, self.vgw.move_node, uuid.uuid4(), dira2.id, 'name') self.assertRaises(errors.DoesNotExist, self.vgw.move_node, dira1.id, uuid.uuid4(), 'name') self.assertRaises(errors.NoPermission, self.vgw.move_node, dira1.id, dira2.id, 'name') dira1 = self.vgw.move_node(dira1.id, dirb1.id, 'newname') self.assertEqual(dira1.parent_id, dirb1.id) self.assertEqual(dira1.name, 'newname') dira6 = self.vgw.get_node(dira6.id) path = posixpath.join( self.vgw.get_root().full_path, 'dirb1/newname/dira2/dira3/dira4/dira5') self.assertEqual(dira6.path, path) dirb2 = self.vgw.get_node(dirb2.id) # make sure moving with the same name deletes old node and descendants dirb4 = self.vgw.move_node(dirb4.id, dirb2.id, 'dirb3') self.assertEqual(dirb4.parent_id, dirb2.id) self.assertRaises(errors.DoesNotExist, self.vgw.get_node, dirb3.id) self.assertRaises(errors.DoesNotExist, self.vgw.get_node, dirb4.id) self.assertRaises(errors.DoesNotExist, self.vgw.get_node, dirb5.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_node_by_path(self): """Test get_node_by_path.""" root_id = self.root.id dir1 = self.vgw.make_subdirectory(root_id, 'dir1') file1 = self.vgw.make_file(root_id, 'file1.txt') dir2 = self.vgw.make_subdirectory(dir1.id, 'dir2') file2 = self.vgw.make_file(dir1.id, 'file2.mp3') dir3 = self.vgw.make_subdirectory(dir2.id, 'dir3') file3 = self.vgw.make_file(dir2.id, 'file3.mp3') # make sure content and mimetype still work correctly hash = self.factory.get_fake_hash() key = uuid.uuid4() self.vgw.make_file_with_content( dir3.id, 'file4.tif', hash, 123, 100, 1000, key, mimetype='image/tif') # filename with a space in it file5 = self.vgw.make_file(root_id, 'space! ') # the basic use cases r = self.vgw.get_node_by_path('/') self.assertEqual(r.id, root_id) d1 = self.vgw.get_node_by_path('/dir1') self.assertEqual(d1.id, dir1.id) f1 = self.vgw.get_node_by_path('/file1.txt') self.assertEqual(f1.id, file1.id) d2 = self.vgw.get_node_by_path('/dir1/dir2') self.assertEqual(d2.id, dir2.id) f2 = self.vgw.get_node_by_path('/dir1/file2.mp3') self.assertEqual(f2.id, file2.id) d3 = self.vgw.get_node_by_path( '/dir1/dir2/dir3', kind=StorageObject.DIRECTORY) self.assertEqual(d3.id, dir3.id) f3 = self.vgw.get_node_by_path('/dir1/dir2/file3.mp3') self.assertEqual(f3.id, file3.id) f5 = self.vgw.get_node_by_path('/space! ') self.assertEqual(f5.id, file5.id) # object accessable without inital / path d1 = self.vgw.get_node_by_path('dir1') self.assertEqual(d1.id, dir1.id) f1 = self.vgw.get_node_by_path('file1.txt', kind=StorageObject.FILE) self.assertEqual(f1.id, file1.id) d2 = self.vgw.get_node_by_path('dir1/dir2') self.assertEqual(d2.id, dir2.id) d3 = self.vgw.get_node_by_path('dir1/dir2/dir3') self.assertEqual(d3.id, dir3.id) # the standard node finder options work as well f4 = self.vgw.get_node_by_path('/dir1/dir2/dir3/file4.tif', with_content=True) self.assertEqual(f4.content.storage_key, key) # invalid paths get StorageErrors self.assertRaises(errors.StorageError, self.vgw.get_node_by_path, '') self.assertRaises(errors.StorageError, self.vgw.get_node_by_path, ' ') # test DoesNotExistCondtions self.assertRaises( errors.DoesNotExist, self.vgw.get_node_by_path, '/X..Y/') self.assertRaises( errors.DoesNotExist, self.vgw.get_node_by_path, '/dir1', kind=StorageObject.FILE) self.assertRaises( errors.DoesNotExist, self.vgw.get_node_by_path, '/file1.txt', kind=StorageObject.DIRECTORY)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_generation_delta_limit(self): """Test get generation delta.""" root_id = self.root.id # get the generation before we add all the nodes last_gen = self.vgw._get_user_volume().generation dir1 = self.vgw.make_subdirectory(root_id, 'dir1') file1 = self.vgw.make_file(root_id, 'file1.txt') dir2 = self.vgw.make_subdirectory(dir1.id, 'dir2') file2 = self.vgw.make_file(dir1.id, 'file2.mp3') dir3 = self.vgw.make_subdirectory(dir2.id, 'dir3') file3 = self.vgw.make_file(dir2.id, 'file3.mp3') self.vgw.delete_node(dir3.id) self.vgw.delete_node(file3.id) # get the first set (just one) delta = list(self.vgw.get_generation_delta(last_gen, 1)) self.assertEqual(len(delta), 1) self.assertTrue(dir1 in delta) # get the next set, should (the next 3) last_gen = delta[-1].generation delta = list(self.vgw.get_generation_delta(last_gen, 3)) self.assertEqual(len(delta), 3) self.assertTrue(file1 in delta) self.assertTrue(dir2 in delta) self.assertTrue(file2 in delta) last_gen = delta[-1].generation delta = list(self.vgw.get_generation_delta(last_gen, 100)) self.assertEqual(len(delta), 2) self.assertTrue(dir3 in delta) self.assertTrue(file3 in delta)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def make_file(i, n): return self.vgw.make_file_with_content(i, n, hash, 123, 1, 1, key)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setup_volume(self): """Setup the volume used for this test case.""" self.gw = SystemGateway() user = self.factory.make_user(max_storage_bytes=2000) self.user = self.gw.get_user(user.id, session_id='QWERTY') self.owner = self.user # make a test file in the database udf = self.factory.make_user_volume( owner=user, path='~/thepath/thename') udf_dao = DAOUserVolume(udf, self.user) self.vgw = ReadWriteVolumeGateway(self.user, udf=udf_dao) self.root = StorageObject.objects.get(id=self.vgw.get_root().id) self.file = self.root.make_file( 'TheName', content_blob=self.factory.make_content_blob(), mimetype='fakemime')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setup_volume(self): """Setup the volume used for this test case.""" # create another user and share a folder with self.user sharer = make_storage_user( username='sharer', max_storage_bytes=2000) self.owner = sharer root = StorageObject.objects.get_root(sharer._user) rw_node = root.make_subdirectory('WriteMe') # share a node with the user with modify access rw_share = self.factory.make_share( subtree=rw_node, shared_to=self.user._user, name='WriteShare', access=Share.MODIFY) rw_share.accept() share_dao = SharedDirectory(rw_share, by_user=sharer) self.vgw = ReadWriteVolumeGateway(self.user, share=share_dao) self.root = StorageObject.objects.get(id=self.vgw.get_root().id) self.file = self.root.make_file( 'TheName', content_blob=self.factory.make_content_blob(), mimetype='fakemime')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setUp(self): super(RootReadWriteVolumeGatewayTestCase, self).setUp() self.gw = SystemGateway() user = self.factory.make_user() self.user = self.gw.get_user(user.id, session_id='QWERTY') # make a test file vgw = self.user._gateway.get_root_gateway() root = StorageObject.objects.get(id=vgw.get_root().id) self.file = root.make_file( 'TheName', content_blob=self.factory.make_content_blob(), mimetype='fakemime')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_share(self): """Test make_share.""" sharer = self.factory.make_user(username='sharer') vgw = ReadWriteVolumeGateway(self.user) a_dir = vgw.make_subdirectory(vgw.get_root().id, 'the dir') a_file = vgw.make_file(vgw.get_root().id, 'the file') # test some obvious error conditions # can't share files self.assertRaises( errors.NotADirectory, vgw.make_share, a_file.id, 'hi', user_id=sharer.id) # user doesn't exist self.assertRaises( errors.DoesNotExist, vgw.make_share, a_dir.id, 'hi', user_id=sharer.id + 1) share = vgw.make_share(a_dir.id, 'hi', user_id=sharer.id) self.assertEqual(share.root_id, a_dir.id) self.assertEqual(share.accepted, False) self.assertEqual(share.shared_to_id, sharer.id) self.assertNotEqual(share.when_shared, None) self.assertNotEqual(share.when_last_changed, None)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_undelete_volume(self): """Test Undelete Volume.""" vgw = ReadWriteVolumeGateway(self.user) d2 = vgw.undelete_volume('recovered') d = vgw.make_subdirectory(vgw.get_root().id, 'the dir') vgw.delete_node(d.id) d2 = vgw.undelete_volume('recovered') vgw.delete_node(d2.id, cascade=True) vgw.undelete_volume('recovered')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setUp(self): super(UDFReadWriteVolumeGatewayTestCase, self).setUp() self.gw = SystemGateway() db_user = self.factory.make_user() self.user = self.gw.get_user(db_user.id, session_id='QWERTY') # make a test file in the database self.udf = self.factory.make_user_volume( owner=db_user, path='~/thepath/thename') udf_dao = DAOUserVolume(self.udf, self.user) self.vgw = ReadWriteVolumeGateway(self.user, udf=udf_dao) self.root = StorageObject.objects.get(id=self.vgw.get_root().id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_share(self): """Test make_share.""" user = self.factory.make_user(username='sharer') a_dir = self.vgw.make_subdirectory(self.root.id, 'the dir') share = self.vgw.make_share(a_dir.id, 'hi', user_id=user.id) self.assertEqual(share.root_id, a_dir.id) self.assertEqual(share.accepted, False) self.assertEqual(share.shared_to_id, user.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_share_offer(self): """Test make_share.""" a_dir = self.vgw.make_subdirectory(self.root.id, 'the dir') share = self.vgw.make_share(a_dir.id, 'hi', user_id=None, email='[email protected]') self.assertEqual(share.root_id, a_dir.id) self.assertEqual(share.accepted, False) self.assertEqual(share.shared_to_id, None) self.assertEqual(share.offered_to_email, '[email protected]')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_undelete_volume(self): """Test Undelete Volume from a UDF.""" udf = self.udf udf_dao = DAOUserVolume(udf, self.user) vgw = ReadWriteVolumeGateway(self.user, udf=udf_dao) d = vgw.make_file(udf.root_node.id, 'thefile.txt') vgw.delete_node(d.id) vgw.undelete_volume('recovered') rgw = ReadWriteVolumeGateway(self.user) node = rgw.get_node(d.id) self.assertEqual(d.volume_id, udf.id) self.assertEqual(node.volume_id, udf.id) self.assertEqual(node.full_path, '/recovered/thefile.txt')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setUp(self): super(ShareGatewayTestCase, self).setUp() self.gw = SystemGateway() user = self.factory.make_user() self.user = self.gw.get_user(user.id, session_id='QWERTY') self.sharer = make_storage_user(username='sharer') self.othersharee = self.factory.make_user(username='sharee') root = StorageObject.objects.get_root(self.sharer._user) self.r_node = root.make_subdirectory('NoWrite') self.file = self.r_node.make_file('A File for uploads') self.rw_node = root.make_subdirectory('WriteMe') self.r_share = self.factory.make_share( subtree=self.r_node, shared_to=user, name='NoWriteShare', access=Share.VIEW) self.rw_share = self.factory.make_share( subtree=self.rw_node, shared_to=user, name='WriteShare', access=Share.MODIFY) self.r_share.accept() self.rw_share.accept()
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_user_gateway_get_shared_to(self): """Test UserGateway get_shared_to methods.""" shares = self.user._gateway.get_shared_to() shares = list(shares) self.assertEqual(len(shares), 2) # this user only has shares shared to him self.assertEqual(shares[0].shared_by.id, self.sharer.id) self.assertEqual(shares[1].shared_by.id, self.sharer.id) shares = self.sharer._gateway.get_shared_to() shares = list(shares) self.assertEqual(shares, [])
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_user_gateway_get_share(self): """Test UserGateway get_share methods.""" wrong_share = self.factory.make_share( subtree=self.rw_node, shared_to=self.othersharee, name='WriteShare', access=Share.VIEW) # test with a share offer shareoffer = self.factory.make_share( subtree=self.r_node, name='offer', email='[email protected]') so = self.sharer._gateway.get_share(shareoffer.id, accepted_only=False) self.assertEqual(so.id, shareoffer.id) self.assertEqual(so.shared_to, None) self.assertEqual(so.shared_by.id, self.sharer.id) self.assertEqual(so.offered_to_email, shareoffer.email) rw_share = self.user._gateway.get_share(self.rw_share.id) self.assertEqual(rw_share.id, self.rw_share.id) self.assertEqual(rw_share.shared_to.id, self.user.id) self.assertEqual(rw_share.shared_by.id, self.sharer.id) r_share = self.user._gateway.get_share(self.r_share.id) self.assertEqual(r_share.id, self.r_share.id) self.assertEqual(r_share.shared_to.id, self.user.id) self.assertEqual(r_share.shared_by.id, self.sharer.id) self.assertRaises(errors.DoesNotExist, self.user._gateway.get_share, wrong_share.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_share_gateway(self): """Test the get_share_gateway method of a StorageUserGateway.""" vgw = self.user._gateway.get_share_gateway(self.r_share.id) self.assertEqual(vgw.user, self.user) self.assertEqual(vgw.owner.id, self.sharer.id) self.assertEqual(vgw.root_id, self.r_share.subtree.id)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_share_not_accepted(self): """Shares that exist but are not accepted don't work.""" self.r_share.accepted = False share_dao = SharedDirectory(self.r_share, by_user=self.sharer) self.assertRaises(errors.NoPermission, ReadWriteVolumeGateway, self.user, share=share_dao)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_readonly_share_fail(self): """Test make sure updates can't happen on a read only share""" share_dao = SharedDirectory(self.r_share, by_user=self.sharer) vgw = ReadWriteVolumeGateway(self.user, share=share_dao) root_id = vgw.get_root().id self.assertRaises(errors.NoPermission, vgw.make_file, root_id, 'name') self.assertRaises(errors.NoPermission, vgw.make_subdirectory, root_id, 'name') a_file = self.r_node.make_file('filename') dira = self.r_node.make_subdirectory('dirname') self.assertRaises(errors.NoPermission, vgw.delete_node, dira.id) self.assertRaises(errors.NoPermission, vgw.restore_node, dira.id) self.assertRaises(errors.NoPermission, vgw.move_node, a_file.id, root_id, 'new name')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_node_not_on_volume(self): """Test to make sure nodes can't be retreived if they are not on the volume.""" share_dao = SharedDirectory(self.rw_share, by_user=self.sharer) sgw = ReadWriteVolumeGateway(self.user, share=share_dao) self.assertRaises(errors.DoesNotExist, sgw.make_subdirectory, uuid.uuid4(), 'name') self.assertRaises(errors.DoesNotExist, sgw.make_subdirectory, uuid.uuid4(), 'name') # now create some files from the sharer but are not in the shared # directory vgw = self.user._gateway.get_root_gateway() a_dir = vgw.make_subdirectory(vgw.get_root().id, 'DirName') a_file = vgw.make_file(a_dir.id, 'TheName') # we shouldn't be able to use these nodes on the share gateway self.assertRaises(errors.DoesNotExist, sgw.get_node, a_dir.id) self.assertRaises(errors.DoesNotExist, sgw.get_node, a_file.id) self.assertRaises(errors.DoesNotExist, sgw.make_subdirectory, a_dir.id, 'name') self.assertRaises(errors.DoesNotExist, sgw.make_subdirectory, a_file.id, 'name') self.assertRaises(errors.DoesNotExist, sgw.make_file, a_dir.id, 'name') self.assertRaises(errors.DoesNotExist, sgw.make_file, a_file.id, 'name')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_public_from_share_error(self): """Users cant make public files shared.""" share_dao = SharedDirectory(self.rw_share, by_user=self.sharer) sgw = ReadWriteVolumeGateway(self.user, share=share_dao) a_file = sgw.make_file(sgw.get_root().id, 'hi') self.assertRaises(errors.NoPermission, sgw.change_public_access, a_file.id, True)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_all_nodes_with_max_generation(self): """Test get_all_nodes with max_generation.""" share_dao = SharedDirectory(self.rw_share, by_user=self.sharer) sgw = ReadWriteVolumeGateway(self.user, share=share_dao) root = sgw.get_root() root_id = root.id # make dirs and put files in them dirs = [] files = [] for i in range(10): d = sgw.make_subdirectory(root_id, '%sd' % i) f = sgw.make_file(d.id, '%sfile.txt' % i) dirs.append(d) files.append(f) nodes = dirs + files nodes.append(root) # sort them in the right order nodes.sort(key=attrgetter('path', 'name')) # with a max_generation in the middle nodes_gen_10 = sgw.get_all_nodes(max_generation=10) self.assertEqual(nodes_gen_10, [n for n in nodes if n.generation <= 10]) # with the last generation nodes_gen_20 = sgw.get_all_nodes( max_generation=nodes[-1].generation) self.assertEqual(nodes_gen_20, nodes)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_all_nodes_with_max_generation_and_limit(self): """Test get_all_nodes with max_generation and limit.""" share_dao = SharedDirectory(self.rw_share, by_user=self.sharer) sgw = ReadWriteVolumeGateway(self.user, share=share_dao) root = sgw.get_root() root_id = root.id # make dirs and put files in them dirs = [] files = [] for i in range(10): d = sgw.make_subdirectory(root_id, '%sd' % i) f = sgw.make_file(d.id, '%sfile.txt' % i) dirs.append(d) files.append(f) all_nodes = dirs + files all_nodes.append(root) # sort them in the right order all_nodes.sort(key=attrgetter('path', 'name')) # with max_generation and limit # first get all the nodes at gen_10 nodes_gen_10 = sgw.get_all_nodes(max_generation=10) # now get them in chunks nodes_limit_5 = sgw.get_all_nodes(max_generation=10, limit=5) self.assertEqual(nodes_limit_5, nodes_gen_10[:5]) last_node = nodes_limit_5[-1] nodes_limit_5 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=10, limit=5) self.assertEqual(nodes_limit_5, nodes_gen_10[:10]) last_node = nodes_limit_5[-1] nodes_limit_5 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=10, limit=5) self.assertEqual(nodes_limit_5, nodes_gen_10) # same but with the last generation nodes_20 = [n for n in all_nodes if n.generation <= 20] nodes_limit_10 = sgw.get_all_nodes(max_generation=20, limit=10) self.assertEqual(nodes_limit_10, nodes_20[:10]) last_node = nodes_limit_10[-1] nodes_limit_10 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=20, limit=10) self.assertEqual(nodes_limit_10, nodes_20[:20]) last_node = nodes_limit_10[-1] nodes_limit_10 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=20, limit=10) self.assertEqual(nodes_limit_10, nodes_20)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_all_nodes_chunked_with_changes(self): """Test chunked get_all_nodes with changes in the middle.""" share_dao = SharedDirectory(self.rw_share, by_user=self.sharer) sgw = ReadWriteVolumeGateway(self.user, share=share_dao) root = sgw.get_root() root_id = root.id # make dirs and put files in them dirs = [] files = [] for i in range(10): d = sgw.make_subdirectory(root_id, '%sd' % i) f = sgw.make_file(d.id, '%sfile.txt' % i) dirs.append(d) files.append(f) all_nodes = dirs + files all_nodes.append(root) # sort them in the right order all_nodes.sort(key=attrgetter('path', 'name')) # filter nodes until gen 19 nodes = [n for n in all_nodes if n.generation <= 19] nodes_gen_19 = sgw.get_all_nodes(max_generation=19) self.assertEqual(nodes_gen_19, nodes) # same but with the last generation nodes_limit_5 = sgw.get_all_nodes(max_generation=19, limit=5) self.assertEqual(nodes_limit_5, nodes[:5]) # now make some changes, # create a new dir. new_dir = sgw.make_subdirectory(root_id, '01d') last_node = nodes_limit_5[-1] nodes_limit_5 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=19, limit=5) self.assertEqual(nodes_limit_5, nodes[:10]) # create a new file inside a existing dir. sgw.make_file(nodes[5].id, '0_new_file.txt') last_node = nodes_limit_5[-1] nodes_limit_5 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=19, limit=5) self.assertEqual(nodes_limit_5, nodes[:15]) # and also make a move to_move = nodes[10:15][2] sgw.move_node(to_move.id, new_dir.id, to_move.name) last_node = nodes_limit_5[-1] # get the rest of the result. nodes_limit_5 += sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=19) self.assertEqual(nodes_limit_5, nodes_gen_19) # now move a node that should have been in the result, # but before getting to it. to_move = nodes_gen_19[-1] sgw.move_node(to_move.id, nodes_gen_19[0].id, to_move.name) last_node = nodes_gen_19[-5] result = sgw.get_all_nodes( start_from_path=(last_node.path, last_node.name), max_generation=19) # check that it wasn't included in the result. self.assertEqual(result, nodes_gen_19[-4:-1])
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_get_all_nodes_chunked_only_root(self): """Test chunked get_all_nodes with only one node, the root.""" share_dao = SharedDirectory(self.rw_share, by_user=self.sharer) sgw = ReadWriteVolumeGateway(self.user, share=share_dao) # sort them in the right order nodes = sgw.get_all_nodes() self.assertEqual(len(nodes), 1) # same but with the last generation nodes_limit_5 = sgw.get_all_nodes( start_from_path=(nodes[-1].path, nodes[-1].name), max_generation=10, limit=5) self.assertEqual(nodes_limit_5, [])
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setUp(self): super(GenerationsTestCase, self).setUp() self.user = make_storage_user(username='testuser') # make a test file self.user_gw = StorageUserGateway(self.user) self.vgw = self.user_gw.get_root_gateway()
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def volume(self): return self.user._user.root_node.volume
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_subdirectory(self): """Test make_subdirectory increments generation.""" self.assertEqual(self.volume.generation, 0) node = self.vgw.make_subdirectory(self.vgw.get_root().id, 'The Dir') self.assertEqual(node.generation, 1) self.assertEqual(self.volume.generation, 1)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_restore_file(self): """Test delete increments generation.""" self.assertEqual(self.volume.generation, 0) node = self.vgw.make_file(self.vgw.get_root().id, 'The FIle') node = self.vgw.delete_node(node.id) node = self.vgw.restore_node(node.id) self.assertEqual(node.generation, 3) self.assertEqual(node.generation_created, 1) self.assertEqual(self.volume.generation, 3)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_make_file_with_content(self): """Test make_file_with_content increments generation.""" name = 'filename' a_hash = self.factory.get_fake_hash() storage_key = uuid.uuid4() crc = 12345 size = 100 deflated_size = 10000 a_file = self.vgw.make_file_with_content( self.vgw.get_root().id, name, a_hash, crc, size, deflated_size, storage_key) self.assertEqual(self.volume.generation, 2) self.assertEqual(a_file.generation, 2)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_change_public_access(self): """Test change public access.""" a_file = self.vgw.make_file(self.vgw.get_root().id, 'the file name') a_file = self.vgw.change_public_access(a_file.id, True) f1 = StorageObject.objects.get(id=a_file.id) self.assertEqual(f1.generation, 2) self.assertEqual(f1.generation_created, 1) self.assertIsNotNone(f1.public_uuid) # make sure the uuid is still saved despite other changes a_file = self.vgw.change_public_access(a_file.id, False) f1 = StorageObject.objects.get(id=a_file.id) self.assertIsNone(f1.public_uuid) a_file = self.vgw.change_public_access(a_file.id, True) f1 = StorageObject.objects.get(id=a_file.id) self.assertIsNotNone(f1.public_uuid)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_deltas_for_shares_with_moves(self): """Test deltas for shares where nodes have been moved out of them.""" # root nodes root = self.vgw.get_root() r_dir = self.vgw.make_subdirectory(root.id, 'dir') r_subdir = self.vgw.make_subdirectory(r_dir.id, 'subdir') r_file1 = self.vgw.make_file(r_dir.id, 'file.txt') # user user2 = make_storage_user(username='shareuser') share = self.vgw.make_share(r_dir.id, 'Shared', user2.id) user2_gw = StorageUserGateway(user2) share = user2_gw.accept_share(share.id) user_share_gw = user2_gw.get_volume_gateway(share=share) sh_delta = list(user_share_gw.get_generation_delta(0)) sh_subdir = user_share_gw.get_node(r_subdir.id) sh_file1 = user_share_gw.get_node(r_file1.id) # just the file is in there now self.assertEqual(len(sh_delta), 2) self.assertTrue(sh_subdir in sh_delta) self.assertTrue(sh_file1 in sh_delta) # after a move out of the share, the file will still be in the delta # but it will be Dead self.vgw.move_node(r_file1.id, root.id, r_file1.name) sh_delta = list(user_share_gw.get_generation_delta(0)) self.assertEqual(len(sh_delta), 2) self.assertTrue(sh_subdir in sh_delta) self.assertTrue(sh_file1 in sh_delta) self.assertTrue(sh_delta[1].id, sh_file1.id) self.assertTrue(sh_delta[1].status, STATUS_DEAD) # moving it back into the share in a different folder. # the file will be in the delta Live r_subdir2 = self.vgw.make_subdirectory(r_dir.id, 'subdir2') sh_subdir2 = user_share_gw.get_node(r_subdir2.id) self.vgw.move_node(r_file1.id, r_subdir2.id, r_file1.name) sh_delta = list(user_share_gw.get_generation_delta(0)) self.assertEqual(len(sh_delta), 3) self.assertTrue(sh_subdir in sh_delta) self.assertTrue(sh_subdir2 in sh_delta) self.assertTrue(sh_file1 in sh_delta) self.assertTrue(sh_delta[2].id, sh_file1.id) self.assertTrue(sh_delta[2].status, STATUS_LIVE) # moving it within the share to the root. # the file will be in the delta Live self.vgw.move_node(r_file1.id, r_subdir.id, r_file1.name) sh_delta = list(user_share_gw.get_generation_delta(0)) self.assertEqual(len(sh_delta), 3) self.assertTrue(sh_subdir in sh_delta) self.assertTrue(sh_subdir2 in sh_delta) self.assertTrue(sh_file1 in sh_delta) self.assertTrue(sh_delta[2].id, sh_file1.id) self.assertTrue(sh_delta[2].status, STATUS_LIVE)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def setUp(self): """Set up.""" super(MetricsTestCase, self).setUp() self.user = make_storage_user(username='user1') self.gw = ReadWriteVolumeGateway(self.user) # put a recorder in the middle to see what was informed self.informed = [] timing_metric.reporter.timing = lambda *a: self.informed.append(a)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def _check(func): """Check func for decorator.""" closures = func.__closure__ # if it doesn't have any closures, it's not decorated at all if closures is None: return False for closure in closures: # if the cell content is the searched decorator, we're done if closure.cell_contents is decorator: return True # if the cell content is other function, need to go deeper if isinstance(closure.cell_contents, types.FunctionType): if _check(closure.cell_contents): # found the searched decorator in a deeper layer! return True else: return False
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_supervised_methods_volume_gateways(self): """Assure all these methods are supervised by timing decorator.""" superv = [ 'get_root', 'get_user_volume', 'get_generation_delta', 'get_node', 'get_node_by_path', 'get_all_nodes', 'get_deleted_files', 'get_children', 'get_child_by_name', 'get_content', 'get_uploadjob', 'get_user_uploadjobs', 'get_user_multipart_uploadjob', 'get_directories_with_mimetypes', 'check_has_children', 'make_file', 'make_subdirectory', 'make_tree', 'make_share', 'delete_node', 'restore_node', 'move_node', 'make_uploadjob', 'make_file_with_content', 'make_content', 'delete_uploadjob', 'add_uploadjob_part', 'set_uploadjob_when_last_active', 'change_public_access', 'get_node_parent_ids', 'undelete_volume', ] for methname in superv: meth = getattr(self.gw, methname) self.assertTrue(self._is_decorated(meth, timing_metric), '%r is not decorated' % (methname,))
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_call_simple_ok(self): """Supervise a successful operation and see all is reported ok.""" self.gw.get_root() informed = self.informed[0] self.assertEqual(informed[0], 'get_root') self.assertTrue(isinstance(informed[1], float))
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_call_complex(self): """Supervise an op with args and see all is reported ok.""" root_id = self.gw.get_root().id self.gw.make_file(root_id, 'filename') self.gw.get_child_by_name(root_id, 'filename', with_content=False) for informed in self.informed: if informed[0] == 'get_child_by_name': self.assertTrue(isinstance(informed[1], float)) break else: self.fail('Timing was not informed for this method.')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_call_on_shared(self): """Supervise an operation that is being made on a share.""" # create other user other_user = make_storage_user(username='user2') # make a share from default user to other user and accept it root_id = self.gw.get_root().id share = self.gw.make_share( root_id, 'hi', user_id=other_user.id, readonly=False) share = StorageUserGateway(other_user).accept_share(share.id) # do the operation in the gw from the share shared_gw = ReadWriteVolumeGateway(other_user, share=share) shared_gw.make_file(root_id, 'filename') # check that the metric informed is storage, from default user, # as she is the owner of the volume where the node was created for informed in self.informed: if informed[0] == 'make_file': self.assertIsInstance(informed[1], float) break else: self.fail('Timing was not informed for this method.')
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def test_fix_udfs_with_gen_out_of_sync(self): obj = self.factory.make_file() obj.generation = obj.volume.generation + 1 obj.save() user2 = self.factory.make_user() obj2 = self.factory.make_file(owner=user2) obj3 = self.factory.make_file(owner=user2) obj2.generation = obj2.volume.generation + 2 obj2.save() obj3.generation = obj3.volume.generation + 1 user_ids = [obj.volume.owner.id, user2.id] fix_udfs_with_generation_out_of_sync(user_ids, logging) obj.volume.refresh_from_db() self.assertEqual(obj.generation, obj.volume.generation) obj2.volume.refresh_from_db() self.assertEqual(obj2.generation, obj2.volume.generation)
magicicada-bot/magicicada-server
[ 6, 1, 6, 4, 1441315733 ]
def __init__(self, question, **options): super().__init__(question, **options) self.pos = options.get("pos") self.rotate = options.get("rotate") self.radius = options.get("radius") self.color = options.get("color") self.explode = options.get("explode") self.sum = options.get("sum") self.after_number = options.get("after_number") self.before_number = options.get("before_number") self.scale_font = options.get("scale_font") self.text = options.get("text") self.style = options.get("style") self.type = options.get("type") # This permit to label correctly multiple charts so we do not have the # same label for each chart self.latex_label = options.get("latex_label", 1)
Pierre-Sassoulas/django-survey
[ 191, 133, 191, 35, 1489572975 ]
def get_results(self): """Return a formatted string for a tikz pgf-pie chart.""" pie = "" for answer, cardinality in list(self.cardinality.items()): if not answer: ans = _("Left blank") ans = Question2Tex.get_clean_answer(answer) pie += f"{cardinality}/{ans}," if not pie: return "" final_answers = [] for answer in pie.split(","): if answer: final_answers.append(answer) return " {}".format(",\n ".join(final_answers))
Pierre-Sassoulas/django-survey
[ 191, 133, 191, 35, 1489572975 ]
def get_caption_specifics(self): return "{} '{}' ".format(_("for the question"), Question2Tex.html2latex(self.question.text))
Pierre-Sassoulas/django-survey
[ 191, 133, 191, 35, 1489572975 ]
def setUp(self): super(UserAPITestCase, self).setUp() self.anonymous_client = APIClient() self.different_user = UserFactory.create(password=TEST_PASSWORD) self.different_client = APIClient() self.staff_user = UserFactory(is_staff=True, password=TEST_PASSWORD) self.staff_client = APIClient() self.user = UserFactory.create(password=TEST_PASSWORD) # will be assigned to self.client by default
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def send_patch(self, client, json_data, content_type="application/merge-patch+json", expected_status=200): """ Helper method for sending a patch to the server, defaulting to application/merge-patch+json content_type. Verifies the expected status and returns the response. """ # pylint: disable=no-member response = client.patch(self.url, data=json.dumps(json_data), content_type=content_type) self.assertEqual(expected_status, response.status_code) return response
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def send_put(self, client, json_data, content_type="application/json", expected_status=204): """ Helper method for sending a PUT to the server. Verifies the expected status and returns the response. """ response = client.put(self.url, data=json.dumps(json_data), content_type=content_type) self.assertEqual(expected_status, response.status_code) return response
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def send_delete(self, client, expected_status=204): """ Helper method for sending a DELETE to the server. Verifies the expected status and returns the response. """ response = client.delete(self.url) self.assertEqual(expected_status, response.status_code) return response
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def _verify_profile_image_data(self, data, has_profile_image): """ Verify the profile image data in a GET response for self.user corresponds to whether the user has or hasn't set a profile image. """ template = '{root}/{filename}_{{size}}.{extension}' if has_profile_image: url_root = 'http://example-storage.com/profile-images' filename = hashlib.md5(('secret' + self.user.username).encode('utf-8')).hexdigest() file_extension = 'jpg' template += '?v={}'.format(TEST_PROFILE_IMAGE_UPLOADED_AT.strftime("%s")) else: url_root = 'http://testserver/static' filename = 'default' file_extension = 'png' template = template.format(root=url_root, filename=filename, extension=file_extension) self.assertEqual( data['profile_image'], { 'has_image': has_profile_image, 'image_url_full': template.format(size=50), 'image_url_small': template.format(size=10), } )
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def setUp(self): super(TestOwnUsernameAPI, self).setUp() self.url = reverse("own_username_api")
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_username(self): """ Test that a client (logged in) can get her own username. """ self.client.login(username=self.user.username, password=TEST_PASSWORD) self._verify_get_own_username(16)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_username_not_logged_in(self): """ Test that a client (not logged in) gets a 401 when trying to retrieve their username. """ # verify that the endpoint is inaccessible when not logged in self._verify_get_own_username(13, expected_status=401)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def setUp(self): super(TestAccountsAPI, self).setUp() self.url = reverse("accounts_api", kwargs={'username': self.user.username})
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def _verify_full_shareable_account_response(self, response, account_privacy=None, badges_enabled=False): """ Verify that the shareable fields from the account are returned """ data = response.data self.assertEqual(12, len(data)) # public fields (3) self.assertEqual(account_privacy, data["account_privacy"]) self._verify_profile_image_data(data, True) self.assertEqual(self.user.username, data["username"]) # additional shareable fields (8) self.assertEqual(TEST_BIO_VALUE, data["bio"]) self.assertEqual("US", data["country"]) self.assertIsNotNone(data["date_joined"]) self.assertEqual([{"code": TEST_LANGUAGE_PROFICIENCY_CODE}], data["language_proficiencies"]) self.assertEqual("m", data["level_of_education"]) self.assertIsNotNone(data["social_links"]) self.assertIsNone(data["time_zone"]) self.assertEqual(badges_enabled, data['accomplishments_shared'])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def _verify_full_account_response(self, response, requires_parental_consent=False, year_of_birth=2000): """ Verify that all account fields are returned (even those that are not shareable). """ data = response.data self.assertEqual(25, len(data)) # public fields (3) expected_account_privacy = ( PRIVATE_VISIBILITY if requires_parental_consent else UserPreference.get_value(self.user, 'account_privacy') ) self.assertEqual(expected_account_privacy, data["account_privacy"]) self._verify_profile_image_data(data, not requires_parental_consent) self.assertEqual(self.user.username, data["username"]) # additional shareable fields (8) self.assertEqual(TEST_BIO_VALUE, data["bio"]) self.assertEqual("US", data["country"]) self.assertIsNotNone(data["date_joined"]) self.assertEqual([{"code": TEST_LANGUAGE_PROFICIENCY_CODE}], data["language_proficiencies"]) self.assertEqual("m", data["level_of_education"]) self.assertIsNotNone(data["social_links"]) self.assertEqual(UserPreference.get_value(self.user, 'time_zone'), data["time_zone"]) self.assertIsNotNone(data["accomplishments_shared"]) self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"]) # additional admin fields (10) self.assertEqual(self.user.email, data["email"]) self.assertIsNotNone(data["extended_profile"]) self.assertEqual("MA", data["state"]) self.assertEqual("f", data["gender"]) self.assertEqual("world peace", data["goals"]) self.assertTrue(data["is_active"]) self.assertEqual("Park Ave", data['mailing_address']) self.assertEqual(requires_parental_consent, data["requires_parental_consent"]) self.assertIsNone(data["secondary_email"]) self.assertIsNone(data["secondary_email_enabled"]) self.assertEqual(year_of_birth, data["year_of_birth"])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_unsupported_methods(self): """ Test that DELETE, POST, and PUT are not supported. """ self.client.login(username=self.user.username, password=TEST_PASSWORD) self.assertEqual(405, self.client.put(self.url).status_code) self.assertEqual(405, self.client.post(self.url).status_code) self.assertEqual(405, self.client.delete(self.url).status_code)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_unknown_user(self, api_client, user): """ Test that requesting a user who does not exist returns a 404. """ client = self.login_client(api_client, user) response = client.get(reverse("accounts_api", kwargs={'username': "does_not_exist"})) self.assertEqual(404, response.status_code)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_by_email(self, api_client, user): """ Test that requesting a user email search works. """ client = self.login_client(api_client, user) self.create_mock_profile(self.user) set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, PRIVATE_VISIBILITY) response = self.send_get(client, query_parameters='email={}'.format(self.user.email)) self._verify_full_account_response(response)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_different_user_visible(self): """ Test that a client (logged in) can only get the shareable fields for a different user. This is the case when default_visibility is set to "all_users". """ self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD) self.create_mock_profile(self.user) with self.assertNumQueries(23): response = self.send_get(self.different_client) self._verify_full_shareable_account_response(response, account_privacy=ALL_USERS_VISIBILITY)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_different_user_private(self): """ Test that a client (logged in) can only get the shareable fields for a different user. This is the case when default_visibility is set to "private". """ self.different_client.login(username=self.different_user.username, password=TEST_PASSWORD) self.create_mock_profile(self.user) with self.assertNumQueries(23): response = self.send_get(self.different_client) self._verify_private_account_response(response)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_private_visibility(self, api_client, requesting_username, preference_visibility): """ Test the return from GET based on user visibility setting. """ def verify_fields_visible_to_all_users(response): """ Confirms that private fields are private, and public/shareable fields are public/shareable """ if preference_visibility == PRIVATE_VISIBILITY: self._verify_private_account_response(response) else: self._verify_full_shareable_account_response(response, ALL_USERS_VISIBILITY, badges_enabled=True) client = self.login_client(api_client, requesting_username) # Update user account visibility setting. set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, preference_visibility) self.create_mock_profile(self.user) response = self.send_get(client) if requesting_username == "different_user": verify_fields_visible_to_all_users(response) else: self._verify_full_account_response(response) # Verify how the view parameter changes the fields that are returned. response = self.send_get(client, query_parameters='view=shared') verify_fields_visible_to_all_users(response) response = self.send_get(client, query_parameters='view=shared&email={}'.format(self.user.email)) verify_fields_visible_to_all_users(response)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_custom_visibility_over_age(self, api_client, requesting_username): self.create_mock_profile(self.user) # set user's custom visibility preferences set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY) shared_fields = ("bio", "language_proficiencies", "name") for field_name in shared_fields: set_user_preference(self.user, "visibility.{}".format(field_name), ALL_USERS_VISIBILITY) # make API request client = self.login_client(api_client, requesting_username) response = self.send_get(client) # verify response if requesting_username == "different_user": data = response.data self.assertEqual(6, len(data)) # public fields self.assertEqual(self.user.username, data["username"]) self.assertEqual(UserPreference.get_value(self.user, 'account_privacy'), data["account_privacy"]) self._verify_profile_image_data(data, has_profile_image=True) # custom shared fields self.assertEqual(TEST_BIO_VALUE, data["bio"]) self.assertEqual([{"code": TEST_LANGUAGE_PROFICIENCY_CODE}], data["language_proficiencies"]) self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"]) else: self._verify_full_account_response(response)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_custom_visibility_under_age(self, api_client, requesting_username): self.create_mock_profile(self.user) year_of_birth = self._set_user_age_to_10_years(self.user) # set user's custom visibility preferences set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, CUSTOM_VISIBILITY) shared_fields = ("bio", "language_proficiencies") for field_name in shared_fields: set_user_preference(self.user, "visibility.{}".format(field_name), ALL_USERS_VISIBILITY) # make API request client = self.login_client(api_client, requesting_username) response = self.send_get(client) # verify response if requesting_username == "different_user": self._verify_private_account_response(response, requires_parental_consent=True) else: self._verify_full_account_response( response, requires_parental_consent=True, year_of_birth=year_of_birth, )
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_default(self): """ Test that a client (logged in) can get her own account information (using default legacy profile information, as created by the test UserFactory). """ def verify_get_own_information(queries): """ Internal helper to perform the actual assertions """ with self.assertNumQueries(queries): response = self.send_get(self.client) data = response.data self.assertEqual(25, len(data)) self.assertEqual(self.user.username, data["username"]) self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"]) for empty_field in ("year_of_birth", "level_of_education", "mailing_address", "bio"): self.assertIsNone(data[empty_field]) self.assertIsNone(data["country"]) self.assertIsNone(data["state"]) self.assertEqual("m", data["gender"]) self.assertEqual("Learn a lot", data["goals"]) self.assertEqual(self.user.email, data["email"]) self.assertIsNotNone(data["date_joined"]) self.assertEqual(self.user.is_active, data["is_active"]) self._verify_profile_image_data(data, False) self.assertTrue(data["requires_parental_consent"]) self.assertEqual([], data["language_proficiencies"]) self.assertEqual(PRIVATE_VISIBILITY, data["account_privacy"]) self.assertIsNone(data["time_zone"]) # Badges aren't on by default, so should not be present. self.assertEqual(False, data["accomplishments_shared"]) self.client.login(username=self.user.username, password=TEST_PASSWORD) verify_get_own_information(21) # Now make sure that the user can get the same information, even if not active self.user.is_active = False self.user.save() verify_get_own_information(13)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_get_account_empty_string(self): """ Test the conversion of empty strings to None for certain fields. """ legacy_profile = UserProfile.objects.get(id=self.user.id) legacy_profile.country = "" legacy_profile.state = "" legacy_profile.level_of_education = "" legacy_profile.gender = "" legacy_profile.bio = "" legacy_profile.save() self.client.login(username=self.user.username, password=TEST_PASSWORD) with self.assertNumQueries(21): response = self.send_get(self.client) for empty_field in ("level_of_education", "gender", "country", "state", "bio",): self.assertIsNone(response.data[empty_field])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_account_disallowed_user(self, api_client, user): """ Test that a client cannot call PATCH on a different client's user account (even with is_staff access). """ client = self.login_client(api_client, user) self.send_patch(client, {}, expected_status=403)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_account_unknown_user(self, api_client, user): """ Test that trying to update a user who does not exist returns a 403. """ client = self.login_client(api_client, user) response = client.patch( reverse("accounts_api", kwargs={'username': "does_not_exist"}), data=json.dumps({}), content_type="application/merge-patch+json" ) self.assertEqual(403, response.status_code)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_account(self, field, value, fails_validation_value=None, developer_validation_message=None): """ Test the behavior of patch, when using the correct content_type. """ client = self.login_client("client", "user") if field == 'account_privacy': # Ensure the user has birth year set, and is over 13, so # account_privacy behaves normally legacy_profile = UserProfile.objects.get(id=self.user.id) legacy_profile.year_of_birth = 2000 legacy_profile.save() response = self.send_patch(client, {field: value}) self.assertEqual(value, response.data[field]) if fails_validation_value: error_response = self.send_patch(client, {field: fails_validation_value}, expected_status=400) self.assertEqual( u'This value is invalid.', error_response.data["field_errors"][field]["user_message"] ) self.assertEqual( u"Value '{value}' is not valid for field '{field}': {messages}".format( value=fails_validation_value, field=field, messages=[developer_validation_message] ), error_response.data["field_errors"][field]["developer_message"] ) elif field != "account_privacy": # If there are no values that would fail validation, then empty string should be supported; # except for account_privacy, which cannot be an empty string. response = self.send_patch(client, {field: ""}) self.assertEqual("", response.data[field])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_account_noneditable(self): """ Tests the behavior of patch when a read-only field is attempted to be edited. """ client = self.login_client("client", "user") def verify_error_response(field_name, data): """ Internal helper to check the error messages returned """ self.assertEqual( "This field is not editable via this API", data["field_errors"][field_name]["developer_message"] ) self.assertEqual( u"The '{0}' field cannot be edited.".format(field_name), data["field_errors"][field_name]["user_message"] ) for field_name in ["username", "date_joined", "is_active", "profile_image", "requires_parental_consent"]: response = self.send_patch(client, {field_name: "will_error", "gender": "o"}, expected_status=400) verify_error_response(field_name, response.data) # Make sure that gender did not change. response = self.send_get(client) self.assertEqual("m", response.data["gender"]) # Test error message with multiple read-only items response = self.send_patch(client, {"username": "will_error", "date_joined": "xx"}, expected_status=400) self.assertEqual(2, len(response.data["field_errors"])) verify_error_response("username", response.data) verify_error_response("date_joined", response.data)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_account_empty_string(self): """ Tests the behavior of patch when attempting to set fields with a select list of options to the empty string. Also verifies the behaviour when setting to None. """ self.client.login(username=self.user.username, password=TEST_PASSWORD) for field_name in ["gender", "level_of_education", "country", "state"]: response = self.send_patch(self.client, {field_name: ""}) # Although throwing a 400 might be reasonable, the default DRF behavior with ModelSerializer # is to convert to None, which also seems acceptable (and is difficult to override). self.assertIsNone(response.data[field_name]) # Verify that the behavior is the same for sending None. response = self.send_patch(self.client, {field_name: ""}) self.assertIsNone(response.data[field_name])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def get_name_change_info(expected_entries): """ Internal method to encapsulate the retrieval of old names used """ legacy_profile = UserProfile.objects.get(id=self.user.id) name_change_info = legacy_profile.get_meta()["old_names"] self.assertEqual(expected_entries, len(name_change_info)) return name_change_info
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_email(self): """ Test that the user can request an email change through the accounts API. Full testing of the helper method used (do_email_change_request) exists in the package with the code. Here just do minimal smoke testing. """ client = self.login_client("client", "user") old_email = self.user.email new_email = "[email protected]" response = self.send_patch(client, {"email": new_email, "goals": "change my email"}) # Since request is multi-step, the email won't change on GET immediately (though goals will update). self.assertEqual(old_email, response.data["email"]) self.assertEqual("change my email", response.data["goals"]) # Now call the method that will be invoked with the user clicks the activation key in the received email. # First we must get the activation key that was sent. pending_change = PendingEmailChange.objects.filter(user=self.user) self.assertEqual(1, len(pending_change)) activation_key = pending_change[0].activation_key confirm_change_url = reverse( "confirm_email_change", kwargs={'key': activation_key} ) response = self.client.post(confirm_change_url) self.assertEqual(200, response.status_code) get_response = self.send_get(client) self.assertEqual(new_email, get_response.data["email"])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_invalid_email(self, bad_email): """ Test a few error cases for email validation (full test coverage lives with do_email_change_request). """ client = self.login_client("client", "user") # Try changing to an invalid email to make sure error messages are appropriately returned. error_response = self.send_patch(client, {"email": bad_email}, expected_status=400) field_errors = error_response.data["field_errors"] self.assertEqual( "Error thrown from validate_new_email: 'Valid e-mail address required.'", field_errors["email"]["developer_message"] ) self.assertEqual("Valid e-mail address required.", field_errors["email"]["user_message"])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_duplicate_email(self, do_email_change_request): """ Test that same success response will be sent to user even if the given email already used. """ existing_email = "[email protected]" UserFactory.create(email=existing_email) client = self.login_client("client", "user") # Try changing to an existing email to make sure no error messages returned. response = self.send_patch(client, {"email": existing_email}) self.assertEqual(200, response.status_code) # Verify that no actual request made for email change self.assertFalse(do_email_change_request.called)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_invalid_language_proficiencies(self, patch_value, expected_error_message): """ Verify we handle error cases when patching the language_proficiencies field. """ if six.PY3: expected_error_message = six.text_type(expected_error_message).replace('unicode', 'str') client = self.login_client("client", "user") response = self.send_patch(client, {"language_proficiencies": patch_value}, expected_status=400) self.assertEqual( response.data["field_errors"]["language_proficiencies"]["developer_message"], u"Value '{patch_value}' is not valid for field 'language_proficiencies': {error_message}".format( patch_value=patch_value, error_message=expected_error_message ) )
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_patch_serializer_save_fails(self, serializer_save): """ Test that AccountUpdateErrors are passed through to the response. """ serializer_save.side_effect = [Exception("bummer"), None] self.client.login(username=self.user.username, password=TEST_PASSWORD) error_response = self.send_patch(self.client, {"goals": "save an account field"}, expected_status=400) self.assertEqual( "Error thrown when saving account updates: 'bummer'", error_response.data["developer_message"] ) self.assertIsNone(error_response.data["user_message"])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_convert_relative_profile_url(self): """ Test that when TEST_PROFILE_IMAGE_BACKEND['base_url'] begins with a '/', the API generates the full URL to profile images based on the URL of the request. """ self.client.login(username=self.user.username, password=TEST_PASSWORD) response = self.send_get(self.client) self.assertEqual( response.data["profile_image"], { "has_image": False, "image_url_full": "http://testserver/static/default_50.png", "image_url_small": "http://testserver/static/default_10.png" } )
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_parental_consent(self, api_client, requesting_username, has_full_access): """ Verifies that under thirteens never return a public profile. """ client = self.login_client(api_client, requesting_username) year_of_birth = self._set_user_age_to_10_years(self.user) set_user_preference(self.user, ACCOUNT_VISIBILITY_PREF_KEY, ALL_USERS_VISIBILITY) # Verify that the default view is still private (except for clients with full access) response = self.send_get(client) if has_full_access: data = response.data self.assertEqual(25, len(data)) self.assertEqual(self.user.username, data["username"]) self.assertEqual(self.user.first_name + " " + self.user.last_name, data["name"]) self.assertEqual(self.user.email, data["email"]) self.assertEqual(year_of_birth, data["year_of_birth"]) for empty_field in ("country", "level_of_education", "mailing_address", "bio", "state",): self.assertIsNone(data[empty_field]) self.assertEqual("m", data["gender"]) self.assertEqual("Learn a lot", data["goals"]) self.assertTrue(data["is_active"]) self.assertIsNotNone(data["date_joined"]) self._verify_profile_image_data(data, False) self.assertTrue(data["requires_parental_consent"]) self.assertEqual(PRIVATE_VISIBILITY, data["account_privacy"]) else: self._verify_private_account_response(response, requires_parental_consent=True) # Verify that the shared view is still private response = self.send_get(client, query_parameters='view=shared') self._verify_private_account_response(response, requires_parental_consent=True)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def setUp(self): super(TestAccountAPITransactions, self).setUp() self.client = APIClient() self.user = UserFactory.create(password=TEST_PASSWORD) self.url = reverse("accounts_api", kwargs={'username': self.user.username})
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_update_account_settings_rollback(self, mock_email_change): """ Verify that updating account settings is transactional when a failure happens. """ # Send a PATCH request with updates to both profile information and email. # Throw an error from the method that is used to process the email change request # (this is the last thing done in the api method). Verify that the profile did not change. mock_email_change.side_effect = [ValueError, "mock value error thrown"] self.client.login(username=self.user.username, password=TEST_PASSWORD) old_email = self.user.email json_data = {"email": "[email protected]", "gender": "o"} response = self.client.patch(self.url, data=json.dumps(json_data), content_type="application/merge-patch+json") self.assertEqual(400, response.status_code) # Verify that GET returns the original preferences response = self.client.get(self.url) data = response.data self.assertEqual(old_email, data["email"]) self.assertEqual(u"m", data["gender"])
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def setUp(self): super(UsernameReplacementViewTests, self).setUp() self.service_user = UserFactory(username=self.SERVICE_USERNAME) self.url = reverse("username_replacement")
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def call_api(self, user, data): """ Helper function to call API with data """ data = json.dumps(data) headers = self.build_jwt_headers(user) return self.client.post(self.url, data, content_type='application/json', **headers)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def test_bad_schema(self, mapping_data): """ Verify the endpoint rejects bad data schema """ data = { "username_mappings": mapping_data } response = self.call_api(self.service_user, data) self.assertEqual(response.status_code, 400)
appsembler/edx-platform
[ 12, 17, 12, 30, 1412683746 ]
def _select(self): select_str = """ SELECT row_number() OVER() as id, a.type_id AS type_id, a.vehicle_id AS vehicle_id, a.driver_id AS driver_id, a.co_driver_id AS co_driver_id, a.date_start AS date_start, a.date_end AS date_end, a.real_date_depart AS real_date_depart, a.real_date_arrive AS real_date_arrive, a.start_location_id AS start_location_id, a.end_location_id AS end_location_id, a.state AS state, SUM(a.end_odometer - a.start_odometer) AS odometer, SUM(a.distance) AS distance """ return select_str
open-synergy/opensynid-fleet
[ 2, 12, 2, 4, 1460253955 ]
def _where(self): where_str = """ """ return where_str
open-synergy/opensynid-fleet
[ 2, 12, 2, 4, 1460253955 ]
def _group_by(self): group_str = """ GROUP BY a.type_id, a.vehicle_id, a.driver_id, a.co_driver_id, a.date_start, a.date_end, a.real_date_depart, a.real_date_arrive, a.start_location_id, a.end_location_id, a.state """ return group_str
open-synergy/opensynid-fleet
[ 2, 12, 2, 4, 1460253955 ]
def remove_permissions(apps, _unused): Permission = apps.get_model("auth.Permission") Permission.objects.filter( codename__in=( "add_follower", "change_follower", "view_follower", "delete_follower", "add_scenario", "change_scenario", "delete_scenario", "view_scenario", ) ).delete()
frePPLe/frePPLe
[ 394, 214, 394, 42, 1378966013 ]
def setUp(self): if not os.path.exists(os.path.join(TESTDATA_PATH, "exports")): os.makedirs(os.path.join(TESTDATA_PATH, "exports")) self.movie = dict() self.movie["title"] = "Fight Club" self.movie["imdb"] = dict() self.movie["imdb"]["id"] = "tt0137523" self.movie["imdb"]["url"] = "https://www.imdb.com/title/tt0137523" self.movie["imdb"]["my_rating"] = 9
StegSchreck/RatS
[ 227, 30, 227, 42, 1486496246 ]
def test_init(self, browser_mock, base_init_mock): ICheckMoviesRatingsInserter(None) self.assertTrue(base_init_mock.called)
StegSchreck/RatS
[ 227, 30, 227, 42, 1486496246 ]
def test_insert( self, browser_mock, base_init_mock, site_mock, impex_mock, select_mock
StegSchreck/RatS
[ 227, 30, 227, 42, 1486496246 ]
def __init__(self, cr, uid, name, context=None): super(ReportAccountPayment, self).__init__(cr, uid, name, context=context) self.localcontext.update({ 'get_text_amount': self._get_text_amount, })
ClearCorp/odoo-clearcorp
[ 14, 44, 14, 5, 1400190481 ]