lines
sequencelengths
1
383
raw_lines
sequencelengths
1
383
label
sequencelengths
1
383
type
sequencelengths
1
383
[ "def FUNC_4(VAR_0):...\n", "VAR_0.DEBUG = True\n", "VAR_6 = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view id='flob'\"\n )\n", "VAR_7 = unicorn(None, VAR_6)\n", "VAR_10 = CLASS_0(component_name='test', component_id='asdf')\n", "VAR_8 = {'view': VAR_10}\n", "VAR_7.render(VAR_8)\n", "assert VAR_7.component_id == 'asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:flob'\n" ]
[ "def test_unicorn_render_parent_with_id(settings):...\n", "settings.DEBUG = True\n", "token = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view id='flob'\"\n )\n", "unicorn_node = unicorn(None, token)\n", "view = FakeComponentParent(component_name='test', component_id='asdf')\n", "context = {'view': view}\n", "unicorn_node.render(context)\n", "assert unicorn_node.component_id == 'asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:flob'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assert'" ]
[ "def FUNC_16(self, VAR_15, VAR_13, VAR_14):...\n", "if VAR_53.session['admin']:\n", "VAR_62 = self.useroptions.forUser(VAR_15)\n", "return 'error: not permitted. Only admins can change other users options'\n", "VAR_62.setOption(VAR_13, VAR_14)\n", "return 'success'\n" ]
[ "def api_setuseroptionfor(self, userid, optionkey, optionval):...\n", "if cherrypy.session['admin']:\n", "uo = self.useroptions.forUser(userid)\n", "return 'error: not permitted. Only admins can change other users options'\n", "uo.setOption(optionkey, optionval)\n", "return 'success'\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Expr'", "Return'" ]
[ "@csrf.ensure_csrf_cookie...\n", "return render_template('index.html', VAR_0)\n" ]
[ "@csrf.ensure_csrf_cookie...\n", "return render_template('index.html', request)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_3():...\n", "return VAR_6\n" ]
[ "def getEngine():...\n", "return _engine\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@pytest.mark.parametrize('openapi_type,python_type', [('number',...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type=openapi_type)\n", "VAR_30 = VAR_1.patch(f'{VAR_0}.{VAR_3}')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_30.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)\n", "assert VAR_4 == VAR_30()\n", "VAR_30.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_30.assert_called_once_with(VAR_5=name, VAR_26=required, default=data.\n default)\n" ]
[ "@pytest.mark.parametrize('openapi_type,python_type', [('number',...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type=openapi_type)\n", "clazz = mocker.patch(f'{MODULE_NAME}.{python_type}')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "clazz.assert_called_once_with(name=name, required=required, default=None)\n", "assert p == clazz()\n", "clazz.reset_mock()\n", "data.default = mocker.MagicMock()\n", "property_from_data(name=name, required=required, data=data)\n", "clazz.assert_called_once_with(name=name, required=required, default=data.\n default)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_14(self):...\n", "VAR_26 = np.array([[1], [2]])\n", "VAR_27 = np.array(range(6)).reshape(2, 3)\n", "VAR_28 = os.path.join(test.get_temp_dir(), 'input0.npy')\n", "VAR_29 = os.path.join(test.get_temp_dir(), 'input1.npy')\n", "np.save(VAR_28, VAR_26)\n", "np.save(VAR_29, VAR_27)\n", "VAR_20 = 'x0=' + VAR_28 + '[x0];x1=' + VAR_29\n", "VAR_30 = saved_model_cli.load_inputs_from_input_arg_string(VAR_20, '', '')\n", "self.assertTrue(np.all(VAR_30['x0'] == VAR_26))\n", "self.assertTrue(np.all(VAR_30['x1'] == VAR_27))\n" ]
[ "def testInputParserNPY(self):...\n", "x0 = np.array([[1], [2]])\n", "x1 = np.array(range(6)).reshape(2, 3)\n", "input0_path = os.path.join(test.get_temp_dir(), 'input0.npy')\n", "input1_path = os.path.join(test.get_temp_dir(), 'input1.npy')\n", "np.save(input0_path, x0)\n", "np.save(input1_path, x1)\n", "input_str = 'x0=' + input0_path + '[x0];x1=' + input1_path\n", "feed_dict = saved_model_cli.load_inputs_from_input_arg_string(input_str, '', ''\n )\n", "self.assertTrue(np.all(feed_dict['x0'] == x0))\n", "self.assertTrue(np.all(feed_dict['x1'] == x1))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_46():...\n", "\"\"\"docstring\"\"\"\n", "if len(request.args) != 2:\n", "session.flash = T('invalid ticket')\n", "VAR_3 = FUNC_5()\n", "redirect(URL('site'))\n", "VAR_27 = request.env.web2py_version\n", "VAR_113 = request.args[1]\n", "VAR_114 = RestrictedError()\n", "VAR_114.load(request, VAR_3, VAR_113)\n", "return dict(VAR_3=app, VAR_113=ticket, VAR_67=e.output, VAR_18=e.traceback and\n TRACEBACK(e.traceback), snapshot=e.snapshot, VAR_180=e.code, layer=e.\n layer, VAR_27=myversion)\n" ]
[ "def ticket():...\n", "\"\"\"docstring\"\"\"\n", "if len(request.args) != 2:\n", "session.flash = T('invalid ticket')\n", "app = get_app()\n", "redirect(URL('site'))\n", "myversion = request.env.web2py_version\n", "ticket = request.args[1]\n", "e = RestrictedError()\n", "e.load(request, app, ticket)\n", "return dict(app=app, ticket=ticket, output=e.output, traceback=e.traceback and\n TRACEBACK(e.traceback), snapshot=e.snapshot, code=e.code, layer=e.layer,\n myversion=myversion)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def __str__(self) ->str:...\n", "return '-'\n" ]
[ "def __str__(self) ->str:...\n", "return '-'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.route('/')...\n", "\"\"\"docstring\"\"\"\n", "if flask_login.current_user.is_authenticated:\n", "VAR_1.error('User may not be logged in. Clearing cookie auth.')\n", "return clear_cookie_auth()\n", "if flask_login.current_user.landing_page == 'live':\n", "return redirect(url_for('routes_page.page_live'))\n", "if flask_login.current_user.landing_page == 'dashboard':\n", "return redirect(url_for('routes_dashboard.page_dashboard_default'))\n", "if flask_login.current_user.landing_page == 'info':\n", "return redirect(url_for('routes_page.page_info'))\n", "return redirect(url_for('routes_page.page_live'))\n" ]
[ "@blueprint.route('/')...\n", "\"\"\"docstring\"\"\"\n", "if flask_login.current_user.is_authenticated:\n", "logger.error('User may not be logged in. Clearing cookie auth.')\n", "return clear_cookie_auth()\n", "if flask_login.current_user.landing_page == 'live':\n", "return redirect(url_for('routes_page.page_live'))\n", "if flask_login.current_user.landing_page == 'dashboard':\n", "return redirect(url_for('routes_dashboard.page_dashboard_default'))\n", "if flask_login.current_user.landing_page == 'info':\n", "return redirect(url_for('routes_page.page_info'))\n", "return redirect(url_for('routes_page.page_live'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Expr'", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_20(self):...\n", "\"\"\"docstring\"\"\"\n", "self._start_loop()\n" ]
[ "def resumeProducing(self):...\n", "\"\"\"docstring\"\"\"\n", "self._start_loop()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_3(self, VAR_8):...\n", "return format_html('<p class=\"datetime\">{0} {1}<br />{2} {3}</p>', _(\n 'Date:'), VAR_8[0], _('Time:'), VAR_8[1])\n" ]
[ "def format_output(self, rendered_widgets):...\n", "return format_html('<p class=\"datetime\">{0} {1}<br />{2} {3}</p>', _(\n 'Date:'), rendered_widgets[0], _('Time:'), rendered_widgets[1])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_13, VAR_14, VAR_3=None, VAR_15=None):...\n", "self.rel = VAR_13\n", "self.admin_site = VAR_14\n", "self.db = VAR_15\n", "super(CLASS_7, self).__init__(VAR_3)\n" ]
[ "def __init__(self, rel, admin_site, attrs=None, using=None):...\n", "self.rel = rel\n", "self.admin_site = admin_site\n", "self.db = using\n", "super(ForeignKeyRawIdWidget, self).__init__(attrs)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "from mock import Mock\n", "from twisted.internet.defer import succeed\n", "from synapse.api.errors import FederationError\n", "from synapse.events import make_event_from_dict\n", "from synapse.logging.context import LoggingContext\n", "from synapse.types import UserID, create_requester\n", "from synapse.util import Clock\n", "from synapse.util.retryutils import NotRetryingDestination\n", "from tests import unittest\n", "from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver\n", "from tests.test_utils import make_awaitable\n", "def FUNC_0(self):...\n", "self.http_client = Mock()\n", "self.reactor = ThreadedMemoryReactorClock()\n", "self.hs_clock = Clock(self.reactor)\n", "self.homeserver = setup_test_homeserver(self.addCleanup, http_client=self.\n http_client, clock=self.hs_clock, reactor=self.reactor)\n", "VAR_0 = UserID('us', 'test')\n", "VAR_1 = create_requester(VAR_0)\n", "VAR_2 = self.homeserver.get_room_creation_handler()\n", "self.room_id = self.get_success(VAR_2.create_room(VAR_1, VAR_2.\n _presets_dict['public_chat'], ratelimit=False))[0]['room_id']\n", "self.store = self.homeserver.get_datastore()\n", "VAR_3 = self.get_success(self.homeserver.get_datastore().\n get_latest_event_ids_in_room(self.room_id))[0]\n", "VAR_4 = make_event_from_dict({'room_id': self.room_id, 'sender':\n '@baduser:test.serv', 'state_key': '@baduser:test.serv', 'event_id':\n '$join:test.serv', 'depth': 1000, 'origin_server_ts': 1, 'type':\n 'm.room.member', 'origin': 'test.servx', 'content': {'membership':\n 'join'}, 'auth_events': [], 'prev_state': [(VAR_3, {})], 'prev_events':\n [(VAR_3, {})]})\n", "self.handler = self.homeserver.get_federation_handler()\n", "self.handler.do_auth = lambda origin, event, context, auth_events: succeed(\n context)\n", "self.client = self.homeserver.get_federation_client()\n", "self.client._check_sigs_and_hash_and_fetch = lambda dest, pdus, **k: succeed(\n pdus)\n", "self.assertEqual(self.get_success(self.handler.on_receive_pdu('test.serv',\n VAR_4, sent_to_us_directly=True)), None)\n", "self.assertEqual(self.get_success(self.store.get_latest_event_ids_in_room(\n self.room_id))[0], '$join:test.serv')\n", "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "async def FUNC_4(VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=0):...\n", "if VAR_6.startswith('/_matrix/federation/v1/get_missing_events/'):\n", "return {'events': []}\n", "self.http_client.post_json = FUNC_4\n", "VAR_3 = self.get_success(self.store.get_latest_event_ids_in_room(self.room_id)\n )[0]\n", "VAR_10 = make_event_from_dict({'room_id': self.room_id, 'sender':\n '@baduser:test.serv', 'event_id': 'one:test.serv', 'depth': 1000,\n 'origin_server_ts': 1, 'type': 'm.room.message', 'origin': 'test.serv',\n 'content': {'body': 'hewwo?'}, 'auth_events': [], 'prev_events': [(\n 'two:test.serv', {}), (VAR_3, {})]})\n", "VAR_24 = self.get_failure(self.handler.on_receive_pdu('test.serv', VAR_10,\n sent_to_us_directly=True), FederationError)\n", "self.assertEqual(VAR_24.value.args[0],\n \"ERROR 403: Your server isn't divulging details about prev_events referenced in this event.\"\n )\n", "VAR_11 = self.get_success(self.store.get_latest_event_ids_in_room(self.room_id)\n )\n", "self.assertEqual(VAR_11[0], '$join:test.serv')\n", "def FUNC_2(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = '@john:test_remote'\n", "VAR_13 = 'test_remote'\n", "self.resync_attempts = 0\n", "def FUNC_5(VAR_5, VAR_0):...\n", "if VAR_0 == VAR_12:\n", "self.resync_attempts += 1\n", "VAR_14 = self.homeserver.get_federation_client()\n", "VAR_14.query_user_devices = Mock(side_effect=query_user_devices)\n", "VAR_15 = self.homeserver.get_datastore()\n", "VAR_15.get_rooms_for_user = Mock(return_value=make_awaitable([\n '!someroom:test']))\n", "VAR_16 = self.homeserver.get_device_handler().device_list_updater\n", "self.get_success(VAR_16.incoming_device_list_update(origin=remote_origin,\n edu_content={'deleted': False, 'device_display_name': 'Mobile',\n 'device_id': 'QBUAZIFURK', 'prev_id': [5], 'stream_id': 6, 'user_id':\n remote_user_id}))\n", "self.assertEqual(self.resync_attempts, 1)\n", "VAR_17 = self.get_success(VAR_15.get_user_ids_requiring_device_list_resync())\n", "self.assertIn(VAR_12, VAR_17)\n", "self.reactor.advance(30)\n", "self.assertEqual(self.resync_attempts, 2)\n", "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = '@john:test_remote'\n", "VAR_18 = '85T7JXPFBAySB/jwby4S3lBPTqY3+Zg53nYuGmu1ggY'\n", "VAR_19 = 'QeIiFEjluPBtI7WQdG365QKZcFs9kqmHir6RBD0//nQ'\n", "VAR_14 = self.homeserver.get_federation_client()\n", "VAR_14.query_user_devices = Mock(return_value=succeed({'user_id':\n remote_user_id, 'stream_id': 1, 'devices': [], 'master_key': {'user_id':\n remote_user_id, 'usage': ['master'], 'keys': {('ed25519:' +\n remote_master_key): remote_master_key}}, 'self_signing_key': {'user_id':\n remote_user_id, 'usage': ['self_signing'], 'keys': {('ed25519:' +\n remote_self_signing_key): remote_self_signing_key}}}))\n", "VAR_20 = self.homeserver.get_device_handler()\n", "self.get_success(VAR_20.device_list_updater.user_device_resync(VAR_12))\n", "VAR_21 = self.get_success(self.store.get_e2e_cross_signing_keys_bulk(\n user_ids=[remote_user_id]))\n", "self.assertTrue(VAR_12 in VAR_21)\n", "VAR_22 = VAR_21[VAR_12]['master']\n", "self.assertEqual(len(VAR_22['keys']), 1)\n", "self.assertTrue('ed25519:' + VAR_18 in VAR_22['keys'].keys())\n", "self.assertTrue(VAR_18 in VAR_22['keys'].values())\n", "VAR_23 = VAR_21[VAR_12]['self_signing']\n", "self.assertEqual(len(VAR_23['keys']), 1)\n", "self.assertTrue('ed25519:' + VAR_19 in VAR_23['keys'].keys())\n", "self.assertTrue(VAR_19 in VAR_23['keys'].values())\n" ]
[ "from mock import Mock\n", "from twisted.internet.defer import succeed\n", "from synapse.api.errors import FederationError\n", "from synapse.events import make_event_from_dict\n", "from synapse.logging.context import LoggingContext\n", "from synapse.types import UserID, create_requester\n", "from synapse.util import Clock\n", "from synapse.util.retryutils import NotRetryingDestination\n", "from tests import unittest\n", "from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver\n", "from tests.test_utils import make_awaitable\n", "def setUp(self):...\n", "self.http_client = Mock()\n", "self.reactor = ThreadedMemoryReactorClock()\n", "self.hs_clock = Clock(self.reactor)\n", "self.homeserver = setup_test_homeserver(self.addCleanup, http_client=self.\n http_client, clock=self.hs_clock, reactor=self.reactor)\n", "user_id = UserID('us', 'test')\n", "our_user = create_requester(user_id)\n", "room_creator = self.homeserver.get_room_creation_handler()\n", "self.room_id = self.get_success(room_creator.create_room(our_user,\n room_creator._presets_dict['public_chat'], ratelimit=False))[0]['room_id']\n", "self.store = self.homeserver.get_datastore()\n", "most_recent = self.get_success(self.homeserver.get_datastore().\n get_latest_event_ids_in_room(self.room_id))[0]\n", "join_event = make_event_from_dict({'room_id': self.room_id, 'sender':\n '@baduser:test.serv', 'state_key': '@baduser:test.serv', 'event_id':\n '$join:test.serv', 'depth': 1000, 'origin_server_ts': 1, 'type':\n 'm.room.member', 'origin': 'test.servx', 'content': {'membership':\n 'join'}, 'auth_events': [], 'prev_state': [(most_recent, {})],\n 'prev_events': [(most_recent, {})]})\n", "self.handler = self.homeserver.get_federation_handler()\n", "self.handler.do_auth = lambda origin, event, context, auth_events: succeed(\n context)\n", "self.client = self.homeserver.get_federation_client()\n", "self.client._check_sigs_and_hash_and_fetch = lambda dest, pdus, **k: succeed(\n pdus)\n", "self.assertEqual(self.get_success(self.handler.on_receive_pdu('test.serv',\n join_event, sent_to_us_directly=True)), None)\n", "self.assertEqual(self.get_success(self.store.get_latest_event_ids_in_room(\n self.room_id))[0], '$join:test.serv')\n", "def test_cant_hide_direct_ancestors(self):...\n", "\"\"\"docstring\"\"\"\n", "async def post_json(destination, path, data, headers=None, timeout=0):...\n", "if path.startswith('/_matrix/federation/v1/get_missing_events/'):\n", "return {'events': []}\n", "self.http_client.post_json = post_json\n", "most_recent = self.get_success(self.store.get_latest_event_ids_in_room(self\n .room_id))[0]\n", "lying_event = make_event_from_dict({'room_id': self.room_id, 'sender':\n '@baduser:test.serv', 'event_id': 'one:test.serv', 'depth': 1000,\n 'origin_server_ts': 1, 'type': 'm.room.message', 'origin': 'test.serv',\n 'content': {'body': 'hewwo?'}, 'auth_events': [], 'prev_events': [(\n 'two:test.serv', {}), (most_recent, {})]})\n", "failure = self.get_failure(self.handler.on_receive_pdu('test.serv',\n lying_event, sent_to_us_directly=True), FederationError)\n", "self.assertEqual(failure.value.args[0],\n \"ERROR 403: Your server isn't divulging details about prev_events referenced in this event.\"\n )\n", "extrem = self.get_success(self.store.get_latest_event_ids_in_room(self.room_id)\n )\n", "self.assertEqual(extrem[0], '$join:test.serv')\n", "def test_retry_device_list_resync(self):...\n", "\"\"\"docstring\"\"\"\n", "remote_user_id = '@john:test_remote'\n", "remote_origin = 'test_remote'\n", "self.resync_attempts = 0\n", "def query_user_devices(destination, user_id):...\n", "if user_id == remote_user_id:\n", "self.resync_attempts += 1\n", "federation_client = self.homeserver.get_federation_client()\n", "federation_client.query_user_devices = Mock(side_effect=query_user_devices)\n", "store = self.homeserver.get_datastore()\n", "store.get_rooms_for_user = Mock(return_value=make_awaitable(['!someroom:test'])\n )\n", "device_list_updater = self.homeserver.get_device_handler().device_list_updater\n", "self.get_success(device_list_updater.incoming_device_list_update(origin=\n remote_origin, edu_content={'deleted': False, 'device_display_name':\n 'Mobile', 'device_id': 'QBUAZIFURK', 'prev_id': [5], 'stream_id': 6,\n 'user_id': remote_user_id}))\n", "self.assertEqual(self.resync_attempts, 1)\n", "need_resync = self.get_success(store.\n get_user_ids_requiring_device_list_resync())\n", "self.assertIn(remote_user_id, need_resync)\n", "self.reactor.advance(30)\n", "self.assertEqual(self.resync_attempts, 2)\n", "def test_cross_signing_keys_retry(self):...\n", "\"\"\"docstring\"\"\"\n", "remote_user_id = '@john:test_remote'\n", "remote_master_key = '85T7JXPFBAySB/jwby4S3lBPTqY3+Zg53nYuGmu1ggY'\n", "remote_self_signing_key = 'QeIiFEjluPBtI7WQdG365QKZcFs9kqmHir6RBD0//nQ'\n", "federation_client = self.homeserver.get_federation_client()\n", "federation_client.query_user_devices = Mock(return_value=succeed({'user_id':\n remote_user_id, 'stream_id': 1, 'devices': [], 'master_key': {'user_id':\n remote_user_id, 'usage': ['master'], 'keys': {('ed25519:' +\n remote_master_key): remote_master_key}}, 'self_signing_key': {'user_id':\n remote_user_id, 'usage': ['self_signing'], 'keys': {('ed25519:' +\n remote_self_signing_key): remote_self_signing_key}}}))\n", "device_handler = self.homeserver.get_device_handler()\n", "self.get_success(device_handler.device_list_updater.user_device_resync(\n remote_user_id))\n", "keys = self.get_success(self.store.get_e2e_cross_signing_keys_bulk(user_ids\n =[remote_user_id]))\n", "self.assertTrue(remote_user_id in keys)\n", "master_key = keys[remote_user_id]['master']\n", "self.assertEqual(len(master_key['keys']), 1)\n", "self.assertTrue('ed25519:' + remote_master_key in master_key['keys'].keys())\n", "self.assertTrue(remote_master_key in master_key['keys'].values())\n", "self_signing_key = keys[remote_user_id]['self_signing']\n", "self.assertEqual(len(self_signing_key['keys']), 1)\n", "self.assertTrue('ed25519:' + remote_self_signing_key in self_signing_key[\n 'keys'].keys())\n", "self.assertTrue(remote_self_signing_key in self_signing_key['keys'].values())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "AsyncFunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Condition", "AugAssign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_2(self) ->CLASS_0:...\n", "return CLASS_0(sys.stdin.readlines(), None)\n" ]
[ "def read(self) ->InputContent:...\n", "return InputContent(sys.stdin.readlines(), None)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_22(VAR_14, VAR_16, VAR_28, VAR_19, VAR_29, VAR_30):...\n", "VAR_50 = False\n", "if VAR_19[VAR_30] == 'None':\n", "VAR_19[VAR_30] = None\n", "if VAR_28.datatype == 'bool':\n", "if VAR_19[VAR_30] != VAR_29:\n", "VAR_19[VAR_30] = 1 if VAR_19[VAR_30] == 'True' else 0\n", "if VAR_28.datatype == 'comments':\n", "if VAR_29 is not None:\n", "return VAR_50, VAR_19\n", "VAR_19[VAR_30] = Markup(VAR_19[VAR_30]).unescape()\n", "if VAR_28.datatype == 'datetime':\n", "if VAR_19[VAR_30] is not None:\n", "VAR_88 = db.cc_classes[VAR_28.id]\n", "if VAR_19[VAR_30]:\n", "VAR_19[VAR_30] = datetime.strptime(VAR_19[VAR_30], '%Y-%m-%d')\n", "VAR_19[VAR_30] = db.Books.DEFAULT_PUBDATE\n", "setattr(getattr(VAR_16, VAR_30)[0], 'value', VAR_19[VAR_30])\n", "VAR_104 = getattr(VAR_16, VAR_30)[0]\n", "VAR_89 = VAR_88(value=to_save[cc_string], VAR_16=book_id)\n", "VAR_19[VAR_30] = clean_html(VAR_19[VAR_30])\n", "VAR_50 = True\n", "getattr(VAR_16, VAR_30).remove(VAR_104)\n", "calibre_db.session.add(VAR_89)\n", "calibre_db.session.delete(VAR_104)\n", "VAR_50 = True\n", "VAR_50 = True\n" ]
[ "def edit_cc_data_value(book_id, book, c, to_save, cc_db_value, cc_string):...\n", "changed = False\n", "if to_save[cc_string] == 'None':\n", "to_save[cc_string] = None\n", "if c.datatype == 'bool':\n", "if to_save[cc_string] != cc_db_value:\n", "to_save[cc_string] = 1 if to_save[cc_string] == 'True' else 0\n", "if c.datatype == 'comments':\n", "if cc_db_value is not None:\n", "return changed, to_save\n", "to_save[cc_string] = Markup(to_save[cc_string]).unescape()\n", "if c.datatype == 'datetime':\n", "if to_save[cc_string] is not None:\n", "cc_class = db.cc_classes[c.id]\n", "if to_save[cc_string]:\n", "to_save[cc_string] = datetime.strptime(to_save[cc_string], '%Y-%m-%d')\n", "to_save[cc_string] = db.Books.DEFAULT_PUBDATE\n", "setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])\n", "del_cc = getattr(book, cc_string)[0]\n", "new_cc = cc_class(value=to_save[cc_string], book=book_id)\n", "to_save[cc_string] = clean_html(to_save[cc_string])\n", "changed = True\n", "getattr(book, cc_string).remove(del_cc)\n", "calibre_db.session.add(new_cc)\n", "calibre_db.session.delete(del_cc)\n", "changed = True\n", "changed = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Return'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_1(VAR_2, VAR_3):...\n", "VAR_21 = iter(VAR_2)\n", "yield next(VAR_21)\n", "for x in VAR_21:\n", "yield VAR_3\n", "yield x\n" ]
[ "def intersperse(iterable, delimiter):...\n", "it = iter(iterable)\n", "yield next(it)\n", "for x in it:\n", "yield delimiter\n", "yield x\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "For", "Expr'", "Expr'" ]
[ "def FUNC_64(self):...\n", "\"\"\"docstring\"\"\"\n", "return ''\n" ]
[ "def newline(self):...\n", "\"\"\"docstring\"\"\"\n", "return ''\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@login_required()...\n", "return FUNC_10(VAR_2=request, VAR_10=menu, VAR_5=conn, VAR_7=url, **kwargs)\n" ]
[ "@login_required()...\n", "return _load_template(request=request, menu=menu, conn=conn, url=url, **kwargs)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_202 = VAR_5.getScriptService()\n", "VAR_203 = VAR_202.getScripts()\n", "VAR_204 = {}\n", "VAR_205 = VAR_2.session.get('server_settings', {}).get('scripts_to_ignore', ''\n ).split(',')\n", "for s in VAR_203:\n", "VAR_42 = s.id.val\n", "def FUNC_84(VAR_206):...\n", "VAR_314 = s.path.val\n", "VAR_317 = []\n", "VAR_3 = s.name.val\n", "for VAR_3, VAR_375 in VAR_206.items():\n", "VAR_315 = os.path.join(VAR_314, VAR_3)\n", "if isinstance(VAR_375, dict):\n", "VAR_317.sort(VAR_310=lambda x: x['name'].lower())\n", "if VAR_315 in VAR_205:\n", "VAR_317.append({'name': VAR_3, 'ul': FUNC_84(VAR_375)})\n", "VAR_317.append({'name': VAR_3, 'id': VAR_375})\n", "return VAR_317\n", "VAR_0.info('Ignoring script %r' % VAR_315)\n", "VAR_206 = VAR_204\n", "VAR_316 = VAR_315.split(os.path.sep)\n", "for li, VAR_362 in enumerate(VAR_316):\n", "if len(VAR_362) == 0:\n", "if VAR_362 not in VAR_206:\n", "if li + 1 == len(VAR_316):\n", "VAR_206 = VAR_206[VAR_362]\n", "VAR_206[VAR_362] = VAR_42\n", "VAR_206[VAR_362] = {}\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "scriptService = conn.getScriptService()\n", "scripts = scriptService.getScripts()\n", "scriptMenu = {}\n", "scripts_to_ignore = request.session.get('server_settings', {}).get(\n 'scripts_to_ignore', '').split(',')\n", "for s in scripts:\n", "scriptId = s.id.val\n", "def ul_to_list(ul):...\n", "path = s.path.val\n", "dir_list = []\n", "name = s.name.val\n", "for name, value in ul.items():\n", "fullpath = os.path.join(path, name)\n", "if isinstance(value, dict):\n", "dir_list.sort(key=lambda x: x['name'].lower())\n", "if fullpath in scripts_to_ignore:\n", "dir_list.append({'name': name, 'ul': ul_to_list(value)})\n", "dir_list.append({'name': name, 'id': value})\n", "return dir_list\n", "logger.info('Ignoring script %r' % fullpath)\n", "ul = scriptMenu\n", "dirs = fullpath.split(os.path.sep)\n", "for li, d in enumerate(dirs):\n", "if len(d) == 0:\n", "if d not in ul:\n", "if li + 1 == len(dirs):\n", "ul = ul[d]\n", "ul[d] = scriptId\n", "ul[d] = {}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Return'", "Expr'", "Assign'", "Assign'", "For", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "if VAR_18._instance is None:\n", "if VAR_18._instance is None:\n", "return VAR_18._instance\n", "VAR_18._instance = CLASS_3()\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "if cls._instance is None:\n", "if cls._instance is None:\n", "return cls._instance\n", "cls._instance = ComponentRegistry()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Condition", "Return'", "Assign'" ]
[ "from __future__ import division, print_function, unicode_literals\n", "import os\n", "import sys\n", "import datetime\n", "import itertools\n", "import uuid\n", "from flask import session as flask_session\n", "from binascii import hexlify\n", "from flask_login import AnonymousUserMixin, current_user\n", "from flask_login import user_logged_in\n", "from contextlib import contextmanager\n", "from flask_dance.consumer.backend.sqla import OAuthConsumerMixin\n", "from sqlalchemy import create_engine, exc, exists, event, text\n", "from flask_dance.consumer.storage.sqla import OAuthConsumerMixin\n", "VAR_16 = False\n", "VAR_16 = True\n", "from sqlalchemy import Column, ForeignKey\n", "VAR_16 = True\n", "from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float, JSON\n", "from sqlalchemy.orm.attributes import flag_modified\n", "from sqlalchemy.sql.expression import func\n", "from sqlalchemy.orm import declarative_base\n", "from sqlalchemy.ext.declarative import declarative_base\n", "from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session\n", "from werkzeug.security import generate_password_hash\n", "from . import constants, logger, cli\n", "VAR_0 = logger.create()\n", "VAR_1 = None\n", "VAR_2 = None\n", "VAR_3 = declarative_base()\n", "VAR_4 = {}\n", "def FUNC_0(VAR_5, VAR_6):...\n", "FUNC_1()\n", "def FUNC_1():...\n", "if VAR_92.get('_user_id', ''):\n", "VAR_0.error('No user id in session')\n", "if not FUNC_3(VAR_92.get('_user_id', ''), VAR_92.get('_id', '')):\n", "VAR_1.rollback()\n", "def FUNC_2(VAR_7, VAR_8):...\n", "VAR_94 = CLASS_4(VAR_92.get('_user_id', ''), VAR_92.get('_id', ''))\n", "VAR_0.info('Found stored session : ' + VAR_92.get('_id', ''))\n", "VAR_0.exception(e)\n", "VAR_0.info('Deleted session_key : ' + VAR_8)\n", "VAR_1.rollback()\n", "def FUNC_3(VAR_7, VAR_8):...\n", "VAR_1.add(VAR_94)\n", "VAR_1.query(CLASS_4).filter(CLASS_4.user_id == VAR_7, CLASS_4.session_key ==\n VAR_8).delete()\n", "VAR_0.exception(e)\n", "return bool(VAR_1.query(CLASS_4).filter(CLASS_4.user_id == VAR_7, CLASS_4.\n session_key == VAR_8).one_or_none())\n", "VAR_1.rollback()\n", "user_logged_in.connect(FUNC_0)\n", "VAR_1.commit()\n", "VAR_1.commit()\n", "VAR_0.exception(e)\n", "def FUNC_4(VAR_9):...\n", "VAR_0.info('Login and store session : ' + VAR_92.get('_id', ''))\n", "VAR_17 = list()\n", "for element in VAR_9:\n", "VAR_17.append(element.id)\n", "VAR_4[VAR_18.id] = VAR_17\n", "@property...\n", "return self.is_active\n" ]
[ "from __future__ import division, print_function, unicode_literals\n", "import os\n", "import sys\n", "import datetime\n", "import itertools\n", "import uuid\n", "from flask import session as flask_session\n", "from binascii import hexlify\n", "from flask_login import AnonymousUserMixin, current_user\n", "from flask_login import user_logged_in\n", "from contextlib import contextmanager\n", "from flask_dance.consumer.backend.sqla import OAuthConsumerMixin\n", "from sqlalchemy import create_engine, exc, exists, event, text\n", "from flask_dance.consumer.storage.sqla import OAuthConsumerMixin\n", "oauth_support = False\n", "oauth_support = True\n", "from sqlalchemy import Column, ForeignKey\n", "oauth_support = True\n", "from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float, JSON\n", "from sqlalchemy.orm.attributes import flag_modified\n", "from sqlalchemy.sql.expression import func\n", "from sqlalchemy.orm import declarative_base\n", "from sqlalchemy.ext.declarative import declarative_base\n", "from sqlalchemy.orm import backref, relationship, sessionmaker, Session, scoped_session\n", "from werkzeug.security import generate_password_hash\n", "from . import constants, logger, cli\n", "log = logger.create()\n", "session = None\n", "app_DB_path = None\n", "Base = declarative_base()\n", "searched_ids = {}\n", "def signal_store_user_session(object, user):...\n", "store_user_session()\n", "def store_user_session():...\n", "if flask_session.get('_user_id', ''):\n", "log.error('No user id in session')\n", "if not check_user_session(flask_session.get('_user_id', ''), flask_session.\n", "session.rollback()\n", "def delete_user_session(user_id, session_key):...\n", "user_session = User_Sessions(flask_session.get('_user_id', ''),\n flask_session.get('_id', ''))\n", "log.info('Found stored session : ' + flask_session.get('_id', ''))\n", "log.exception(e)\n", "log.info('Deleted session_key : ' + session_key)\n", "session.rollback()\n", "def check_user_session(user_id, session_key):...\n", "session.add(user_session)\n", "session.query(User_Sessions).filter(User_Sessions.user_id == user_id, \n User_Sessions.session_key == session_key).delete()\n", "log.exception(e)\n", "return bool(session.query(User_Sessions).filter(User_Sessions.user_id ==\n user_id, User_Sessions.session_key == session_key).one_or_none())\n", "session.rollback()\n", "user_logged_in.connect(signal_store_user_session)\n", "session.commit()\n", "session.commit()\n", "log.exception(e)\n", "def store_ids(result):...\n", "log.info('Login and store session : ' + flask_session.get('_id', ''))\n", "ids = list()\n", "for element in result:\n", "ids.append(element.id)\n", "searched_ids[current_user.id] = ids\n", "@property...\n", "return self.is_active\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "FunctionDef'", "Condition", "Expr'", "Condition", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "For", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_58(self):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked\n", "if not self.flags.ignore_links:\n", "check_if_doc_is_linked(self, VAR_25='Cancel')\n", "check_if_doc_is_dynamically_linked(self, VAR_25='Cancel')\n" ]
[ "def check_no_back_links_exist(self):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked\n", "if not self.flags.ignore_links:\n", "check_if_doc_is_linked(self, method='Cancel')\n", "check_if_doc_is_dynamically_linked(self, method='Cancel')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Condition", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_21: IAgent, VAR_8: Optional[IPSet]=None, VAR_9:...\n", "\"\"\"docstring\"\"\"\n", "self._agent = VAR_21\n", "self._ip_whitelist = VAR_8\n", "self._ip_blacklist = VAR_9\n" ]
[ "def __init__(self, agent: IAgent, ip_whitelist: Optional[IPSet]=None,...\n", "\"\"\"docstring\"\"\"\n", "self._agent = agent\n", "self._ip_whitelist = ip_whitelist\n", "self._ip_blacklist = ip_blacklist\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_29(self):...\n", "" ]
[ "def about_us(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_13():...\n", "" ]
[ "def test_launching_with_python2():...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@defer.inlineCallbacks...\n", "self.hs.config.enable_set_displayname = False\n", "yield defer.ensureDeferred(self.store.set_profile_displayname(self.frank.\n localpart, 'Frank'))\n", "self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_displayname(self.frank.localpart))), 'Frank')\n", "VAR_4 = defer.ensureDeferred(self.handler.set_displayname(self.frank,\n synapse.types.create_requester(self.frank), 'Frank Jr.'))\n", "yield self.assertFailure(VAR_4, SynapseError)\n" ]
[ "@defer.inlineCallbacks...\n", "self.hs.config.enable_set_displayname = False\n", "yield defer.ensureDeferred(self.store.set_profile_displayname(self.frank.\n localpart, 'Frank'))\n", "self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_displayname(self.frank.localpart))), 'Frank')\n", "d = defer.ensureDeferred(self.handler.set_displayname(self.frank, synapse.\n types.create_requester(self.frank), 'Frank Jr.'))\n", "yield self.assertFailure(d, SynapseError)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_14(self, VAR_36, *VAR_37):...\n", "" ]
[ "def log_message(self, format, *args):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "self.load_from_db()\n" ]
[ "def reload(self):...\n", "\"\"\"docstring\"\"\"\n", "self.load_from_db()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_43(self):...\n", "self.helper.join(self.room_id, user=self.second_user_id, VAR_52=self.second_tok\n )\n", "VAR_12 = 'hello'\n", "VAR_22, VAR_23 = self.make_request('POST',\n '/_matrix/client/r0/rooms/{}/ban'.format(self.room_id), VAR_19={\n 'reason': reason, 'user_id': self.second_user_id}, VAR_16=self.creator_tok)\n", "self.assertEqual(VAR_23.code, 200, VAR_23.result)\n", "self._check_for_reason(VAR_12)\n" ]
[ "def test_ban_reason(self):...\n", "self.helper.join(self.room_id, user=self.second_user_id, tok=self.second_tok)\n", "reason = 'hello'\n", "request, channel = self.make_request('POST',\n '/_matrix/client/r0/rooms/{}/ban'.format(self.room_id), content={\n 'reason': reason, 'user_id': self.second_user_id}, access_token=self.\n creator_tok)\n", "self.assertEqual(channel.code, 200, channel.result)\n", "self._check_for_reason(reason)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_167 = FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8, **kwargs)\n", "if 'error' in VAR_167:\n", "return VAR_167\n", "if len(VAR_167['data']) < 1:\n", "return dict(VAR_176='Could not retrieve bulk annotations table')\n", "VAR_168 = 0\n", "VAR_169 = None\n", "VAR_170 = sorted(VAR_167['data'], VAR_79=lambda x: x['file'], reverse=True)\n", "VAR_171 = None\n", "for VAR_246 in VAR_170:\n", "VAR_171 = FUNC_53(VAR_2, VAR_246['file'], VAR_8, **kwargs)\n", "if VAR_169 is None:\n", "if 'error' not in VAR_171:\n", "return dict(VAR_176=tableData.get('error',\n 'Could not retrieve matching bulk annotation table'))\n", "VAR_171['id'] = VAR_168\n", "VAR_169 = VAR_246\n", "VAR_171['annId'] = VAR_169['id']\n", "VAR_168 = VAR_246['file']\n", "VAR_171['owner'] = VAR_169['owner']\n", "VAR_171['addedBy'] = VAR_169['addedBy']\n", "VAR_171['parentType'] = VAR_169['parentType']\n", "VAR_171['parentId'] = VAR_169['parentId']\n", "VAR_171['addedOn'] = VAR_169['addedOn']\n", "return VAR_171\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "a = _bulk_file_annotations(request, objtype, objid, conn, **kwargs)\n", "if 'error' in a:\n", "return a\n", "if len(a['data']) < 1:\n", "return dict(error='Could not retrieve bulk annotations table')\n", "fileId = 0\n", "ann = None\n", "annList = sorted(a['data'], key=lambda x: x['file'], reverse=True)\n", "tableData = None\n", "for annotation in annList:\n", "tableData = _table_query(request, annotation['file'], conn, **kwargs)\n", "if ann is None:\n", "if 'error' not in tableData:\n", "return dict(error=tableData.get('error',\n 'Could not retrieve matching bulk annotation table'))\n", "tableData['id'] = fileId\n", "ann = annotation\n", "tableData['annId'] = ann['id']\n", "fileId = annotation['file']\n", "tableData['owner'] = ann['owner']\n", "tableData['addedBy'] = ann['addedBy']\n", "tableData['parentType'] = ann['parentType']\n", "tableData['parentId'] = ann['parentId']\n", "tableData['addedOn'] = ann['addedOn']\n", "return tableData\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Return'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@VAR_2.route('/admin/viewconfig', methods=['POST'])...\n", "VAR_16 = request.form.to_dict()\n", "FUNC_41(VAR_16, 'config_calibre_web_title')\n", "FUNC_41(VAR_16, 'config_columns_to_ignore')\n", "if FUNC_41(VAR_16, 'config_title_regex'):\n", "calibre_db.update_title_sort(config)\n", "if not FUNC_18(VAR_16.get('config_read_column', '0')):\n", "flash(_(u'Invalid Read Column'), category='error')\n", "FUNC_38(VAR_16, 'config_read_column')\n", "VAR_0.debug('Invalid Read column')\n", "if not FUNC_19(VAR_16.get('config_restricted_column', '0')):\n", "return FUNC_10()\n", "flash(_(u'Invalid Restricted Column'), category='error')\n", "FUNC_38(VAR_16, 'config_restricted_column')\n", "VAR_0.debug('Invalid Restricted Column')\n", "FUNC_38(VAR_16, 'config_theme')\n", "return FUNC_10()\n", "FUNC_38(VAR_16, 'config_random_books')\n", "FUNC_38(VAR_16, 'config_books_per_page')\n", "FUNC_38(VAR_16, 'config_authors_max')\n", "FUNC_41(VAR_16, 'config_default_language')\n", "FUNC_41(VAR_16, 'config_default_locale')\n", "config.config_default_role = constants.selected_roles(VAR_16)\n", "config.config_default_role &= ~constants.ROLE_ANONYMOUS\n", "config.config_default_show = sum(VAR_119(k[5:]) for k in VAR_16 if k.\n startswith('show_'))\n", "if 'Show_detail_random' in VAR_16:\n", "config.config_default_show |= constants.DETAIL_RANDOM\n", "config.save()\n", "flash(_(u'Calibre-Web configuration updated'), category='success')\n", "VAR_0.debug('Calibre-Web configuration updated')\n", "FUNC_1()\n", "return FUNC_10()\n" ]
[ "@admi.route('/admin/viewconfig', methods=['POST'])...\n", "to_save = request.form.to_dict()\n", "_config_string(to_save, 'config_calibre_web_title')\n", "_config_string(to_save, 'config_columns_to_ignore')\n", "if _config_string(to_save, 'config_title_regex'):\n", "calibre_db.update_title_sort(config)\n", "if not check_valid_read_column(to_save.get('config_read_column', '0')):\n", "flash(_(u'Invalid Read Column'), category='error')\n", "_config_int(to_save, 'config_read_column')\n", "log.debug('Invalid Read column')\n", "if not check_valid_restricted_column(to_save.get('config_restricted_column',\n", "return view_configuration()\n", "flash(_(u'Invalid Restricted Column'), category='error')\n", "_config_int(to_save, 'config_restricted_column')\n", "log.debug('Invalid Restricted Column')\n", "_config_int(to_save, 'config_theme')\n", "return view_configuration()\n", "_config_int(to_save, 'config_random_books')\n", "_config_int(to_save, 'config_books_per_page')\n", "_config_int(to_save, 'config_authors_max')\n", "_config_string(to_save, 'config_default_language')\n", "_config_string(to_save, 'config_default_locale')\n", "config.config_default_role = constants.selected_roles(to_save)\n", "config.config_default_role &= ~constants.ROLE_ANONYMOUS\n", "config.config_default_show = sum(int(k[5:]) for k in to_save if k.\n startswith('show_'))\n", "if 'Show_detail_random' in to_save:\n", "config.config_default_show |= constants.DETAIL_RANDOM\n", "config.save()\n", "flash(_(u'Calibre-Web configuration updated'), category='success')\n", "log.debug('Calibre-Web configuration updated')\n", "before_request()\n", "return view_configuration()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Return'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "AugAssign'", "Assign'", "Condition", "AugAssign'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = '@john:test_remote'\n", "VAR_18 = '85T7JXPFBAySB/jwby4S3lBPTqY3+Zg53nYuGmu1ggY'\n", "VAR_19 = 'QeIiFEjluPBtI7WQdG365QKZcFs9kqmHir6RBD0//nQ'\n", "VAR_14 = self.homeserver.get_federation_client()\n", "VAR_14.query_user_devices = Mock(return_value=succeed({'user_id':\n remote_user_id, 'stream_id': 1, 'devices': [], 'master_key': {'user_id':\n remote_user_id, 'usage': ['master'], 'keys': {('ed25519:' +\n remote_master_key): remote_master_key}}, 'self_signing_key': {'user_id':\n remote_user_id, 'usage': ['self_signing'], 'keys': {('ed25519:' +\n remote_self_signing_key): remote_self_signing_key}}}))\n", "VAR_20 = self.homeserver.get_device_handler()\n", "self.get_success(VAR_20.device_list_updater.user_device_resync(VAR_12))\n", "VAR_21 = self.get_success(self.store.get_e2e_cross_signing_keys_bulk(\n user_ids=[remote_user_id]))\n", "self.assertTrue(VAR_12 in VAR_21)\n", "VAR_22 = VAR_21[VAR_12]['master']\n", "self.assertEqual(len(VAR_22['keys']), 1)\n", "self.assertTrue('ed25519:' + VAR_18 in VAR_22['keys'].keys())\n", "self.assertTrue(VAR_18 in VAR_22['keys'].values())\n", "VAR_23 = VAR_21[VAR_12]['self_signing']\n", "self.assertEqual(len(VAR_23['keys']), 1)\n", "self.assertTrue('ed25519:' + VAR_19 in VAR_23['keys'].keys())\n", "self.assertTrue(VAR_19 in VAR_23['keys'].values())\n" ]
[ "def test_cross_signing_keys_retry(self):...\n", "\"\"\"docstring\"\"\"\n", "remote_user_id = '@john:test_remote'\n", "remote_master_key = '85T7JXPFBAySB/jwby4S3lBPTqY3+Zg53nYuGmu1ggY'\n", "remote_self_signing_key = 'QeIiFEjluPBtI7WQdG365QKZcFs9kqmHir6RBD0//nQ'\n", "federation_client = self.homeserver.get_federation_client()\n", "federation_client.query_user_devices = Mock(return_value=succeed({'user_id':\n remote_user_id, 'stream_id': 1, 'devices': [], 'master_key': {'user_id':\n remote_user_id, 'usage': ['master'], 'keys': {('ed25519:' +\n remote_master_key): remote_master_key}}, 'self_signing_key': {'user_id':\n remote_user_id, 'usage': ['self_signing'], 'keys': {('ed25519:' +\n remote_self_signing_key): remote_self_signing_key}}}))\n", "device_handler = self.homeserver.get_device_handler()\n", "self.get_success(device_handler.device_list_updater.user_device_resync(\n remote_user_id))\n", "keys = self.get_success(self.store.get_e2e_cross_signing_keys_bulk(user_ids\n =[remote_user_id]))\n", "self.assertTrue(remote_user_id in keys)\n", "master_key = keys[remote_user_id]['master']\n", "self.assertEqual(len(master_key['keys']), 1)\n", "self.assertTrue('ed25519:' + remote_master_key in master_key['keys'].keys())\n", "self.assertTrue(remote_master_key in master_key['keys'].values())\n", "self_signing_key = keys[remote_user_id]['self_signing']\n", "self.assertEqual(len(self_signing_key['keys']), 1)\n", "self.assertTrue('ed25519:' + remote_self_signing_key in self_signing_key[\n 'keys'].keys())\n", "self.assertTrue(remote_self_signing_key in self_signing_key['keys'].values())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_51(VAR_19=None, VAR_21=None):...\n", "VAR_87 = not is_gdrive_ready()\n", "VAR_88 = []\n", "if not VAR_21 and config.config_use_google_drive:\n", "VAR_21 = gdriveutils.get_error_text()\n", "if VAR_21 and gdrive_support:\n", "VAR_0.error(VAR_21)\n", "if not VAR_87 and gdrive_support:\n", "VAR_21 = _(VAR_21)\n", "VAR_88 = gdriveutils.listRootFolders()\n", "if VAR_19:\n", "flash(VAR_21, category='error')\n", "VAR_0.error(VAR_19)\n", "if request.method == 'POST' and not VAR_21:\n", "config.load()\n", "flash(_('Database Settings updated'), category='success')\n", "return render_title_template('config_db.html', config=config,\n show_authenticate_google_drive=gdrive_authenticate, gdriveError=\n gdrive_error, VAR_88=gdrivefolders, VAR_1=feature_support, title=_(\n u'Database Configuration'), page='dbconfig')\n", "flash(VAR_19, category='error')\n" ]
[ "def _db_configuration_result(error_flash=None, gdrive_error=None):...\n", "gdrive_authenticate = not is_gdrive_ready()\n", "gdrivefolders = []\n", "if not gdrive_error and config.config_use_google_drive:\n", "gdrive_error = gdriveutils.get_error_text()\n", "if gdrive_error and gdrive_support:\n", "log.error(gdrive_error)\n", "if not gdrive_authenticate and gdrive_support:\n", "gdrive_error = _(gdrive_error)\n", "gdrivefolders = gdriveutils.listRootFolders()\n", "if error_flash:\n", "flash(gdrive_error, category='error')\n", "log.error(error_flash)\n", "if request.method == 'POST' and not gdrive_error:\n", "config.load()\n", "flash(_('Database Settings updated'), category='success')\n", "return render_title_template('config_db.html', config=config,\n show_authenticate_google_drive=gdrive_authenticate, gdriveError=\n gdrive_error, gdrivefolders=gdrivefolders, feature_support=\n feature_support, title=_(u'Database Configuration'), page='dbconfig')\n", "flash(error_flash, category='error')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Return'", "Expr'" ]
[ "def FUNC_86(VAR_71):...\n", "for VAR_433 in ['Dataset', 'Image', 'Plate']:\n", "VAR_187 = VAR_71.POST.get(VAR_433, None)\n", "if VAR_187 is not None:\n", "for o in list(VAR_5.getObjects(VAR_433, VAR_187.split(','))):\n", "return o.getDetails().owner.id.val\n" ]
[ "def getObjectOwnerId(r):...\n", "for t in ['Dataset', 'Image', 'Plate']:\n", "ids = r.POST.get(t, None)\n", "if ids is not None:\n", "for o in list(conn.getObjects(t, ids.split(','))):\n", "return o.getDetails().owner.id.val\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Condition", "For", "Return'" ]
[ "def FUNC_25(self):...\n", "\"\"\"docstring\"\"\"\n", "if frappe.flags.in_install == 'frappe':\n", "return\n", "VAR_51 = self.meta.get_workflow()\n", "if VAR_51:\n", "FUNC_25(self)\n", "if not self._action == 'save':\n", "set_workflow_state_on_action(self, VAR_51, self._action)\n" ]
[ "def validate_workflow(self):...\n", "\"\"\"docstring\"\"\"\n", "if frappe.flags.in_install == 'frappe':\n", "return\n", "workflow = self.meta.get_workflow()\n", "if workflow:\n", "validate_workflow(self)\n", "if not self._action == 'save':\n", "set_workflow_state_on_action(self, workflow, self._action)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_9(self, VAR_0):...\n", "VAR_13 = VAR_0.patch('httpx.get')\n", "VAR_14 = VAR_0.patch('openapi_python_client.Path')\n", "VAR_15 = VAR_0.patch('yaml.safe_load')\n", "from openapi_python_client import _get_document\n", "VAR_11 = VAR_2(VAR_6=None, VAR_7=None)\n", "assert VAR_11 == GeneratorError(header='No URL or Path provided')\n", "VAR_13.assert_not_called()\n", "VAR_14.assert_not_called()\n", "VAR_15.assert_not_called()\n" ]
[ "def test__get_document_no_url_or_path(self, mocker):...\n", "get = mocker.patch('httpx.get')\n", "Path = mocker.patch('openapi_python_client.Path')\n", "loads = mocker.patch('yaml.safe_load')\n", "from openapi_python_client import _get_document\n", "result = _get_document(url=None, path=None)\n", "assert result == GeneratorError(header='No URL or Path provided')\n", "get.assert_not_called()\n", "Path.assert_not_called()\n", "loads.assert_not_called()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_163(VAR_10):...\n", "self.soap_procedures[VAR_148 or VAR_10.__name__\n ] = VAR_10, VAR_188, VAR_11, VAR_189, VAR_190\n", "return VAR_10\n" ]
[ "def _soap(f):...\n", "self.soap_procedures[name or f.__name__\n ] = f, returns, args, doc, response_element_name\n", "return f\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_28(VAR_8, VAR_60, VAR_15=None, VAR_61=None, VAR_62=None, VAR_63=...\n", "if VAR_62 is None:\n", "VAR_62 = []\n", "def FUNC_36():...\n", "return FUNC_2(VAR_8, VAR_15=additional_key_data)\n" ]
[ "def get_cached_view(key, view, additional_key_data=None, additional_files=...\n", "if additional_etag is None:\n", "additional_etag = []\n", "def cache_key():...\n", "return _cache_key(key, additional_key_data=additional_key_data)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_38(self, VAR_2='password'):...\n", "VAR_26 = reverse('logout')\n", "for bad_url in ('http://example.com', 'https://example.com',\n", "VAR_30 = '%(url)s?%(next)s=%(bad_url)s' % {'url': VAR_26, 'next':\n REDIRECT_FIELD_NAME, 'bad_url': urlquote(bad_url)}\n", "for good_url in ('/view/?param=http://example.com',\n", "self.login()\n", "VAR_31 = '%(url)s?%(next)s=%(good_url)s' % {'url': VAR_26, 'next':\n REDIRECT_FIELD_NAME, 'good_url': urlquote(good_url)}\n", "VAR_3 = self.client.get(VAR_30)\n", "self.login()\n", "self.assertEqual(VAR_3.status_code, 302)\n", "VAR_3 = self.client.get(VAR_31)\n", "self.assertFalse(bad_url in VAR_3.url, '%s should be blocked' % bad_url)\n", "self.assertEqual(VAR_3.status_code, 302)\n", "self.confirm_logged_out()\n", "self.assertTrue(good_url in VAR_3.url, '%s should be allowed' % good_url)\n", "self.confirm_logged_out()\n" ]
[ "def test_security_check(self, password='password'):...\n", "logout_url = reverse('logout')\n", "for bad_url in ('http://example.com', 'https://example.com',\n", "nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {'url': logout_url, 'next':\n REDIRECT_FIELD_NAME, 'bad_url': urlquote(bad_url)}\n", "for good_url in ('/view/?param=http://example.com',\n", "self.login()\n", "safe_url = '%(url)s?%(next)s=%(good_url)s' % {'url': logout_url, 'next':\n REDIRECT_FIELD_NAME, 'good_url': urlquote(good_url)}\n", "response = self.client.get(nasty_url)\n", "self.login()\n", "self.assertEqual(response.status_code, 302)\n", "response = self.client.get(safe_url)\n", "self.assertFalse(bad_url in response.url, '%s should be blocked' % bad_url)\n", "self.assertEqual(response.status_code, 302)\n", "self.confirm_logged_out()\n", "self.assertTrue(good_url in response.url, '%s should be allowed' % good_url)\n", "self.confirm_logged_out()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "For", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_1(VAR_5: int, VAR_6: str):...\n", "return time.strftime(VAR_6, time.localtime(VAR_5 / 1000))\n" ]
[ "def _format_ts_filter(value: int, format: str):...\n", "return time.strftime(format, time.localtime(value / 1000))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_13(self, VAR_14):...\n", "VAR_34 = VAR_14.patch(\n 'qutebrowser.browser.webkit.network.filescheme.os.listdir')\n", "VAR_34.side_effect = OSError('Error message')\n", "VAR_19 = filescheme.dirbrowser_html('').decode('utf-8')\n", "VAR_20 = bs4.BeautifulSoup(VAR_19, 'html.parser')\n", "print(VAR_20.prettify())\n", "VAR_35 = VAR_20('p', id='error-message-text')[0].string\n", "assert VAR_35 == 'Error message'\n" ]
[ "def test_oserror(self, mocker):...\n", "m = mocker.patch('qutebrowser.browser.webkit.network.filescheme.os.listdir')\n", "m.side_effect = OSError('Error message')\n", "html = filescheme.dirbrowser_html('').decode('utf-8')\n", "soup = bs4.BeautifulSoup(html, 'html.parser')\n", "print(soup.prettify())\n", "error_msg = soup('p', id='error-message-text')[0].string\n", "assert error_msg == 'Error message'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assert'" ]
[ "def FUNC_38():...\n", "\"\"\"docstring\"\"\"\n", "VAR_187 = FUNC_37()\n", "FUNC_10().set_value('metadata_version', VAR_187)\n", "return VAR_187\n" ]
[ "def reset_metadata_version():...\n", "\"\"\"docstring\"\"\"\n", "v = generate_hash()\n", "cache().set_value('metadata_version', v)\n", "return v\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Return'" ]
[ "def __next__(self):...\n", "if self._nextIndex > 0:\n", "self._last_item = self.item\n", "return super().__next__()\n" ]
[ "def __next__(self):...\n", "if self._nextIndex > 0:\n", "self._last_item = self.item\n", "return super().__next__()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_63(VAR_79):...\n", "" ]
[ "def getConfigValue(key):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_85(VAR_215, VAR_187):...\n", "VAR_323 = [int(VAR_40) for VAR_40 in VAR_187.split(',')]\n", "VAR_324 = {}\n", "for VAR_38 in VAR_5.getObjects(VAR_215, VAR_323):\n", "VAR_324[VAR_38.id] = VAR_38\n", "VAR_325 = [VAR_41 for VAR_41 in VAR_323 if VAR_41 in VAR_324.keys()]\n", "if len(VAR_325) == 0:\n", "VAR_335 = list(VAR_324.values())[0].getDetails().group.id.val\n", "return VAR_325, VAR_324\n", "VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)\n" ]
[ "def validateIds(dtype, ids):...\n", "ints = [int(oid) for oid in ids.split(',')]\n", "validObjs = {}\n", "for obj in conn.getObjects(dtype, ints):\n", "validObjs[obj.id] = obj\n", "filteredIds = [iid for iid in ints if iid in validObjs.keys()]\n", "if len(filteredIds) == 0:\n", "gid = list(validObjs.values())[0].getDetails().group.id.val\n", "return filteredIds, validObjs\n", "conn.SERVICE_OPTS.setOmeroGroup(gid)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Expr'" ]
[ "@VAR_2.route('/downloadlist')...\n", "if VAR_87.get_view_property('download', 'dir') == 'desc':\n", "VAR_10 = ub.User.name.desc()\n", "VAR_10 = ub.User.name.asc()\n", "VAR_109 = 0\n", "VAR_109 = 1\n", "if VAR_87.check_visibility(constants.SIDEBAR_DOWNLOAD) and VAR_87.role_admin():\n", "VAR_63 = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label(\n 'count')).join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(VAR_10\n ).all()\n", "abort(404)\n", "VAR_107 = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).\n label('char')).filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) !=\n constants.ROLE_ANONYMOUS).group_by(func.upper(func.substr(ub.User.name,\n 1, 1))).all()\n", "return render_title_template('list.html', VAR_63=entries, folder=\n 'web.books_list', VAR_107=charlist, VAR_149=_(u'Downloads'), VAR_9=\n 'downloadlist', VAR_8='download', VAR_10=order_no)\n" ]
[ "@web.route('/downloadlist')...\n", "if current_user.get_view_property('download', 'dir') == 'desc':\n", "order = ub.User.name.desc()\n", "order = ub.User.name.asc()\n", "order_no = 0\n", "order_no = 1\n", "if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD\n", "entries = ub.session.query(ub.User, func.count(ub.Downloads.book_id).label(\n 'count')).join(ub.Downloads).group_by(ub.Downloads.user_id).order_by(order\n ).all()\n", "abort(404)\n", "charlist = ub.session.query(func.upper(func.substr(ub.User.name, 1, 1)).\n label('char')).filter(ub.User.role.op('&')(constants.ROLE_ANONYMOUS) !=\n constants.ROLE_ANONYMOUS).group_by(func.upper(func.substr(ub.User.name,\n 1, 1))).all()\n", "return render_title_template('list.html', entries=entries, folder=\n 'web.books_list', charlist=charlist, title=_(u'Downloads'), page=\n 'downloadlist', data='download', order=order_no)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_8(self, VAR_12: DomainSpecificString) ->bool:...\n", "return VAR_12.domain == self.hostname\n" ]
[ "def is_mine(self, domain_specific_string: DomainSpecificString) ->bool:...\n", "return domain_specific_string.domain == self.hostname\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@require_POST...\n", "VAR_6 = get_object_or_404(Topic, VAR_2=topic_id)\n", "VAR_7 = CommentMoveForm(VAR_6=topic, data=request.POST)\n", "if VAR_7.is_valid():\n", "VAR_12 = VAR_7.save()\n", "messages.error(VAR_0, render_form_errors(VAR_7))\n", "for VAR_8 in VAR_12:\n", "return redirect(VAR_0.POST.get('next', VAR_6.get_absolute_url()))\n", "comment_posted(VAR_8=comment, mentions=None)\n", "VAR_6.decrease_comment_count()\n", "post_comment_move(VAR_8=comment, VAR_6=topic)\n" ]
[ "@require_POST...\n", "topic = get_object_or_404(Topic, pk=topic_id)\n", "form = CommentMoveForm(topic=topic, data=request.POST)\n", "if form.is_valid():\n", "comments = form.save()\n", "messages.error(request, render_form_errors(form))\n", "for comment in comments:\n", "return redirect(request.POST.get('next', topic.get_absolute_url()))\n", "comment_posted(comment=comment, mentions=None)\n", "topic.decrease_comment_count()\n", "post_comment_move(comment=comment, topic=topic)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "For", "Return'", "Expr'", "Expr'", "Expr'" ]
[ "@VAR_0.filter...\n", "return [('' if item is None else '%s' % item) for item in VAR_17]\n" ]
[ "@register.filter...\n", "return [('' if item is None else '%s' % item) for item in value]\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_0(self, VAR_0, VAR_1, VAR_2, VAR_3=True):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_3:\n", "for r in VAR_0.get_children():\n", "VAR_16 = VAR_0.name, VAR_1, VAR_2\n", "VAR_16 = r.name, VAR_1, VAR_2\n", "if VAR_16 not in self._allowed:\n", "if VAR_16 not in self._allowed:\n", "self._allowed.append(VAR_16)\n", "self._allowed.append(VAR_16)\n" ]
[ "def allow(self, role, method, resource, with_children=True):...\n", "\"\"\"docstring\"\"\"\n", "if with_children:\n", "for r in role.get_children():\n", "permission = role.name, method, resource\n", "permission = r.name, method, resource\n", "if permission not in self._allowed:\n", "if permission not in self._allowed:\n", "self._allowed.append(permission)\n", "self._allowed.append(permission)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "For", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_30(VAR_27):...\n", "print(VAR_26 + VAR_27)\n" ]
[ "def in_print(s):...\n", "print(indent_str + s)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_11(self, VAR_0, VAR_13):...\n", "VAR_26 = VAR_0 / 'foo'\n", "VAR_32 = VAR_0 / 'bar'\n", "VAR_26.ensure()\n", "VAR_32.ensure(dir=True)\n", "VAR_25 = VAR_13(str(VAR_0))\n", "VAR_28 = self.Item(FUNC_1(VAR_26), VAR_26.relto(VAR_0))\n", "VAR_29 = self.Item(FUNC_1(VAR_32), VAR_32.relto(VAR_0))\n", "assert VAR_25.parent\n", "assert VAR_25.files == [VAR_28]\n", "assert VAR_25.folders == [VAR_29]\n" ]
[ "def test_mixed(self, tmpdir, parser):...\n", "foo_file = tmpdir / 'foo'\n", "bar_dir = tmpdir / 'bar'\n", "foo_file.ensure()\n", "bar_dir.ensure(dir=True)\n", "parsed = parser(str(tmpdir))\n", "foo_item = self.Item(_file_url(foo_file), foo_file.relto(tmpdir))\n", "bar_item = self.Item(_file_url(bar_dir), bar_dir.relto(tmpdir))\n", "assert parsed.parent\n", "assert parsed.files == [foo_item]\n", "assert parsed.folders == [bar_item]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_3(self, VAR_13):...\n", "if VAR_13 == 'untranslated':\n", "return 'todo'\n", "return super().to_python(VAR_13)\n" ]
[ "def to_python(self, value):...\n", "if value == 'untranslated':\n", "return 'todo'\n", "return super().to_python(value)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_8(VAR_0, VAR_1):...\n", "return FUNC_0(VAR_0=request, VAR_1=pk, VAR_2='is_globally_pinned', VAR_3=\n False, VAR_4=Comment.UNPINNED, VAR_5=_(\n 'The topic has been globally unpinned'))\n" ]
[ "def global_unpin(request, pk):...\n", "return _moderate(request=request, pk=pk, field_name='is_globally_pinned',\n to_value=False, action=Comment.UNPINNED, message=_(\n 'The topic has been globally unpinned'))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "from __future__ import absolute_import, unicode_literals\n", "from django import forms\n", "from django.apps import apps\n", "from django.template.loader import render_to_string\n", "from django.utils.safestring import mark_safe\n", "from .permissions import permission_tag_view\n", "VAR_3 = 'tags/forms/widgets/tag_select_option.html'\n", "def __init__(self, *VAR_4, **VAR_5):...\n", "self.queryset = VAR_5.pop('queryset')\n", "return super(CLASS_0, self).__init__(*VAR_4, **kwargs)\n" ]
[ "from __future__ import absolute_import, unicode_literals\n", "from django import forms\n", "from django.apps import apps\n", "from django.template.loader import render_to_string\n", "from django.utils.safestring import mark_safe\n", "from .permissions import permission_tag_view\n", "option_template_name = 'tags/forms/widgets/tag_select_option.html'\n", "def __init__(self, *args, **kwargs):...\n", "self.queryset = kwargs.pop('queryset')\n", "return super(TagFormWidget, self).__init__(*args, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Return'" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = FUNC_1('/groups/%s/settings/m.join_policy', VAR_30)\n", "return self.client.put_json(VAR_5=destination, VAR_2=path, VAR_3={\n 'requester_user_id': requester_user_id}, VAR_39=content, VAR_15=True)\n" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "path = _create_v1_path('/groups/%s/settings/m.join_policy', group_id)\n", "return self.client.put_json(destination=destination, path=path, args={\n 'requester_user_id': requester_user_id}, data=content, ignore_backoff=True)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_15(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = 1\n", "VAR_3 = '@test:server'\n", "self.get_success(self.presence_handler.update_external_syncs_row(VAR_13,\n VAR_3, True, self.clock.time_msec()))\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY / 2)\n", "VAR_10 = self.get_success(self.presence_handler.get_state(UserID.\n from_string(VAR_3)))\n", "self.assertEqual(VAR_10.state, PresenceState.ONLINE)\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY)\n", "VAR_10 = self.get_success(self.presence_handler.get_state(UserID.\n from_string(VAR_3)))\n", "self.assertEqual(VAR_10.state, PresenceState.OFFLINE)\n" ]
[ "def test_external_process_timeout(self):...\n", "\"\"\"docstring\"\"\"\n", "process_id = 1\n", "user_id = '@test:server'\n", "self.get_success(self.presence_handler.update_external_syncs_row(process_id,\n user_id, True, self.clock.time_msec()))\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY / 2)\n", "state = self.get_success(self.presence_handler.get_state(UserID.from_string\n (user_id)))\n", "self.assertEqual(state.state, PresenceState.ONLINE)\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY)\n", "state = self.get_success(self.presence_handler.get_state(UserID.from_string\n (user_id)))\n", "self.assertEqual(state.state, PresenceState.OFFLINE)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_23(self):...\n", "self._validate_mandatory()\n", "self._validate_data_fields()\n", "self._validate_selects()\n", "self._validate_non_negative()\n", "self._validate_length()\n", "self._extract_images_from_text_editor()\n", "self._sanitize_content()\n", "self._save_passwords()\n", "self.validate_workflow()\n", "VAR_50 = self.get_all_children()\n", "for VAR_21 in VAR_50:\n", "VAR_21._validate_data_fields()\n", "if self.is_new():\n", "VAR_21._validate_selects()\n", "for VAR_18 in optional_fields:\n", "self.validate_set_only_once()\n", "VAR_21._validate_non_negative()\n", "self.set(VAR_18, None)\n", "VAR_21._validate_length()\n", "VAR_21._extract_images_from_text_editor()\n", "VAR_21._sanitize_content()\n", "VAR_21._save_passwords()\n" ]
[ "def _validate(self):...\n", "self._validate_mandatory()\n", "self._validate_data_fields()\n", "self._validate_selects()\n", "self._validate_non_negative()\n", "self._validate_length()\n", "self._extract_images_from_text_editor()\n", "self._sanitize_content()\n", "self._save_passwords()\n", "self.validate_workflow()\n", "children = self.get_all_children()\n", "for d in children:\n", "d._validate_data_fields()\n", "if self.is_new():\n", "d._validate_selects()\n", "for fieldname in optional_fields:\n", "self.validate_set_only_once()\n", "d._validate_non_negative()\n", "self.set(fieldname, None)\n", "d._validate_length()\n", "d._extract_images_from_text_editor()\n", "d._sanitize_content()\n", "d._save_passwords()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "For", "Expr'", "Condition", "Expr'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n", "FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n", "for VAR_6 in VAR_73:\n", "FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n", "return self._return_value\n" ]
[ "def runner(self, method, *args, **kwargs):...\n", "add_to_return_value(self, fn(self, *args, **kwargs))\n", "for f in hooks:\n", "add_to_return_value(self, f(self, method, *args, **kwargs))\n", "return self._return_value\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "For", "Expr'", "Return'" ]
[ "def FUNC_5(self):...\n", "return self.base_url_parameters()\n" ]
[ "def url_parameters(self):...\n", "return self.base_url_parameters()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __getattr__(self, VAR_12):...\n", "if VAR_12 == '__name__':\n", "return self.__class__.__name__\n", "return super(CLASS_3, self).getattr(VAR_12)\n" ]
[ "def __getattr__(self, name):...\n", "if name == '__name__':\n", "return self.__class__.__name__\n", "return super(render_response, self).getattr(name)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_70(self, VAR_143=VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_56 = VAR_263.request\n", "VAR_139 = VAR_263.session\n", "VAR_63 = VAR_139.auth\n", "VAR_254 = self.table_user()\n", "if not self.is_logged_in():\n", "VAR_280 = VAR_63.user.id\n", "VAR_281 = VAR_143\n", "VAR_141 = None\n", "if VAR_143 is VAR_3:\n", "VAR_143 = VAR_263.request.post_vars.user_id\n", "if VAR_143 and VAR_143 != self.user.id and VAR_143 != '0':\n", "if not self.has_permission('impersonate', self.table_user(), VAR_143):\n", "if VAR_143 in (0, '0'):\n", "VAR_141 = VAR_254(VAR_143)\n", "if self.is_impersonating():\n", "if VAR_281 is VAR_3 and not VAR_56.post_vars:\n", "if not VAR_141:\n", "VAR_139.clear()\n", "return None\n", "return SQLFORM.factory(VAR_1('user_id', 'integer'))\n", "if not VAR_141:\n", "VAR_63.impersonator = pickle.dumps(VAR_139, pickle.HIGHEST_PROTOCOL)\n", "VAR_139.update(pickle.loads(VAR_63.impersonator))\n", "return None\n", "return SQLFORM(VAR_254, VAR_141.id, readonly=True)\n", "VAR_63.user.update(VAR_254._filter_fields(VAR_141, True))\n", "self.user = VAR_139.auth.user\n", "self.user = VAR_63.user\n", "self.update_groups()\n", "self.update_groups()\n", "self.run_login_onaccept()\n", "VAR_136 = self.messages['impersonate_log']\n", "self.log_event(VAR_136, dict(VAR_215=current_id, other_id=auth.user.id))\n", "self.run_login_onaccept()\n" ]
[ "def impersonate(self, user_id=DEFAULT):...\n", "\"\"\"docstring\"\"\"\n", "request = current.request\n", "session = current.session\n", "auth = session.auth\n", "table_user = self.table_user()\n", "if not self.is_logged_in():\n", "current_id = auth.user.id\n", "requested_id = user_id\n", "user = None\n", "if user_id is DEFAULT:\n", "user_id = current.request.post_vars.user_id\n", "if user_id and user_id != self.user.id and user_id != '0':\n", "if not self.has_permission('impersonate', self.table_user(), user_id):\n", "if user_id in (0, '0'):\n", "user = table_user(user_id)\n", "if self.is_impersonating():\n", "if requested_id is DEFAULT and not request.post_vars:\n", "if not user:\n", "session.clear()\n", "return None\n", "return SQLFORM.factory(Field('user_id', 'integer'))\n", "if not user:\n", "auth.impersonator = pickle.dumps(session, pickle.HIGHEST_PROTOCOL)\n", "session.update(pickle.loads(auth.impersonator))\n", "return None\n", "return SQLFORM(table_user, user.id, readonly=True)\n", "auth.user.update(table_user._filter_fields(user, True))\n", "self.user = session.auth.user\n", "self.user = auth.user\n", "self.update_groups()\n", "self.update_groups()\n", "self.run_login_onaccept()\n", "log = self.messages['impersonate_log']\n", "self.log_event(log, dict(id=current_id, other_id=auth.user.id))\n", "self.run_login_onaccept()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Condition", "Expr'", "Return'", "Return'", "Condition", "Assign'", "Expr'", "Return'", "Return'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_92(self, VAR_177):...\n", "VAR_290 = VAR_177._db[VAR_177.type[10:]]\n", "VAR_291 = VAR_290.get('_format', None)\n", "if VAR_291 and isinstance(VAR_291, str):\n", "return VAR_291[2:-2]\n", "return VAR_177.name\n" ]
[ "def get_format(self, field):...\n", "rtable = field._db[field.type[10:]]\n", "format = rtable.get('_format', None)\n", "if format and isinstance(format, str):\n", "return format[2:-2]\n", "return field.name\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "async def FUNC_3(self, VAR_8: str, VAR_10: dict) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "if VAR_10.get('id_server'):\n", "VAR_57 = [VAR_10['id_server']]\n", "VAR_57 = await self.store.get_id_servers_user_bound(user_id=mxid, VAR_19=\n threepid['medium'], VAR_20=threepid['address'])\n", "if not VAR_57:\n", "return False\n", "VAR_39 = True\n", "for VAR_4 in VAR_57:\n", "VAR_39 &= await self.try_unbind_threepid_with_id_server(VAR_8, VAR_10, VAR_4)\n", "return VAR_39\n" ]
[ "async def try_unbind_threepid(self, mxid: str, threepid: dict) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "if threepid.get('id_server'):\n", "id_servers = [threepid['id_server']]\n", "id_servers = await self.store.get_id_servers_user_bound(user_id=mxid,\n medium=threepid['medium'], address=threepid['address'])\n", "if not id_servers:\n", "return False\n", "changed = True\n", "for id_server in id_servers:\n", "changed &= await self.try_unbind_threepid_with_id_server(mxid, threepid,\n id_server)\n", "return changed\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "For", "AugAssign'", "Return'" ]
[ "def FUNC_43(VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_71 = VAR_2.GET or VAR_2.POST\n", "VAR_114 = {'images': VAR_71.getlist('image'), 'datasets': VAR_71.getlist(\n 'dataset'), 'projects': VAR_71.getlist('project'), 'screens': VAR_71.\n getlist('screen'), 'plates': VAR_71.getlist('plate'), 'acquisitions':\n VAR_71.getlist('acquisition'), 'wells': VAR_71.getlist('well'),\n 'shares': VAR_71.getlist('share')}\n", "return VAR_114\n" ]
[ "def getIds(request):...\n", "\"\"\"docstring\"\"\"\n", "r = request.GET or request.POST\n", "selected = {'images': r.getlist('image'), 'datasets': r.getlist('dataset'),\n 'projects': r.getlist('project'), 'screens': r.getlist('screen'),\n 'plates': r.getlist('plate'), 'acquisitions': r.getlist('acquisition'),\n 'wells': r.getlist('well'), 'shares': r.getlist('share')}\n", "return selected\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "@VAR_4.route('/generate_auth_token/<int:user_id>')...\n", "VAR_11 = request.host.rsplit(':')\n", "if len(VAR_11) == 1:\n", "VAR_13 = ':'.join(VAR_11)\n", "VAR_13 = ':'.join(VAR_11[0:-1])\n", "if VAR_13.startswith('127.') or VAR_13.lower(\n", "VAR_14 = _(\n 'PLease access calibre-web from non localhost to get valid api_endpoint for kobo device'\n )\n", "VAR_12 = ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken.\n user_id == VAR_5).filter(ub.RemoteAuthToken.token_type == 1).first()\n", "return render_title_template('generate_kobo_auth_url.html', title=_(\n u'Kobo Setup'), VAR_14=warning)\n", "if not VAR_12:\n", "VAR_12 = ub.RemoteAuthToken()\n", "VAR_15 = calibre_db.session.query(db.Books).join(db.Data).all()\n", "VAR_12.user_id = VAR_5\n", "for book in VAR_15:\n", "VAR_12.expiration = datetime.max\n", "VAR_17 = [data.format for data in book.data]\n", "return render_title_template('generate_kobo_auth_url.html', title=_(\n u'Kobo Setup'), kobo_auth_url=url_for('kobo.TopLevelEndpoint',\n auth_token=auth_token.auth_token, _external=True), VAR_14=False)\n", "VAR_12.auth_token = hexlify(urandom(16)).decode('utf-8')\n", "if not 'KEPUB' in VAR_17 and config.config_kepubifypath and 'EPUB' in VAR_17:\n", "VAR_12.token_type = 1\n", "helper.convert_book_format(book.id, config.config_calibre_dir, 'EPUB',\n 'KEPUB', current_user.name)\n", "ub.session.add(VAR_12)\n", "ub.session_commit()\n" ]
[ "@kobo_auth.route('/generate_auth_token/<int:user_id>')...\n", "host_list = request.host.rsplit(':')\n", "if len(host_list) == 1:\n", "host = ':'.join(host_list)\n", "host = ':'.join(host_list[0:-1])\n", "if host.startswith('127.') or host.lower() == 'localhost' or host.startswith(\n", "warning = _(\n 'PLease access calibre-web from non localhost to get valid api_endpoint for kobo device'\n )\n", "auth_token = ub.session.query(ub.RemoteAuthToken).filter(ub.RemoteAuthToken\n .user_id == user_id).filter(ub.RemoteAuthToken.token_type == 1).first()\n", "return render_title_template('generate_kobo_auth_url.html', title=_(\n u'Kobo Setup'), warning=warning)\n", "if not auth_token:\n", "auth_token = ub.RemoteAuthToken()\n", "books = calibre_db.session.query(db.Books).join(db.Data).all()\n", "auth_token.user_id = user_id\n", "for book in books:\n", "auth_token.expiration = datetime.max\n", "formats = [data.format for data in book.data]\n", "return render_title_template('generate_kobo_auth_url.html', title=_(\n u'Kobo Setup'), kobo_auth_url=url_for('kobo.TopLevelEndpoint',\n auth_token=auth_token.auth_token, _external=True), warning=False)\n", "auth_token.auth_token = hexlify(urandom(16)).decode('utf-8')\n", "if not 'KEPUB' in formats and config.config_kepubifypath and 'EPUB' in formats:\n", "auth_token.token_type = 1\n", "helper.convert_book_format(book.id, config.config_calibre_dir, 'EPUB',\n 'KEPUB', current_user.name)\n", "ub.session.add(auth_token)\n", "ub.session_commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_59(self, VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "if self.options.get('parse_block_html'):\n", "return VAR_14\n", "return VAR_19(VAR_14)\n" ]
[ "def text(self, text):...\n", "\"\"\"docstring\"\"\"\n", "if self.options.get('parse_block_html'):\n", "return text\n", "return escape(text)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "@FUNC_0...\n", "return UserDirectoryHandler(self)\n" ]
[ "@cache_in_self...\n", "return UserDirectoryHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@qutescheme.add_handler('pdfjs', backend=usertypes.Backend.QtWebKit)...\n", "\"\"\"docstring\"\"\"\n", "VAR_3 = pdfjs.get_pdfjs_res(VAR_1.path())\n", "log.misc.warning('pdfjs resource requested but not found: {}'.format(e.path))\n", "VAR_2, VAR_4 = mimetypes.guess_type(VAR_1.fileName())\n", "assert VAR_2 is not None, VAR_1\n", "return VAR_2, VAR_3\n" ]
[ "@qutescheme.add_handler('pdfjs', backend=usertypes.Backend.QtWebKit)...\n", "\"\"\"docstring\"\"\"\n", "data = pdfjs.get_pdfjs_res(url.path())\n", "log.misc.warning('pdfjs resource requested but not found: {}'.format(e.path))\n", "mimetype, _encoding = mimetypes.guess_type(url.fileName())\n", "assert mimetype is not None, url\n", "return mimetype, data\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Expr'", "Assign'", "Assert'", "Return'" ]
[ "@VAR_8.route('/api/interpret/', methods=['POST'])...\n", "FUNC_13('interpret')\n", "VAR_45 = request.json['data']\n", "VAR_65, VAR_66 = VAR_8.interface.interpret(VAR_45)\n", "return jsonify({'interpretation_scores': VAR_65, 'alternative_outputs': VAR_66}\n )\n" ]
[ "@app.route('/api/interpret/', methods=['POST'])...\n", "log_feature_analytics('interpret')\n", "raw_input = request.json['data']\n", "interpretation_scores, alternative_outputs = app.interface.interpret(raw_input)\n", "return jsonify({'interpretation_scores': interpretation_scores,\n 'alternative_outputs': alternative_outputs})\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "@app.route('/tags/<tag_name>')...\n", "if not app.config['SEARCH_CONF']['enabled'] and not which('rg'):\n", "flash(\n 'Search (for example ripgrep) must be installed to view pages about embedded tags.'\n , 'error')\n", "VAR_11 = search(f'#{VAR_0}#', strict=True)\n", "return redirect('/')\n", "VAR_12 = set([item['id'] for item in VAR_11])\n", "for res in search_frontmatter_tags(VAR_0):\n", "if res['id'] not in VAR_12:\n", "return render_template('tags/show.html', title=f'Tags - {tag_name}', VAR_0=\n tag_name, search_result=results)\n", "VAR_11.append(res)\n" ]
[ "@app.route('/tags/<tag_name>')...\n", "if not app.config['SEARCH_CONF']['enabled'] and not which('rg'):\n", "flash(\n 'Search (for example ripgrep) must be installed to view pages about embedded tags.'\n , 'error')\n", "results = search(f'#{tag_name}#', strict=True)\n", "return redirect('/')\n", "res_ids = set([item['id'] for item in results])\n", "for res in search_frontmatter_tags(tag_name):\n", "if res['id'] not in res_ids:\n", "return render_template('tags/show.html', title=f'Tags - {tag_name}',\n tag_name=tag_name, search_result=results)\n", "results.append(res)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Return'", "Assign'", "For", "Condition", "Return'", "Expr'" ]
[ "async def FUNC_42(self, VAR_11: str, VAR_48: Sequence[Tuple[EventBase,...\n", "\"\"\"docstring\"\"\"\n", "VAR_151 = self.config.worker.events_shard_config.get_instance(VAR_11)\n", "if VAR_151 != self._instance_name:\n", "VAR_184 = await self._send_events(instance_name=instance, store=self.store,\n VAR_11=room_id, VAR_48=event_and_contexts, VAR_33=backfilled)\n", "assert self.storage.persistence\n", "return VAR_184['max_stream_id']\n", "VAR_20, VAR_49 = await self.storage.persistence.persist_events(VAR_48,\n VAR_33=backfilled)\n", "if self._ephemeral_messages_enabled:\n", "for VAR_1 in VAR_20:\n", "if not VAR_33:\n", "self._message_handler.maybe_schedule_expiry(VAR_1)\n", "for VAR_1 in VAR_20:\n", "return VAR_49.stream\n", "await self._notify_persisted_event(VAR_1, VAR_49)\n" ]
[ "async def persist_events_and_notify(self, room_id: str, event_and_contexts:...\n", "\"\"\"docstring\"\"\"\n", "instance = self.config.worker.events_shard_config.get_instance(room_id)\n", "if instance != self._instance_name:\n", "result = await self._send_events(instance_name=instance, store=self.store,\n room_id=room_id, event_and_contexts=event_and_contexts, backfilled=\n backfilled)\n", "assert self.storage.persistence\n", "return result['max_stream_id']\n", "events, max_stream_token = await self.storage.persistence.persist_events(\n event_and_contexts, backfilled=backfilled)\n", "if self._ephemeral_messages_enabled:\n", "for event in events:\n", "if not backfilled:\n", "self._message_handler.maybe_schedule_expiry(event)\n", "for event in events:\n", "return max_stream_token.stream\n", "await self._notify_persisted_event(event, max_stream_token)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Assert'", "Return'", "Assign'", "Condition", "For", "Condition", "Expr'", "For", "Return'", "Expr'" ]
[ "def FUNC_25(VAR_32, VAR_17, VAR_15):...\n", "if 'btn-upload-format' in VAR_32.files:\n", "VAR_40 = VAR_32.files['btn-upload-format']\n", "if VAR_40.filename != '':\n", "if not current_user.role_upload():\n", "abort(403)\n", "if '.' in VAR_40.filename:\n", "VAR_96 = VAR_40.filename.rsplit('.', 1)[-1].lower()\n", "flash(_('File to be uploaded must have an extension'), category='error')\n", "if VAR_96 not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n", "flash(_(\n \"File extension '%(ext)s' is not allowed to be uploaded to this server\",\n ext=file_ext), category='error')\n", "VAR_106 = VAR_17.path.rsplit('/', 1)[-1]\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n", "VAR_107 = os.path.normpath(os.path.join(config.config_calibre_dir, VAR_17.path)\n )\n", "VAR_108 = os.path.join(VAR_107, VAR_106 + '.' + VAR_96)\n", "if not os.path.exists(VAR_107):\n", "os.makedirs(VAR_107)\n", "flash(_(u'Failed to create path %(path)s (Permission denied).', VAR_72=\n filepath), category='error')\n", "VAR_40.save(VAR_108)\n", "flash(_(u'Failed to store file %(file)s.', file=saved_filename), category=\n 'error')\n", "VAR_73 = os.path.getsize(VAR_108)\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n", "VAR_109 = calibre_db.get_book_format(VAR_15, VAR_96.upper())\n", "if VAR_109:\n", "VAR_2.warning('Book format %s already existing', VAR_96.upper())\n", "VAR_119 = db.Data(VAR_15, VAR_96.upper(), VAR_73, VAR_106)\n", "calibre_db.session.rollback()\n", "VAR_110 = _(u'File format %(ext)s added to %(book)s', ext=file_ext.upper(),\n VAR_17=book.title)\n", "calibre_db.session.add(VAR_119)\n", "VAR_2.error('Database error: %s', e)\n", "WorkerThread.add(current_user.name, TaskUpload('<a href=\"' + url_for(\n 'web.show_book', VAR_15=book.id) + '\">' + VAR_110 + '</a>'))\n", "calibre_db.session.commit()\n", "flash(_(u'Database error: %(error)s.', VAR_49=e), category='error')\n", "return uploader.process(VAR_108, *os.path.splitext(VAR_40.filename),\n rarExecutable=config.config_rarfile_location)\n", "calibre_db.update_title_sort(config)\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n" ]
[ "def upload_single_file(request, book, book_id):...\n", "if 'btn-upload-format' in request.files:\n", "requested_file = request.files['btn-upload-format']\n", "if requested_file.filename != '':\n", "if not current_user.role_upload():\n", "abort(403)\n", "if '.' in requested_file.filename:\n", "file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()\n", "flash(_('File to be uploaded must have an extension'), category='error')\n", "if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:\n", "return redirect(url_for('web.show_book', book_id=book.id))\n", "flash(_(\n \"File extension '%(ext)s' is not allowed to be uploaded to this server\",\n ext=file_ext), category='error')\n", "file_name = book.path.rsplit('/', 1)[-1]\n", "return redirect(url_for('web.show_book', book_id=book.id))\n", "filepath = os.path.normpath(os.path.join(config.config_calibre_dir, book.path))\n", "saved_filename = os.path.join(filepath, file_name + '.' + file_ext)\n", "if not os.path.exists(filepath):\n", "os.makedirs(filepath)\n", "flash(_(u'Failed to create path %(path)s (Permission denied).', path=\n filepath), category='error')\n", "requested_file.save(saved_filename)\n", "flash(_(u'Failed to store file %(file)s.', file=saved_filename), category=\n 'error')\n", "file_size = os.path.getsize(saved_filename)\n", "return redirect(url_for('web.show_book', book_id=book.id))\n", "return redirect(url_for('web.show_book', book_id=book.id))\n", "is_format = calibre_db.get_book_format(book_id, file_ext.upper())\n", "if is_format:\n", "log.warning('Book format %s already existing', file_ext.upper())\n", "db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)\n", "calibre_db.session.rollback()\n", "uploadText = _(u'File format %(ext)s added to %(book)s', ext=file_ext.upper\n (), book=book.title)\n", "calibre_db.session.add(db_format)\n", "log.error('Database error: %s', e)\n", "WorkerThread.add(current_user.name, TaskUpload('<a href=\"' + url_for(\n 'web.show_book', book_id=book.id) + '\">' + uploadText + '</a>'))\n", "calibre_db.session.commit()\n", "flash(_(u'Database error: %(error)s.', error=e), category='error')\n", "return uploader.process(saved_filename, *os.path.splitext(requested_file.\n filename), rarExecutable=config.config_rarfile_location)\n", "calibre_db.update_title_sort(config)\n", "return redirect(url_for('web.show_book', book_id=book.id))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Condition", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Return'", "Expr'", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Return'", "Return'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'", "Return'" ]
[ "def FUNC_6(self):...\n", "return {}\n" ]
[ "def get_kwargs(self):...\n", "return {}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@FUNC_0...\n", "return StatsHandler(self)\n" ]
[ "@cache_in_self...\n", "return StatsHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_18(VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_20.all:\n", "FUNC_8(VAR_20.dir)\n", "if VAR_20.tag_set is None:\n", "FUNC_0(VAR_20.dir)\n", "if VAR_20.signature_def is None:\n", "FUNC_1(VAR_20.dir, VAR_20.tag_set)\n", "FUNC_4(VAR_20.dir, VAR_20.tag_set, VAR_20.signature_def)\n" ]
[ "def show(args):...\n", "\"\"\"docstring\"\"\"\n", "if args.all:\n", "_show_all(args.dir)\n", "if args.tag_set is None:\n", "_show_tag_sets(args.dir)\n", "if args.signature_def is None:\n", "_show_signature_def_map_keys(args.dir, args.tag_set)\n", "_show_inputs_outputs(args.dir, args.tag_set, args.signature_def)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_94(VAR_153, VAR_46, VAR_154, VAR_155=False):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_153 not in VAR_1.cache:\n", "VAR_1.cache[VAR_153] = {}\n", "if VAR_46 not in VAR_1.cache[VAR_153]:\n", "VAR_1.cache[VAR_153][VAR_46] = VAR_154()\n", "if VAR_1.cache[VAR_153][VAR_46] == None and VAR_155:\n", "return VAR_1.cache[VAR_153][VAR_46]\n", "VAR_1.cache[VAR_153][VAR_46] = VAR_154()\n" ]
[ "def local_cache(namespace, key, generator, regenerate_if_none=False):...\n", "\"\"\"docstring\"\"\"\n", "if namespace not in local.cache:\n", "local.cache[namespace] = {}\n", "if key not in local.cache[namespace]:\n", "local.cache[namespace][key] = generator()\n", "if local.cache[namespace][key] == None and regenerate_if_none:\n", "return local.cache[namespace][key]\n", "local.cache[namespace][key] = generator()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_115(VAR_129):...\n", "VAR_83.append(VAR_129)\n", "VAR_86[VAR_129] = VAR_89\n", "if VAR_87:\n", "VAR_84.append(VAR_129)\n", "return VAR_129\n", "if VAR_88:\n", "VAR_85.append(VAR_129)\n" ]
[ "def innerfn(fn):...\n", "whitelisted.append(fn)\n", "allowed_http_methods_for_whitelisted_func[fn] = methods\n", "if allow_guest:\n", "guest_methods.append(fn)\n", "return fn\n", "if xss_safe:\n", "xss_safe_methods.append(fn)\n" ]
[ 0, 2, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Condition", "Expr'", "Return'", "Condition", "Expr'" ]
[ "def FUNC_8(self) ->None:...\n", "self.package_dir.mkdir()\n", "VAR_7 = self.package_dir / '__init__.py'\n", "VAR_8 = self.env.get_template('package_init.pyi')\n", "VAR_7.write_text(VAR_8.render(description=self.package_description))\n", "VAR_9 = self.package_dir / 'py.typed'\n", "VAR_9.write_text('# Marker file for PEP 561')\n" ]
[ "def _create_package(self) ->None:...\n", "self.package_dir.mkdir()\n", "package_init = self.package_dir / '__init__.py'\n", "package_init_template = self.env.get_template('package_init.pyi')\n", "package_init.write_text(package_init_template.render(description=self.\n package_description))\n", "pytyped = self.package_dir / 'py.typed'\n", "pytyped.write_text('# Marker file for PEP 561')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "@VAR_2.route('/ajax/fullsync')...\n", "VAR_55 = ub.session.query(ub.KoboSyncedBooks).filter(VAR_62.id == ub.\n KoboSyncedBooks.user_id).delete()\n", "VAR_71 = _('{} sync entries deleted').format(VAR_55)\n", "ub.session_commit(VAR_71)\n", "return Response(json.dumps([{'type': 'success', 'message': VAR_71}]),\n mimetype='application/json')\n" ]
[ "@admi.route('/ajax/fullsync')...\n", "count = ub.session.query(ub.KoboSyncedBooks).filter(current_user.id == ub.\n KoboSyncedBooks.user_id).delete()\n", "message = _('{} sync entries deleted').format(count)\n", "ub.session_commit(message)\n", "return Response(json.dumps([{'type': 'success', 'message': message}]),\n mimetype='application/json')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "async def FUNC_9(self, VAR_24: str, VAR_19: str, VAR_25: Iterable[Dict[str,...\n", "VAR_50 = False\n", "for n in VAR_25:\n", "VAR_68 = VAR_26[n['event_id']]\n", "VAR_51 = await calculate_room_name(self.store, VAR_27, VAR_19)\n", "if VAR_68.type == EventTypes.Member and VAR_68.state_key == VAR_19:\n", "VAR_52 = {'title': VAR_51, 'hash': FUNC_3(VAR_24), 'notifs': [], 'invite':\n VAR_50, 'link': self.make_room_link(VAR_24)}\n", "if VAR_68.content.get('membership') == Membership.INVITE:\n", "if not VAR_50:\n", "VAR_50 = True\n", "for n in VAR_25:\n", "return VAR_52\n", "VAR_71 = await self.get_notif_vars(n, VAR_19, VAR_26[n['event_id']], VAR_27)\n", "VAR_72 = False\n", "if VAR_52['notifs'] and 'messages' in VAR_52['notifs'][-1]:\n", "VAR_77 = VAR_52['notifs'][-1]['messages']\n", "if not VAR_72:\n", "for message in VAR_71['messages']:\n", "VAR_52['notifs'].append(VAR_71)\n", "VAR_81 = list(filter(lambda VAR_81: VAR_81['id'] == message['id'], VAR_77))\n", "if VAR_81:\n", "if not message['is_historical']:\n", "if VAR_72:\n", "VAR_81[0]['is_historical'] = False\n", "VAR_72 = True\n", "VAR_77.append(message)\n" ]
[ "async def get_room_vars(self, room_id: str, user_id: str, notifs: Iterable[...\n", "is_invite = False\n", "for n in notifs:\n", "ev = notif_events[n['event_id']]\n", "room_name = await calculate_room_name(self.store, room_state_ids, user_id)\n", "if ev.type == EventTypes.Member and ev.state_key == user_id:\n", "room_vars = {'title': room_name, 'hash': string_ordinal_total(room_id),\n 'notifs': [], 'invite': is_invite, 'link': self.make_room_link(room_id)}\n", "if ev.content.get('membership') == Membership.INVITE:\n", "if not is_invite:\n", "is_invite = True\n", "for n in notifs:\n", "return room_vars\n", "notifvars = await self.get_notif_vars(n, user_id, notif_events[n['event_id'\n ]], room_state_ids)\n", "merge = False\n", "if room_vars['notifs'] and 'messages' in room_vars['notifs'][-1]:\n", "prev_messages = room_vars['notifs'][-1]['messages']\n", "if not merge:\n", "for message in notifvars['messages']:\n", "room_vars['notifs'].append(notifvars)\n", "pm = list(filter(lambda pm: pm['id'] == message['id'], prev_messages))\n", "if pm:\n", "if not message['is_historical']:\n", "if merge:\n", "pm[0]['is_historical'] = False\n", "merge = True\n", "prev_messages.append(message)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "For", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "For", "Expr'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "return any(VAR_9 in os.environ for VAR_9 in VAR_2)\n" ]
[ "def looks_like_ci():...\n", "\"\"\"docstring\"\"\"\n", "return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_57(VAR_99):...\n", "\"\"\"docstring\"\"\"\n", "return VAR_99.replace('_', ' ').replace('-', ' ').title()\n" ]
[ "def unscrub(txt):...\n", "\"\"\"docstring\"\"\"\n", "return txt.replace('_', ' ').replace('-', ' ').title()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def __init__(self, VAR_58, *VAR_6, **VAR_7):...\n", "self.project = VAR_58\n", "VAR_7['initial'] = {'project': VAR_58}\n", "super().__init__(*VAR_6, **kwargs)\n" ]
[ "def __init__(self, project, *args, **kwargs):...\n", "self.project = project\n", "kwargs['initial'] = {'project': project}\n", "super().__init__(*args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_40(VAR_62, VAR_104, VAR_97, VAR_105=None):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.client\n", "return frappe.client.set_value(VAR_62, VAR_104, VAR_97, VAR_105)\n" ]
[ "def set_value(doctype, docname, fieldname, value=None):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.client\n", "return frappe.client.set_value(doctype, docname, fieldname, value)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Return'" ]
[ "def FUNC_46(self):...\n", "self.helper.invite(self.room_id, src=self.creator, targ=self.second_user_id,\n VAR_52=self.creator_tok)\n", "VAR_12 = 'hello'\n", "VAR_22, VAR_23 = self.make_request('POST',\n '/_matrix/client/r0/rooms/{}/leave'.format(self.room_id), VAR_19={\n 'reason': reason}, VAR_16=self.second_tok)\n", "self.assertEqual(VAR_23.code, 200, VAR_23.result)\n", "self._check_for_reason(VAR_12)\n" ]
[ "def test_reject_invite_reason(self):...\n", "self.helper.invite(self.room_id, src=self.creator, targ=self.second_user_id,\n tok=self.creator_tok)\n", "reason = 'hello'\n", "request, channel = self.make_request('POST',\n '/_matrix/client/r0/rooms/{}/leave'.format(self.room_id), content={\n 'reason': reason}, access_token=self.second_tok)\n", "self.assertEqual(channel.code, 200, channel.result)\n", "self._check_for_reason(reason)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_2(self):...\n", "self.write(\n \"Looks like you're trying to access rtxcomplete at the wrong host name.\")\n", "self.write(\n \"<br>Please make sure the address is correct: 'rtxcomplete.ixlab.org'\")\n" ]
[ "def get(self):...\n", "self.write(\n \"Looks like you're trying to access rtxcomplete at the wrong host name.\")\n", "self.write(\n \"<br>Please make sure the address is correct: 'rtxcomplete.ixlab.org'\")\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def FUNC_11(self, VAR_2):...\n", "VAR_37 = self.url_regex.match(VAR_2)\n", "if not VAR_37:\n", "VAR_42 = VAR_37.groups()[0]\n", "return VAR_42\n" ]
[ "def _parse_resource_id(self, spec):...\n", "match = self.url_regex.match(spec)\n", "if not match:\n", "resource_id = match.groups()[0]\n", "return resource_id\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = FUNC_0(VAR_2, 'experimenter', None)\n", "return HttpResponseBadRequest('Invalid parameter value')\n", "if VAR_260 is not None or VAR_261 is not None:\n", "VAR_245 = FUNC_0(VAR_2, 'project', None)\n", "VAR_265 = paths_to_tag(VAR_5, VAR_11, VAR_261, VAR_260)\n", "VAR_265 = paths_to_object(VAR_5, VAR_11, VAR_245, VAR_246, VAR_257, VAR_251,\n VAR_252, VAR_258, VAR_259, VAR_20, VAR_264, VAR_262, VAR_263)\n", "VAR_246 = FUNC_0(VAR_2, 'dataset', None)\n", "return JsonResponse({'paths': VAR_265})\n", "VAR_257 = FUNC_0(VAR_2, 'image', None)\n", "VAR_251 = FUNC_0(VAR_2, 'screen', None)\n", "VAR_252 = FUNC_0(VAR_2, 'plate', None)\n", "VAR_258 = FUNC_0(VAR_2, 'run', None)\n", "VAR_258 = FUNC_0(VAR_2, 'acquisition', VAR_258)\n", "VAR_259 = VAR_2.GET.get('well', None)\n", "VAR_260 = FUNC_0(VAR_2, 'tag', None)\n", "VAR_261 = FUNC_0(VAR_2, 'tagset', None)\n", "VAR_262 = FUNC_0(VAR_2, 'roi', None)\n", "VAR_263 = FUNC_0(VAR_2, 'shape', None)\n", "VAR_20 = FUNC_0(VAR_2, 'group', None)\n", "VAR_264 = FUNC_0(VAR_2, 'page_size', settings.PAGE)\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "experimenter_id = get_long_or_default(request, 'experimenter', None)\n", "return HttpResponseBadRequest('Invalid parameter value')\n", "if tag_id is not None or tagset_id is not None:\n", "project_id = get_long_or_default(request, 'project', None)\n", "paths = paths_to_tag(conn, experimenter_id, tagset_id, tag_id)\n", "paths = paths_to_object(conn, experimenter_id, project_id, dataset_id,\n image_id, screen_id, plate_id, acquisition_id, well_id, group_id,\n page_size, roi_id, shape_id)\n", "dataset_id = get_long_or_default(request, 'dataset', None)\n", "return JsonResponse({'paths': paths})\n", "image_id = get_long_or_default(request, 'image', None)\n", "screen_id = get_long_or_default(request, 'screen', None)\n", "plate_id = get_long_or_default(request, 'plate', None)\n", "acquisition_id = get_long_or_default(request, 'run', None)\n", "acquisition_id = get_long_or_default(request, 'acquisition', acquisition_id)\n", "well_id = request.GET.get('well', None)\n", "tag_id = get_long_or_default(request, 'tag', None)\n", "tagset_id = get_long_or_default(request, 'tagset', None)\n", "roi_id = get_long_or_default(request, 'roi', None)\n", "shape_id = get_long_or_default(request, 'shape', None)\n", "group_id = get_long_or_default(request, 'group', None)\n", "page_size = get_long_or_default(request, 'page_size', settings.PAGE)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@require_POST...\n", "\"\"\"docstring\"\"\"\n", "VAR_202 = VAR_5.getScriptService()\n", "VAR_49 = VAR_202.getScriptID('/omero/export_scripts/Batch_Image_Export.py')\n", "VAR_142 = VAR_5.getObject('Image', VAR_26)\n", "if VAR_142 is not None:\n", "VAR_335 = VAR_142.getDetails().group.id.val\n", "VAR_212 = [VAR_241(VAR_26)]\n", "VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)\n", "VAR_50 = {'Data_Type': wrap('Image'), 'IDs': rlist([rlong(id) for id in\n VAR_212])}\n", "VAR_50['Format'] = wrap('OME-TIFF')\n", "VAR_174 = FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43='Create OME-TIFF')\n", "return JsonResponse(VAR_174)\n" ]
[ "@require_POST...\n", "\"\"\"docstring\"\"\"\n", "scriptService = conn.getScriptService()\n", "sId = scriptService.getScriptID('/omero/export_scripts/Batch_Image_Export.py')\n", "image = conn.getObject('Image', imageId)\n", "if image is not None:\n", "gid = image.getDetails().group.id.val\n", "imageIds = [long(imageId)]\n", "conn.SERVICE_OPTS.setOmeroGroup(gid)\n", "inputMap = {'Data_Type': wrap('Image'), 'IDs': rlist([rlong(id) for id in\n imageIds])}\n", "inputMap['Format'] = wrap('OME-TIFF')\n", "rsp = run_script(request, conn, sId, inputMap, scriptName='Create OME-TIFF')\n", "return JsonResponse(rsp)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_48(self):...\n", "self.admin_login(username='super', password='secret', login_url='/')\n", "self.selenium.get('%s%s' % (self.live_server_url, '/admin_widgets/event/add/'))\n", "VAR_51 = self.selenium.current_window_handle\n", "self.assertEqual(self.selenium.find_element_by_id('id_main_band').\n get_attribute('value'), '')\n", "self.selenium.find_element_by_id('lookup_id_main_band').click()\n", "self.selenium.switch_to_window('id_main_band')\n", "self.wait_page_loaded()\n", "VAR_52 = self.selenium.find_element_by_link_text('Bogey Blues')\n", "self.assertTrue('/band/42/' in VAR_52.get_attribute('href'))\n", "VAR_52.click()\n", "self.selenium.switch_to_window(VAR_51)\n", "self.assertEqual(self.selenium.find_element_by_id('id_main_band').\n get_attribute('value'), '42')\n", "self.selenium.find_element_by_id('lookup_id_main_band').click()\n", "self.selenium.switch_to_window('id_main_band')\n", "self.wait_page_loaded()\n", "VAR_52 = self.selenium.find_element_by_link_text('Green Potatoes')\n", "self.assertTrue('/band/98/' in VAR_52.get_attribute('href'))\n", "VAR_52.click()\n", "self.selenium.switch_to_window(VAR_51)\n", "self.assertEqual(self.selenium.find_element_by_id('id_main_band').\n get_attribute('value'), '98')\n" ]
[ "def test_foreignkey(self):...\n", "self.admin_login(username='super', password='secret', login_url='/')\n", "self.selenium.get('%s%s' % (self.live_server_url, '/admin_widgets/event/add/'))\n", "main_window = self.selenium.current_window_handle\n", "self.assertEqual(self.selenium.find_element_by_id('id_main_band').\n get_attribute('value'), '')\n", "self.selenium.find_element_by_id('lookup_id_main_band').click()\n", "self.selenium.switch_to_window('id_main_band')\n", "self.wait_page_loaded()\n", "link = self.selenium.find_element_by_link_text('Bogey Blues')\n", "self.assertTrue('/band/42/' in link.get_attribute('href'))\n", "link.click()\n", "self.selenium.switch_to_window(main_window)\n", "self.assertEqual(self.selenium.find_element_by_id('id_main_band').\n get_attribute('value'), '42')\n", "self.selenium.find_element_by_id('lookup_id_main_band').click()\n", "self.selenium.switch_to_window('id_main_band')\n", "self.wait_page_loaded()\n", "link = self.selenium.find_element_by_link_text('Green Potatoes')\n", "self.assertTrue('/band/98/' in link.get_attribute('href'))\n", "link.click()\n", "self.selenium.switch_to_window(main_window)\n", "self.assertEqual(self.selenium.find_element_by_id('id_main_band').\n get_attribute('value'), '98')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(self, VAR_2='password'):...\n", "VAR_3 = self.client.post('/login/', {'username': 'testclient', 'password':\n VAR_2})\n", "self.assertTrue(SESSION_KEY in self.client.session)\n", "return VAR_3\n" ]
[ "def login(self, password='password'):...\n", "response = self.client.post('/login/', {'username': 'testclient',\n 'password': password})\n", "self.assertTrue(SESSION_KEY in self.client.session)\n", "return response\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n", "FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n", "for VAR_6 in VAR_73:\n", "FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n", "return self._return_value\n" ]
[ "def runner(self, method, *args, **kwargs):...\n", "add_to_return_value(self, fn(self, *args, **kwargs))\n", "for f in hooks:\n", "add_to_return_value(self, f(self, method, *args, **kwargs))\n", "return self._return_value\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "For", "Expr'", "Return'" ]
[ "def FUNC_99(self) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "return self.config.send_federation and (not self.config.worker_app or self.\n config.worker_app == 'synapse.app.federation_sender')\n" ]
[ "def should_send_federation(self) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "return self.config.send_federation and (not self.config.worker_app or self.\n config.worker_app == 'synapse.app.federation_sender')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_99(self, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "self.jsonrpc_procedures[VAR_10.__name__] = VAR_10\n", "return VAR_10\n" ]
[ "def jsonrpc(self, f):...\n", "\"\"\"docstring\"\"\"\n", "self.jsonrpc_procedures[f.__name__] = f\n", "return f\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "@VAR_7.route('/obj/<path:obj_path>')...\n", "VAR_36 = FUNC_10(VAR_20)\n", "VAR_29 = Headers()\n", "VAR_39 = send_file(VAR_36, cache_timeout=datetime.timedelta(days=365).\n total_seconds(), add_etags=True, conditional=True)\n", "VAR_39.headers.extend(VAR_29)\n", "return VAR_39\n" ]
[ "@scope_blueprint.route('/obj/<path:obj_path>')...\n", "path = _get_obj_absolute_path(obj_path)\n", "headers = Headers()\n", "response = send_file(path, cache_timeout=datetime.timedelta(days=365).\n total_seconds(), add_etags=True, conditional=True)\n", "response.headers.extend(headers)\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_18(self):...\n", "self.assert_expected(self.folder.t, 'CheckPathAlt.html')\n" ]
[ "def testPathAlt(self):...\n", "self.assert_expected(self.folder.t, 'CheckPathAlt.html')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "async def FUNC_4(self, VAR_13: IProtocolFactory) ->None:...\n", "VAR_17 = None\n", "VAR_18 = await self._resolve_server()\n", "for server in VAR_18:\n", "VAR_1 = server.host\n", "if VAR_17:\n", "VAR_19 = server.port\n", "VAR_0.debug('Connecting to %s:%i', VAR_1.decode('ascii'), VAR_19)\n", "VAR_0.info('Failed to connect to %s:%i: %s', VAR_1.decode('ascii'), VAR_19, e)\n", "VAR_21 = HostnameEndpoint(self._reactor, VAR_1, VAR_19)\n", "if not VAR_17:\n", "if self._tls_options:\n", "VAR_17 = e\n", "VAR_21 = wrapClientTLS(self._tls_options, VAR_21)\n", "VAR_22 = await make_deferred_yieldable(VAR_21.connect(VAR_13))\n", "return VAR_22\n" ]
[ "async def _do_connect(self, protocol_factory: IProtocolFactory) ->None:...\n", "first_exception = None\n", "server_list = await self._resolve_server()\n", "for server in server_list:\n", "host = server.host\n", "if first_exception:\n", "port = server.port\n", "logger.debug('Connecting to %s:%i', host.decode('ascii'), port)\n", "logger.info('Failed to connect to %s:%i: %s', host.decode('ascii'), port, e)\n", "endpoint = HostnameEndpoint(self._reactor, host, port)\n", "if not first_exception:\n", "if self._tls_options:\n", "first_exception = e\n", "endpoint = wrapClientTLS(self._tls_options, endpoint)\n", "result = await make_deferred_yieldable(endpoint.connect(protocol_factory))\n", "return result\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_41(VAR_16, VAR_17):...\n", "return config.set_from_dictionary(VAR_16, VAR_17, lambda y: y.strip() if y else\n y)\n" ]
[ "def _config_string(to_save, x):...\n", "return config.set_from_dictionary(to_save, x, lambda y: y.strip() if y else y)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_38(self):...\n", "return self.renderer.block_code(self.token['text'], self.token['lang'])\n" ]
[ "def output_code(self):...\n", "return self.renderer.block_code(self.token['text'], self.token['lang'])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]