lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_20(VAR_56, VAR_46):...\n",
"return os.path.exists(VAR_56) and os.access(VAR_56, VAR_46\n ) and not os.path.isdir(VAR_56)\n"
] | [
"def _access_check(fn, mode):...\n",
"return os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_35():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = request.args(0)\n",
"VAR_96 = request.args(1)\n",
"VAR_97 = 'plugin_' + VAR_96\n",
"VAR_46 = FORM.confirm(T('Delete'), {T('Cancel'): URL('design', VAR_98=app)})\n",
"if VAR_46.accepted:\n",
"return dict(VAR_46=dialog, VAR_96=plugin)\n",
"for VAR_168 in ['models', 'views', 'controllers', 'static', 'modules',\n",
"session.flash = T('unable to delete file plugin \"%(plugin)s\"', dict(VAR_96=\n plugin))\n",
"redirect(URL('design', VAR_98=request.args(0), VAR_157=request.vars.id2))\n",
"VAR_15 = os.path.join(apath(VAR_3, VAR_122=request), VAR_168)\n",
"session.flash = T('plugin \"%(plugin)s\" deleted', dict(VAR_96=plugin))\n",
"for VAR_70 in os.listdir(VAR_15):\n",
"if VAR_70.rsplit('.', 1)[0] == VAR_97:\n",
"VAR_5 = os.path.join(VAR_15, VAR_70)\n",
"if os.path.isdir(VAR_5):\n",
"shutil.rmtree(VAR_5)\n",
"os.unlink(VAR_5)\n"
] | [
"def delete_plugin():...\n",
"\"\"\"docstring\"\"\"\n",
"app = request.args(0)\n",
"plugin = request.args(1)\n",
"plugin_name = 'plugin_' + plugin\n",
"dialog = FORM.confirm(T('Delete'), {T('Cancel'): URL('design', args=app)})\n",
"if dialog.accepted:\n",
"return dict(dialog=dialog, plugin=plugin)\n",
"for folder in ['models', 'views', 'controllers', 'static', 'modules', 'private'\n",
"session.flash = T('unable to delete file plugin \"%(plugin)s\"', dict(plugin=\n plugin))\n",
"redirect(URL('design', args=request.args(0), anchor=request.vars.id2))\n",
"path = os.path.join(apath(app, r=request), folder)\n",
"session.flash = T('plugin \"%(plugin)s\" deleted', dict(plugin=plugin))\n",
"for item in os.listdir(path):\n",
"if item.rsplit('.', 1)[0] == plugin_name:\n",
"filename = os.path.join(path, item)\n",
"if os.path.isdir(filename):\n",
"shutil.rmtree(filename)\n",
"os.unlink(filename)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_61(VAR_70):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_137 = {' ': 'normal', '+': 'plus', '-': 'minus'}\n",
"return VAR_137[VAR_70[0]]\n"
] | [
"def getclass(item):...\n",
"\"\"\"docstring\"\"\"\n",
"operators = {' ': 'normal', '+': 'plus', '-': 'minus'}\n",
"return operators[item[0]]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_31=None, VAR_32=None, VAR_33=None, VAR_34=True):...\n",
"VAR_222 = self.settings = Settings()\n",
"VAR_222.server = VAR_31\n",
"VAR_222.sender = VAR_32\n",
"VAR_222.login = VAR_33\n",
"VAR_222.tls = VAR_34\n",
"VAR_222.timeout = 5\n",
"VAR_222.hostname = None\n",
"VAR_222.ssl = False\n",
"VAR_222.cipher_type = None\n",
"VAR_222.gpg_home = None\n",
"VAR_222.sign = True\n",
"VAR_222.sign_passphrase = None\n",
"VAR_222.encrypt = True\n",
"VAR_222.x509_sign_keyfile = None\n",
"VAR_222.x509_sign_certfile = None\n",
"VAR_222.x509_sign_chainfile = None\n",
"VAR_222.x509_nocerts = False\n",
"VAR_222.x509_crypt_certfiles = None\n",
"VAR_222.debug = False\n",
"VAR_222.lock_keys = True\n",
"self.result = {}\n",
"self.error = None\n"
] | [
"def __init__(self, server=None, sender=None, login=None, tls=True):...\n",
"settings = self.settings = Settings()\n",
"settings.server = server\n",
"settings.sender = sender\n",
"settings.login = login\n",
"settings.tls = tls\n",
"settings.timeout = 5\n",
"settings.hostname = None\n",
"settings.ssl = False\n",
"settings.cipher_type = None\n",
"settings.gpg_home = None\n",
"settings.sign = True\n",
"settings.sign_passphrase = None\n",
"settings.encrypt = True\n",
"settings.x509_sign_keyfile = None\n",
"settings.x509_sign_certfile = None\n",
"settings.x509_sign_chainfile = None\n",
"settings.x509_nocerts = False\n",
"settings.x509_crypt_certfiles = None\n",
"settings.debug = False\n",
"settings.lock_keys = True\n",
"self.result = {}\n",
"self.error = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@defer.inlineCallbacks...\n",
"yield defer.ensureDeferred(self.handler.set_displayname(self.frank, synapse\n .types.create_requester(self.frank), 'Frank Jr.'))\n",
"self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_displayname(self.frank.localpart))), 'Frank Jr.')\n",
"yield defer.ensureDeferred(self.handler.set_displayname(self.frank, synapse\n .types.create_requester(self.frank), 'Frank'))\n",
"self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_displayname(self.frank.localpart))), 'Frank')\n"
] | [
"@defer.inlineCallbacks...\n",
"yield defer.ensureDeferred(self.handler.set_displayname(self.frank, synapse\n .types.create_requester(self.frank), 'Frank Jr.'))\n",
"self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_displayname(self.frank.localpart))), 'Frank Jr.')\n",
"yield defer.ensureDeferred(self.handler.set_displayname(self.frank, synapse\n .types.create_requester(self.frank), 'Frank'))\n",
"self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_displayname(self.frank.localpart))), 'Frank')\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@app.route('/')...\n",
"from octoprint.server import connectivityChecker, printer\n",
"VAR_46 = settings().getBoolean(['devel', 'cache', 'preemptive'])\n",
"VAR_14 = g.locale.language if g.locale else 'en'\n",
"def FUNC_25(VAR_18):...\n",
"return VAR_18 is not None and bool(VAR_18['wizard']['order'])\n"
] | [
"@app.route('/')...\n",
"from octoprint.server import connectivityChecker, printer\n",
"preemptive_cache_enabled = settings().getBoolean(['devel', 'cache',\n 'preemptive'])\n",
"locale = g.locale.language if g.locale else 'en'\n",
"def wizard_active(templates):...\n",
"return templates is not None and bool(templates['wizard']['order'])\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_84(VAR_72, *VAR_73):...\n",
"def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n",
"FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n",
"for VAR_6 in VAR_73:\n",
"FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n",
"return self._return_value\n"
] | [
"def compose(fn, *hooks):...\n",
"def runner(self, method, *args, **kwargs):...\n",
"add_to_return_value(self, fn(self, *args, **kwargs))\n",
"for f in hooks:\n",
"add_to_return_value(self, f(self, method, *args, **kwargs))\n",
"return self._return_value\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Expr'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_28(self):...\n",
"VAR_25 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})\n",
"VAR_26 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})\n",
"self.assertEqual(VAR_25, {'color__in': 'red,blue'})\n",
"self.assertEqual(VAR_25, VAR_26)\n"
] | [
"def test_url_params_from_lookup_dict_any_iterable(self):...\n",
"lookup1 = widgets.url_params_from_lookup_dict({'color__in': ('red', 'blue')})\n",
"lookup2 = widgets.url_params_from_lookup_dict({'color__in': ['red', 'blue']})\n",
"self.assertEqual(lookup1, {'color__in': 'red,blue'})\n",
"self.assertEqual(lookup1, lookup2)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_10: str, VAR_11: Optional[Iterable[str]]=None):...\n",
"self.msg = VAR_10\n",
"self.path = VAR_11\n"
] | [
"def __init__(self, msg: str, path: Optional[Iterable[str]]=None):...\n",
"self.msg = msg\n",
"self.path = path\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(VAR_0, VAR_1):...\n",
"return FUNC_0(VAR_0=request, VAR_1=pk, VAR_2='is_globally_pinned', VAR_3=\n True, VAR_4=Comment.PINNED, VAR_5=_('The topic has been globally pinned'))\n"
] | [
"def global_pin(request, pk):...\n",
"return _moderate(request=request, pk=pk, field_name='is_globally_pinned',\n to_value=True, action=Comment.PINNED, message=_(\n 'The topic has been globally pinned'))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_22(VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_46 = VAR_20.checkpoint_path or os.path.join(VAR_20.dir,\n 'variables/variables')\n",
"if not VAR_20.variables_to_feed:\n",
"VAR_75 = []\n",
"if VAR_20.variables_to_feed.lower() == 'all':\n",
"saved_model_aot_compile.aot_compile_cpu_meta_graph_def(VAR_46=\n checkpoint_path, VAR_4=saved_model_utils.get_meta_graph_def(args.dir,\n args.tag_set), VAR_5=args.signature_def_key, VAR_75=variables_to_feed,\n output_prefix=args.output_prefix, target_triple=args.target_triple,\n target_cpu=args.target_cpu, cpp_class=args.cpp_class, multithreading=\n args.multithreading.lower() not in ('f', 'false', '0'))\n",
"VAR_75 = None\n",
"VAR_75 = VAR_20.variables_to_feed.split(',')\n"
] | [
"def aot_compile_cpu(args):...\n",
"\"\"\"docstring\"\"\"\n",
"checkpoint_path = args.checkpoint_path or os.path.join(args.dir,\n 'variables/variables')\n",
"if not args.variables_to_feed:\n",
"variables_to_feed = []\n",
"if args.variables_to_feed.lower() == 'all':\n",
"saved_model_aot_compile.aot_compile_cpu_meta_graph_def(checkpoint_path=\n checkpoint_path, meta_graph_def=saved_model_utils.get_meta_graph_def(\n args.dir, args.tag_set), signature_def_key=args.signature_def_key,\n variables_to_feed=variables_to_feed, output_prefix=args.output_prefix,\n target_triple=args.target_triple, target_cpu=args.target_cpu, cpp_class\n =args.cpp_class, multithreading=args.multithreading.lower() not in ('f',\n 'false', '0'))\n",
"variables_to_feed = None\n",
"variables_to_feed = args.variables_to_feed.split(',')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_9(self, VAR_3, VAR_4, VAR_5):...\n",
"self.owner = self.register_user('owner', 'pass')\n",
"self.owner_tok = self.login('owner', 'pass')\n",
"self.profile_url = '/profile/%s' % self.owner\n",
"self.requester = self.register_user('requester', 'pass')\n",
"self.requester_tok = self.login('requester', 'pass')\n",
"self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)\n"
] | [
"def prepare(self, reactor, clock, hs):...\n",
"self.owner = self.register_user('owner', 'pass')\n",
"self.owner_tok = self.login('owner', 'pass')\n",
"self.profile_url = '/profile/%s' % self.owner\n",
"self.requester = self.register_user('requester', 'pass')\n",
"self.requester_tok = self.login('requester', 'pass')\n",
"self.room_id = self.helper.create_room_as(self.owner, tok=self.owner_tok)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_24(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_9 = test.test_src_dir_path(VAR_0)\n",
"VAR_44 = np.array([[1], [2]])\n",
"VAR_45 = np.zeros((6, 3))\n",
"VAR_31 = os.path.join(test.get_temp_dir(), 'testRunCommandNewOutdir_inputs.npz'\n )\n",
"VAR_41 = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"if os.path.isdir(VAR_41):\n",
"shutil.rmtree(VAR_41)\n",
"np.savez(VAR_31, VAR_26=x, VAR_27=x_notused)\n",
"VAR_10 = self.parser.parse_args(['run', '--dir', VAR_9, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n VAR_31 + '[x0]', '--outdir', VAR_41])\n",
"saved_model_cli.run(VAR_10)\n",
"VAR_42 = np.load(os.path.join(VAR_41, 'y.npy'))\n",
"VAR_43 = np.array([[2.5], [3.0]])\n",
"self.assertAllClose(VAR_43, VAR_42)\n"
] | [
"def testRunCommandNewOutdir(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"x = np.array([[1], [2]])\n",
"x_notused = np.zeros((6, 3))\n",
"input_path = os.path.join(test.get_temp_dir(),\n 'testRunCommandNewOutdir_inputs.npz')\n",
"output_dir = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"if os.path.isdir(output_dir):\n",
"shutil.rmtree(output_dir)\n",
"np.savez(input_path, x0=x, x1=x_notused)\n",
"args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n input_path + '[x0]', '--outdir', output_dir])\n",
"saved_model_cli.run(args)\n",
"y_actual = np.load(os.path.join(output_dir, 'y.npy'))\n",
"y_expected = np.array([[2.5], [3.0]])\n",
"self.assertAllClose(y_expected, y_actual)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"if 'onelogin' not in current_app.config.get('ACTIVE_PROVIDERS'):\n",
"return 'Onelogin is not enabled in the config. See the ACTIVE_PROVIDERS section.', 404\n",
"VAR_5 = OneLogin_Saml2_Auth(self.req, current_app.config.get(\n 'ONELOGIN_SETTINGS'))\n",
"self.reqparse.add_argument('return_to', required=False, default=current_app\n .config.get('WEB_PATH'))\n",
"self.reqparse.add_argument('acs', required=False)\n",
"self.reqparse.add_argument('sls', required=False)\n",
"VAR_7 = self.reqparse.parse_args()\n",
"VAR_10 = VAR_7['return_to']\n",
"if VAR_7['acs'] != None:\n",
"if self._consumer(VAR_5):\n",
"if VAR_7['sls'] != None:\n",
"VAR_36 = VAR_5.get_attribute(current_app.config.get('ONELOGIN_EMAIL_FIELD'))[0]\n",
"return dict(message='OneLogin authentication failed.'), 403\n",
"return dict(message='OneLogin SLS not implemented yet.'), 405\n",
"return redirect(VAR_5.login(VAR_10=return_to))\n",
"VAR_23 = User.query.filter(User.email == VAR_36).first()\n",
"if not VAR_23:\n",
"VAR_23 = User(VAR_36=email, active=True, role=current_app.config.get(\n 'ONELOGIN_DEFAULT_ROLE'))\n",
"identity_changed.send(current_app._get_current_object(), identity=Identity(\n user.id))\n",
"db.session.add(VAR_23)\n",
"login_user(VAR_23)\n",
"db.session.commit()\n",
"VAR_37 = OneLogin_Saml2_Utils.get_self_url(self.req)\n",
"db.session.refresh(VAR_23)\n",
"if 'RelayState' in request.form and VAR_37 != request.form['RelayState']:\n",
"return redirect(VAR_5.redirect_to(request.form['RelayState']), code=302)\n",
"return redirect(current_app.config.get('BASE_URL'), code=302)\n"
] | [
"def post(self):...\n",
"if 'onelogin' not in current_app.config.get('ACTIVE_PROVIDERS'):\n",
"return 'Onelogin is not enabled in the config. See the ACTIVE_PROVIDERS section.', 404\n",
"auth = OneLogin_Saml2_Auth(self.req, current_app.config.get(\n 'ONELOGIN_SETTINGS'))\n",
"self.reqparse.add_argument('return_to', required=False, default=current_app\n .config.get('WEB_PATH'))\n",
"self.reqparse.add_argument('acs', required=False)\n",
"self.reqparse.add_argument('sls', required=False)\n",
"args = self.reqparse.parse_args()\n",
"return_to = args['return_to']\n",
"if args['acs'] != None:\n",
"if self._consumer(auth):\n",
"if args['sls'] != None:\n",
"email = auth.get_attribute(current_app.config.get('ONELOGIN_EMAIL_FIELD'))[0]\n",
"return dict(message='OneLogin authentication failed.'), 403\n",
"return dict(message='OneLogin SLS not implemented yet.'), 405\n",
"return redirect(auth.login(return_to=return_to))\n",
"user = User.query.filter(User.email == email).first()\n",
"if not user:\n",
"user = User(email=email, active=True, role=current_app.config.get(\n 'ONELOGIN_DEFAULT_ROLE'))\n",
"identity_changed.send(current_app._get_current_object(), identity=Identity(\n user.id))\n",
"db.session.add(user)\n",
"login_user(user)\n",
"db.session.commit()\n",
"self_url = OneLogin_Saml2_Utils.get_self_url(self.req)\n",
"db.session.refresh(user)\n",
"if 'RelayState' in request.form and self_url != request.form['RelayState']:\n",
"return redirect(auth.redirect_to(request.form['RelayState']), code=302)\n",
"return redirect(current_app.config.get('BASE_URL'), code=302)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Return'",
"Return'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"For",
"Return'",
"Return'"
] |
[
"@VAR_0.user_loader...\n",
"VAR_12 = FUNC_7(VAR_8)\n",
"if VAR_12:\n",
"return VAR_12\n",
"VAR_13 = current_org._get_current_object()\n",
"VAR_20, VAR_26 = VAR_7.split('-')\n",
"return None\n",
"VAR_12 = models.User.get_by_id_and_org(VAR_20, VAR_13)\n",
"if VAR_12.is_disabled or VAR_12.get_id() != VAR_7:\n",
"return None\n",
"return VAR_12\n"
] | [
"@login_manager.user_loader...\n",
"user = api_key_load_user_from_request(request)\n",
"if user:\n",
"return user\n",
"org = current_org._get_current_object()\n",
"user_id, _ = user_id_with_identity.split('-')\n",
"return None\n",
"user = models.User.get_by_id_and_org(user_id, org)\n",
"if user.is_disabled or user.get_id() != user_id_with_identity:\n",
"return None\n",
"return user\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"async def FUNC_4(VAR_6, VAR_7):...\n",
"if 'json' not in VAR_7.headers['Content-Type']:\n",
"if VAR_6.path.endswith('/'):\n",
"return VAR_7\n",
"return web.HTTPFound(VAR_6.path.rstrip('/'))\n",
"return web.json_response({'status': 404, 'message': \"Page '{}' not found\".\n format(VAR_6.path)}, status=404)\n"
] | [
"async def handle_404(request, response):...\n",
"if 'json' not in response.headers['Content-Type']:\n",
"if request.path.endswith('/'):\n",
"return response\n",
"return web.HTTPFound(request.path.rstrip('/'))\n",
"return web.json_response({'status': 404, 'message': \"Page '{}' not found\".\n format(request.path)}, status=404)\n"
] | [
0,
0,
0,
0,
4,
0
] | [
"AsyncFunctionDef'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_4(self, VAR_0, VAR_1, VAR_3):...\n",
"self.store = VAR_3.get_datastore()\n",
"self.handler = VAR_3.get_directory_handler()\n",
"self.state_handler = VAR_3.get_state_handler()\n",
"self.admin_user = self.register_user('admin', 'pass', admin=True)\n",
"self.admin_user_tok = self.login('admin', 'pass')\n",
"self.room_id = self.helper.create_room_as(self.admin_user, tok=self.\n admin_user_tok)\n",
"self.test_alias = '#test:test'\n",
"self.room_alias = self._add_alias(self.test_alias)\n"
] | [
"def prepare(self, reactor, clock, hs):...\n",
"self.store = hs.get_datastore()\n",
"self.handler = hs.get_directory_handler()\n",
"self.state_handler = hs.get_state_handler()\n",
"self.admin_user = self.register_user('admin', 'pass', admin=True)\n",
"self.admin_user_tok = self.login('admin', 'pass')\n",
"self.room_id = self.helper.create_room_as(self.admin_user, tok=self.\n admin_user_tok)\n",
"self.test_alias = '#test:test'\n",
"self.room_alias = self._add_alias(self.test_alias)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_23(self, VAR_19, VAR_36, VAR_37):...\n",
"if VAR_19 is None:\n",
"return VAR_36 == VAR_37\n",
"VAR_44 = VAR_3()\n",
"return getattr(VAR_36, VAR_19, VAR_44) == getattr(VAR_37, VAR_19, VAR_44\n ) is not VAR_44\n"
] | [
"def same_part(self, name, ob1, ob2):...\n",
"if name is None:\n",
"return ob1 == ob2\n",
"no = object()\n",
"return getattr(ob1, name, no) == getattr(ob2, name, no) is not no\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_20(VAR_24, VAR_16, VAR_25=False, VAR_26=None):...\n",
"VAR_60 = VAR_24.split(',')\n",
"VAR_61 = []\n",
"if not VAR_25:\n",
"VAR_62 = isoLanguages.get_language_codes(get_locale(), VAR_60, VAR_61)\n",
"VAR_62 = isoLanguages.get_valid_language_codes(get_locale(), VAR_60, VAR_61)\n",
"for l in VAR_61:\n",
"VAR_1.error(\"'%s' is not a valid language\", l)\n",
"if VAR_25 and len(VAR_62) == 1:\n",
"if isinstance(VAR_26, list):\n",
"if VAR_62[0] != current_user.filter_language(\n",
"VAR_62 = helper.uniq(VAR_62)\n",
"VAR_26.append(l)\n",
"VAR_62[0] = calibre_db.session.query(db.Languages).filter(db.Languages.\n lang_code == current_user.filter_language()).first().lang_code\n",
"return FUNC_7(VAR_62, VAR_16.languages, db.Languages, calibre_db.session,\n 'languages')\n"
] | [
"def edit_book_languages(languages, book, upload=False, invalid=None):...\n",
"input_languages = languages.split(',')\n",
"unknown_languages = []\n",
"if not upload:\n",
"input_l = isoLanguages.get_language_codes(get_locale(), input_languages,\n unknown_languages)\n",
"input_l = isoLanguages.get_valid_language_codes(get_locale(),\n input_languages, unknown_languages)\n",
"for l in unknown_languages:\n",
"log.error(\"'%s' is not a valid language\", l)\n",
"if upload and len(input_l) == 1:\n",
"if isinstance(invalid, list):\n",
"if input_l[0] != current_user.filter_language(\n",
"input_l = helper.uniq(input_l)\n",
"invalid.append(l)\n",
"input_l[0] = calibre_db.session.query(db.Languages).filter(db.Languages.\n lang_code == current_user.filter_language()).first().lang_code\n",
"return modify_database_object(input_l, book.languages, db.Languages,\n calibre_db.session, 'languages')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Expr'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_11(self, VAR_18: List[str], VAR_19: Optional[str]=None, VAR_20:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_57 = []\n",
"VAR_58 = [self.default_template_dir]\n",
"if VAR_19:\n",
"if not self.path_exists(VAR_19):\n",
"VAR_59 = jinja2.FileSystemLoader(VAR_58)\n",
"VAR_58.insert(0, VAR_19)\n",
"VAR_60 = jinja2.Environment(VAR_59=loader, VAR_20=autoescape)\n",
"VAR_60.filters.update({'format_ts': FUNC_1, 'mxc_to_http': FUNC_2(self.\n public_baseurl)})\n",
"for filename in VAR_18:\n",
"VAR_73 = VAR_60.get_template(filename)\n",
"return VAR_57\n",
"VAR_57.append(VAR_73)\n"
] | [
"def read_templates(self, filenames: List[str], custom_template_directory:...\n",
"\"\"\"docstring\"\"\"\n",
"templates = []\n",
"search_directories = [self.default_template_dir]\n",
"if custom_template_directory:\n",
"if not self.path_exists(custom_template_directory):\n",
"loader = jinja2.FileSystemLoader(search_directories)\n",
"search_directories.insert(0, custom_template_directory)\n",
"env = jinja2.Environment(loader=loader, autoescape=autoescape)\n",
"env.filters.update({'format_ts': _format_ts_filter, 'mxc_to_http':\n _create_mxc_to_http_filter(self.public_baseurl)})\n",
"for filename in filenames:\n",
"template = env.get_template(filename)\n",
"return templates\n",
"templates.append(template)\n"
] | [
0,
0,
2,
0,
0,
0,
0,
0,
2,
0,
2,
2,
2,
2
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"VAR_62 = self.useroptions.forUser(self.getUserId())\n",
"VAR_62.setOption('last_time_online', int(time.time()))\n"
] | [
"def api_heartbeat(self):...\n",
"uo = self.useroptions.forUser(self.getUserId())\n",
"uo.setOption('last_time_online', int(time.time()))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_27(VAR_23):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_58 = 'string'\n",
"VAR_59 = VAR_23.add_parser('convert', description=convert_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n",
"VAR_59.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to convert')\n",
"VAR_59.add_argument('--output_dir', type=str, required=True, help=\n 'output directory for the converted SavedModel')\n",
"VAR_59.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to convert, separated by ','\")\n",
"VAR_60 = VAR_59.add_subparsers(title='conversion methods', description=\n 'valid conversion methods', help=\n 'the conversion to run with the SavedModel')\n",
"VAR_61 = VAR_60.add_parser('tensorrt', description=\n 'Convert the SavedModel with Tensorflow-TensorRT integration',\n formatter_class=argparse.RawTextHelpFormatter)\n",
"VAR_61.add_argument('--max_workspace_size_bytes', type=int, default=2 << 20,\n help=\n 'the maximum GPU temporary memory which the TRT engine can use at execution time'\n )\n",
"VAR_61.add_argument('--precision_mode', type=str, default='FP32', help=\n 'one of FP32, FP16 and INT8')\n",
"VAR_61.add_argument('--minimum_segment_size', type=int, default=3, help=\n 'the minimum number of nodes required for a subgraph to be replacedin a TensorRT node'\n )\n",
"VAR_61.add_argument('--convert_tf1_model', type=bool, default=False, help=\n 'support TRT conversion for TF1 models')\n",
"VAR_61.set_defaults(func=convert_with_tensorrt)\n"
] | [
"def add_convert_subparser(subparsers):...\n",
"\"\"\"docstring\"\"\"\n",
"convert_msg = \"\"\"Usage example:\nTo convert the SavedModel to one that have TensorRT ops:\n$saved_model_cli convert \\\\\n --dir /tmp/saved_model \\\\\n --tag_set serve \\\\\n --output_dir /tmp/saved_model_trt \\\\\n tensorrt \n\"\"\"\n",
"parser_convert = subparsers.add_parser('convert', description=convert_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n",
"parser_convert.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to convert')\n",
"parser_convert.add_argument('--output_dir', type=str, required=True, help=\n 'output directory for the converted SavedModel')\n",
"parser_convert.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to convert, separated by ','\")\n",
"convert_subparsers = parser_convert.add_subparsers(title=\n 'conversion methods', description='valid conversion methods', help=\n 'the conversion to run with the SavedModel')\n",
"parser_convert_with_tensorrt = convert_subparsers.add_parser('tensorrt',\n description=\n 'Convert the SavedModel with Tensorflow-TensorRT integration',\n formatter_class=argparse.RawTextHelpFormatter)\n",
"parser_convert_with_tensorrt.add_argument('--max_workspace_size_bytes',\n type=int, default=2 << 20, help=\n 'the maximum GPU temporary memory which the TRT engine can use at execution time'\n )\n",
"parser_convert_with_tensorrt.add_argument('--precision_mode', type=str,\n default='FP32', help='one of FP32, FP16 and INT8')\n",
"parser_convert_with_tensorrt.add_argument('--minimum_segment_size', type=\n int, default=3, help=\n 'the minimum number of nodes required for a subgraph to be replacedin a TensorRT node'\n )\n",
"parser_convert_with_tensorrt.add_argument('--convert_tf1_model', type=bool,\n default=False, help='support TRT conversion for TF1 models')\n",
"parser_convert_with_tensorrt.set_defaults(func=convert_with_tensorrt)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"VAR_6 = Mock()\n",
"VAR_3 = '@foo:bar'\n",
"VAR_7 = 5000000\n",
"VAR_8 = UserPresenceState.default(VAR_3)\n",
"VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=now)\n",
"VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE)\n",
"VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=False, VAR_6=\n wheel_timer, VAR_7=now)\n",
"self.assertFalse(VAR_11)\n",
"self.assertFalse(VAR_12)\n",
"self.assertFalse(VAR_10.currently_active)\n",
"self.assertEquals(VAR_9.state, VAR_10.state)\n",
"self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n",
"self.assertEquals(VAR_6.insert.call_count, 1)\n",
"VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_federation_update_ts + FEDERATION_TIMEOUT)], any_order=True)\n"
] | [
"def test_remote_ping_timer(self):...\n",
"wheel_timer = Mock()\n",
"user_id = '@foo:bar'\n",
"now = 5000000\n",
"prev_state = UserPresenceState.default(user_id)\n",
"prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now)\n",
"new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)\n",
"state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=False, wheel_timer=wheel_timer, now=now)\n",
"self.assertFalse(persist_and_notify)\n",
"self.assertFalse(federation_ping)\n",
"self.assertFalse(state.currently_active)\n",
"self.assertEquals(new_state.state, state.state)\n",
"self.assertEquals(new_state.status_msg, state.status_msg)\n",
"self.assertEquals(wheel_timer.insert.call_count, 1)\n",
"wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_federation_update_ts + FEDERATION_TIMEOUT)], any_order=True)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_18, dict):\n",
"self.update(VAR_18)\n",
"self.set(VAR_18, VAR_26)\n",
"if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:\n",
"self.set('modified', now())\n",
"self.load_doc_before_save()\n",
"self.set('modified_by', frappe.session.user)\n",
"self.run_method('before_change')\n",
"frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26, self.modified,\n self.modified_by, VAR_27=update_modified)\n",
"self.run_method('on_change')\n",
"if VAR_28:\n",
"self.notify_update()\n",
"self.clear_cache()\n",
"if VAR_29:\n",
"frappe.db.commit()\n"
] | [
"def db_set(self, fieldname, value=None, update_modified=True, notify=False,...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(fieldname, dict):\n",
"self.update(fieldname)\n",
"self.set(fieldname, value)\n",
"if update_modified and (self.doctype, self.name\n",
"self.set('modified', now())\n",
"self.load_doc_before_save()\n",
"self.set('modified_by', frappe.session.user)\n",
"self.run_method('before_change')\n",
"frappe.db.set_value(self.doctype, self.name, fieldname, value, self.\n modified, self.modified_by, update_modified=update_modified)\n",
"self.run_method('on_change')\n",
"if notify:\n",
"self.notify_update()\n",
"self.clear_cache()\n",
"if commit:\n",
"frappe.db.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = self.client.get('/password_reset/')\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"VAR_3 = self.client.post('/password_reset/', {'email':\n '[email protected]'})\n",
"self.assertEqual(VAR_3.status_code, 302)\n",
"self.assertEqual(len(mail.outbox), 0)\n"
] | [
"def test_email_not_found(self):...\n",
"\"\"\"docstring\"\"\"\n",
"response = self.client.get('/password_reset/')\n",
"self.assertEqual(response.status_code, 200)\n",
"response = self.client.post('/password_reset/', {'email':\n '[email protected]'})\n",
"self.assertEqual(response.status_code, 302)\n",
"self.assertEqual(len(mail.outbox), 0)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self, **VAR_4):...\n",
"self.view_config.revert()\n",
"return HttpResponse(\n '<html><script>parent.location.reload()</script>%s.</html>' % _('Reverted')\n )\n"
] | [
"def dispatch_revert(self, **kwargs):...\n",
"self.view_config.revert()\n",
"return HttpResponse(\n '<html><script>parent.location.reload()</script>%s.</html>' % _('Reverted')\n )\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_3(VAR_19: Text, VAR_20: Optional[Union[Path, Text]]=None...\n",
"\"\"\"docstring\"\"\"\n",
"import tarfile\n",
"if VAR_20 is None:\n",
"VAR_20 = tempfile.mkdtemp()\n",
"tar.extractall(VAR_20)\n",
"VAR_0.error(f'Failed to extract model at {VAR_19}. Error: {e}')\n",
"return TempDirectoryPath(VAR_20)\n",
"VAR_0.debug(f\"Extracted model to '{VAR_20}'.\")\n"
] | [
"def unpack_model(model_file: Text, working_directory: Optional[Union[Path,...\n",
"\"\"\"docstring\"\"\"\n",
"import tarfile\n",
"if working_directory is None:\n",
"working_directory = tempfile.mkdtemp()\n",
"tar.extractall(working_directory)\n",
"logger.error(f'Failed to extract model at {model_file}. Error: {e}')\n",
"return TempDirectoryPath(working_directory)\n",
"logger.debug(f\"Extracted model to '{working_directory}'.\")\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Import'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"VAR_11 = JsonResource(self.hs)\n",
"VersionServlet(self.hs).register(VAR_11)\n",
"return VAR_11\n"
] | [
"def create_test_resource(self):...\n",
"resource = JsonResource(self.hs)\n",
"VersionServlet(self.hs).register(resource)\n",
"return resource\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@CLASS_4('javascript')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = VAR_3.path()\n",
"if VAR_15:\n",
"VAR_15 = 'javascript' + os.sep.join(VAR_15.split('/'))\n",
"return 'text/html', utils.read_file(VAR_15, binary=False)\n"
] | [
"@add_handler('javascript')...\n",
"\"\"\"docstring\"\"\"\n",
"path = url.path()\n",
"if path:\n",
"path = 'javascript' + os.sep.join(path.split('/'))\n",
"return 'text/html', utils.read_file(path, binary=False)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_76(self, VAR_146=None, VAR_147=None, VAR_90=None):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_160(self=self, VAR_147=VAR_147, VAR_146=VAR_146):...\n",
"return self.has_membership(VAR_147=group_id, VAR_146=role)\n"
] | [
"def requires_membership(self, role=None, group_id=None, otherwise=None):...\n",
"\"\"\"docstring\"\"\"\n",
"def has_membership(self=self, group_id=group_id, role=role):...\n",
"return self.has_membership(group_id=group_id, role=role)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = signedjson.key.generate_signing_key('abc')\n",
"self.expect_outgoing_key_request('targetserver', VAR_14)\n",
"VAR_21 = 'ed25519:%s' % (VAR_14.version,)\n",
"VAR_22 = PerspectivesKeyFetcher(self.hs2)\n",
"VAR_23 = VAR_22.get_keys({'targetserver': {VAR_21: 1000}})\n",
"VAR_24 = self.get_success(VAR_23)\n",
"self.assertIn('targetserver', VAR_24)\n",
"VAR_25 = VAR_24['targetserver'][VAR_21]\n",
"assert isinstance(VAR_25, FetchKeyResult)\n",
"self.assertEqual(signedjson.key.encode_verify_key_base64(VAR_25.verify_key),\n signedjson.key.encode_verify_key_base64(VAR_14.verify_key))\n"
] | [
"def test_get_key(self):...\n",
"\"\"\"docstring\"\"\"\n",
"testkey = signedjson.key.generate_signing_key('abc')\n",
"self.expect_outgoing_key_request('targetserver', testkey)\n",
"keyid = 'ed25519:%s' % (testkey.version,)\n",
"fetcher = PerspectivesKeyFetcher(self.hs2)\n",
"d = fetcher.get_keys({'targetserver': {keyid: 1000}})\n",
"res = self.get_success(d)\n",
"self.assertIn('targetserver', res)\n",
"keyres = res['targetserver'][keyid]\n",
"assert isinstance(keyres, FetchKeyResult)\n",
"self.assertEqual(signedjson.key.encode_verify_key_base64(keyres.verify_key),\n signedjson.key.encode_verify_key_base64(testkey.verify_key))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assert'",
"Expr'"
] |
[
"def FUNC_15(VAR_20, VAR_17):...\n",
"VAR_48 = False\n",
"if VAR_20['rating'].strip():\n",
"VAR_84 = False\n",
"if len(VAR_17.ratings) > 0:\n",
"if len(VAR_17.ratings) > 0:\n",
"VAR_17.ratings.remove(VAR_17.ratings[0])\n",
"return VAR_48\n",
"VAR_84 = VAR_17.ratings[0].rating\n",
"VAR_85 = int(float(VAR_20['rating']) * 2)\n",
"VAR_48 = True\n",
"if VAR_85 != VAR_84:\n",
"VAR_48 = True\n",
"VAR_104 = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating ==\n VAR_85).first()\n",
"if VAR_104:\n",
"VAR_17.ratings.append(VAR_104)\n",
"VAR_117 = db.Ratings(rating=ratingx2)\n",
"if VAR_84:\n",
"VAR_17.ratings.append(VAR_117)\n",
"VAR_17.ratings.remove(VAR_17.ratings[0])\n"
] | [
"def edit_book_ratings(to_save, book):...\n",
"changed = False\n",
"if to_save['rating'].strip():\n",
"old_rating = False\n",
"if len(book.ratings) > 0:\n",
"if len(book.ratings) > 0:\n",
"book.ratings.remove(book.ratings[0])\n",
"return changed\n",
"old_rating = book.ratings[0].rating\n",
"ratingx2 = int(float(to_save['rating']) * 2)\n",
"changed = True\n",
"if ratingx2 != old_rating:\n",
"changed = True\n",
"is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating ==\n ratingx2).first()\n",
"if is_rating:\n",
"book.ratings.append(is_rating)\n",
"new_rating = db.Ratings(rating=ratingx2)\n",
"if old_rating:\n",
"book.ratings.append(new_rating)\n",
"book.ratings.remove(book.ratings[0])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_121(*VAR_79, **VAR_42):...\n",
"if VAR_13.read_from_replica:\n",
"FUNC_6()\n",
"VAR_225 = VAR_129(*VAR_79, **get_newargs(fn, kwargs))\n",
"if VAR_1 and hasattr(VAR_1, 'primary_db'):\n",
"return VAR_225\n",
"VAR_1.db.close()\n",
"VAR_1.db = VAR_1.primary_db\n"
] | [
"def wrapper_fn(*args, **kwargs):...\n",
"if conf.read_from_replica:\n",
"connect_replica()\n",
"retval = fn(*args, **get_newargs(fn, kwargs))\n",
"if local and hasattr(local, 'primary_db'):\n",
"return retval\n",
"local.db.close()\n",
"local.db = local.primary_db\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'"
] |
[
"def FUNC_33(VAR_62=None, VAR_93='read', VAR_94=None, VAR_10=None, VAR_95=...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_62 and VAR_94:\n",
"VAR_62 = VAR_94.doctype\n",
"import frappe.permissions\n",
"VAR_179 = frappe.permissions.has_permission(VAR_62, VAR_93, VAR_94=doc,\n VAR_95=verbose, VAR_10=user, VAR_32=throw)\n",
"if VAR_96 and not VAR_179:\n",
"if VAR_94:\n",
"return VAR_179\n",
"frappe.throw(FUNC_0('No permission for {0}').format(VAR_94.doctype + ' ' +\n VAR_94.name))\n",
"frappe.throw(FUNC_0('No permission for {0}').format(VAR_62))\n"
] | [
"def has_permission(doctype=None, ptype='read', doc=None, user=None, verbose...\n",
"\"\"\"docstring\"\"\"\n",
"if not doctype and doc:\n",
"doctype = doc.doctype\n",
"import frappe.permissions\n",
"out = frappe.permissions.has_permission(doctype, ptype, doc=doc, verbose=\n verbose, user=user, raise_exception=throw)\n",
"if throw and not out:\n",
"if doc:\n",
"return out\n",
"frappe.throw(_('No permission for {0}').format(doc.doctype + ' ' + doc.name))\n",
"frappe.throw(_('No permission for {0}').format(doctype))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Assign'",
"Import'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Expr'"
] |
[
"@wraps(VAR_2)...\n",
"if current_user.role_edit() or current_user.role_admin():\n",
"return VAR_2(*VAR_48, **kwargs)\n",
"abort(403)\n"
] | [
"@wraps(f)...\n",
"if current_user.role_edit() or current_user.role_admin():\n",
"return f(*args, **kwargs)\n",
"abort(403)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_11(self, VAR_0):...\n",
"VAR_13 = VAR_0.patch('httpx.get', side_effect=httpcore.NetworkError)\n",
"VAR_14 = VAR_0.patch('openapi_python_client.Path')\n",
"VAR_15 = VAR_0.patch('yaml.safe_load')\n",
"from openapi_python_client import _get_document\n",
"VAR_6 = VAR_0.MagicMock()\n",
"VAR_11 = VAR_2(VAR_6=url, VAR_7=None)\n",
"assert VAR_11 == GeneratorError(header=\n 'Could not get OpenAPI document from provided URL')\n",
"VAR_13.assert_called_once_with(VAR_6)\n",
"VAR_14.assert_not_called()\n",
"VAR_15.assert_not_called()\n"
] | [
"def test__get_document_bad_url(self, mocker):...\n",
"get = mocker.patch('httpx.get', side_effect=httpcore.NetworkError)\n",
"Path = mocker.patch('openapi_python_client.Path')\n",
"loads = mocker.patch('yaml.safe_load')\n",
"from openapi_python_client import _get_document\n",
"url = mocker.MagicMock()\n",
"result = _get_document(url=url, path=None)\n",
"assert result == GeneratorError(header=\n 'Could not get OpenAPI document from provided URL')\n",
"get.assert_called_once_with(url)\n",
"Path.assert_not_called()\n",
"loads.assert_not_called()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assert'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_15(VAR_41):...\n",
"VAR_2, VAR_6 = VAR_41\n",
"VAR_31 = await self.get_server_verify_key_v2_direct(VAR_2, VAR_6)\n",
"VAR_0.warning('Error looking up keys %s from %s: %s', VAR_6, VAR_2, e)\n",
"VAR_26[VAR_2] = VAR_31\n",
"VAR_0.exception('Error getting keys %s from %s', VAR_6, VAR_2)\n"
] | [
"async def get_key(key_to_fetch_item):...\n",
"server_name, key_ids = key_to_fetch_item\n",
"keys = await self.get_server_verify_key_v2_direct(server_name, key_ids)\n",
"logger.warning('Error looking up keys %s from %s: %s', key_ids, server_name, e)\n",
"results[server_name] = keys\n",
"logger.exception('Error getting keys %s from %s', key_ids, server_name)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"from Products.PageTemplates.Expressions import createZopeEngine\n",
"return createZopeEngine()\n"
] | [
"def _makeEngine(self):...\n",
"from Products.PageTemplates.Expressions import createZopeEngine\n",
"return createZopeEngine()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_30(VAR_2, VAR_5=None, **VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_267 = FUNC_2(VAR_2, 'id')\n",
"return HttpResponseBadRequest('Invalid parameter value')\n",
"VAR_83 = list()\n",
"VAR_84 = None\n",
"for VAR_260 in VAR_267:\n",
"return HttpResponseBadRequest(e.message)\n",
"return JsonResponse('')\n",
"VAR_83.append(omero.cmd.Delete('/Annotation', VAR_260))\n",
"VAR_268 = omero.cmd.DoAll()\n",
"VAR_268.requests = VAR_83\n",
"VAR_84 = VAR_5.c.sf.submit(VAR_268, VAR_5.SERVICE_OPTS)\n",
"VAR_5._waitOnCmd(VAR_84)\n",
"VAR_84.close()\n"
] | [
"def api_tags_and_tagged_list_DELETE(request, conn=None, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"tag_ids = get_longs(request, 'id')\n",
"return HttpResponseBadRequest('Invalid parameter value')\n",
"dcs = list()\n",
"handle = None\n",
"for tag_id in tag_ids:\n",
"return HttpResponseBadRequest(e.message)\n",
"return JsonResponse('')\n",
"dcs.append(omero.cmd.Delete('/Annotation', tag_id))\n",
"doall = omero.cmd.DoAll()\n",
"doall.requests = dcs\n",
"handle = conn.c.sf.submit(doall, conn.SERVICE_OPTS)\n",
"conn._waitOnCmd(handle)\n",
"handle.close()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"For",
"Return'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_26(VAR_23):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_56 = 'string'\n",
"VAR_57 = VAR_23.add_parser('scan', description=scan_msg, formatter_class=\n argparse.RawTextHelpFormatter)\n",
"VAR_57.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to execute')\n",
"VAR_57.add_argument('--tag_set', type=str, help=\n \"tag-set of graph in SavedModel to scan, separated by ','\")\n",
"VAR_57.set_defaults(func=scan)\n"
] | [
"def add_scan_subparser(subparsers):...\n",
"\"\"\"docstring\"\"\"\n",
"scan_msg = \"\"\"Usage example:\nTo scan for denylisted ops in SavedModel:\n$saved_model_cli scan --dir /tmp/saved_model\nTo scan a specific MetaGraph, pass in --tag_set\n\"\"\"\n",
"parser_scan = subparsers.add_parser('scan', description=scan_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n",
"parser_scan.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to execute')\n",
"parser_scan.add_argument('--tag_set', type=str, help=\n \"tag-set of graph in SavedModel to scan, separated by ','\")\n",
"parser_scan.set_defaults(func=scan)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_64(self, VAR_16: str, VAR_17: int=200) ->JsonDict:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_22, VAR_23 = self.make_request('GET', \n '/_matrix/client/unstable/org.matrix.msc2432/rooms/%s/aliases' % (self.\n room_id,), VAR_16=access_token)\n",
"self.assertEqual(VAR_23.code, VAR_17, VAR_23.result)\n",
"VAR_61 = VAR_23.json_body\n",
"self.assertIsInstance(VAR_61, dict)\n",
"if VAR_17 == 200:\n",
"self.assertIsInstance(VAR_61['aliases'], list)\n",
"return VAR_61\n"
] | [
"def _get_aliases(self, access_token: str, expected_code: int=200) ->JsonDict:...\n",
"\"\"\"docstring\"\"\"\n",
"request, channel = self.make_request('GET', \n '/_matrix/client/unstable/org.matrix.msc2432/rooms/%s/aliases' % (self.\n room_id,), access_token=access_token)\n",
"self.assertEqual(channel.code, expected_code, channel.result)\n",
"res = channel.json_body\n",
"self.assertIsInstance(res, dict)\n",
"if expected_code == 200:\n",
"self.assertIsInstance(res['aliases'], list)\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n",
"FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n",
"for VAR_6 in VAR_73:\n",
"FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n",
"return self._return_value\n"
] | [
"def runner(self, method, *args, **kwargs):...\n",
"add_to_return_value(self, fn(self, *args, **kwargs))\n",
"for f in hooks:\n",
"add_to_return_value(self, f(self, method, *args, **kwargs))\n",
"return self._return_value\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Expr'",
"Return'"
] |
[
"def __init__(self):...\n",
"self.called = False\n"
] | [
"def __init__(self):...\n",
"self.called = False\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"async def FUNC_8(self, VAR_17: str, VAR_16: str, VAR_37: Optional[JsonDict]...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = CLASS_0(VAR_15='POST', VAR_17=destination, VAR_16=path, VAR_20=\n args, VAR_18=data)\n",
"VAR_12 = self.clock.time_msec()\n",
"VAR_11 = await self._send_request(VAR_10, VAR_30=long_retries, VAR_29=\n timeout, VAR_31=ignore_backoff)\n",
"if VAR_29:\n",
"VAR_61 = VAR_29 / 1000\n",
"VAR_61 = self.default_timeout\n",
"VAR_47 = await FUNC_0(self.reactor, VAR_61, VAR_10, VAR_11, VAR_12)\n",
"return VAR_47\n"
] | [
"async def post_json(self, destination: str, path: str, data: Optional[...\n",
"\"\"\"docstring\"\"\"\n",
"request = MatrixFederationRequest(method='POST', destination=destination,\n path=path, query=args, json=data)\n",
"start_ms = self.clock.time_msec()\n",
"response = await self._send_request(request, long_retries=long_retries,\n timeout=timeout, ignore_backoff=ignore_backoff)\n",
"if timeout:\n",
"_sec_timeout = timeout / 1000\n",
"_sec_timeout = self.default_timeout\n",
"body = await _handle_json_response(self.reactor, _sec_timeout, request,\n response, start_ms)\n",
"return body\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@login_required...\n",
"VAR_3 = get_object_or_404(Comment.objects.exclude(user=request.user), VAR_2\n =comment_id)\n",
"VAR_4 = LikeForm(user=request.user, VAR_3=comment, data=post_data(request))\n",
"if is_post(VAR_0) and VAR_4.is_valid():\n",
"VAR_5 = VAR_4.save()\n",
"return render(VAR_0=request, template_name=\n 'spirit/comment/like/create.html', context={'form': form, 'comment':\n comment})\n",
"VAR_5.comment.increase_likes_count()\n",
"if is_ajax(VAR_0):\n",
"return json_response({'url_delete': VAR_5.get_delete_url()})\n",
"return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))\n"
] | [
"@login_required...\n",
"comment = get_object_or_404(Comment.objects.exclude(user=request.user), pk=\n comment_id)\n",
"form = LikeForm(user=request.user, comment=comment, data=post_data(request))\n",
"if is_post(request) and form.is_valid():\n",
"like = form.save()\n",
"return render(request=request, template_name=\n 'spirit/comment/like/create.html', context={'form': form, 'comment':\n comment})\n",
"like.comment.increase_likes_count()\n",
"if is_ajax(request):\n",
"return json_response({'url_delete': like.get_delete_url()})\n",
"return redirect(request.POST.get('next', comment.get_absolute_url()))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Expr'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"VAR_5 = self.get_counts('json', period='this-year')\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertJSONEqual(VAR_5.content.decode(), VAR_0)\n"
] | [
"def test_counts_view_this_year(self):...\n",
"response = self.get_counts('json', period='this-year')\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertJSONEqual(response.content.decode(), COUNTS_DATA)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_41(self):...\n",
"VAR_85 = self.allowed_column_value or ''\n",
"return [t.strip() for t in VAR_85.split(',')]\n"
] | [
"def list_allowed_column_values(self):...\n",
"mct = self.allowed_column_value or ''\n",
"return [t.strip() for t in mct.split(',')]\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"VAR_5 = self._makeContext()\n",
"self.assertEqual(VAR_5.evaluate('dummy'), 'dummy')\n"
] | [
"def test_evaluate_with_render_simple_callable(self):...\n",
"ec = self._makeContext()\n",
"self.assertEqual(ec.evaluate('dummy'), 'dummy')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_15(VAR_19, VAR_16):...\n",
"VAR_50 = False\n",
"if VAR_19['rating'].strip():\n",
"VAR_85 = False\n",
"if len(VAR_16.ratings) > 0:\n",
"if len(VAR_16.ratings) > 0:\n",
"VAR_16.ratings.remove(VAR_16.ratings[0])\n",
"return VAR_50\n",
"VAR_85 = VAR_16.ratings[0].rating\n",
"VAR_86 = int(float(VAR_19['rating']) * 2)\n",
"VAR_50 = True\n",
"if VAR_86 != VAR_85:\n",
"VAR_50 = True\n",
"VAR_103 = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating ==\n VAR_86).first()\n",
"if VAR_103:\n",
"VAR_16.ratings.append(VAR_103)\n",
"VAR_117 = db.Ratings(rating=ratingx2)\n",
"if VAR_85:\n",
"VAR_16.ratings.append(VAR_117)\n",
"VAR_16.ratings.remove(VAR_16.ratings[0])\n"
] | [
"def edit_book_ratings(to_save, book):...\n",
"changed = False\n",
"if to_save['rating'].strip():\n",
"old_rating = False\n",
"if len(book.ratings) > 0:\n",
"if len(book.ratings) > 0:\n",
"book.ratings.remove(book.ratings[0])\n",
"return changed\n",
"old_rating = book.ratings[0].rating\n",
"ratingx2 = int(float(to_save['rating']) * 2)\n",
"changed = True\n",
"if ratingx2 != old_rating:\n",
"changed = True\n",
"is_rating = calibre_db.session.query(db.Ratings).filter(db.Ratings.rating ==\n ratingx2).first()\n",
"if is_rating:\n",
"book.ratings.append(is_rating)\n",
"new_rating = db.Ratings(rating=ratingx2)\n",
"if old_rating:\n",
"book.ratings.append(new_rating)\n",
"book.ratings.remove(book.ratings[0])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_11(self):...\n",
"VAR_5 = {'not_types': ['m.*', 'org.*'], 'types': ['m.room.message',\n 'm.room.topic']}\n",
"VAR_6 = FUNC_0(sender='@foo:bar', type='m.room.topic', room_id='!foo:bar')\n",
"self.assertFalse(Filter(VAR_5).check(VAR_6))\n"
] | [
"def test_definition_not_types_takes_priority_over_types(self):...\n",
"definition = {'not_types': ['m.*', 'org.*'], 'types': ['m.room.message',\n 'm.room.topic']}\n",
"event = MockEvent(sender='@foo:bar', type='m.room.topic', room_id='!foo:bar')\n",
"self.assertFalse(Filter(definition).check(event))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_2.method not in ['POST', 'DELETE']:\n",
"return JsonResponse({'Error':\n 'Need to POST or DELETE JSON data to update links'}, VAR_313=405)\n",
"VAR_17 = json.loads(VAR_2.body)\n",
"VAR_17 = json.loads(bytes_to_native_str(VAR_2.body))\n",
"if VAR_2.method == 'POST':\n",
"return FUNC_24(VAR_5, VAR_17)\n",
"if VAR_2.method == 'DELETE':\n",
"return FUNC_25(VAR_5, VAR_17)\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"if request.method not in ['POST', 'DELETE']:\n",
"return JsonResponse({'Error':\n 'Need to POST or DELETE JSON data to update links'}, status=405)\n",
"json_data = json.loads(request.body)\n",
"json_data = json.loads(bytes_to_native_str(request.body))\n",
"if request.method == 'POST':\n",
"return _api_links_POST(conn, json_data)\n",
"if request.method == 'DELETE':\n",
"return _api_links_DELETE(conn, json_data)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"@app.route('/dataobj/move/<int:dataobj_id>', methods=['POST'])...\n",
"VAR_9 = forms.MoveItemForm()\n",
"VAR_22 = VAR_9.path.data if VAR_9.path.data != '' else 'root directory'\n",
"if VAR_9.path.data == None:\n",
"flash('No path specified.')\n",
"if data.move_item(VAR_1, VAR_9.path.data):\n",
"flash('Data not found.', 'error')\n",
"return redirect(f'/dataobj/{VAR_1}')\n",
"flash(f'Data successfully moved to {VAR_22}.', 'success')\n",
"flash(f'Data could not be moved to {VAR_22}.', 'error')\n",
"return redirect('/')\n",
"return redirect(f'/dataobj/{VAR_1}')\n",
"return redirect(f'/dataobj/{VAR_1}')\n"
] | [
"@app.route('/dataobj/move/<int:dataobj_id>', methods=['POST'])...\n",
"form = forms.MoveItemForm()\n",
"out_dir = form.path.data if form.path.data != '' else 'root directory'\n",
"if form.path.data == None:\n",
"flash('No path specified.')\n",
"if data.move_item(dataobj_id, form.path.data):\n",
"flash('Data not found.', 'error')\n",
"return redirect(f'/dataobj/{dataobj_id}')\n",
"flash(f'Data successfully moved to {out_dir}.', 'success')\n",
"flash(f'Data could not be moved to {out_dir}.', 'error')\n",
"return redirect('/')\n",
"return redirect(f'/dataobj/{dataobj_id}')\n",
"return redirect(f'/dataobj/{dataobj_id}')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Return'",
"Return'",
"Return'"
] |
[
"async def FUNC_1(self, VAR_5, VAR_6, VAR_8, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = VAR_6.room_id\n",
"VAR_12 = VAR_6.event_id\n",
"VAR_54 = await self.store.have_events_in_timeline(VAR_8)\n",
"if not VAR_8 - VAR_54:\n",
"return\n",
"VAR_55 = await self.store.get_latest_event_ids_in_room(VAR_11)\n",
"VAR_56 = set(VAR_55)\n",
"VAR_56 |= VAR_54\n",
"VAR_0.info('[%s %s]: Requesting missing events between %s and %s', VAR_11,\n VAR_12, shortstr(VAR_56), VAR_12)\n",
"VAR_65 = await self.federation_client.get_missing_events(VAR_5, VAR_11,\n earliest_events_ids=list(latest), VAR_39=[pdu], VAR_17=10, VAR_9=\n min_depth, timeout=60000)\n",
"VAR_0.warning('[%s %s]: Failed to get prev_events: %s', VAR_11, VAR_12, VAR_170\n )\n",
"VAR_0.info('[%s %s]: Got %d prev_events: %s', VAR_11, VAR_12, len(VAR_65),\n shortstr(VAR_65))\n",
"return\n",
"VAR_65.sort(VAR_144=lambda x: x.depth)\n",
"for VAR_21 in VAR_65:\n",
"VAR_0.info('[%s %s] Handling received prev_event %s', VAR_11, VAR_12,\n VAR_21.event_id)\n",
"await self.on_receive_pdu(VAR_5, VAR_21, VAR_7=False)\n",
"if VAR_170.code == 403:\n",
"VAR_0.warning('[%s %s] Received prev_event %s failed history check.',\n VAR_11, VAR_12, VAR_21.event_id)\n"
] | [
"async def _get_missing_events_for_pdu(self, origin, pdu, prevs, min_depth):...\n",
"\"\"\"docstring\"\"\"\n",
"room_id = pdu.room_id\n",
"event_id = pdu.event_id\n",
"seen = await self.store.have_events_in_timeline(prevs)\n",
"if not prevs - seen:\n",
"return\n",
"latest_list = await self.store.get_latest_event_ids_in_room(room_id)\n",
"latest = set(latest_list)\n",
"latest |= seen\n",
"logger.info('[%s %s]: Requesting missing events between %s and %s', room_id,\n event_id, shortstr(latest), event_id)\n",
"missing_events = await self.federation_client.get_missing_events(origin,\n room_id, earliest_events_ids=list(latest), latest_events=[pdu], limit=\n 10, min_depth=min_depth, timeout=60000)\n",
"logger.warning('[%s %s]: Failed to get prev_events: %s', room_id, event_id, e)\n",
"logger.info('[%s %s]: Got %d prev_events: %s', room_id, event_id, len(\n missing_events), shortstr(missing_events))\n",
"return\n",
"missing_events.sort(key=lambda x: x.depth)\n",
"for ev in missing_events:\n",
"logger.info('[%s %s] Handling received prev_event %s', room_id, event_id,\n ev.event_id)\n",
"await self.on_receive_pdu(origin, ev, sent_to_us_directly=False)\n",
"if e.code == 403:\n",
"logger.warning('[%s %s] Received prev_event %s failed history check.',\n room_id, event_id, ev.event_id)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"AugAssign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"@VAR_0.route('/fork/<int:id>')...\n",
"if get_user() is None:\n",
"return redirect('/login?next=fork/{id}'.format(VAR_3=id))\n",
"VAR_14 = Query()\n",
"VAR_14.user = get_user()\n",
"VAR_15 = g.conn.session.query(Query).filter(Query.id == VAR_3).one()\n",
"VAR_14.title = VAR_15.title\n",
"VAR_14.parent_id = VAR_15.id\n",
"VAR_14.description = VAR_15.description\n",
"g.conn.session.add(VAR_14)\n",
"g.conn.session.commit()\n",
"VAR_16 = QueryRevision(VAR_4=query.id, VAR_21=parent_query.latest_rev.text)\n",
"VAR_14.latest_rev = VAR_16\n",
"g.conn.session.add(VAR_14)\n",
"g.conn.session.add(VAR_16)\n",
"g.conn.session.commit()\n",
"return redirect(url_for('query_show', VAR_4=query.id))\n"
] | [
"@app.route('/fork/<int:id>')...\n",
"if get_user() is None:\n",
"return redirect('/login?next=fork/{id}'.format(id=id))\n",
"query = Query()\n",
"query.user = get_user()\n",
"parent_query = g.conn.session.query(Query).filter(Query.id == id).one()\n",
"query.title = parent_query.title\n",
"query.parent_id = parent_query.id\n",
"query.description = parent_query.description\n",
"g.conn.session.add(query)\n",
"g.conn.session.commit()\n",
"query_rev = QueryRevision(query_id=query.id, text=parent_query.latest_rev.text)\n",
"query.latest_rev = query_rev\n",
"g.conn.session.add(query)\n",
"g.conn.session.add(query_rev)\n",
"g.conn.session.commit()\n",
"return redirect(url_for('query_show', query_id=query.id))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_101(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"self.xmlrpc_procedures[VAR_10.__name__] = VAR_10\n",
"return VAR_10\n"
] | [
"def xmlrpc(self, f):...\n",
"\"\"\"docstring\"\"\"\n",
"self.xmlrpc_procedures[f.__name__] = f\n",
"return f\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_1, VAR_2):...\n",
"return frappe.local.module_app[scrub(VAR_1)] + '.' + scrub(VAR_1\n ) + '.report.' + scrub(VAR_2) + '.' + scrub(VAR_2)\n"
] | [
"def get_report_module_dotted_path(module, report_name):...\n",
"return frappe.local.module_app[scrub(module)] + '.' + scrub(module\n ) + '.report.' + scrub(report_name) + '.' + scrub(report_name)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@FUNC_0...\n",
"return AccountValidityHandler(self)\n"
] | [
"@cache_in_self...\n",
"return AccountValidityHandler(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_26(self):...\n",
"VAR_13 = self.cleaned_data['agreement']\n",
"self.spam_check(VAR_13)\n",
"return VAR_13\n"
] | [
"def clean_agreement(self):...\n",
"value = self.cleaned_data['agreement']\n",
"self.spam_check(value)\n",
"return value\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_24(self):...\n",
"assert join('/', trailing_slash=True) == '/'\n",
"assert join('/foo', 'baz', None, trailing_slash=True) == '/foo/baz/'\n",
"assert join('/foo', 'baz/', trailing_slash=True) == '/foo/baz/'\n"
] | [
"def test_trailing_slash(self):...\n",
"assert join('/', trailing_slash=True) == '/'\n",
"assert join('/foo', 'baz', None, trailing_slash=True) == '/foo/baz/'\n",
"assert join('/foo', 'baz/', trailing_slash=True) == '/foo/baz/'\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_5(VAR_4, VAR_9, VAR_7, VAR_5, VAR_10):...\n",
"VAR_48 = False\n",
"if VAR_5 == 'languages':\n",
"VAR_83 = VAR_9.lang_code\n",
"if VAR_5 == 'custom':\n",
"for VAR_12 in VAR_10:\n",
"VAR_83 = VAR_9.value\n",
"VAR_83 = VAR_9.name\n",
"VAR_11 = VAR_7.query(VAR_9).filter(VAR_83 == VAR_12).first()\n",
"return VAR_48\n",
"if VAR_5 == 'author':\n",
"VAR_102 = VAR_9(VAR_12, helper.get_sorted_author(VAR_12.replace('|', ',')), '')\n",
"if VAR_5 == 'series':\n",
"if VAR_11 is None:\n",
"VAR_102 = VAR_9(VAR_12, VAR_12)\n",
"if VAR_5 == 'custom':\n",
"VAR_48 = True\n",
"VAR_11 = FUNC_6(VAR_11, VAR_12, VAR_5)\n",
"VAR_102 = VAR_9(value=add_element)\n",
"if VAR_5 == 'publisher':\n",
"VAR_7.add(VAR_102)\n",
"VAR_48 = True\n",
"VAR_102 = VAR_9(VAR_12, None)\n",
"VAR_102 = VAR_9(VAR_12)\n",
"VAR_4.append(VAR_102)\n",
"VAR_48 = True\n",
"VAR_4.append(VAR_11)\n"
] | [
"def add_objects(db_book_object, db_object, db_session, db_type, add_elements):...\n",
"changed = False\n",
"if db_type == 'languages':\n",
"db_filter = db_object.lang_code\n",
"if db_type == 'custom':\n",
"for add_element in add_elements:\n",
"db_filter = db_object.value\n",
"db_filter = db_object.name\n",
"db_element = db_session.query(db_object).filter(db_filter == add_element\n ).first()\n",
"return changed\n",
"if db_type == 'author':\n",
"new_element = db_object(add_element, helper.get_sorted_author(add_element.\n replace('|', ',')), '')\n",
"if db_type == 'series':\n",
"if db_element is None:\n",
"new_element = db_object(add_element, add_element)\n",
"if db_type == 'custom':\n",
"changed = True\n",
"db_element = create_objects_for_addition(db_element, add_element, db_type)\n",
"new_element = db_object(value=add_element)\n",
"if db_type == 'publisher':\n",
"db_session.add(new_element)\n",
"changed = True\n",
"new_element = db_object(add_element, None)\n",
"new_element = db_object(add_element)\n",
"db_book_object.append(new_element)\n",
"changed = True\n",
"db_book_object.append(db_element)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"from __future__ import division, print_function, unicode_literals\n",
"import os\n",
"from datetime import datetime\n",
"import json\n",
"from shutil import copyfile\n",
"from uuid import uuid4\n",
"VAR_0 = True\n",
"from scholarly import scholarly\n",
"VAR_0 = False\n",
"from babel import Locale as LC\n",
"from babel.core import UnknownLocaleError\n",
"from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response\n",
"from flask_babel import gettext as _\n",
"from flask_login import current_user, login_required\n",
"from sqlalchemy.exc import OperationalError, IntegrityError\n",
"from sqlite3 import OperationalError as sqliteOperationalError\n",
"from . import constants, logger, isoLanguages, gdriveutils, uploader, helper\n",
"from . import config, get_locale, ub, db\n",
"from . import calibre_db\n",
"from .services.worker import WorkerThread\n",
"from .tasks.upload import TaskUpload\n",
"from .render_template import render_title_template\n",
"from .usermanagement import login_required_if_no_ano\n",
"from functools import wraps\n",
"VAR_1 = Blueprint('editbook', __name__)\n",
"VAR_2 = logger.create()\n",
"def FUNC_0(VAR_3):...\n",
"@wraps(VAR_3)...\n",
"if current_user.role_upload() or current_user.role_admin():\n",
"return VAR_3(*VAR_46, **kwargs)\n",
"abort(403)\n",
"return FUNC_44\n"
] | [
"from __future__ import division, print_function, unicode_literals\n",
"import os\n",
"from datetime import datetime\n",
"import json\n",
"from shutil import copyfile\n",
"from uuid import uuid4\n",
"have_scholar = True\n",
"from scholarly import scholarly\n",
"have_scholar = False\n",
"from babel import Locale as LC\n",
"from babel.core import UnknownLocaleError\n",
"from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response\n",
"from flask_babel import gettext as _\n",
"from flask_login import current_user, login_required\n",
"from sqlalchemy.exc import OperationalError, IntegrityError\n",
"from sqlite3 import OperationalError as sqliteOperationalError\n",
"from . import constants, logger, isoLanguages, gdriveutils, uploader, helper\n",
"from . import config, get_locale, ub, db\n",
"from . import calibre_db\n",
"from .services.worker import WorkerThread\n",
"from .tasks.upload import TaskUpload\n",
"from .render_template import render_title_template\n",
"from .usermanagement import login_required_if_no_ano\n",
"from functools import wraps\n",
"editbook = Blueprint('editbook', __name__)\n",
"log = logger.create()\n",
"def upload_required(f):...\n",
"@wraps(f)...\n",
"if current_user.role_upload() or current_user.role_admin():\n",
"return f(*args, **kwargs)\n",
"abort(403)\n",
"return inner\n"
] | [
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"ImportFrom'",
"Assign'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_114 = VAR_2.GET.get('format', 'png')\n",
"if VAR_114 not in ('jpeg', 'png', 'tif'):\n",
"VAR_114 = 'png'\n",
"VAR_150 = []\n",
"VAR_151 = []\n",
"if VAR_6 is None:\n",
"VAR_150 = VAR_2.GET.getlist('image')\n",
"VAR_150 = [VAR_6]\n",
"if len(VAR_150) == 0:\n",
"VAR_152 = []\n",
"VAR_151 = VAR_2.GET.getlist('well')\n",
"if VAR_150:\n",
"if len(VAR_151) == 0:\n",
"VAR_152 = list(VAR_8.getObjects('Image', VAR_150))\n",
"if VAR_151:\n",
"return HttpResponseServerError(\n 'No images or wells specified in request. Use ?image=123 or ?well=123')\n",
"if len(VAR_152) == 0:\n",
"VAR_319 = int(VAR_2.GET.get('index', 0))\n",
"VAR_319 = 0\n",
"for VAR_10 in VAR_8.getObjects('Well', VAR_151):\n",
"VAR_231 = 'Cannot download as %s. Images (ids: %s) not found.' % (VAR_114,\n VAR_150)\n",
"if len(VAR_152) == 1:\n",
"VAR_152.append(VAR_10.getWellSample(VAR_319).image())\n",
"VAR_1.debug(VAR_231)\n",
"VAR_60 = VAR_152[0].renderJpeg()\n",
"VAR_232 = tempfile.NamedTemporaryFile(suffix='.download_as')\n",
"return HttpResponseServerError(VAR_231)\n",
"if VAR_60 is None:\n",
"def FUNC_77(VAR_233, VAR_234, VAR_235):...\n",
"VAR_61 = HttpResponse(VAR_60, VAR_279='image/jpeg')\n",
"VAR_17 = os.path.basename(VAR_233)\n",
"VAR_61['Content-Length'] = len(VAR_60)\n",
"VAR_293 = '%s.%s' % (VAR_17, VAR_234)\n",
"VAR_61['Content-Disposition'] = 'attachment; filename=%s.jpg' % VAR_152[0\n ].getName().replace(' ', '_')\n",
"VAR_293 = os.path.join(VAR_235, VAR_293)\n",
"VAR_61['Content-Type'] = 'application/force-download'\n",
"VAR_212 = 1\n",
"return VAR_61\n",
"VAR_17 = VAR_293[:-(len(VAR_234) + 1)]\n",
"while os.path.exists(VAR_293):\n",
"VAR_293 = '%s_(%d).%s' % (VAR_17, VAR_212, VAR_234)\n",
"return VAR_293\n",
"VAR_212 += 1\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"format = request.GET.get('format', 'png')\n",
"if format not in ('jpeg', 'png', 'tif'):\n",
"format = 'png'\n",
"imgIds = []\n",
"wellIds = []\n",
"if iid is None:\n",
"imgIds = request.GET.getlist('image')\n",
"imgIds = [iid]\n",
"if len(imgIds) == 0:\n",
"images = []\n",
"wellIds = request.GET.getlist('well')\n",
"if imgIds:\n",
"if len(wellIds) == 0:\n",
"images = list(conn.getObjects('Image', imgIds))\n",
"if wellIds:\n",
"return HttpResponseServerError(\n 'No images or wells specified in request. Use ?image=123 or ?well=123')\n",
"if len(images) == 0:\n",
"index = int(request.GET.get('index', 0))\n",
"index = 0\n",
"for w in conn.getObjects('Well', wellIds):\n",
"msg = 'Cannot download as %s. Images (ids: %s) not found.' % (format, imgIds)\n",
"if len(images) == 1:\n",
"images.append(w.getWellSample(index).image())\n",
"logger.debug(msg)\n",
"jpeg_data = images[0].renderJpeg()\n",
"temp = tempfile.NamedTemporaryFile(suffix='.download_as')\n",
"return HttpResponseServerError(msg)\n",
"if jpeg_data is None:\n",
"def makeImageName(originalName, extension, folder_name):...\n",
"rsp = HttpResponse(jpeg_data, mimetype='image/jpeg')\n",
"name = os.path.basename(originalName)\n",
"rsp['Content-Length'] = len(jpeg_data)\n",
"imgName = '%s.%s' % (name, extension)\n",
"rsp['Content-Disposition'] = 'attachment; filename=%s.jpg' % images[0].getName(\n ).replace(' ', '_')\n",
"imgName = os.path.join(folder_name, imgName)\n",
"rsp['Content-Type'] = 'application/force-download'\n",
"i = 1\n",
"return rsp\n",
"name = imgName[:-(len(extension) + 1)]\n",
"while os.path.exists(imgName):\n",
"imgName = '%s_(%d).%s' % (name, i, extension)\n",
"return imgName\n",
"i += 1\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Condition",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"AugAssign'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_101 = FUNC_0(VAR_2, 'group', -1)\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_345(VAR_101))\n",
"VAR_177 = int(VAR_2.GET.get('offset'))\n",
"VAR_177 = VAR_88 = None\n",
"VAR_160 = VAR_2.GET.get('jsonmode')\n",
"VAR_88 = int(VAR_2.GET.get('limit', 1000))\n",
"if VAR_160 == 'tagcount':\n",
"VAR_288 = VAR_5.getTagCount()\n",
"VAR_104 = BaseContainer(VAR_5)\n",
"return dict(VAR_288=tag_count)\n",
"VAR_104.loadTagsRecursive(eid=-1, VAR_177=offset, VAR_88=limit)\n",
"VAR_161 = VAR_104.tags_recursive\n",
"VAR_162 = VAR_104.tags_recursive_owners\n",
"if VAR_160 == 'tags':\n",
"VAR_71 = list((VAR_318, VAR_433, o, s) for VAR_318, VAR_362, VAR_433, o, s in\n VAR_161)\n",
"if VAR_160 == 'desc':\n",
"return VAR_71\n",
"return dict((VAR_318, VAR_362) for VAR_318, VAR_362, VAR_433, o, s in VAR_161)\n",
"if VAR_160 == 'owners':\n",
"return VAR_162\n",
"return HttpResponse()\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"group = get_long_or_default(request, 'group', -1)\n",
"conn.SERVICE_OPTS.setOmeroGroup(str(group))\n",
"offset = int(request.GET.get('offset'))\n",
"offset = limit = None\n",
"jsonmode = request.GET.get('jsonmode')\n",
"limit = int(request.GET.get('limit', 1000))\n",
"if jsonmode == 'tagcount':\n",
"tag_count = conn.getTagCount()\n",
"manager = BaseContainer(conn)\n",
"return dict(tag_count=tag_count)\n",
"manager.loadTagsRecursive(eid=-1, offset=offset, limit=limit)\n",
"all_tags = manager.tags_recursive\n",
"all_tags_owners = manager.tags_recursive_owners\n",
"if jsonmode == 'tags':\n",
"r = list((i, t, o, s) for i, d, t, o, s in all_tags)\n",
"if jsonmode == 'desc':\n",
"return r\n",
"return dict((i, d) for i, d, t, o, s in all_tags)\n",
"if jsonmode == 'owners':\n",
"return all_tags_owners\n",
"return HttpResponse()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"@app.route('/config', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_20(VAR_22, VAR_23, VAR_24):...\n",
"if VAR_22 != 'SECRET_KEY':\n",
"if type(VAR_23) is dict:\n",
"VAR_9 = forms.config_form(app.config)\n",
"for k, v in VAR_23.items():\n",
"VAR_24[VAR_22] = VAR_23\n",
"VAR_25 = vars(Config())\n",
"FUNC_20(k, v, VAR_24[VAR_22])\n",
"if VAR_9.validate_on_submit():\n",
"VAR_35 = Config()\n",
"if request.method == 'POST':\n",
"VAR_35.override(VAR_9.data)\n",
"flash('Could not update config.', 'error')\n",
"return render_template('config.html', conf=form, VAR_25=default, title=\n 'Edit Config')\n",
"for k, v in vars(VAR_35).items():\n",
"FUNC_20(k, v, app.config)\n",
"write_config(vars(VAR_35))\n",
"flash('Config successfully updated.', 'success')\n"
] | [
"@app.route('/config', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"def update_config_value(key, val, dictionary):...\n",
"if key != 'SECRET_KEY':\n",
"if type(val) is dict:\n",
"form = forms.config_form(app.config)\n",
"for k, v in val.items():\n",
"dictionary[key] = val\n",
"default = vars(Config())\n",
"update_config_value(k, v, dictionary[key])\n",
"if form.validate_on_submit():\n",
"changed_config = Config()\n",
"if request.method == 'POST':\n",
"changed_config.override(form.data)\n",
"flash('Could not update config.', 'error')\n",
"return render_template('config.html', conf=form, default=default, title=\n 'Edit Config')\n",
"for k, v in vars(changed_config).items():\n",
"update_config_value(k, v, app.config)\n",
"write_config(vars(changed_config))\n",
"flash('Config successfully updated.', 'success')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"FunctionDef'",
"Condition",
"Condition",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"For",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_12(self):...\n",
""
] | [
"def finalize_options(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_23(self, VAR_19, VAR_36, VAR_37):...\n",
"if VAR_19 is None:\n",
"return VAR_36 == VAR_37\n",
"if isinstance(VAR_19, str):\n",
"VAR_19 = VAR_19.split('/')\n",
"if isinstance(VAR_19, bytes):\n",
"VAR_36 = FUNC_0(VAR_36, VAR_19, None)\n",
"return False\n",
"return VAR_36 == VAR_37\n",
"VAR_19 = VAR_19.split(b'/')\n",
"VAR_37 = FUNC_0(VAR_37, VAR_19, None)\n"
] | [
"def same_part(self, name, ob1, ob2):...\n",
"if name is None:\n",
"return ob1 == ob2\n",
"if isinstance(name, str):\n",
"name = name.split('/')\n",
"if isinstance(name, bytes):\n",
"ob1 = boboAwareZopeTraverse(ob1, name, None)\n",
"return False\n",
"return ob1 == ob2\n",
"name = name.split(b'/')\n",
"ob2 = boboAwareZopeTraverse(ob2, name, None)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_52(VAR_62, VAR_109=False, VAR_115=False):...\n",
"\"\"\"docstring\"\"\"\n",
"FUNC_53(FUNC_56(VAR_12.get_value('DocType', VAR_62, 'module')), 'doctype',\n FUNC_56(VAR_62), VAR_109=force, VAR_115=reset_permissions)\n"
] | [
"def reload_doctype(doctype, force=False, reset_permissions=False):...\n",
"\"\"\"docstring\"\"\"\n",
"reload_doc(scrub(db.get_value('DocType', doctype, 'module')), 'doctype',\n scrub(doctype), force=force, reset_permissions=reset_permissions)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_53(VAR_21, VAR_29, VAR_30):...\n",
"for tag in VAR_29:\n",
"VAR_21 = VAR_21.filter(db.Books.tags.any(db.Tags.id == tag))\n",
"for tag in VAR_30:\n",
"VAR_21 = VAR_21.filter(not_(db.Books.tags.any(db.Tags.id == tag)))\n",
"return VAR_21\n"
] | [
"def adv_search_tag(q, include_tag_inputs, exclude_tag_inputs):...\n",
"for tag in include_tag_inputs:\n",
"q = q.filter(db.Books.tags.any(db.Tags.id == tag))\n",
"for tag in exclude_tag_inputs:\n",
"q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))\n",
"return q\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_43(VAR_110, VAR_111):...\n",
"if 'firstrunstart' in VAR_111:\n",
"VAR_111.remove('firstrunstart')\n",
"if 'firstrunend' in VAR_111:\n",
"VAR_111.remove('firstrunend')\n",
"return ['firstrunstart'] + VAR_110 + VAR_111 + ['firstrunend']\n"
] | [
"def custom_insert_order(existing, missing):...\n",
"if 'firstrunstart' in missing:\n",
"missing.remove('firstrunstart')\n",
"if 'firstrunend' in missing:\n",
"missing.remove('firstrunend')\n",
"return ['firstrunstart'] + existing + missing + ['firstrunend']\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_2, VAR_3):...\n",
"VAR_51 = VAR_2.GET.getlist(VAR_3)\n",
"return [VAR_318 for VAR_318 in VAR_51 if VAR_318 != '']\n"
] | [
"def get_list(request, name):...\n",
"val = request.GET.getlist(name)\n",
"return [i for i in val if i != '']\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"async def FUNC_26(VAR_23):...\n",
"self.assertEqual(VAR_23, {'server1': {FUNC_0(VAR_20): 1500}})\n",
"return {'server1': {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20), \n 1200)}}\n"
] | [
"async def get_keys2(keys_to_fetch):...\n",
"self.assertEqual(keys_to_fetch, {'server1': {get_key_id(key1): 1500}})\n",
"return {'server1': {get_key_id(key1): FetchKeyResult(get_verify_key(key1), \n 1200)}}\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_2: IReactorCore, VAR_3: Optional[...\n",
"self._reactor = VAR_2\n",
"self._clock = Clock(VAR_2)\n",
"self._pool = HTTPConnectionPool(VAR_2)\n",
"self._pool.retryAutomatically = False\n",
"self._pool.maxPersistentPerHost = 5\n",
"self._pool.cachedConnectionTimeout = 2 * 60\n",
"self._agent = Agent.usingEndpointFactory(self._reactor, CLASS_1(VAR_2,\n VAR_3, VAR_5), pool=self._pool)\n",
"self.user_agent = VAR_4\n",
"if VAR_6 is None:\n",
"VAR_6 = WellKnownResolver(self._reactor, agent=Agent(self._reactor, pool=\n self._pool, contextFactory=tls_client_options_factory), VAR_4=self.\n user_agent)\n",
"self._well_known_resolver = VAR_6\n"
] | [
"def __init__(self, reactor: IReactorCore, tls_client_options_factory:...\n",
"self._reactor = reactor\n",
"self._clock = Clock(reactor)\n",
"self._pool = HTTPConnectionPool(reactor)\n",
"self._pool.retryAutomatically = False\n",
"self._pool.maxPersistentPerHost = 5\n",
"self._pool.cachedConnectionTimeout = 2 * 60\n",
"self._agent = Agent.usingEndpointFactory(self._reactor,\n MatrixHostnameEndpointFactory(reactor, tls_client_options_factory,\n _srv_resolver), pool=self._pool)\n",
"self.user_agent = user_agent\n",
"if _well_known_resolver is None:\n",
"_well_known_resolver = WellKnownResolver(self._reactor, agent=Agent(self.\n _reactor, pool=self._pool, contextFactory=tls_client_options_factory),\n user_agent=self.user_agent)\n",
"self._well_known_resolver = _well_known_resolver\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_26(self):...\n",
"VAR_5, VAR_15 = self._test_confirm_start()\n",
"VAR_15 = VAR_15.replace('/reset/', '/reset/custom/named/')\n",
"VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',\n 'new_password2': 'anewpassword'})\n",
"self.assertEqual(VAR_3.status_code, 302)\n",
"self.assertURLEqual(VAR_3.url, '/password_reset/')\n"
] | [
"def test_confirm_redirect_custom_named(self):...\n",
"url, path = self._test_confirm_start()\n",
"path = path.replace('/reset/', '/reset/custom/named/')\n",
"response = self.client.post(path, {'new_password1': 'anewpassword',\n 'new_password2': 'anewpassword'})\n",
"self.assertEqual(response.status_code, 302)\n",
"self.assertURLEqual(response.url, '/password_reset/')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_56(VAR_35, VAR_36, VAR_37, VAR_38, VAR_39, VAR_40, VAR_41, VAR_24,...\n",
"VAR_35.extend((VAR_36.replace('|', ','), VAR_37, VAR_38))\n",
"if VAR_39:\n",
"if VAR_40:\n",
"VAR_35.extend([_(u'Published after ') + format_date(datetime.strptime(\n VAR_39, '%Y-%m-%d'), format='medium', locale=get_locale())])\n",
"VAR_39 = u''\n",
"VAR_92 = {'tag': db.Tags, 'serie': db.Series, 'shelf': ub.Shelf}\n",
"VAR_35.extend([_(u'Published before ') + format_date(datetime.strptime(\n VAR_40, '%Y-%m-%d'), format='medium', locale=get_locale())])\n",
"VAR_40 = u''\n",
"for key, db_element in VAR_92.items():\n",
"VAR_111 = calibre_db.session.query(db_element).filter(db_element.id.in_(\n VAR_41['include_' + key])).all()\n",
"VAR_57 = calibre_db.session.query(db.Languages).filter(db.Languages.id.in_(\n VAR_41['include_language'])).all()\n",
"VAR_35.extend(tag.name for tag in VAR_111)\n",
"if VAR_57:\n",
"VAR_111 = calibre_db.session.query(db_element).filter(db_element.id.in_(\n VAR_41['exclude_' + key])).all()\n",
"VAR_57 = calibre_db.speaking_language(VAR_57)\n",
"VAR_35.extend(language.name for language in VAR_57)\n",
"VAR_35.extend(tag.name for tag in VAR_111)\n",
"VAR_57 = calibre_db.session.query(db.Languages).filter(db.Languages.id.in_(\n VAR_41['exclude_language'])).all()\n",
"if VAR_57:\n",
"VAR_57 = calibre_db.speaking_language(VAR_57)\n",
"VAR_35.extend(language.name for language in VAR_57)\n",
"if VAR_24:\n",
"VAR_35.extend([_(u'Rating <= %(rating)s', rating=rating_high)])\n",
"if VAR_25:\n",
"VAR_35.extend([_(u'Rating >= %(rating)s', rating=rating_low)])\n",
"if VAR_26:\n",
"VAR_35.extend([_(u'Read Status = %(status)s', status=read_status)])\n",
"VAR_35.extend(ext for ext in VAR_41['include_extension'])\n",
"VAR_35.extend(ext for ext in VAR_41['exclude_extension'])\n",
"VAR_35 = ' + '.join(filter(None, VAR_35))\n",
"return VAR_35, VAR_39, VAR_40\n"
] | [
"def extend_search_term(searchterm, author_name, book_title, publisher,...\n",
"searchterm.extend((author_name.replace('|', ','), book_title, publisher))\n",
"if pub_start:\n",
"if pub_end:\n",
"searchterm.extend([_(u'Published after ') + format_date(datetime.strptime(\n pub_start, '%Y-%m-%d'), format='medium', locale=get_locale())])\n",
"pub_start = u''\n",
"elements = {'tag': db.Tags, 'serie': db.Series, 'shelf': ub.Shelf}\n",
"searchterm.extend([_(u'Published before ') + format_date(datetime.strptime(\n pub_end, '%Y-%m-%d'), format='medium', locale=get_locale())])\n",
"pub_end = u''\n",
"for key, db_element in elements.items():\n",
"tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(\n tags['include_' + key])).all()\n",
"language_names = calibre_db.session.query(db.Languages).filter(db.Languages\n .id.in_(tags['include_language'])).all()\n",
"searchterm.extend(tag.name for tag in tag_names)\n",
"if language_names:\n",
"tag_names = calibre_db.session.query(db_element).filter(db_element.id.in_(\n tags['exclude_' + key])).all()\n",
"language_names = calibre_db.speaking_language(language_names)\n",
"searchterm.extend(language.name for language in language_names)\n",
"searchterm.extend(tag.name for tag in tag_names)\n",
"language_names = calibre_db.session.query(db.Languages).filter(db.Languages\n .id.in_(tags['exclude_language'])).all()\n",
"if language_names:\n",
"language_names = calibre_db.speaking_language(language_names)\n",
"searchterm.extend(language.name for language in language_names)\n",
"if rating_high:\n",
"searchterm.extend([_(u'Rating <= %(rating)s', rating=rating_high)])\n",
"if rating_low:\n",
"searchterm.extend([_(u'Rating >= %(rating)s', rating=rating_low)])\n",
"if read_status:\n",
"searchterm.extend([_(u'Read Status = %(status)s', status=read_status)])\n",
"searchterm.extend(ext for ext in tags['include_extension'])\n",
"searchterm.extend(ext for ext in tags['exclude_extension'])\n",
"searchterm = ' + '.join(filter(None, searchterm))\n",
"return searchterm, pub_start, pub_end\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@VAR_2.route('/get_authors_json', methods=['GET'])...\n",
"return calibre_db.get_typeahead(db.Authors, request.args.get('q'), ('|', ','))\n"
] | [
"@web.route('/get_authors_json', methods=['GET'])...\n",
"return calibre_db.get_typeahead(db.Authors, request.args.get('q'), ('|', ','))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@def_function.function...\n",
"return VAR_50 + 2 * VAR_56\n"
] | [
"@def_function.function...\n",
"return y + 2 * c\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_19(self, VAR_17):...\n",
"FUNC_0()\n",
"VAR_68 = '../res/img/folder.png'\n",
"log.i('Fetching album art for: %s' % VAR_17)\n",
"VAR_69 = os.path.join(cherry.config['media.basedir'], VAR_17)\n",
"if os.path.isfile(VAR_69):\n",
"VAR_99 = TinyTag.get(VAR_69, image=True)\n",
"VAR_66 = albumArtFilePath(VAR_17)\n",
"VAR_100 = VAR_99.get_image()\n",
"VAR_70 = self.albumartcache_load(VAR_66)\n",
"if VAR_100:\n",
"if VAR_70:\n",
"log.d('Image found in tag.')\n",
"VAR_17 = os.path.dirname(VAR_17)\n",
"VAR_53.response.headers['Content-Length'] = len(VAR_70)\n",
"VAR_64 = albumartfetcher.AlbumArtFetcher()\n",
"VAR_67 = {'Content-Type': 'image/jpg', 'Content-Length': len(VAR_100)}\n",
"return VAR_70\n",
"VAR_71 = os.path.join(cherry.config['media.basedir'], VAR_17)\n",
"VAR_53.response.headers.update(VAR_67)\n",
"VAR_67, VAR_20, VAR_72 = VAR_64.fetchLocal(VAR_71)\n",
"return VAR_100\n",
"if VAR_67:\n",
"if VAR_72:\n",
"if cherry.config['media.fetch_album_art']:\n",
"self.albumartcache_save(VAR_66, VAR_20)\n",
"VAR_53.response.headers.update(VAR_67)\n",
"VAR_123 = os.path.basename(VAR_17)\n",
"return VAR_20\n",
"VAR_124 = VAR_123\n",
"log.i(_('Fetching album art for keywords {keywords!r}').format(VAR_124=\n keywords))\n",
"VAR_67, VAR_20 = VAR_64.fetch(VAR_124)\n",
"if VAR_67:\n",
"VAR_53.response.headers.update(VAR_67)\n",
"self.albumartcache_save(VAR_66, VAR_20)\n",
"return VAR_20\n"
] | [
"def api_fetchalbumart(self, directory):...\n",
"_save_and_release_session()\n",
"default_folder_image = '../res/img/folder.png'\n",
"log.i('Fetching album art for: %s' % directory)\n",
"filepath = os.path.join(cherry.config['media.basedir'], directory)\n",
"if os.path.isfile(filepath):\n",
"tag = TinyTag.get(filepath, image=True)\n",
"b64imgpath = albumArtFilePath(directory)\n",
"image_data = tag.get_image()\n",
"img_data = self.albumartcache_load(b64imgpath)\n",
"if image_data:\n",
"if img_data:\n",
"log.d('Image found in tag.')\n",
"directory = os.path.dirname(directory)\n",
"cherrypy.response.headers['Content-Length'] = len(img_data)\n",
"fetcher = albumartfetcher.AlbumArtFetcher()\n",
"header = {'Content-Type': 'image/jpg', 'Content-Length': len(image_data)}\n",
"return img_data\n",
"localpath = os.path.join(cherry.config['media.basedir'], directory)\n",
"cherrypy.response.headers.update(header)\n",
"header, data, resized = fetcher.fetchLocal(localpath)\n",
"return image_data\n",
"if header:\n",
"if resized:\n",
"if cherry.config['media.fetch_album_art']:\n",
"self.albumartcache_save(b64imgpath, data)\n",
"cherrypy.response.headers.update(header)\n",
"foldername = os.path.basename(directory)\n",
"return data\n",
"keywords = foldername\n",
"log.i(_('Fetching album art for keywords {keywords!r}').format(keywords=\n keywords))\n",
"header, data = fetcher.fetch(keywords)\n",
"if header:\n",
"cherrypy.response.headers.update(header)\n",
"self.albumartcache_save(b64imgpath, data)\n",
"return data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def __eq__(self, VAR_2):...\n",
"return str(self) == str(VAR_2)\n"
] | [
"def __eq__(self, other):...\n",
"return str(self) == str(other)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_72(self, VAR_212=None):...\n",
""
] | [
"def not_authorized(self, page=None):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_1(VAR_0):...\n",
"from MoinMoin import macro\n",
"VAR_3 = macro.getNames(VAR_0.cfg)\n",
"VAR_3.sort()\n",
"return VAR_3\n"
] | [
"def macro_list(request):...\n",
"from MoinMoin import macro\n",
"macros = macro.getNames(request.cfg)\n",
"macros.sort()\n",
"return macros\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def __init__(self):...\n",
"self._subscribers = set()\n"
] | [
"def __init__(self):...\n",
"self._subscribers = set()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_8(self):...\n",
"VAR_10 = test.test_src_dir_path(VAR_0)\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_11 = self.parser.parse_args(['show', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'serving_default'])\n",
"saved_model_cli.show(VAR_11)\n",
"VAR_12 = out.getvalue().strip()\n",
"VAR_19 = 'string'\n",
"self.assertEqual(VAR_12, VAR_19)\n",
"self.assertEqual(err.getvalue().strip(), '')\n"
] | [
"def testShowCommandInputsOutputs(self):...\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"self.parser = saved_model_cli.create_parser()\n",
"args = self.parser.parse_args(['show', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'serving_default'])\n",
"saved_model_cli.show(args)\n",
"output = out.getvalue().strip()\n",
"expected_output = \"\"\"The given SavedModel SignatureDef contains the following input(s):\n inputs['x'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: x:0\nThe given SavedModel SignatureDef contains the following output(s):\n outputs['y'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y:0\nMethod name is: tensorflow/serving/predict\"\"\"\n",
"self.assertEqual(output, expected_output)\n",
"self.assertEqual(err.getvalue().strip(), '')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@FUNC_2.__func__...\n",
"\"\"\"docstring\"\"\"\n",
"self._submit()\n"
] | [
"@whitelist.__func__...\n",
"\"\"\"docstring\"\"\"\n",
"self._submit()\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"VAR_62 = self.useroptions.forUser(self.getUserId())\n",
"VAR_62.setOption('last_time_online', int(time.time()))\n"
] | [
"def api_heartbeat(self):...\n",
"uo = self.useroptions.forUser(self.getUserId())\n",
"uo.setOption('last_time_online', int(time.time()))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"@VAR_0.route('/api/jobs', methods=['GET'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_73 = {'ids': request.args.get('ids'), 'status': request.args.get(\n 'status'), 'application': request.args.get('application'), 'backend':\n request.args.get('backend'), 'recent': request.args.get('recent'),\n 'length': request.args.get('length'), 'offset': request.args.get(\n 'offset'), 'auto-validate-ids': request.args.get('auto-validate-ids')}\n",
"VAR_93 = FUNC_58(f'/internal/jobs', 'get', VAR_73=params)\n",
"return jsonify({'success': False, 'message': str(err)}), 400\n",
"return jsonify(VAR_93)\n"
] | [
"@gui.route('/api/jobs', methods=['GET'])...\n",
"\"\"\"docstring\"\"\"\n",
"params = {'ids': request.args.get('ids'), 'status': request.args.get(\n 'status'), 'application': request.args.get('application'), 'backend':\n request.args.get('backend'), 'recent': request.args.get('recent'),\n 'length': request.args.get('length'), 'offset': request.args.get(\n 'offset'), 'auto-validate-ids': request.args.get('auto-validate-ids')}\n",
"jobs_info = query_internal_api(f'/internal/jobs', 'get', params=params)\n",
"return jsonify({'success': False, 'message': str(err)}), 400\n",
"return jsonify(jobs_info)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import ast\n",
"import sys\n",
"from datetime import datetime\n",
"from bson import ObjectId\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = CLASS_1()\n",
"VAR_1.visit(ast.parse(VAR_0))\n",
"VAR_8 = CLASS_0(VAR_8)\n",
"return VAR_1.mongo_query\n",
"VAR_8.__traceback__ = sys.exc_info()[2]\n"
] | [
"\"\"\"\n eve.io.mongo.parser\n ~~~~~~~~~~~~~~~~~~~\n\n This module implements a Python-to-Mongo syntax parser. Allows the MongoDB\n data-layer to seamlessly respond to a Python-like query.\n\n :copyright: (c) 2017 by Nicola Iarocci.\n :license: BSD, see LICENSE for more details.\n\"\"\"\n",
"import ast\n",
"import sys\n",
"from datetime import datetime\n",
"from bson import ObjectId\n",
"def parse(expression):...\n",
"\"\"\"docstring\"\"\"\n",
"v = MongoVisitor()\n",
"v.visit(ast.parse(expression))\n",
"e = ParseError(e)\n",
"return v.mongo_query\n",
"e.__traceback__ = sys.exc_info()[2]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_2: str, VAR_3: SigningKey) ->None:...\n",
"\"\"\"docstring\"\"\"\n",
"async def FUNC_10(VAR_6, VAR_7, VAR_8=False, **VAR_9):...\n",
"self.assertTrue(VAR_8)\n",
"self.assertEqual(VAR_6, VAR_2)\n",
"VAR_4 = '%s:%s' % (VAR_3.alg, VAR_3.version)\n",
"self.assertEqual(VAR_7, '/_matrix/key/v2/server/%s' % (urllib.parse.quote(\n VAR_4),))\n",
"VAR_26 = {'server_name': VAR_2, 'old_verify_keys': {}, 'valid_until_ts': \n 200 * 1000, 'verify_keys': {VAR_4: {'key': signedjson.key.\n encode_verify_key_base64(VAR_3.verify_key)}}}\n",
"sign_json(VAR_26, VAR_2, VAR_3)\n",
"return VAR_26\n"
] | [
"def expect_outgoing_key_request(self, server_name: str, signing_key: SigningKey...\n",
"\"\"\"docstring\"\"\"\n",
"async def get_json(destination, path, ignore_backoff=False, **kwargs):...\n",
"self.assertTrue(ignore_backoff)\n",
"self.assertEqual(destination, server_name)\n",
"key_id = '%s:%s' % (signing_key.alg, signing_key.version)\n",
"self.assertEqual(path, '/_matrix/key/v2/server/%s' % (urllib.parse.quote(\n key_id),))\n",
"response = {'server_name': server_name, 'old_verify_keys': {},\n 'valid_until_ts': 200 * 1000, 'verify_keys': {key_id: {'key':\n signedjson.key.encode_verify_key_base64(signing_key.verify_key)}}}\n",
"sign_json(response, server_name, signing_key)\n",
"return response\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"AsyncFunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self, VAR_3, VAR_4):...\n",
"return django.forms.EmailField(**options)\n"
] | [
"def create_email_field(self, field, options):...\n",
"return django.forms.EmailField(**options)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_19(VAR_43):...\n",
"VAR_54 = json.loads(VAR_43)\n",
"VAR_55 = next(item for item in VAR_54['dates'] if item['type'] == 'modified')[\n 'start_date']\n",
"VAR_55 = VAR_55.split('.')[0]\n",
"VAR_56 = datetime.strptime(VAR_55, '%Y-%m-%dT%H:%M:%S')\n",
"VAR_57 = VAR_56.replace(tzinfo=timezone(timedelta(0))).timestamp()\n",
"return str(int(VAR_57))\n"
] | [
"def parse_date(json_body):...\n",
"json_response = json.loads(json_body)\n",
"date = next(item for item in json_response['dates'] if item['type'] ==\n 'modified')['start_date']\n",
"date = date.split('.')[0]\n",
"parsed_date = datetime.strptime(date, '%Y-%m-%dT%H:%M:%S')\n",
"epoch = parsed_date.replace(tzinfo=timezone(timedelta(0))).timestamp()\n",
"return str(int(epoch))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_27(self):...\n",
"VAR_29 = coreapi.Document(VAR_5='', title='Example API', content={'animals':\n {'dog': {'vet': {'list': coreapi.Link(url='/animals/dog/{id}/vet',\n action='get', fields=[coreapi.Field('id', required=True, location=\n 'path', schema=coreschema.String())])}, 'read': coreapi.Link(url=\n '/animals/dog/{id}', action='get', fields=[coreapi.Field('id', required\n =True, location='path', schema=coreschema.String())])}, 'cat': {'list':\n coreapi.Link(url='/animals/cat/', action='get', fields=[coreapi.Field(\n 'id', required=True, location='path', schema=coreschema.String())]),\n 'create': coreapi.Link(url='/aniamls/cat', action='post', fields=[])}},\n 'farmers': {'silo': {'soy': {'list': coreapi.Link(url=\n '/farmers/silo/{id}/soy', action='get', fields=[coreapi.Field('id',\n required=True, location='path', schema=coreschema.String())])}, 'list':\n coreapi.Link(url='/farmers/silo', action='get', fields=[coreapi.Field(\n 'id', required=True, location='path', schema=coreschema.String())])}}})\n",
"VAR_30 = VAR_29['animals']\n",
"VAR_31 = schema_links(VAR_30)\n",
"assert len(VAR_31) is 4\n",
"assert 'cat > create' in VAR_31\n",
"assert 'cat > list' in VAR_31\n",
"assert 'dog > read' in VAR_31\n",
"assert 'dog > vet > list' in VAR_31\n",
"VAR_30 = VAR_29['farmers']\n",
"VAR_31 = schema_links(VAR_30)\n",
"assert len(VAR_31) is 2\n",
"assert 'silo > list' in VAR_31\n",
"assert 'silo > soy > list' in VAR_31\n"
] | [
"def test_multiple_resources_with_multiple_nested_routes(self):...\n",
"schema = coreapi.Document(url='', title='Example API', content={'animals':\n {'dog': {'vet': {'list': coreapi.Link(url='/animals/dog/{id}/vet',\n action='get', fields=[coreapi.Field('id', required=True, location=\n 'path', schema=coreschema.String())])}, 'read': coreapi.Link(url=\n '/animals/dog/{id}', action='get', fields=[coreapi.Field('id', required\n =True, location='path', schema=coreschema.String())])}, 'cat': {'list':\n coreapi.Link(url='/animals/cat/', action='get', fields=[coreapi.Field(\n 'id', required=True, location='path', schema=coreschema.String())]),\n 'create': coreapi.Link(url='/aniamls/cat', action='post', fields=[])}},\n 'farmers': {'silo': {'soy': {'list': coreapi.Link(url=\n '/farmers/silo/{id}/soy', action='get', fields=[coreapi.Field('id',\n required=True, location='path', schema=coreschema.String())])}, 'list':\n coreapi.Link(url='/farmers/silo', action='get', fields=[coreapi.Field(\n 'id', required=True, location='path', schema=coreschema.String())])}}})\n",
"section = schema['animals']\n",
"flat_links = schema_links(section)\n",
"assert len(flat_links) is 4\n",
"assert 'cat > create' in flat_links\n",
"assert 'cat > list' in flat_links\n",
"assert 'dog > read' in flat_links\n",
"assert 'dog > vet > list' in flat_links\n",
"section = schema['farmers']\n",
"flat_links = schema_links(section)\n",
"assert len(flat_links) is 2\n",
"assert 'silo > list' in flat_links\n",
"assert 'silo > soy > list' in flat_links\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assert'",
"Assert'",
"Assert'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_35(self):...\n",
"VAR_128 = Flags()\n",
"VAR_128.merge(self.get_glossary_flags())\n",
"VAR_86 = self.cleaned_data.get('variant')\n",
"if VAR_86:\n",
"VAR_128.set_value('variant', VAR_86.source)\n",
"return {'context': self.cleaned_data.get('context', ''), 'source': self.\n cleaned_data['source'], 'target': self.cleaned_data.get('target'),\n 'extra_flags': VAR_128.format(), 'explanation': self.cleaned_data.get(\n 'explanation', ''), 'auto_context': self.cleaned_data.get(\n 'auto_context', False)}\n"
] | [
"def as_kwargs(self):...\n",
"flags = Flags()\n",
"flags.merge(self.get_glossary_flags())\n",
"variant = self.cleaned_data.get('variant')\n",
"if variant:\n",
"flags.set_value('variant', variant.source)\n",
"return {'context': self.cleaned_data.get('context', ''), 'source': self.\n cleaned_data['source'], 'target': self.cleaned_data.get('target'),\n 'extra_flags': flags.format(), 'explanation': self.cleaned_data.get(\n 'explanation', ''), 'auto_context': self.cleaned_data.get(\n 'auto_context', False)}\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"@VAR_0.teardown_request...\n",
"g.conn.close_all()\n"
] | [
"@app.teardown_request...\n",
"g.conn.close_all()\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"@CLASS_4('back')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = jinja.render('back.html', title='Suspended: ' + urllib.parse.\n unquote(url.fragment()))\n",
"return 'text/html', VAR_20\n"
] | [
"@add_handler('back')...\n",
"\"\"\"docstring\"\"\"\n",
"src = jinja.render('back.html', title='Suspended: ' + urllib.parse.unquote(\n url.fragment()))\n",
"return 'text/html', src\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_22():...\n",
"if not URL.verify(request, hmac_key=session.hmac_key):\n",
"VAR_3 = FUNC_5()\n",
"VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), 'DISABLED')\n",
"if is_gae:\n",
"return SPAN(T('Not supported'), _style='color:yellow')\n",
"if os.path.exists(VAR_5):\n",
"os.unlink(VAR_5)\n",
"if PY2:\n",
"return SPAN(T('Disable'), _style='color:green')\n",
"FUNC_2(VAR_5, 'wb').write(\"\"\"disabled: True\ntime-disabled: %s\"\"\" % request.now)\n",
"VAR_200 = \"\"\"disabled: True\ntime-disabled: %s\"\"\" % request.now\n",
"return SPAN(T('Enable'), _style='color:red')\n",
"FUNC_2(VAR_5, 'wb').write(VAR_200.encode('utf-8'))\n"
] | [
"def enable():...\n",
"if not URL.verify(request, hmac_key=session.hmac_key):\n",
"app = get_app()\n",
"filename = os.path.join(apath(app, r=request), 'DISABLED')\n",
"if is_gae:\n",
"return SPAN(T('Not supported'), _style='color:yellow')\n",
"if os.path.exists(filename):\n",
"os.unlink(filename)\n",
"if PY2:\n",
"return SPAN(T('Disable'), _style='color:green')\n",
"safe_open(filename, 'wb').write(\"\"\"disabled: True\ntime-disabled: %s\"\"\" %\n request.now)\n",
"str_ = \"\"\"disabled: True\ntime-disabled: %s\"\"\" % request.now\n",
"return SPAN(T('Enable'), _style='color:red')\n",
"safe_open(filename, 'wb').write(str_.encode('utf-8'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Return'",
"Expr'"
] |
[
"@FUNC_0...\n",
"return AcmeHandler(self)\n"
] | [
"@cache_in_self...\n",
"return AcmeHandler(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_77(VAR_94, VAR_132=True):...\n",
"\"\"\"docstring\"\"\"\n",
"import copy\n",
"def FUNC_119(VAR_174):...\n",
"for df in VAR_174.meta.get('fields', {'no_copy': 1}):\n",
"if hasattr(VAR_174, df.fieldname):\n",
"VAR_195 = ['name', 'owner', 'creation', 'modified', 'modified_by']\n",
"VAR_174.set(df.fieldname, None)\n",
"if not VAR_1.flags.in_test:\n",
"VAR_195.append('docstatus')\n",
"if not isinstance(VAR_94, dict):\n",
"VAR_174 = VAR_94.as_dict()\n",
"VAR_174 = VAR_94\n",
"VAR_196 = FUNC_45(FUNC_113.deepcopy(VAR_174))\n",
"VAR_196.set('__islocal', 1)\n",
"for VAR_97 in (VAR_195 + ['amended_from', 'amendment_date']):\n",
"VAR_196.set(VAR_97, None)\n",
"if not VAR_132:\n",
"FUNC_119(VAR_196)\n",
"for i, VAR_174 in enumerate(VAR_196.get_all_children()):\n",
"VAR_174.set('__islocal', 1)\n",
"return VAR_196\n",
"for VAR_97 in VAR_195:\n",
"VAR_174.set(VAR_97, None)\n",
"if not VAR_132:\n",
"FUNC_119(VAR_174)\n"
] | [
"def copy_doc(doc, ignore_no_copy=True):...\n",
"\"\"\"docstring\"\"\"\n",
"import copy\n",
"def remove_no_copy_fields(d):...\n",
"for df in d.meta.get('fields', {'no_copy': 1}):\n",
"if hasattr(d, df.fieldname):\n",
"fields_to_clear = ['name', 'owner', 'creation', 'modified', 'modified_by']\n",
"d.set(df.fieldname, None)\n",
"if not local.flags.in_test:\n",
"fields_to_clear.append('docstatus')\n",
"if not isinstance(doc, dict):\n",
"d = doc.as_dict()\n",
"d = doc\n",
"newdoc = get_doc(copy.deepcopy(d))\n",
"newdoc.set('__islocal', 1)\n",
"for fieldname in (fields_to_clear + ['amended_from', 'amendment_date']):\n",
"newdoc.set(fieldname, None)\n",
"if not ignore_no_copy:\n",
"remove_no_copy_fields(newdoc)\n",
"for i, d in enumerate(newdoc.get_all_children()):\n",
"d.set('__islocal', 1)\n",
"return newdoc\n",
"for fieldname in fields_to_clear:\n",
"d.set(fieldname, None)\n",
"if not ignore_no_copy:\n",
"remove_no_copy_fields(d)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Condition",
"Expr'",
"For",
"Expr'",
"Return'",
"For",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_8(VAR_5):...\n",
"import os\n",
"VAR_28 = os.path.dirname(__file__)\n",
"VAR_29 = VAR_5\n",
"VAR_30 = os.path.join(VAR_28, VAR_29)\n",
"return VAR_30\n"
] | [
"def get_absolute_path(path):...\n",
"import os\n",
"script_dir = os.path.dirname(__file__)\n",
"rel_path = path\n",
"abs_file_path = os.path.join(script_dir, rel_path)\n",
"return abs_file_path\n"
] | [
0,
0,
0,
0,
1,
0
] | [
"FunctionDef'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_30(self):...\n",
"return self._has_role(constants.ROLE_DELETE_BOOKS)\n"
] | [
"def role_delete_books(self):...\n",
"return self._has_role(constants.ROLE_DELETE_BOOKS)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_7(self, VAR_17: str, VAR_16: str, VAR_36: Optional[QueryArgs...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = CLASS_0(VAR_15='PUT', VAR_17=destination, VAR_16=path, VAR_20=args,\n VAR_19=json_data_callback, VAR_18=data)\n",
"VAR_12 = self.clock.time_msec()\n",
"VAR_11 = await self._send_request_with_optional_trailing_slash(VAR_10,\n VAR_26, VAR_32=backoff_on_404, VAR_31=ignore_backoff, VAR_30=\n long_retries, VAR_29=timeout)\n",
"if VAR_29 is not None:\n",
"VAR_61 = VAR_29 / 1000\n",
"VAR_61 = self.default_timeout\n",
"VAR_47 = await FUNC_0(self.reactor, VAR_61, VAR_10, VAR_11, VAR_12)\n",
"return VAR_47\n"
] | [
"async def put_json(self, destination: str, path: str, args: Optional[...\n",
"\"\"\"docstring\"\"\"\n",
"request = MatrixFederationRequest(method='PUT', destination=destination,\n path=path, query=args, json_callback=json_data_callback, json=data)\n",
"start_ms = self.clock.time_msec()\n",
"response = await self._send_request_with_optional_trailing_slash(request,\n try_trailing_slash_on_400, backoff_on_404=backoff_on_404,\n ignore_backoff=ignore_backoff, long_retries=long_retries, timeout=timeout)\n",
"if timeout is not None:\n",
"_sec_timeout = timeout / 1000\n",
"_sec_timeout = self.default_timeout\n",
"body = await _handle_json_response(self.reactor, _sec_timeout, request,\n response, start_ms)\n",
"return body\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_21(self):...\n",
"self.assert_expected_unicode(self.folder.t, 'CheckUnicodeInserts.html')\n"
] | [
"def testUnicodeInserts(self):...\n",
"self.assert_expected_unicode(self.folder.t, 'CheckUnicodeInserts.html')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.