lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_15(VAR_3, **VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"self.fail(\"shouldn't ever get here\")\n"
] | [
"def _callback(request, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"self.fail(\"shouldn't ever get here\")\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@VAR_0.route('/ajax/getcustomenum/<int:c_id>')...\n",
"VAR_80 = list()\n",
"VAR_32 = calibre_db.session.query(db.Custom_Columns).filter(db.\n Custom_Columns.id == VAR_43).filter(db.Custom_Columns.datatype.notin_(\n db.cc_exceptions)).one_or_none()\n",
"VAR_80.append({'value': '', 'text': ''})\n",
"for idx, en in enumerate(VAR_32.get_display_dict()['enum_values']):\n",
"VAR_80.append({'value': en, 'text': en})\n",
"return json.dumps(VAR_80)\n"
] | [
"@editbook.route('/ajax/getcustomenum/<int:c_id>')...\n",
"ret = list()\n",
"cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.\n id == c_id).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)\n ).one_or_none()\n",
"ret.append({'value': '', 'text': ''})\n",
"for idx, en in enumerate(cc.get_display_dict()['enum_values']):\n",
"ret.append({'value': en, 'text': en})\n",
"return json.dumps(ret)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Return'"
] |
[
"@login_required...\n",
"VAR_15 = Topic.objects.filter(VAR_7=request.user, category_id=settings.\n ST_TOPIC_PRIVATE_CATEGORY_PK).exclude(topics_private__user=request.user)\n",
"VAR_15 = yt_paginate(VAR_15, per_page=config.topics_per_page, page_number=\n request.GET.get('page', 1))\n",
"return render(VAR_1=request, template_name=\n 'spirit/topic/private/index_author.html', context={'topics': topics})\n"
] | [
"@login_required...\n",
"topics = Topic.objects.filter(user=request.user, category_id=settings.\n ST_TOPIC_PRIVATE_CATEGORY_PK).exclude(topics_private__user=request.user)\n",
"topics = yt_paginate(topics, per_page=config.topics_per_page, page_number=\n request.GET.get('page', 1))\n",
"return render(request=request, template_name=\n 'spirit/topic/private/index_author.html', context={'topics': topics})\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = self.client.post('/password_reset/', {'email':\n '[email protected]'})\n",
"self.assertEqual(VAR_3.status_code, 302)\n",
"self.assertEqual(len(mail.outbox), 1)\n",
"self.assertTrue('http://' in mail.outbox[0].body)\n",
"self.assertEqual(VAR_23.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)\n",
"self.assertFalse(mail.outbox[0].message().is_multipart())\n"
] | [
"def test_email_found(self):...\n",
"\"\"\"docstring\"\"\"\n",
"response = self.client.post('/password_reset/', {'email':\n '[email protected]'})\n",
"self.assertEqual(response.status_code, 302)\n",
"self.assertEqual(len(mail.outbox), 1)\n",
"self.assertTrue('http://' in mail.outbox[0].body)\n",
"self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)\n",
"self.assertFalse(mail.outbox[0].message().is_multipart())\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_29(self):...\n",
""
] | [
"def about_us(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_64(VAR_122=False, VAR_123=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if getattr(VAR_19, 'in_install_db', True):\n",
"return []\n",
"if not VAR_12:\n",
"FUNC_5()\n",
"if not VAR_1.all_apps:\n",
"VAR_1.all_apps = FUNC_10().get_value('all_apps', FUNC_63)\n",
"VAR_190 = json.loads(VAR_12.get_global('installed_apps') or '[]')\n",
"if VAR_122:\n",
"VAR_190 = [VAR_219 for VAR_219 in VAR_1.all_apps if VAR_219 in VAR_190]\n",
"if VAR_123:\n",
"if 'frappe' in VAR_190:\n",
"return VAR_190\n",
"VAR_190.remove('frappe')\n",
"VAR_190.append('frappe')\n"
] | [
"def get_installed_apps(sort=False, frappe_last=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if getattr(flags, 'in_install_db', True):\n",
"return []\n",
"if not db:\n",
"connect()\n",
"if not local.all_apps:\n",
"local.all_apps = cache().get_value('all_apps', get_all_apps)\n",
"installed = json.loads(db.get_global('installed_apps') or '[]')\n",
"if sort:\n",
"installed = [app for app in local.all_apps if app in installed]\n",
"if frappe_last:\n",
"if 'frappe' in installed:\n",
"return installed\n",
"installed.remove('frappe')\n",
"installed.append('frappe')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Expr'"
] |
[
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_10 = test.test_src_dir_path(VAR_0)\n",
"VAR_42 = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs={\"x\":8.0,\"x2\":5.0}', '--outdir', VAR_42] + (['--use_tfrt'] if\n VAR_5 else []))\n",
"saved_model_cli.run(VAR_11)\n"
] | [
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"output_dir = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs={\"x\":8.0,\"x2\":5.0}', '--outdir', output_dir] + (['--use_tfrt'] if\n use_tfrt else []))\n",
"saved_model_cli.run(args)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@def_function.function...\n",
"if VAR_55:\n",
"return VAR_53 + VAR_54\n",
"return VAR_53 * VAR_54\n"
] | [
"@def_function.function...\n",
"if c:\n",
"return a + b\n",
"return a * b\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"@VAR_1.route('/admin/book/convert/<int:book_id>', methods=['POST'])...\n",
"VAR_76 = VAR_32.form.get('book_format_from', None)\n",
"VAR_77 = VAR_32.form.get('book_format_to', None)\n",
"if VAR_76 is None or VAR_77 is None:\n",
"flash(_(u'Source or destination format for conversion missing'), category=\n 'error')\n",
"VAR_2.info('converting: book id: %s from: %s to: %s', VAR_15, VAR_76, VAR_77)\n",
"return redirect(url_for('editbook.edit_book', VAR_15=book_id))\n",
"VAR_78 = helper.convert_book_format(VAR_15, config.config_calibre_dir,\n VAR_76.upper(), VAR_77.upper(), current_user.name)\n",
"if VAR_78 is None:\n",
"flash(_(u'Book successfully queued for converting to %(book_format)s',\n VAR_16=book_format_to), category='success')\n",
"flash(_(u'There was an error converting this book: %(res)s', res=rtn),\n category='error')\n",
"return redirect(url_for('editbook.edit_book', VAR_15=book_id))\n"
] | [
"@editbook.route('/admin/book/convert/<int:book_id>', methods=['POST'])...\n",
"book_format_from = request.form.get('book_format_from', None)\n",
"book_format_to = request.form.get('book_format_to', None)\n",
"if book_format_from is None or book_format_to is None:\n",
"flash(_(u'Source or destination format for conversion missing'), category=\n 'error')\n",
"log.info('converting: book id: %s from: %s to: %s', book_id,\n book_format_from, book_format_to)\n",
"return redirect(url_for('editbook.edit_book', book_id=book_id))\n",
"rtn = helper.convert_book_format(book_id, config.config_calibre_dir,\n book_format_from.upper(), book_format_to.upper(), current_user.name)\n",
"if rtn is None:\n",
"flash(_(u'Book successfully queued for converting to %(book_format)s',\n book_format=book_format_to), category='success')\n",
"flash(_(u'There was an error converting this book: %(res)s', res=rtn),\n category='error')\n",
"return redirect(url_for('editbook.edit_book', book_id=book_id))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"self.set_doctype_roles()\n"
] | [
"def before_insert(self):...\n",
"self.set_doctype_roles()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_10(self, VAR_16, VAR_58):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.whitelist_tags is not None and VAR_16.tag not in self.whitelist_tags:\n",
"return False\n",
"VAR_77, VAR_78, VAR_79, VAR_80, VAR_81 = urlsplit(VAR_58)\n",
"VAR_78 = VAR_78.lower().split(':', 1)[0]\n",
"if VAR_77 not in ('http', 'https'):\n",
"return False\n",
"if VAR_78 in self.host_whitelist:\n",
"return True\n",
"return False\n"
] | [
"def allow_embedded_url(self, el, url):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.whitelist_tags is not None and el.tag not in self.whitelist_tags:\n",
"return False\n",
"scheme, netloc, path, query, fragment = urlsplit(url)\n",
"netloc = netloc.lower().split(':', 1)[0]\n",
"if scheme not in ('http', 'https'):\n",
"return False\n",
"if netloc in self.host_whitelist:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"self.other_user_id = self.register_user('otheruser', 'pass')\n",
"self.other_access_token = self.login('otheruser', 'pass')\n"
] | [
"def prepare(self, reactor, clock, hs):...\n",
"self.other_user_id = self.register_user('otheruser', 'pass')\n",
"self.other_access_token = self.login('otheruser', 'pass')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_34(VAR_34):...\n",
"VAR_66 = \"'\" * isinstance(VAR_34, str)\n",
"return VAR_66 + str(VAR_34) + VAR_66\n"
] | [
"def _maybe_add_quotes(value):...\n",
"is_quotes = \"'\" * isinstance(value, str)\n",
"return is_quotes + str(value) + is_quotes\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_38(VAR_47):...\n",
"return VAR_47\n"
] | [
"def fake_wrapper_session(sess):...\n",
"return sess\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_93(self, VAR_177, VAR_178, VAR_179, VAR_180=False):...\n",
""
] | [
"def get_query(self, field, op, value, refsearch=False):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_33(self, VAR_39, VAR_3, VAR_36, VAR_37):...\n",
"VAR_39.cert_reqs = 'CERT_NONE'\n",
"VAR_39.ca_certs = None\n"
] | [
"def cert_verify(self, conn, url, verify, cert):...\n",
"conn.cert_reqs = 'CERT_NONE'\n",
"conn.ca_certs = None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_19, VAR_20, VAR_21):...\n",
"if not VAR_20.strip():\n",
"VAR_20 = 'nothing'\n",
"super().__init__(VAR_19, VAR_20, VAR_21, self._TRAVERSER)\n"
] | [
"def __init__(self, name, expr, engine):...\n",
"if not expr.strip():\n",
"expr = 'nothing'\n",
"super().__init__(name, expr, engine, self._TRAVERSER)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'"
] |
[
"@VAR_2.route('/logout')...\n",
"if VAR_87 is not None and VAR_87.is_authenticated:\n",
"ub.delete_user_session(VAR_87.id, VAR_91.get('_id', ''))\n",
"VAR_3.debug(u'User logged out')\n",
"logout_user()\n",
"return redirect(url_for('web.login'))\n",
"if VAR_0['oauth'] and (config.config_login_type == 2 or config.\n",
"logout_oauth_user()\n"
] | [
"@web.route('/logout')...\n",
"if current_user is not None and current_user.is_authenticated:\n",
"ub.delete_user_session(current_user.id, flask_session.get('_id', ''))\n",
"log.debug(u'User logged out')\n",
"logout_user()\n",
"return redirect(url_for('web.login'))\n",
"if feature_support['oauth'] and (config.config_login_type == 2 or config.\n",
"logout_oauth_user()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Condition",
"Expr'"
] |
[
"def FUNC_0(VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = VAR_3.adjusted(QUrl.NormalizePathSegments | QUrl.StripTrailingSlash)\n",
"if VAR_14 != VAR_3:\n",
"VAR_15 = VAR_3.path()\n",
"VAR_16 = VAR_3.host()\n",
"VAR_17 = urlutils.query_string(VAR_3)\n",
"log.misc.debug('url: {}, path: {}, host {}'.format(VAR_3.toDisplayString(),\n VAR_15, VAR_16))\n",
"if not VAR_15 or not VAR_16:\n",
"VAR_29 = QUrl()\n",
"VAR_30 = VAR_2[VAR_16]\n",
"VAR_31, VAR_32 = VAR_30(VAR_3)\n",
"assert VAR_31 is not None, VAR_3\n",
"VAR_29.setScheme('qute')\n",
"if VAR_31 == 'text/html' and isinstance(VAR_32, str):\n",
"if VAR_16:\n",
"VAR_32 = VAR_32.encode('utf-8', errors='xmlcharrefreplace')\n",
"return VAR_31, VAR_32\n",
"VAR_29.setHost(VAR_16)\n",
"VAR_29.setHost(VAR_15)\n",
"VAR_29.setPath('/')\n",
"if VAR_17:\n",
"VAR_29.setQuery(VAR_17)\n",
"if VAR_29.host():\n"
] | [
"def data_for_url(url):...\n",
"\"\"\"docstring\"\"\"\n",
"norm_url = url.adjusted(QUrl.NormalizePathSegments | QUrl.StripTrailingSlash)\n",
"if norm_url != url:\n",
"path = url.path()\n",
"host = url.host()\n",
"query = urlutils.query_string(url)\n",
"log.misc.debug('url: {}, path: {}, host {}'.format(url.toDisplayString(),\n path, host))\n",
"if not path or not host:\n",
"new_url = QUrl()\n",
"handler = _HANDLERS[host]\n",
"mimetype, data = handler(url)\n",
"assert mimetype is not None, url\n",
"new_url.setScheme('qute')\n",
"if mimetype == 'text/html' and isinstance(data, str):\n",
"if host:\n",
"data = data.encode('utf-8', errors='xmlcharrefreplace')\n",
"return mimetype, data\n",
"new_url.setHost(host)\n",
"new_url.setHost(path)\n",
"new_url.setPath('/')\n",
"if query:\n",
"new_url.setQuery(query)\n",
"if new_url.host():\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Condition"
] |
[
"def FUNC_13(VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_39 = {}\n",
"VAR_40 = VAR_16.split(';')\n",
"for input_raw in filter(bool, VAR_40):\n",
"VAR_67 = re.match('([^=]+)=([^\\\\[\\\\]]+)\\\\[([^\\\\[\\\\]]+)\\\\]$', input_raw)\n",
"return VAR_39\n",
"if VAR_67:\n",
"VAR_39[VAR_67.group(1)] = VAR_67.group(2), VAR_67.group(3)\n",
"VAR_67 = re.match('([^=]+)=([^\\\\[\\\\]]+)$', input_raw)\n",
"if VAR_67:\n",
"VAR_39[VAR_67.group(1)] = VAR_67.group(2), None\n"
] | [
"def preprocess_inputs_arg_string(inputs_str):...\n",
"\"\"\"docstring\"\"\"\n",
"input_dict = {}\n",
"inputs_raw = inputs_str.split(';')\n",
"for input_raw in filter(bool, inputs_raw):\n",
"match = re.match('([^=]+)=([^\\\\[\\\\]]+)\\\\[([^\\\\[\\\\]]+)\\\\]$', input_raw)\n",
"return input_dict\n",
"if match:\n",
"input_dict[match.group(1)] = match.group(2), match.group(3)\n",
"match = re.match('([^=]+)=([^\\\\[\\\\]]+)$', input_raw)\n",
"if match:\n",
"input_dict[match.group(1)] = match.group(2), None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"@FUNC_0...\n",
"return DeviceMessageHandler(self)\n"
] | [
"@cache_in_self...\n",
"return DeviceMessageHandler(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_25(VAR_14, VAR_16, VAR_19):...\n",
"VAR_32 = calibre_db.session.query(db.Custom_Columns).filter(db.\n Custom_Columns.datatype.notin_(db.cc_exceptions)).all()\n",
"return FUNC_26(VAR_14, VAR_16, VAR_19, VAR_32)\n"
] | [
"def edit_all_cc_data(book_id, book, to_save):...\n",
"cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.\n datatype.notin_(db.cc_exceptions)).all()\n",
"return edit_cc_data(book_id, book, to_save, cc)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@VAR_8.route('/enable_sharing/<path:path>', methods=['GET'])...\n",
"if VAR_17 == 'None':\n",
"VAR_17 = None\n",
"VAR_8.interface.config['share_url'] = VAR_17\n",
"return jsonify(success=True)\n"
] | [
"@app.route('/enable_sharing/<path:path>', methods=['GET'])...\n",
"if path == 'None':\n",
"path = None\n",
"app.interface.config['share_url'] = path\n",
"return jsonify(success=True)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_28(self, VAR_2='password'):...\n",
"VAR_3 = self.client.post('/login/', {'username': 'testclient', 'password':\n VAR_2})\n",
"self.assertFormError(VAR_3, AuthenticationForm.error_messages[\n 'invalid_login'] % {'username': User._meta.get_field('username').\n verbose_name})\n"
] | [
"def fail_login(self, password='password'):...\n",
"response = self.client.post('/login/', {'username': 'testclient',\n 'password': password})\n",
"self.assertFormError(response, AuthenticationForm.error_messages[\n 'invalid_login'] % {'username': User._meta.get_field('username').\n verbose_name})\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(VAR_3):...\n",
"@wraps(VAR_3)...\n",
"if current_user.role_edit() or current_user.role_admin():\n",
"return VAR_3(*VAR_46, **kwargs)\n",
"abort(403)\n",
"return FUNC_44\n"
] | [
"def edit_required(f):...\n",
"@wraps(f)...\n",
"if current_user.role_edit() or current_user.role_admin():\n",
"return f(*args, **kwargs)\n",
"abort(403)\n",
"return inner\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_4):...\n",
"@wraps(VAR_4)...\n",
"if VAR_87.role_download():\n",
"return VAR_4(*VAR_49, **kwargs)\n",
"abort(403)\n",
"return FUNC_71\n"
] | [
"def download_required(f):...\n",
"@wraps(f)...\n",
"if current_user.role_download():\n",
"return f(*args, **kwargs)\n",
"abort(403)\n",
"return inner\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"@expose('/oauth-authorized/<provider>')...\n",
"VAR_0.debug('Authorized init')\n",
"VAR_45 = self.appbuilder.sm.oauth_remotes[VAR_41].authorize_access_token()\n",
"if VAR_45 is None:\n",
"flash(u'You denied the request to sign in.', 'warning')\n",
"VAR_0.debug('OAUTH Authorized resp: {0}'.format(VAR_45))\n",
"return redirect(self.appbuilder.get_url_for_login)\n",
"self.appbuilder.sm.set_oauth_session(VAR_41, VAR_45)\n",
"VAR_0.error('Error returning OAuth user info: {0}'.format(e))\n",
"VAR_0.debug('User info retrieved from {0}: {1}'.format(VAR_41, VAR_52))\n",
"VAR_52 = self.appbuilder.sm.oauth_user_info(VAR_41, VAR_45)\n",
"VAR_50 = None\n",
"if VAR_41 in self.appbuilder.sm.oauth_whitelists:\n",
"if VAR_50 is None:\n",
"VAR_54 = self.appbuilder.sm.oauth_whitelists[VAR_41]\n",
"VAR_0.debug('No whitelist for OAuth provider')\n",
"flash(as_unicode(self.invalid_login_message), 'warning')\n",
"login_user(VAR_50)\n",
"VAR_55 = False\n",
"VAR_50 = self.appbuilder.sm.auth_user_oauth(VAR_52)\n",
"return redirect(self.appbuilder.get_url_for_login)\n",
"VAR_46 = jwt.decode(request.args['state'], self.appbuilder.app.config[\n 'SECRET_KEY'], algorithms=['HS256'])\n",
"VAR_56 = VAR_46['next'][0] or self.appbuilder.get_url_for_index\n",
"VAR_56 = self.appbuilder.get_url_for_index\n",
"return redirect(VAR_56)\n",
"for e in VAR_54:\n",
"if re.search(e, VAR_52['email']):\n",
"if not VAR_55:\n",
"VAR_55 = True\n",
"flash(u'You are not authorized.', 'warning')\n",
"return redirect(self.appbuilder.get_url_for_login)\n"
] | [
"@expose('/oauth-authorized/<provider>')...\n",
"log.debug('Authorized init')\n",
"resp = self.appbuilder.sm.oauth_remotes[provider].authorize_access_token()\n",
"if resp is None:\n",
"flash(u'You denied the request to sign in.', 'warning')\n",
"log.debug('OAUTH Authorized resp: {0}'.format(resp))\n",
"return redirect(self.appbuilder.get_url_for_login)\n",
"self.appbuilder.sm.set_oauth_session(provider, resp)\n",
"log.error('Error returning OAuth user info: {0}'.format(e))\n",
"log.debug('User info retrieved from {0}: {1}'.format(provider, userinfo))\n",
"userinfo = self.appbuilder.sm.oauth_user_info(provider, resp)\n",
"user = None\n",
"if provider in self.appbuilder.sm.oauth_whitelists:\n",
"if user is None:\n",
"whitelist = self.appbuilder.sm.oauth_whitelists[provider]\n",
"log.debug('No whitelist for OAuth provider')\n",
"flash(as_unicode(self.invalid_login_message), 'warning')\n",
"login_user(user)\n",
"allow = False\n",
"user = self.appbuilder.sm.auth_user_oauth(userinfo)\n",
"return redirect(self.appbuilder.get_url_for_login)\n",
"state = jwt.decode(request.args['state'], self.appbuilder.app.config[\n 'SECRET_KEY'], algorithms=['HS256'])\n",
"next_url = state['next'][0] or self.appbuilder.get_url_for_index\n",
"next_url = self.appbuilder.get_url_for_index\n",
"return redirect(next_url)\n",
"for e in whitelist:\n",
"if re.search(e, userinfo['email']):\n",
"if not allow:\n",
"allow = True\n",
"flash(u'You are not authorized.', 'warning')\n",
"return redirect(self.appbuilder.get_url_for_login)\n"
] | [
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"For",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_5 = {'not_rooms': ['!anothersecretbase:unknown']}\n",
"VAR_6 = FUNC_0(sender='@foo:bar', type='m.room.message', room_id=\n '!anothersecretbase:unknown')\n",
"self.assertFalse(Filter(VAR_5).check(VAR_6))\n"
] | [
"def test_definition_not_rooms_works_with_literals(self):...\n",
"definition = {'not_rooms': ['!anothersecretbase:unknown']}\n",
"event = MockEvent(sender='@foo:bar', type='m.room.message', room_id=\n '!anothersecretbase:unknown')\n",
"self.assertFalse(Filter(definition).check(event))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_52():...\n",
"self.http_server.stop()\n",
"self.io_loop.stop()\n"
] | [
"def _stop():...\n",
"self.http_server.stop()\n",
"self.io_loop.stop()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0: str) ->str:...\n",
"return re.sub('[^\\\\w _-]+', '', VAR_0)\n"
] | [
"def _sanitize(value: str) ->str:...\n",
"return re.sub('[^\\\\w _-]+', '', value)\n"
] | [
0,
5
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_74(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alt_aliases': '@bad:test'}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': None}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': 0}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': 1}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': False}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': True}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': {}}, VAR_17=400)\n"
] | [
"def test_bad_data(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alt_aliases': '@bad:test'}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': None}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': 0}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': 1}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': False}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': True}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': {}}, expected_code=400)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(VAR_4, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = FUNC_0(VAR_4.config) + ['--temp-basedir', '-s',\n 'content.private_browsing', 'true']\n",
"VAR_6.start(VAR_11)\n",
"VAR_6.compare_session('string')\n",
"VAR_6.send_cmd(':quit')\n",
"VAR_6.wait_for_quit()\n"
] | [
"def test_initial_private_browsing(request, quteproc_new):...\n",
"\"\"\"docstring\"\"\"\n",
"args = _base_args(request.config) + ['--temp-basedir', '-s',\n 'content.private_browsing', 'true']\n",
"quteproc_new.start(args)\n",
"quteproc_new.compare_session(\n \"\"\"\n windows:\n - private: True\n tabs:\n - history:\n - url: about:blank\n \"\"\"\n )\n",
"quteproc_new.send_cmd(':quit')\n",
"quteproc_new.wait_for_quit()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_14():...\n",
""
] | [
"async def do_iterations():...\n",
""
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Condition"
] |
[
"@VAR_11.before_request...\n",
"session.permanent = True\n",
"VAR_11.permanent_session_lifetime = timedelta(seconds=settings.\n SESSION_EXPIRY_TIME)\n"
] | [
"@app.before_request...\n",
"session.permanent = True\n",
"app.permanent_session_lifetime = timedelta(seconds=settings.SESSION_EXPIRY_TIME\n )\n"
] | [
0,
0,
0
] | [
"For",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(VAR_2: 'RequestToCommandArgs', VAR_3: List[str]):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0.info('Executing archivy command')\n",
"if not VAR_22.environ.get('PYTHONIOENCODING'):\n",
"VAR_22.environ['PYTHONIOENCODING'] = 'UTF-8'\n",
"VAR_8 = subprocess.Popen(VAR_3, shell=False, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT)\n",
"VAR_0.info('script running Pid: %d', VAR_8.pid)\n",
"VAR_9 = sys.getdefaultencoding()\n",
"for line in iter(VAR_8.stdout.readline, b''):\n",
"yield line.decode(VAR_9)\n",
"VAR_8.wait()\n",
"VAR_0.info('script finished Pid: %d', VAR_8.pid)\n",
"for fi in VAR_2.field_infos:\n",
"fi.after_script_executed()\n"
] | [
"def _run_script_and_generate_stream(req_to_args: 'RequestToCommandArgs',...\n",
"\"\"\"docstring\"\"\"\n",
"logger.info('Executing archivy command')\n",
"if not os.environ.get('PYTHONIOENCODING'):\n",
"os.environ['PYTHONIOENCODING'] = 'UTF-8'\n",
"process = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr\n =subprocess.STDOUT)\n",
"logger.info('script running Pid: %d', process.pid)\n",
"encoding = sys.getdefaultencoding()\n",
"for line in iter(process.stdout.readline, b''):\n",
"yield line.decode(encoding)\n",
"process.wait()\n",
"logger.info('script finished Pid: %d', process.pid)\n",
"for fi in req_to_args.field_infos:\n",
"fi.after_script_executed()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'"
] |
[
"def __call__(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_56 = VAR_263.request\n",
"if len(VAR_56.args) < 1:\n",
"VAR_320 = VAR_56.args(0)\n",
"if VAR_320 == 'run':\n",
"return self.serve_run(VAR_56.args[1:])\n",
"if VAR_320 == 'rss':\n",
"return self.serve_rss(VAR_56.args[1:])\n",
"if VAR_320 == 'csv':\n",
"return self.serve_csv(VAR_56.args[1:])\n",
"if VAR_320 == 'xml':\n",
"return self.serve_xml(VAR_56.args[1:])\n",
"if VAR_320 == 'json':\n",
"return self.serve_json(VAR_56.args[1:])\n",
"if VAR_320 == 'jsonrpc':\n",
"return self.serve_jsonrpc()\n",
"if VAR_320 == 'jsonrpc2':\n",
"return self.serve_jsonrpc2()\n",
"if VAR_320 == 'xmlrpc':\n",
"return self.serve_xmlrpc()\n",
"if VAR_320 == 'amfrpc':\n",
"return self.serve_amfrpc()\n",
"if VAR_320 == 'amfrpc3':\n",
"return self.serve_amfrpc(3)\n",
"if VAR_320 == 'soap':\n",
"return self.serve_soap()\n",
"self.error()\n"
] | [
"def __call__(self):...\n",
"\"\"\"docstring\"\"\"\n",
"request = current.request\n",
"if len(request.args) < 1:\n",
"arg0 = request.args(0)\n",
"if arg0 == 'run':\n",
"return self.serve_run(request.args[1:])\n",
"if arg0 == 'rss':\n",
"return self.serve_rss(request.args[1:])\n",
"if arg0 == 'csv':\n",
"return self.serve_csv(request.args[1:])\n",
"if arg0 == 'xml':\n",
"return self.serve_xml(request.args[1:])\n",
"if arg0 == 'json':\n",
"return self.serve_json(request.args[1:])\n",
"if arg0 == 'jsonrpc':\n",
"return self.serve_jsonrpc()\n",
"if arg0 == 'jsonrpc2':\n",
"return self.serve_jsonrpc2()\n",
"if arg0 == 'xmlrpc':\n",
"return self.serve_xmlrpc()\n",
"if arg0 == 'amfrpc':\n",
"return self.serve_amfrpc()\n",
"if arg0 == 'amfrpc3':\n",
"return self.serve_amfrpc(3)\n",
"if arg0 == 'soap':\n",
"return self.serve_soap()\n",
"self.error()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_13(VAR_22, VAR_23: Element) ->Union[Any, Type[NoValue]]:...\n",
"VAR_23.component_instance.component_name = self.name\n",
"VAR_23.component_instance.form_id = current_form_id(VAR_22)\n",
"if self.url is not None:\n",
"VAR_23.component_instance.url = self.url\n",
"def FUNC_14():...\n",
"VAR_23.component_instance.json_args = VAR_34\n",
"VAR_23.component_instance.special_args.extend(VAR_21)\n",
"if VAR_6 is None:\n",
"FUNC_14()\n",
"def FUNC_15(VAR_35, VAR_36=''):...\n",
"return VAR_35\n"
] | [
"def marshall_component(dg, element: Element) ->Union[Any, Type[NoValue]]:...\n",
"element.component_instance.component_name = self.name\n",
"element.component_instance.form_id = current_form_id(dg)\n",
"if self.url is not None:\n",
"element.component_instance.url = self.url\n",
"def marshall_element_args():...\n",
"element.component_instance.json_args = serialized_json_args\n",
"element.component_instance.special_args.extend(special_args)\n",
"if key is None:\n",
"marshall_element_args()\n",
"def deserialize_component(ui_value, widget_id=''):...\n",
"return ui_value\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"FunctionDef'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self, VAR_3, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"self.links.append({'url': VAR_3, 'title': VAR_2})\n",
"return self\n"
] | [
"def with_link(self, url, title):...\n",
"\"\"\"docstring\"\"\"\n",
"self.links.append({'url': url, 'title': title})\n",
"return self\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Return'"
] |
[
"def FUNC_15(self):...\n",
"if self._pull_to_push_producer:\n",
"self._pull_to_push_producer.stop()\n"
] | [
"def unregisterProducer(self):...\n",
"if self._pull_to_push_producer:\n",
"self._pull_to_push_producer.stop()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def FUNC_16(self):...\n",
"self.call('testCall2', 'hello')\n"
] | [
"def mount(self):...\n",
"self.call('testCall2', 'hello')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_23):...\n",
"assert isinstance(VAR_23, text_type)\n",
"if self.encoding:\n",
"return VAR_23.encode(self.encoding, 'strict')\n",
"return VAR_23\n"
] | [
"def encodeStrict(self, string):...\n",
"assert isinstance(string, text_type)\n",
"if self.encoding:\n",
"return string.encode(self.encoding, 'strict')\n",
"return string\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_37():...\n",
"if callable(VAR_63):\n",
"VAR_33 = FUNC_21()\n",
"VAR_33 = VAR_63()\n",
"VAR_0.exception('Error while trying to retrieve tracked files for plugin {}'\n .format(VAR_8))\n",
"VAR_33 += FUNC_22()\n",
"if VAR_33:\n",
"VAR_33 += FUNC_23(g.locale.language if g.locale else 'en', 'messages')\n",
"return VAR_33\n",
"if callable(VAR_61):\n",
"return sorted(set(VAR_33))\n",
"VAR_142 = VAR_61()\n",
"VAR_0.exception(\n 'Error while trying to retrieve additional tracked files for plugin {}'\n .format(VAR_8))\n",
"if VAR_142:\n",
"VAR_33 += VAR_142\n"
] | [
"def collect_files():...\n",
"if callable(custom_files):\n",
"files = _get_all_templates()\n",
"files = custom_files()\n",
"_logger.exception('Error while trying to retrieve tracked files for plugin {}'\n .format(key))\n",
"files += _get_all_assets()\n",
"if files:\n",
"files += _get_all_translationfiles(g.locale.language if g.locale else 'en',\n 'messages')\n",
"return files\n",
"if callable(additional_files):\n",
"return sorted(set(files))\n",
"af = additional_files()\n",
"_logger.exception(\n 'Error while trying to retrieve additional tracked files for plugin {}'\n .format(key))\n",
"if af:\n",
"files += af\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"AugAssign'",
"Condition",
"AugAssign'",
"Return'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"AugAssign'"
] |
[
"def FUNC_4(self):...\n",
"return FUNC_0(self.rel.limit_choices_to)\n"
] | [
"def base_url_parameters(self):...\n",
"return url_params_from_lookup_dict(self.rel.limit_choices_to)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_33(self):...\n",
"from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver\n",
"from Products.PageTemplates.unicodeconflictresolver import IgnoringUnicodeEncodingConflictResolver\n",
"from zope.component import getUtility\n",
"from zope.component import provideUtility\n",
"provideUtility(IgnoringUnicodeEncodingConflictResolver,\n IUnicodeEncodingConflictResolver)\n",
"VAR_12 = getUtility(IUnicodeEncodingConflictResolver)\n",
"self.assertEqual(VAR_12.resolve(None, b'\\xe4\\xfc\\xf6', None), '')\n"
] | [
"def testIgnoringResolver(self):...\n",
"from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver\n",
"from Products.PageTemplates.unicodeconflictresolver import IgnoringUnicodeEncodingConflictResolver\n",
"from zope.component import getUtility\n",
"from zope.component import provideUtility\n",
"provideUtility(IgnoringUnicodeEncodingConflictResolver,\n IUnicodeEncodingConflictResolver)\n",
"resolver = getUtility(IUnicodeEncodingConflictResolver)\n",
"self.assertEqual(resolver.resolve(None, b'\\xe4\\xfc\\xf6', None), '')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"from __future__ import unicode_literals\n",
"import frappe\n",
"import json, datetime\n",
"from frappe import _, scrub\n",
"import frappe.desk.query_report\n",
"from frappe.utils import cint, cstr\n",
"from frappe.model.document import Document\n",
"from frappe.modules.export_file import export_to_files\n",
"from frappe.modules import make_boilerplate\n",
"from frappe.core.doctype.page.page import delete_custom_role\n",
"from frappe.core.doctype.custom_role.custom_role import get_custom_allowed_roles\n",
"from frappe.desk.reportview import append_totals_row\n",
"from six import iteritems\n",
"from frappe.utils.safe_exec import safe_exec\n",
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.module:\n",
"self.module = frappe.db.get_value('DocType', self.ref_doctype, 'module')\n",
"if not self.is_standard:\n",
"self.is_standard = 'No'\n",
"if self.is_standard == 'No':\n",
"if frappe.session.user == 'Administrator' and getattr(frappe.local.conf,\n",
"if self.report_type != 'Report Builder':\n",
"if self.is_standard == 'Yes' and frappe.session.user != 'Administrator':\n",
"self.is_standard = 'Yes'\n",
"frappe.only_for('Script Manager', True)\n",
"if frappe.db.get_value('Report', self.name, 'is_standard') == 'Yes':\n",
"frappe.throw(_(\n 'Only Administrator can save a standard report. Please rename and save.'))\n",
"if self.report_type == 'Report Builder':\n",
"frappe.throw(_(\n 'Cannot edit a standard report. Please duplicate and create a new report'))\n",
"self.update_report_json()\n",
"def FUNC_5(self):...\n",
"self.set_doctype_roles()\n",
"def FUNC_6(self):...\n",
"self.export_doc()\n",
"def FUNC_7(self):...\n",
"if self.is_standard == 'Yes' and not cint(getattr(frappe.local.conf,\n",
"frappe.throw(_('You are not allowed to delete Standard Report'))\n",
"delete_custom_role('report', self.name)\n",
"def FUNC_8(self):...\n",
"return [d.as_dict(no_default_fields=True) for d in self.columns]\n"
] | [
"from __future__ import unicode_literals\n",
"import frappe\n",
"import json, datetime\n",
"from frappe import _, scrub\n",
"import frappe.desk.query_report\n",
"from frappe.utils import cint, cstr\n",
"from frappe.model.document import Document\n",
"from frappe.modules.export_file import export_to_files\n",
"from frappe.modules import make_boilerplate\n",
"from frappe.core.doctype.page.page import delete_custom_role\n",
"from frappe.core.doctype.custom_role.custom_role import get_custom_allowed_roles\n",
"from frappe.desk.reportview import append_totals_row\n",
"from six import iteritems\n",
"from frappe.utils.safe_exec import safe_exec\n",
"def validate(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.module:\n",
"self.module = frappe.db.get_value('DocType', self.ref_doctype, 'module')\n",
"if not self.is_standard:\n",
"self.is_standard = 'No'\n",
"if self.is_standard == 'No':\n",
"if frappe.session.user == 'Administrator' and getattr(frappe.local.conf,\n",
"if self.report_type != 'Report Builder':\n",
"if self.is_standard == 'Yes' and frappe.session.user != 'Administrator':\n",
"self.is_standard = 'Yes'\n",
"frappe.only_for('Script Manager', True)\n",
"if frappe.db.get_value('Report', self.name, 'is_standard') == 'Yes':\n",
"frappe.throw(_(\n 'Only Administrator can save a standard report. Please rename and save.'))\n",
"if self.report_type == 'Report Builder':\n",
"frappe.throw(_(\n 'Cannot edit a standard report. Please duplicate and create a new report'))\n",
"self.update_report_json()\n",
"def before_insert(self):...\n",
"self.set_doctype_roles()\n",
"def on_update(self):...\n",
"self.export_doc()\n",
"def on_trash(self):...\n",
"if self.is_standard == 'Yes' and not cint(getattr(frappe.local.conf,\n",
"frappe.throw(_('You are not allowed to delete Standard Report'))\n",
"delete_custom_role('report', self.name)\n",
"def get_columns(self):...\n",
"return [d.as_dict(no_default_fields=True) for d in self.columns]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_9(VAR_25):...\n",
"return VAR_25[:-1] if VAR_25.endswith('_') else VAR_25\n"
] | [
"def norm(k):...\n",
"return k[:-1] if k.endswith('_') else k\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_3(VAR_6, VAR_7):...\n",
"return web.json_response({'status': VAR_7.status, 'message': VAR_7.reason},\n status=response.status)\n"
] | [
"async def handle_any(request, response):...\n",
"return web.json_response({'status': response.status, 'message': response.\n reason}, status=response.status)\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Return'"
] |
[
"def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n",
"FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n",
"for VAR_6 in VAR_73:\n",
"FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n",
"return self._return_value\n"
] | [
"def runner(self, method, *args, **kwargs):...\n",
"add_to_return_value(self, fn(self, *args, **kwargs))\n",
"for f in hooks:\n",
"add_to_return_value(self, f(self, method, *args, **kwargs))\n",
"return self._return_value\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_65(VAR_6):...\n",
"return reverse(VAR_120, VAR_116=(iid,))\n"
] | [
"def urlprefix(iid):...\n",
"return reverse(prefix, args=(iid,))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_1: str):...\n",
"self.sourceName = VAR_1\n",
"self.type = 'file'\n",
"self.content = None\n"
] | [
"def __init__(self, sourceName: str):...\n",
"self.sourceName = sourceName\n",
"self.type = 'file'\n",
"self.content = None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"async def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"async def get_resolved_spec(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Docstring"
] |
[
"def FUNC_77(VAR_94, VAR_132=True):...\n",
"\"\"\"docstring\"\"\"\n",
"import copy\n",
"def FUNC_119(VAR_174):...\n",
"for df in VAR_174.meta.get('fields', {'no_copy': 1}):\n",
"if hasattr(VAR_174, df.fieldname):\n",
"VAR_195 = ['name', 'owner', 'creation', 'modified', 'modified_by']\n",
"VAR_174.set(df.fieldname, None)\n",
"if not VAR_1.flags.in_test:\n",
"VAR_195.append('docstatus')\n",
"if not isinstance(VAR_94, dict):\n",
"VAR_174 = VAR_94.as_dict()\n",
"VAR_174 = VAR_94\n",
"VAR_196 = FUNC_45(FUNC_113.deepcopy(VAR_174))\n",
"VAR_196.set('__islocal', 1)\n",
"for VAR_97 in (VAR_195 + ['amended_from', 'amendment_date']):\n",
"VAR_196.set(VAR_97, None)\n",
"if not VAR_132:\n",
"FUNC_119(VAR_196)\n",
"for i, VAR_174 in enumerate(VAR_196.get_all_children()):\n",
"VAR_174.set('__islocal', 1)\n",
"return VAR_196\n",
"for VAR_97 in VAR_195:\n",
"VAR_174.set(VAR_97, None)\n",
"if not VAR_132:\n",
"FUNC_119(VAR_174)\n"
] | [
"def copy_doc(doc, ignore_no_copy=True):...\n",
"\"\"\"docstring\"\"\"\n",
"import copy\n",
"def remove_no_copy_fields(d):...\n",
"for df in d.meta.get('fields', {'no_copy': 1}):\n",
"if hasattr(d, df.fieldname):\n",
"fields_to_clear = ['name', 'owner', 'creation', 'modified', 'modified_by']\n",
"d.set(df.fieldname, None)\n",
"if not local.flags.in_test:\n",
"fields_to_clear.append('docstatus')\n",
"if not isinstance(doc, dict):\n",
"d = doc.as_dict()\n",
"d = doc\n",
"newdoc = get_doc(copy.deepcopy(d))\n",
"newdoc.set('__islocal', 1)\n",
"for fieldname in (fields_to_clear + ['amended_from', 'amendment_date']):\n",
"newdoc.set(fieldname, None)\n",
"if not ignore_no_copy:\n",
"remove_no_copy_fields(newdoc)\n",
"for i, d in enumerate(newdoc.get_all_children()):\n",
"d.set('__islocal', 1)\n",
"return newdoc\n",
"for fieldname in fields_to_clear:\n",
"d.set(fieldname, None)\n",
"if not ignore_no_copy:\n",
"remove_no_copy_fields(d)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Condition",
"Expr'",
"For",
"Expr'",
"Return'",
"For",
"Expr'",
"Condition",
"Expr'"
] |
[
"def __init__(self, VAR_309, VAR_310):...\n",
"VAR_393 = CLASS_5.jsonrpc_errors.get(VAR_309)\n",
"if VAR_393:\n",
"self.message, self.description = VAR_393\n",
"self.code, self.info = VAR_309, VAR_310\n"
] | [
"def __init__(self, code, info):...\n",
"jrpc_error = Service.jsonrpc_errors.get(code)\n",
"if jrpc_error:\n",
"self.message, self.description = jrpc_error\n",
"self.code, self.info = code, info\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_22(self, VAR_1):...\n",
"VAR_5 = VAR_1.MagicMock()\n",
"VAR_26 = VAR_1.MagicMock()\n",
"VAR_23 = oai.Schema.construct(type='string', schema_format='date')\n",
"VAR_37 = VAR_1.patch(f'{VAR_0}.DateProperty')\n",
"from openapi_python_client.parser.properties import _string_based_property\n",
"VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n",
"VAR_37.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)\n",
"assert VAR_4 == VAR_37.return_value\n",
"VAR_37.reset_mock()\n",
"VAR_23.default = VAR_1.MagicMock()\n",
"VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n",
"VAR_37.assert_called_once_with(VAR_5=name, VAR_26=required, default=data.\n default)\n"
] | [
"def test__string_based_property_date_format(self, mocker):...\n",
"name = mocker.MagicMock()\n",
"required = mocker.MagicMock()\n",
"data = oai.Schema.construct(type='string', schema_format='date')\n",
"DateProperty = mocker.patch(f'{MODULE_NAME}.DateProperty')\n",
"from openapi_python_client.parser.properties import _string_based_property\n",
"p = _string_based_property(name=name, required=required, data=data)\n",
"DateProperty.assert_called_once_with(name=name, required=required, default=None\n )\n",
"assert p == DateProperty.return_value\n",
"DateProperty.reset_mock()\n",
"data.default = mocker.MagicMock()\n",
"_string_based_property(name=name, required=required, data=data)\n",
"DateProperty.assert_called_once_with(name=name, required=required, default=\n data.default)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assert'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_25(self, VAR_32):...\n",
"return await self.store.get_min_depth(VAR_32)\n"
] | [
"async def get_min_depth_for_context(self, context):...\n",
"return await self.store.get_min_depth(context)\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Return'"
] |
[
"def FUNC_67(VAR_44, VAR_45, VAR_46, VAR_47, VAR_48):...\n",
"VAR_55 = request.form.to_dict()\n",
"VAR_87.random_books = 0\n",
"if VAR_87.role_passwd() or VAR_87.role_admin():\n",
"if VAR_55.get('password'):\n",
"if VAR_55.get('kindle_mail', VAR_87.kindle_mail) != VAR_87.kindle_mail:\n",
"flash(str(ex), category='error')\n",
"VAR_98 = 0\n",
"VAR_87.password = generate_password_hash(VAR_55['password'])\n",
"VAR_87.kindle_mail = valid_email(VAR_55['kindle_mail'])\n",
"if VAR_55.get('email', VAR_87.email) != VAR_87.email:\n",
"return render_title_template('user_edit.html', VAR_115=current_user, VAR_47\n =translations, profile=1, VAR_48=languages, VAR_150=_(\n u\"%(name)s's profile\", name=current_user.name), VAR_9='me', VAR_44=\n kobo_support, registered_oauth=local_oauth_check, VAR_46=oauth_status)\n",
"for key, VAR_64 in VAR_55.items():\n",
"VAR_87.email = check_email(VAR_55['email'])\n",
"if VAR_87.role_admin():\n",
"if key.startswith('show'):\n",
"VAR_87.sidebar_view = VAR_98\n",
"if VAR_55.get('name', VAR_87.name) != VAR_87.name:\n",
"VAR_87.random_books = 1 if VAR_55.get('show_random') == 'on' else 0\n",
"VAR_98 += int(key[5:])\n",
"if VAR_55.get('Show_detail_random'):\n",
"VAR_87.name = check_username(VAR_55['name'])\n",
"if VAR_55.get('default_language'):\n",
"VAR_87.sidebar_view += constants.DETAIL_RANDOM\n",
"ub.session.commit()\n",
"ub.session.rollback()\n",
"VAR_87.default_language = VAR_55['default_language']\n",
"if VAR_55.get('locale'):\n",
"flash(_(u'Profile updated'), category='success')\n",
"flash(_(u'Found an existing account for this e-mail address'), category='error'\n )\n",
"VAR_87.locale = VAR_55['locale']\n",
"VAR_117 = VAR_87.kobo_only_shelves_sync\n",
"VAR_3.debug(u'Profile updated')\n",
"VAR_3.debug(u'Found an existing account for this e-mail address')\n",
"VAR_87.kobo_only_shelves_sync = int(VAR_55.get('kobo_only_shelves_sync') ==\n 'on') or 0\n",
"ub.session.rollback()\n",
"if VAR_117 == 0 and VAR_87.kobo_only_shelves_sync == 1:\n",
"VAR_3.error('Database error: %s', e)\n",
"kobo_sync_status.update_on_sync_shelfs(VAR_87.id)\n",
"flash(_(u'Database error: %(error)s.', VAR_140=e), category='error')\n"
] | [
"def change_profile(kobo_support, local_oauth_check, oauth_status,...\n",
"to_save = request.form.to_dict()\n",
"current_user.random_books = 0\n",
"if current_user.role_passwd() or current_user.role_admin():\n",
"if to_save.get('password'):\n",
"if to_save.get('kindle_mail', current_user.kindle_mail\n",
"flash(str(ex), category='error')\n",
"val = 0\n",
"current_user.password = generate_password_hash(to_save['password'])\n",
"current_user.kindle_mail = valid_email(to_save['kindle_mail'])\n",
"if to_save.get('email', current_user.email) != current_user.email:\n",
"return render_title_template('user_edit.html', content=current_user,\n translations=translations, profile=1, languages=languages, title=_(\n u\"%(name)s's profile\", name=current_user.name), page='me', kobo_support\n =kobo_support, registered_oauth=local_oauth_check, oauth_status=\n oauth_status)\n",
"for key, __ in to_save.items():\n",
"current_user.email = check_email(to_save['email'])\n",
"if current_user.role_admin():\n",
"if key.startswith('show'):\n",
"current_user.sidebar_view = val\n",
"if to_save.get('name', current_user.name) != current_user.name:\n",
"current_user.random_books = 1 if to_save.get('show_random') == 'on' else 0\n",
"val += int(key[5:])\n",
"if to_save.get('Show_detail_random'):\n",
"current_user.name = check_username(to_save['name'])\n",
"if to_save.get('default_language'):\n",
"current_user.sidebar_view += constants.DETAIL_RANDOM\n",
"ub.session.commit()\n",
"ub.session.rollback()\n",
"current_user.default_language = to_save['default_language']\n",
"if to_save.get('locale'):\n",
"flash(_(u'Profile updated'), category='success')\n",
"flash(_(u'Found an existing account for this e-mail address'), category='error'\n )\n",
"current_user.locale = to_save['locale']\n",
"old_state = current_user.kobo_only_shelves_sync\n",
"log.debug(u'Profile updated')\n",
"log.debug(u'Found an existing account for this e-mail address')\n",
"current_user.kobo_only_shelves_sync = int(to_save.get(\n 'kobo_only_shelves_sync') == 'on') or 0\n",
"ub.session.rollback()\n",
"if old_state == 0 and current_user.kobo_only_shelves_sync == 1:\n",
"log.error('Database error: %s', e)\n",
"kobo_sync_status.update_on_sync_shelfs(current_user.id)\n",
"flash(_(u'Database error: %(error)s.', error=e), category='error')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"For",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Condition",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
""
] | [
"def clean(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_40(self, VAR_28, VAR_35):...\n",
"VAR_15 = self.getUserId()\n",
"VAR_80 = self.playlistdb.createPLS(VAR_28=plid, VAR_15=userid, addrstr=hostaddr\n )\n",
"VAR_81 = self.playlistdb.getName(VAR_28, VAR_15)\n",
"if VAR_80 and VAR_81:\n",
"return self.serve_string_as_file(VAR_80, VAR_81 + '.pls')\n"
] | [
"def api_downloadpls(self, plid, hostaddr):...\n",
"userid = self.getUserId()\n",
"pls = self.playlistdb.createPLS(plid=plid, userid=userid, addrstr=hostaddr)\n",
"name = self.playlistdb.getName(plid, userid)\n",
"if pls and name:\n",
"return self.serve_string_as_file(pls, name + '.pls')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_1():...\n",
"if VAR_92.get('_user_id', ''):\n",
"VAR_0.error('No user id in session')\n",
"if not FUNC_3(VAR_92.get('_user_id', ''), VAR_92.get('_id', '')):\n",
"VAR_1.rollback()\n",
"VAR_94 = CLASS_4(VAR_92.get('_user_id', ''), VAR_92.get('_id', ''))\n",
"VAR_0.info('Found stored session : ' + VAR_92.get('_id', ''))\n",
"VAR_0.exception(e)\n",
"VAR_1.add(VAR_94)\n",
"VAR_1.commit()\n",
"VAR_0.info('Login and store session : ' + VAR_92.get('_id', ''))\n"
] | [
"def store_user_session():...\n",
"if flask_session.get('_user_id', ''):\n",
"log.error('No user id in session')\n",
"if not check_user_session(flask_session.get('_user_id', ''), flask_session.\n",
"session.rollback()\n",
"user_session = User_Sessions(flask_session.get('_user_id', ''),\n flask_session.get('_id', ''))\n",
"log.info('Found stored session : ' + flask_session.get('_id', ''))\n",
"log.exception(e)\n",
"session.add(user_session)\n",
"session.commit()\n",
"log.info('Login and store session : ' + flask_session.get('_id', ''))\n"
] | [
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
2
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_8(VAR_13, VAR_14, VAR_15):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_23 = ['ffmpeg', '-v', 'quiet', '-ss', str(VAR_14), '-t', str(VAR_15),\n '-i', str(VAR_13), '-movflags', 'frag_keyframe+empty_moov', '-c',\n 'copy', '-f', 'mp4', 'pipe:1']\n",
"VAR_17 = subprocess.Popen(VAR_23, stdout=subprocess.PIPE, bufsize=-1)\n",
"return VAR_17\n"
] | [
"def _create_ffmpeg_segment_proc(video_path, start_sec, duration_sec):...\n",
"\"\"\"docstring\"\"\"\n",
"cmd_l = ['ffmpeg', '-v', 'quiet', '-ss', str(start_sec), '-t', str(\n duration_sec), '-i', str(video_path), '-movflags',\n 'frag_keyframe+empty_moov', '-c', 'copy', '-f', 'mp4', 'pipe:1']\n",
"proc = subprocess.Popen(cmd_l, stdout=subprocess.PIPE, bufsize=-1)\n",
"return proc\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self, VAR_0, VAR_2):...\n",
"VAR_8 = VAR_0.post('/', data={'file': 'does_not_exist.txt', 's3file': 'file'})\n",
"S3FileMiddleware(lambda x: None)(VAR_8)\n",
"assert not VAR_8.FILES.getlist('file')\n",
"assert 'File not found: does_not_exist.txt' in VAR_2.text\n"
] | [
"def test_process_request__no_file(self, rf, caplog):...\n",
"request = rf.post('/', data={'file': 'does_not_exist.txt', 's3file': 'file'})\n",
"S3FileMiddleware(lambda x: None)(request)\n",
"assert not request.FILES.getlist('file')\n",
"assert 'File not found: does_not_exist.txt' in caplog.text\n"
] | [
0,
1,
0,
0,
1
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assert'",
"Assert'"
] |
[
"def FUNC_6(VAR_8):...\n",
"VAR_9 = VAR_8.args.get('api_key', None)\n",
"if VAR_9 is not None:\n",
"return VAR_9\n",
"if VAR_8.headers.get('Authorization'):\n",
"VAR_27 = VAR_8.headers.get('Authorization')\n",
"if VAR_8.view_args is not None and VAR_8.view_args.get('token'):\n",
"VAR_9 = VAR_27.replace('Key ', '', 1)\n",
"VAR_9 = VAR_8.view_args['token']\n",
"return VAR_9\n"
] | [
"def get_api_key_from_request(request):...\n",
"api_key = request.args.get('api_key', None)\n",
"if api_key is not None:\n",
"return api_key\n",
"if request.headers.get('Authorization'):\n",
"auth_header = request.headers.get('Authorization')\n",
"if request.view_args is not None and request.view_args.get('token'):\n",
"api_key = auth_header.replace('Key ', '', 1)\n",
"api_key = request.view_args['token']\n",
"return api_key\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_16(self, VAR_18):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_49 = self.get_doc_before_save()\n",
"return VAR_49.get(VAR_18) != self.get(VAR_18) if VAR_49 else True\n"
] | [
"def has_value_changed(self, fieldname):...\n",
"\"\"\"docstring\"\"\"\n",
"previous = self.get_doc_before_save()\n",
"return previous.get(fieldname) != self.get(fieldname) if previous else True\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_17(self, VAR_20=False, VAR_15=None, VAR_16=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.flags.name_set and not VAR_20:\n",
"return\n",
"if self.get('__newname'):\n",
"self.name = self.get('__newname')\n",
"if VAR_15:\n",
"self.flags.name_set = True\n",
"self.name = VAR_15\n",
"FUNC_17(self)\n",
"return\n",
"if VAR_16:\n",
"for VAR_21 in self.get_all_children():\n",
"self.flags.name_set = True\n",
"FUNC_17(VAR_21)\n"
] | [
"def set_new_name(self, force=False, set_name=None, set_child_names=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.flags.name_set and not force:\n",
"return\n",
"if self.get('__newname'):\n",
"self.name = self.get('__newname')\n",
"if set_name:\n",
"self.flags.name_set = True\n",
"self.name = set_name\n",
"set_new_name(self)\n",
"return\n",
"if set_child_names:\n",
"for d in self.get_all_children():\n",
"self.flags.name_set = True\n",
"set_new_name(d)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Condition",
"For",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(VAR_19):...\n",
"if VAR_2:\n",
"return 'file://' + FUNC_10(VAR_19)\n",
"return url_for('.get_object_src_http', VAR_20=object_path)\n"
] | [
"def _get_object_src_uri(object_path):...\n",
"if LOCAL_OBJ_URI:\n",
"return 'file://' + _get_obj_absolute_path(object_path)\n",
"return url_for('.get_object_src_http', obj_path=object_path)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"return CLASS_7()\n"
] | [
"def _makeEngine(self):...\n",
"return DummyEngine()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.route('/shelf/remove/<int:shelf_id>/<int:book_id>')...\n",
"VAR_12 = request.headers.get('X-Requested-With') == 'XMLHttpRequest'\n",
"VAR_0 = ub.session.query(ub.Shelf).filter(ub.Shelf.id == VAR_3).first()\n",
"if VAR_0 is None:\n",
"VAR_1.error('Invalid shelf specified: {}'.format(VAR_3))\n",
"if FUNC_0(VAR_0):\n",
"if not VAR_12:\n",
"VAR_19 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == VAR_3,\n ub.BookShelf.book_id == VAR_4).first()\n",
"if not VAR_12:\n",
"return redirect(url_for('web.index'))\n",
"return 'Invalid shelf specified', 400\n",
"if VAR_19 is None:\n",
"VAR_1.warning('You are not allowed to remove a book from shelf: {}'.format(\n VAR_0.name))\n",
"return 'Sorry you are not allowed to remove a book from this shelf', 403\n",
"VAR_1.error('Book %s already removed from %s', VAR_4, VAR_0)\n",
"ub.session.delete(VAR_19)\n",
"ub.session.rollback()\n",
"if not VAR_12:\n",
"flash(_(u'Sorry you are not allowed to remove a book from this shelf'),\n category='error')\n",
"if not VAR_12:\n",
"VAR_0.last_modified = datetime.utcnow()\n",
"VAR_1.error('Settings DB is not Writeable')\n",
"flash(_(u'Book has been removed from shelf: %(sname)s', sname=shelf.name),\n category='success')\n",
"return '', 204\n",
"return redirect(url_for('web.index'))\n",
"return redirect(url_for('web.index'))\n",
"return 'Book already removed from shelf', 410\n",
"ub.session.commit()\n",
"flash(_('Settings DB is not Writeable'), category='error')\n",
"if 'HTTP_REFERER' in request.environ:\n",
"if 'HTTP_REFERER' in request.environ:\n",
"return redirect(request.environ['HTTP_REFERER'])\n",
"return redirect(url_for('web.index'))\n",
"return redirect(request.environ['HTTP_REFERER'])\n",
"return redirect(url_for('web.index'))\n"
] | [
"@shelf.route('/shelf/remove/<int:shelf_id>/<int:book_id>')...\n",
"xhr = request.headers.get('X-Requested-With') == 'XMLHttpRequest'\n",
"shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()\n",
"if shelf is None:\n",
"log.error('Invalid shelf specified: {}'.format(shelf_id))\n",
"if check_shelf_edit_permissions(shelf):\n",
"if not xhr:\n",
"book_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf ==\n shelf_id, ub.BookShelf.book_id == book_id).first()\n",
"if not xhr:\n",
"return redirect(url_for('web.index'))\n",
"return 'Invalid shelf specified', 400\n",
"if book_shelf is None:\n",
"log.warning('You are not allowed to remove a book from shelf: {}'.format(\n shelf.name))\n",
"return 'Sorry you are not allowed to remove a book from this shelf', 403\n",
"log.error('Book %s already removed from %s', book_id, shelf)\n",
"ub.session.delete(book_shelf)\n",
"ub.session.rollback()\n",
"if not xhr:\n",
"flash(_(u'Sorry you are not allowed to remove a book from this shelf'),\n category='error')\n",
"if not xhr:\n",
"shelf.last_modified = datetime.utcnow()\n",
"log.error('Settings DB is not Writeable')\n",
"flash(_(u'Book has been removed from shelf: %(sname)s', sname=shelf.name),\n category='success')\n",
"return '', 204\n",
"return redirect(url_for('web.index'))\n",
"return redirect(url_for('web.index'))\n",
"return 'Book already removed from shelf', 410\n",
"ub.session.commit()\n",
"flash(_('Settings DB is not Writeable'), category='error')\n",
"if 'HTTP_REFERER' in request.environ:\n",
"if 'HTTP_REFERER' in request.environ:\n",
"return redirect(request.environ['HTTP_REFERER'])\n",
"return redirect(url_for('web.index'))\n",
"return redirect(request.environ['HTTP_REFERER'])\n",
"return redirect(url_for('web.index'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Return'",
"Condition",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Return'",
"Return'",
"Return'",
"Expr'",
"Expr'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'",
"Return'"
] |
[
"@VAR_2.route('/send/<int:book_id>/<book_format>/<int:convert>')...\n",
"if not config.get_mail_server_configured():\n",
"flash(_(u'Please configure the SMTP mail settings first...'), category='error')\n",
"if VAR_87.kindle_mail:\n",
"if 'HTTP_REFERER' in request.environ:\n",
"VAR_135 = send_mail(VAR_5, VAR_6, VAR_43, VAR_87.kindle_mail, config.\n config_calibre_dir, VAR_87.name)\n",
"flash(_(\n u'Please update your profile with a valid Send to Kindle E-mail Address.'\n ), category='error')\n",
"return redirect(request.environ['HTTP_REFERER'])\n",
"return redirect(url_for('web.index'))\n",
"if VAR_135 is None:\n",
"flash(_(u'Book successfully queued for sending to %(kindlemail)s',\n kindlemail=current_user.kindle_mail), category='success')\n",
"flash(_(u'Oops! There was an error sending this book: %(res)s', res=result),\n category='error')\n",
"ub.update_download(VAR_5, int(VAR_87.id))\n"
] | [
"@web.route('/send/<int:book_id>/<book_format>/<int:convert>')...\n",
"if not config.get_mail_server_configured():\n",
"flash(_(u'Please configure the SMTP mail settings first...'), category='error')\n",
"if current_user.kindle_mail:\n",
"if 'HTTP_REFERER' in request.environ:\n",
"result = send_mail(book_id, book_format, convert, current_user.kindle_mail,\n config.config_calibre_dir, current_user.name)\n",
"flash(_(\n u'Please update your profile with a valid Send to Kindle E-mail Address.'\n ), category='error')\n",
"return redirect(request.environ['HTTP_REFERER'])\n",
"return redirect(url_for('web.index'))\n",
"if result is None:\n",
"flash(_(u'Book successfully queued for sending to %(kindlemail)s',\n kindlemail=current_user.kindle_mail), category='success')\n",
"flash(_(u'Oops! There was an error sending this book: %(res)s', res=result),\n category='error')\n",
"ub.update_download(book_id, int(current_user.id))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Return'",
"Return'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_30(VAR_20, VAR_36):...\n",
"if VAR_20['author_name'] == _(u'Unknown'):\n",
"VAR_20['author_name'] = ''\n",
"if VAR_20['book_title'] == _(u'Unknown'):\n",
"VAR_20['book_title'] = ''\n",
"for VAR_93, m_field in [('tags', 'tags'), ('author_name', 'author'), (\n",
"VAR_20[VAR_93] = VAR_20[VAR_93] or getattr(VAR_36, m_field, '')\n",
"VAR_20['description'] = VAR_20['description'] or Markup(getattr(VAR_36,\n 'description', '')).unescape()\n"
] | [
"def merge_metadata(to_save, meta):...\n",
"if to_save['author_name'] == _(u'Unknown'):\n",
"to_save['author_name'] = ''\n",
"if to_save['book_title'] == _(u'Unknown'):\n",
"to_save['book_title'] = ''\n",
"for s_field, m_field in [('tags', 'tags'), ('author_name', 'author'), (\n",
"to_save[s_field] = to_save[s_field] or getattr(meta, m_field, '')\n",
"to_save['description'] = to_save['description'] or Markup(getattr(meta,\n 'description', '')).unescape()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Assign'"
] |
[
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = []\n",
"for filename in VAR_1:\n",
"VAR_11 = os.path.join(VAR_0, filename)\n",
"return sorted(VAR_6, key=lambda v: v['name'].lower())\n",
"if VAR_2(VAR_11):\n",
"VAR_6.append({'name': filename, 'absname': VAR_11})\n"
] | [
"def get_file_list(basedir, all_files, filterfunc):...\n",
"\"\"\"docstring\"\"\"\n",
"items = []\n",
"for filename in all_files:\n",
"absname = os.path.join(basedir, filename)\n",
"return sorted(items, key=lambda v: v['name'].lower())\n",
"if filterfunc(absname):\n",
"items.append({'name': filename, 'absname': absname})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Expr'"
] |
[
"@staticmethod...\n",
"VAR_27, VAR_38 = utils.execute('openssl', 'rsautl', '-decrypt', '-inkey', \n '%s' % VAR_20, process_input=encrypted_key, check_exit_code=False)\n",
"if VAR_38:\n",
"VAR_39, VAR_38 = utils.execute('openssl', 'rsautl', '-decrypt', '-inkey', \n '%s' % VAR_20, process_input=encrypted_iv, check_exit_code=False)\n",
"if VAR_38:\n",
"VAR_40, VAR_38 = utils.execute('openssl', 'enc', '-d', '-aes-128-cbc',\n '-in', '%s' % (VAR_17,), '-K', '%s' % (VAR_27,), '-iv', '%s' % (VAR_39,\n ), '-out', '%s' % (VAR_21,), check_exit_code=False)\n",
"if VAR_38:\n"
] | [
"@staticmethod...\n",
"key, err = utils.execute('openssl', 'rsautl', '-decrypt', '-inkey', '%s' %\n cloud_private_key, process_input=encrypted_key, check_exit_code=False)\n",
"if err:\n",
"iv, err = utils.execute('openssl', 'rsautl', '-decrypt', '-inkey', '%s' %\n cloud_private_key, process_input=encrypted_iv, check_exit_code=False)\n",
"if err:\n",
"_out, err = utils.execute('openssl', 'enc', '-d', '-aes-128-cbc', '-in', \n '%s' % (encrypted_filename,), '-K', '%s' % (key,), '-iv', '%s' % (iv,),\n '-out', '%s' % (decrypted_filename,), check_exit_code=False)\n",
"if err:\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition"
] |
[
"@VAR_0.simple_tag...\n",
"VAR_12 = {'template_pack': VAR_10} if VAR_10 else {}\n",
"VAR_40 = HTMLFormRenderer()\n",
"return VAR_40.render(VAR_9.data, None, {'style': VAR_12})\n"
] | [
"@register.simple_tag...\n",
"style = {'template_pack': template_pack} if template_pack else {}\n",
"renderer = HTMLFormRenderer()\n",
"return renderer.render(serializer.data, None, {'style': style})\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_24(VAR_14, VAR_31):...\n",
"from babel.messages.pofile import read_po\n",
"from octoprint.util import dict_merge\n",
"VAR_83 = {}\n",
"VAR_84 = None\n",
"def FUNC_34(VAR_9, VAR_14, VAR_31):...\n",
"VAR_83 = {}\n",
"VAR_85 = read_po(FUNC_47, VAR_14=locale, VAR_31=domain)\n",
"for message in VAR_85:\n",
"VAR_141 = message.id\n",
"return VAR_83, VAR_85.plural_expr\n",
"if isinstance(VAR_141, (list, tuple)):\n",
"VAR_141 = VAR_141[0]\n",
"if message.string:\n",
"VAR_83[VAR_141] = message.string\n"
] | [
"def _get_translations(locale, domain):...\n",
"from babel.messages.pofile import read_po\n",
"from octoprint.util import dict_merge\n",
"messages = {}\n",
"plural_expr = None\n",
"def messages_from_po(path, locale, domain):...\n",
"messages = {}\n",
"catalog = read_po(f, locale=locale, domain=domain)\n",
"for message in catalog:\n",
"message_id = message.id\n",
"return messages, catalog.plural_expr\n",
"if isinstance(message_id, (list, tuple)):\n",
"message_id = message_id[0]\n",
"if message.string:\n",
"messages[message_id] = message.string\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_4():...\n",
"VAR_39 = FUNC_3(CLASS_4, VAR_9=False)\n",
"VAR_39.types['python'] = PythonExpr\n",
"return VAR_39\n"
] | [
"def createTrustedZopeEngine():...\n",
"e = createZopeEngine(TrustedZopePathExpr, untrusted=False)\n",
"e.types['python'] = PythonExpr\n",
"return e\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_83(VAR_9, VAR_10):...\n",
""
] | [
"def wrapped(request, course_id):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@login_required...\n",
"VAR_5 = get_object_or_404(CommentLike, VAR_2=pk, user=request.user)\n",
"if is_post(VAR_0):\n",
"VAR_5.delete()\n",
"return render(VAR_0=request, template_name=\n 'spirit/comment/like/delete.html', context={'like': like})\n",
"VAR_5.comment.decrease_likes_count()\n",
"if is_ajax(VAR_0):\n",
"VAR_6 = reverse('spirit:comment:like:create', kwargs={'comment_id': like.\n comment.pk})\n",
"return redirect(VAR_0.POST.get('next', VAR_5.comment.get_absolute_url()))\n",
"return json_response({'url_create': VAR_6})\n"
] | [
"@login_required...\n",
"like = get_object_or_404(CommentLike, pk=pk, user=request.user)\n",
"if is_post(request):\n",
"like.delete()\n",
"return render(request=request, template_name=\n 'spirit/comment/like/delete.html', context={'like': like})\n",
"like.comment.decrease_likes_count()\n",
"if is_ajax(request):\n",
"url = reverse('spirit:comment:like:create', kwargs={'comment_id': like.\n comment.pk})\n",
"return redirect(request.POST.get('next', like.comment.get_absolute_url()))\n",
"return json_response({'url_create': url})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Expr'",
"Condition",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_7(self, VAR_22: bytes, VAR_23: bytes, VAR_24: Optional[Headers]=...\n",
"VAR_43 = urllib.parse.urlparse(VAR_23.decode('ascii'))\n",
"VAR_7 = IPAddress(VAR_43.hostname)\n",
"return self._agent.request(VAR_22, VAR_23, VAR_24=headers, VAR_25=bodyProducer)\n",
"if FUNC_0(VAR_7, self._ip_whitelist, self._ip_blacklist):\n",
"VAR_0.info('Blocking access to %s due to blacklist' % (VAR_7,))\n",
"VAR_58 = SynapseError(403, 'IP address blocked by IP blacklist entry')\n",
"return defer.fail(Failure(VAR_58))\n"
] | [
"def request(self, method: bytes, uri: bytes, headers: Optional[Headers]=...\n",
"h = urllib.parse.urlparse(uri.decode('ascii'))\n",
"ip_address = IPAddress(h.hostname)\n",
"return self._agent.request(method, uri, headers=headers, bodyProducer=\n bodyProducer)\n",
"if check_against_blacklist(ip_address, self._ip_whitelist, self._ip_blacklist):\n",
"logger.info('Blocking access to %s due to blacklist' % (ip_address,))\n",
"e = SynapseError(403, 'IP address blocked by IP blacklist entry')\n",
"return defer.fail(Failure(e))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(VAR_8):...\n",
"VAR_13 = current_org._get_current_object()\n",
"VAR_21 = None\n",
"if org_settings['auth_jwt_auth_cookie_name']:\n",
"VAR_28 = VAR_8.cookies.get(org_settings['auth_jwt_auth_cookie_name'], None)\n",
"if org_settings['auth_jwt_auth_header_name']:\n",
"if VAR_28:\n",
"VAR_28 = VAR_8.headers.get(org_settings['auth_jwt_auth_header_name'], None)\n",
"return None\n",
"VAR_21, VAR_29 = jwt_auth.verify_jwt_token(VAR_28, expected_issuer=\n org_settings['auth_jwt_auth_issuer'], expected_audience=org_settings[\n 'auth_jwt_auth_audience'], algorithms=org_settings[\n 'auth_jwt_auth_algorithms'], public_certs_url=org_settings[\n 'auth_jwt_auth_public_certs_url'])\n",
"if not VAR_21:\n",
"if not VAR_29:\n",
"return\n",
"VAR_12 = models.User.get_by_email_and_org(VAR_21['email'], VAR_13)\n",
"VAR_12 = FUNC_13(current_org, VAR_21['email'], VAR_21['email'])\n",
"return VAR_12\n"
] | [
"def jwt_token_load_user_from_request(request):...\n",
"org = current_org._get_current_object()\n",
"payload = None\n",
"if org_settings['auth_jwt_auth_cookie_name']:\n",
"jwt_token = request.cookies.get(org_settings['auth_jwt_auth_cookie_name'], None\n )\n",
"if org_settings['auth_jwt_auth_header_name']:\n",
"if jwt_token:\n",
"jwt_token = request.headers.get(org_settings['auth_jwt_auth_header_name'], None\n )\n",
"return None\n",
"payload, token_is_valid = jwt_auth.verify_jwt_token(jwt_token,\n expected_issuer=org_settings['auth_jwt_auth_issuer'], expected_audience\n =org_settings['auth_jwt_auth_audience'], algorithms=org_settings[\n 'auth_jwt_auth_algorithms'], public_certs_url=org_settings[\n 'auth_jwt_auth_public_certs_url'])\n",
"if not payload:\n",
"if not token_is_valid:\n",
"return\n",
"user = models.User.get_by_email_and_org(payload['email'], org)\n",
"user = create_and_login_user(current_org, payload['email'], payload['email'])\n",
"return user\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"async def FUNC_4(VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=0):...\n",
"if VAR_6.startswith('/_matrix/federation/v1/get_missing_events/'):\n",
"return {'events': []}\n"
] | [
"async def post_json(destination, path, data, headers=None, timeout=0):...\n",
"if path.startswith('/_matrix/federation/v1/get_missing_events/'):\n",
"return {'events': []}\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_34(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_9 = test.test_src_dir_path(VAR_0)\n",
"VAR_10 = self.parser.parse_args(['scan', '--dir', VAR_9])\n",
"saved_model_cli.scan(VAR_10)\n",
"VAR_11 = out.getvalue().strip()\n",
"self.assertTrue('does not contain denylisted ops' in VAR_11)\n"
] | [
"def testScanCommand(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"args = self.parser.parse_args(['scan', '--dir', base_path])\n",
"saved_model_cli.scan(args)\n",
"output = out.getvalue().strip()\n",
"self.assertTrue('does not contain denylisted ops' in output)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"super(CLASS_0, self).setUp()\n",
"if platform.system() == 'Windows':\n",
"self.skipTest('Skipping failing tests on Windows.')\n"
] | [
"def setUp(self):...\n",
"super(SavedModelCLITestCase, self).setUp()\n",
"if platform.system() == 'Windows':\n",
"self.skipTest('Skipping failing tests on Windows.')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Expr'"
] |
[
"@app.route('/bookmarks/new', methods=['GET', 'POST'])...\n",
"VAR_8 = app.config.get('DEFAULT_BOOKMARKS_DIR', 'root directory')\n",
"VAR_9 = forms.NewBookmarkForm(VAR_7=default_dir)\n",
"VAR_9.path.choices = [('', 'root directory')] + [(pathname, pathname) for\n pathname in data.get_dirs()]\n",
"if VAR_9.validate_on_submit():\n",
"VAR_7 = VAR_9.path.data\n",
"VAR_9.url.data = request.args.get('url', '')\n",
"VAR_10 = VAR_9.tags.data.split(',') if VAR_9.tags.data != '' else []\n",
"VAR_7 = request.args.get('path', VAR_8).strip('/')\n",
"VAR_10 = [tag.strip() for tag in VAR_10]\n",
"VAR_9.path.data = VAR_7\n",
"VAR_27 = DataObj(url=form.url.data, VAR_10=tags, VAR_7=path, type='bookmark')\n",
"return render_template('dataobjs/new.html', title='New Bookmark', VAR_9=form)\n",
"VAR_27.process_bookmark_url()\n",
"VAR_28 = VAR_27.insert()\n",
"if VAR_28:\n",
"flash('Bookmark Saved!', 'success')\n",
"flash(VAR_27.error, 'error')\n",
"return redirect(f'/dataobj/{VAR_28}')\n",
"return redirect('/bookmarks/new')\n"
] | [
"@app.route('/bookmarks/new', methods=['GET', 'POST'])...\n",
"default_dir = app.config.get('DEFAULT_BOOKMARKS_DIR', 'root directory')\n",
"form = forms.NewBookmarkForm(path=default_dir)\n",
"form.path.choices = [('', 'root directory')] + [(pathname, pathname) for\n pathname in data.get_dirs()]\n",
"if form.validate_on_submit():\n",
"path = form.path.data\n",
"form.url.data = request.args.get('url', '')\n",
"tags = form.tags.data.split(',') if form.tags.data != '' else []\n",
"path = request.args.get('path', default_dir).strip('/')\n",
"tags = [tag.strip() for tag in tags]\n",
"form.path.data = path\n",
"bookmark = DataObj(url=form.url.data, tags=tags, path=path, type='bookmark')\n",
"return render_template('dataobjs/new.html', title='New Bookmark', form=form)\n",
"bookmark.process_bookmark_url()\n",
"bookmark_id = bookmark.insert()\n",
"if bookmark_id:\n",
"flash('Bookmark Saved!', 'success')\n",
"flash(bookmark.error, 'error')\n",
"return redirect(f'/dataobj/{bookmark_id}')\n",
"return redirect('/bookmarks/new')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"@login_required...\n",
"VAR_6 = TopicNotification.objects.for_access(VAR_0.user).filter(is_read=False\n ).with_related_data()\n",
"VAR_7 = paginate(VAR_0, query_set=notifications, lookup_field='date',\n page_var='p', per_page=settings.ST_NOTIFICATIONS_PER_PAGE)\n",
"return render(VAR_0=request, template_name=\n 'spirit/topic/notification/index_unread.html', context={'page': page,\n 'next_page': to_page_key(**page.next_page())})\n"
] | [
"@login_required...\n",
"notifications = TopicNotification.objects.for_access(request.user).filter(\n is_read=False).with_related_data()\n",
"page = paginate(request, query_set=notifications, lookup_field='date',\n page_var='p', per_page=settings.ST_NOTIFICATIONS_PER_PAGE)\n",
"return render(request=request, template_name=\n 'spirit/topic/notification/index_unread.html', context={'page': page,\n 'next_page': to_page_key(**page.next_page())})\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_52(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._send_labelled_messages_in_room()\n",
"VAR_39 = 's0_0_0_0_0_0_0_0_0'\n",
"VAR_22, VAR_23 = self.make_request('GET', \n '/rooms/%s/messages?access_token=%s&from=%s&filter=%s' % (self.room_id,\n self.tok, VAR_39, json.dumps(self.FILTER_NOT_LABELS)))\n",
"VAR_60 = VAR_23.json_body['chunk']\n",
"self.assertEqual(len(VAR_60), 4, [event['content'] for event in VAR_60])\n",
"self.assertEqual(VAR_60[0]['content']['body'], 'without label', VAR_60[0])\n",
"self.assertEqual(VAR_60[1]['content']['body'], 'without label', VAR_60[1])\n",
"self.assertEqual(VAR_60[2]['content']['body'], 'with wrong label', VAR_60[2])\n",
"self.assertEqual(VAR_60[3]['content']['body'], 'with two wrong labels',\n VAR_60[3])\n"
] | [
"def test_messages_filter_not_labels(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._send_labelled_messages_in_room()\n",
"token = 's0_0_0_0_0_0_0_0_0'\n",
"request, channel = self.make_request('GET', \n '/rooms/%s/messages?access_token=%s&from=%s&filter=%s' % (self.room_id,\n self.tok, token, json.dumps(self.FILTER_NOT_LABELS)))\n",
"events = channel.json_body['chunk']\n",
"self.assertEqual(len(events), 4, [event['content'] for event in events])\n",
"self.assertEqual(events[0]['content']['body'], 'without label', events[0])\n",
"self.assertEqual(events[1]['content']['body'], 'without label', events[1])\n",
"self.assertEqual(events[2]['content']['body'], 'with wrong label', events[2])\n",
"self.assertEqual(events[3]['content']['body'], 'with two wrong labels',\n events[3])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"self.folder.laf.write(util.read_input('TeeShopLAF.html'))\n",
"self.assert_expected(self.folder.t, 'TeeShop1.html', getProducts=self.\n getProducts)\n"
] | [
"def test_3(self):...\n",
"self.folder.laf.write(util.read_input('TeeShopLAF.html'))\n",
"self.assert_expected(self.folder.t, 'TeeShop1.html', getProducts=self.\n getProducts)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_27(self, VAR_90=None, VAR_91=True, VAR_66=True, VAR_88='_token'):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_153(VAR_114):...\n",
"def FUNC_118(*VAR_11, **VAR_351):...\n",
"VAR_85 = self.get_jwt_token_from_request(VAR_88=token_param)\n",
"if VAR_91:\n",
"if VAR_85 and len(VAR_85) < self.max_header_length:\n",
"VAR_85 = None\n",
"VAR_447 = self.verify_expiration\n",
"return VAR_114(*VAR_11, **kwargs)\n",
"self.verify_expiration = VAR_66\n",
"self.verify_expiration = VAR_447\n",
"self.inject_token(VAR_89)\n",
"VAR_89 = self.load_token(VAR_85)\n"
] | [
"def allows_jwt(self, otherwise=None, required=True, verify_expiration=True,...\n",
"\"\"\"docstring\"\"\"\n",
"def decorator(action):...\n",
"def f(*args, **kwargs):...\n",
"token = self.get_jwt_token_from_request(token_param=token_param)\n",
"if required:\n",
"if token and len(token) < self.max_header_length:\n",
"token = None\n",
"old_verify_expiration = self.verify_expiration\n",
"return action(*args, **kwargs)\n",
"self.verify_expiration = verify_expiration\n",
"self.verify_expiration = old_verify_expiration\n",
"self.inject_token(tokend)\n",
"tokend = self.load_token(token)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_8(self, VAR_20, VAR_13, VAR_21=None, VAR_22=None, **VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = VAR_13\n",
"VAR_106 = VAR_17.get_target_plurals()\n",
"VAR_59 = VAR_17.translation.language\n",
"VAR_107 = VAR_17.translation.plural\n",
"VAR_108 = self.attrs['tabindex']\n",
"VAR_109 = [hl[2] for hl in highlight_string(VAR_17.source_string, VAR_17)]\n",
"VAR_21['class'] = 'translation-editor form-control highlight-editor'\n",
"VAR_21['tabindex'] = VAR_108\n",
"VAR_21['lang'] = VAR_59.code\n",
"VAR_21['dir'] = VAR_59.direction\n",
"VAR_21['rows'] = 3\n",
"VAR_21['data-max'] = VAR_17.get_max_length()\n",
"VAR_21['data-mode'] = VAR_17.edit_mode\n",
"VAR_21['data-placeables'] = '|'.join(re.escape(pl) for pl in VAR_109 if pl)\n",
"if VAR_17.readonly:\n",
"VAR_21['readonly'] = 1\n",
"VAR_110 = []\n",
"VAR_111 = VAR_17.get_source_plurals()\n",
"VAR_112 = f'id_{VAR_17.checksum}'\n",
"for VAR_18, val in enumerate(VAR_106):\n",
"VAR_15 = f'{VAR_20}_{VAR_18}'\n",
"if len(VAR_106) > 1:\n",
"VAR_132 = f'{VAR_112}_{VAR_18}'\n",
"VAR_110.append(render_to_string('snippets/plural-formula.html', {'plural':\n VAR_107, 'user': self.profile.user}))\n",
"return mark_safe(''.join(VAR_110))\n",
"VAR_21['id'] = VAR_132\n",
"VAR_21['tabindex'] = VAR_108 + VAR_18\n",
"if VAR_18 and len(VAR_111) > 1:\n",
"VAR_19 = VAR_111[1]\n",
"VAR_19 = VAR_111[0]\n",
"VAR_133 = super().render(VAR_15, val, VAR_21, VAR_22, **kwargs)\n",
"VAR_134 = str(VAR_17.translation.language)\n",
"if len(VAR_106) != 1:\n",
"VAR_134 = f'{VAR_134}, {VAR_107.get_plural_label(VAR_18)}'\n",
"VAR_110.append(render_to_string('snippets/editor.html', {'toolbar': self.\n get_toolbar(VAR_59, VAR_132, VAR_17, VAR_18, VAR_19), 'fieldid':\n VAR_132, 'label': mark_safe(VAR_134), 'textarea': VAR_133, 'max_length':\n VAR_21['data-max'], 'length': len(val), 'source_length': len(VAR_19),\n 'rtl_toggle': self.get_rtl_toggle(VAR_59, VAR_132)}))\n"
] | [
"def render(self, name, value, attrs=None, renderer=None, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"unit = value\n",
"values = unit.get_target_plurals()\n",
"lang = unit.translation.language\n",
"plural = unit.translation.plural\n",
"tabindex = self.attrs['tabindex']\n",
"placeables = [hl[2] for hl in highlight_string(unit.source_string, unit)]\n",
"attrs['class'] = 'translation-editor form-control highlight-editor'\n",
"attrs['tabindex'] = tabindex\n",
"attrs['lang'] = lang.code\n",
"attrs['dir'] = lang.direction\n",
"attrs['rows'] = 3\n",
"attrs['data-max'] = unit.get_max_length()\n",
"attrs['data-mode'] = unit.edit_mode\n",
"attrs['data-placeables'] = '|'.join(re.escape(pl) for pl in placeables if pl)\n",
"if unit.readonly:\n",
"attrs['readonly'] = 1\n",
"ret = []\n",
"plurals = unit.get_source_plurals()\n",
"base_id = f'id_{unit.checksum}'\n",
"for idx, val in enumerate(values):\n",
"fieldname = f'{name}_{idx}'\n",
"if len(values) > 1:\n",
"fieldid = f'{base_id}_{idx}'\n",
"ret.append(render_to_string('snippets/plural-formula.html', {'plural':\n plural, 'user': self.profile.user}))\n",
"return mark_safe(''.join(ret))\n",
"attrs['id'] = fieldid\n",
"attrs['tabindex'] = tabindex + idx\n",
"if idx and len(plurals) > 1:\n",
"source = plurals[1]\n",
"source = plurals[0]\n",
"textarea = super().render(fieldname, val, attrs, renderer, **kwargs)\n",
"label = str(unit.translation.language)\n",
"if len(values) != 1:\n",
"label = f'{label}, {plural.get_plural_label(idx)}'\n",
"ret.append(render_to_string('snippets/editor.html', {'toolbar': self.\n get_toolbar(lang, fieldid, unit, idx, source), 'fieldid': fieldid,\n 'label': mark_safe(label), 'textarea': textarea, 'max_length': attrs[\n 'data-max'], 'length': len(val), 'source_length': len(source),\n 'rtl_toggle': self.get_rtl_toggle(lang, fieldid)}))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"async def FUNC_5(self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_36 = {'token': VAR_15, 'client_secret': VAR_16, 'sid': VAR_17}\n",
"VAR_37 = (self.hs.config.public_baseurl + \n '_matrix/client/unstable/registration/email/submit_token?%s' % urllib.\n parse.urlencode(VAR_36))\n",
"VAR_38 = {'link': VAR_37}\n",
"await self.send_email(VAR_14, self.email_subjects.email_validation % {\n 'server_name': self.hs.config.server_name}, VAR_38)\n"
] | [
"async def send_registration_mail(self, email_address: str, token: str,...\n",
"\"\"\"docstring\"\"\"\n",
"params = {'token': token, 'client_secret': client_secret, 'sid': sid}\n",
"link = (self.hs.config.public_baseurl + \n '_matrix/client/unstable/registration/email/submit_token?%s' % urllib.\n parse.urlencode(params))\n",
"template_vars = {'link': link}\n",
"await self.send_email(email_address, self.email_subjects.email_validation %\n {'server_name': self.hs.config.server_name}, template_vars)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"VAR_30 = '@some_other_guy:red'\n",
"VAR_29 = self.helper.create_room_as(VAR_30)\n",
"VAR_31 = '/rooms/%s/members' % VAR_29\n",
"self.helper.invite(VAR_7=room_id, src=room_creator, targ=self.user_id)\n",
"VAR_22, VAR_23 = self.make_request('GET', VAR_31)\n",
"self.assertEquals(403, VAR_23.code, msg=channel.result['body'])\n",
"self.helper.join(VAR_7=room_id, user=self.user_id)\n",
"VAR_22, VAR_23 = self.make_request('GET', VAR_31)\n",
"self.assertEquals(200, VAR_23.code, msg=channel.result['body'])\n",
"self.helper.leave(VAR_7=room_id, user=self.user_id)\n",
"VAR_22, VAR_23 = self.make_request('GET', VAR_31)\n",
"self.assertEquals(200, VAR_23.code, msg=channel.result['body'])\n"
] | [
"def test_get_member_list_mixed_memberships(self):...\n",
"room_creator = '@some_other_guy:red'\n",
"room_id = self.helper.create_room_as(room_creator)\n",
"room_path = '/rooms/%s/members' % room_id\n",
"self.helper.invite(room=room_id, src=room_creator, targ=self.user_id)\n",
"request, channel = self.make_request('GET', room_path)\n",
"self.assertEquals(403, channel.code, msg=channel.result['body'])\n",
"self.helper.join(room=room_id, user=self.user_id)\n",
"request, channel = self.make_request('GET', room_path)\n",
"self.assertEquals(200, channel.code, msg=channel.result['body'])\n",
"self.helper.leave(room=room_id, user=self.user_id)\n",
"request, channel = self.make_request('GET', room_path)\n",
"self.assertEquals(200, channel.code, msg=channel.result['body'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_1):...\n",
"from openapi_python_client.parser.properties import RefProperty\n",
"VAR_25 = VAR_28(VAR_5='test', VAR_26=True, default=None, reference=mocker.\n MagicMock(class_name='MyRefClass'))\n",
"assert VAR_25.get_type_string() == 'MyRefClass'\n",
"VAR_25.required = False\n",
"assert VAR_25.get_type_string() == 'Optional[MyRefClass]'\n"
] | [
"def test_get_type_string(self, mocker):...\n",
"from openapi_python_client.parser.properties import RefProperty\n",
"ref_property = RefProperty(name='test', required=True, default=None,\n reference=mocker.MagicMock(class_name='MyRefClass'))\n",
"assert ref_property.get_type_string() == 'MyRefClass'\n",
"ref_property.required = False\n",
"assert ref_property.get_type_string() == 'Optional[MyRefClass]'\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Assign'",
"Assert'",
"Assign'",
"Assert'"
] |
[
"def FUNC_2(VAR_2, VAR_3):...\n",
"if not VAR_2:\n",
"return None\n",
"VAR_6 = VAR_2.identifier, VAR_3\n",
"if VAR_6 not in VAR_0:\n",
"VAR_5 = FUNC_1(VAR_2, VAR_3)\n",
"VAR_5 = VAR_0[VAR_6]\n",
"VAR_0[VAR_6] = VAR_5\n",
"return VAR_5\n"
] | [
"def get_view_by_name(theme, view_name):...\n",
"if not theme:\n",
"return None\n",
"cache_key = theme.identifier, view_name\n",
"if cache_key not in _VIEW_CACHE:\n",
"view = _get_view_by_name(theme, view_name)\n",
"view = _VIEW_CACHE[cache_key]\n",
"_VIEW_CACHE[cache_key] = view\n",
"return view\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@log_function...\n",
"VAR_2 = FUNC_2('/send_join/%s/%s', VAR_6, VAR_7)\n",
"VAR_37 = await self.client.put_json(VAR_5=destination, VAR_2=path, VAR_39=\n content)\n",
"return VAR_37\n"
] | [
"@log_function...\n",
"path = _create_v2_path('/send_join/%s/%s', room_id, event_id)\n",
"response = await self.client.put_json(destination=destination, path=path,\n data=content)\n",
"return response\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"@VAR_1.route('/get_title', methods=['POST'])...\n",
"VAR_11 = urllib2.build_opener()\n",
"VAR_12 = VAR_11.open(VAR_0.url_to_clone).read()\n",
"VAR_12 = VAR_12[VAR_12.find('<title>') + 7:VAR_12.find('</title>')]\n",
"return json.dumps({'status': 'OK', 'title': VAR_12})\n"
] | [
"@app.route('/get_title', methods=['POST'])...\n",
"opener = urllib2.build_opener()\n",
"html = opener.open(trape.url_to_clone).read()\n",
"html = html[html.find('<title>') + 7:html.find('</title>')]\n",
"return json.dumps({'status': 'OK', 'title': html})\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_26 = FUNC_10(VAR_2, VAR_3)\n",
"print(\n 'The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:'\n )\n",
"for VAR_5 in sorted(VAR_26.keys()):\n",
"print('SignatureDef key: \"%s\"' % VAR_5)\n"
] | [
"def _show_signature_def_map_keys(saved_model_dir, tag_set):...\n",
"\"\"\"docstring\"\"\"\n",
"signature_def_map = get_signature_def_map(saved_model_dir, tag_set)\n",
"print(\n 'The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:'\n )\n",
"for signature_def_key in sorted(signature_def_map.keys()):\n",
"print('SignatureDef key: \"%s\"' % signature_def_key)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"For",
"Expr'"
] |
[
"async def FUNC_11(self, VAR_23: str, VAR_32: Any, VAR_16: Optional[...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_16:\n",
"VAR_39 = urllib.parse.urlencode(VAR_16, True)\n",
"VAR_48 = encode_canonical_json(VAR_32)\n",
"VAR_23 = '%s?%s' % (VAR_23, VAR_39)\n",
"VAR_46 = {b'Content-Type': [b'application/json'], b'User-Agent': [self.\n user_agent], b'Accept': [b'application/json']}\n",
"if VAR_24:\n",
"VAR_46.update(VAR_24)\n",
"VAR_13 = await self.request('PUT', VAR_23, VAR_24=Headers(actual_headers),\n VAR_30=json_str)\n",
"VAR_47 = await make_deferred_yieldable(readBody(VAR_13))\n",
"if 200 <= VAR_13.code < 300:\n",
"return json_decoder.decode(VAR_47.decode('utf-8'))\n"
] | [
"async def put_json(self, uri: str, json_body: Any, args: Optional[...\n",
"\"\"\"docstring\"\"\"\n",
"if args:\n",
"query_str = urllib.parse.urlencode(args, True)\n",
"json_str = encode_canonical_json(json_body)\n",
"uri = '%s?%s' % (uri, query_str)\n",
"actual_headers = {b'Content-Type': [b'application/json'], b'User-Agent': [\n self.user_agent], b'Accept': [b'application/json']}\n",
"if headers:\n",
"actual_headers.update(headers)\n",
"response = await self.request('PUT', uri, headers=Headers(actual_headers),\n data=json_str)\n",
"body = await make_deferred_yieldable(readBody(response))\n",
"if 200 <= response.code < 300:\n",
"return json_decoder.decode(body.decode('utf-8'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_2(**VAR_5):...\n",
"VAR_27 = dict(python=expressions.PythonExpr, path=TrustedPathExpr)\n",
"VAR_27.update(VAR_5)\n",
"return FUNC_1(VAR_4=False, **ovr)\n"
] | [
"def createTrustedChameleonEngine(**overrides):...\n",
"ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)\n",
"ovr.update(overrides)\n",
"return createChameleonEngine(untrusted=False, **ovr)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_14(self, VAR_35):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_35 = self._substitute_comments('', VAR_35)\n",
"VAR_35 = VAR_35.replace('\\\\', '')\n",
"VAR_35 = VAR_5('', VAR_35)\n",
"VAR_35 = VAR_35.lower()\n",
"if 'javascript:' in VAR_35:\n",
"return True\n",
"if 'expression(' in VAR_35:\n",
"return True\n",
"return False\n"
] | [
"def _has_sneaky_javascript(self, style):...\n",
"\"\"\"docstring\"\"\"\n",
"style = self._substitute_comments('', style)\n",
"style = style.replace('\\\\', '')\n",
"style = _substitute_whitespace('', style)\n",
"style = style.lower()\n",
"if 'javascript:' in style:\n",
"return True\n",
"if 'expression(' in style:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"async def FUNC_8(self):...\n",
"VAR_36 = await self.get_resolved_spec()\n",
"return f'https://doi.org/{VAR_36}'\n"
] | [
"async def get_resolved_ref_url(self):...\n",
"resolved_spec = await self.get_resolved_spec()\n",
"return f'https://doi.org/{resolved_spec}'\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Return'"
] |
[
"async def FUNC_6(*VAR_9, **VAR_10):...\n",
"return None\n"
] | [
"async def _insert_client_ip(*args, **kwargs):...\n",
"return None\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Return'"
] |
[
"def FUNC_88(self):...\n",
"if not isinstance(VAR_263.session['settings_%s' % self.section], dict):\n",
"VAR_222 = dict(self.config.items(self.section))\n",
"VAR_222 = VAR_263.session['settings_%s' % self.section]\n",
"return VAR_222\n"
] | [
"def read(self):...\n",
"if not isinstance(current.session['settings_%s' % self.section], dict):\n",
"settings = dict(self.config.items(self.section))\n",
"settings = current.session['settings_%s' % self.section]\n",
"return settings\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.