lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_3(VAR_0, VAR_1: FlaskClient):...\n",
"VAR_5 = VAR_1.post('/bookmarks/new', data={'submit': True})\n",
"assert VAR_5.status_code == 200\n",
"assert b'This field is required' in VAR_5.data\n"
] | [
"def test_post_new_bookmark_missing_fields(test_app, client: FlaskClient):...\n",
"response = client.post('/bookmarks/new', data={'submit': True})\n",
"assert response.status_code == 200\n",
"assert b'This field is required' in response.data\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"def __getattr__(self, VAR_46):...\n",
"VAR_205 = self.get(VAR_46)\n",
"if not VAR_205 and VAR_46.startswith('__'):\n",
"return VAR_205\n"
] | [
"def __getattr__(self, key):...\n",
"ret = self.get(key)\n",
"if not ret and key.startswith('__'):\n",
"return ret\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'"
] |
[
"def __enter__(self):...\n",
"FUNC_4(self.site)\n",
"return VAR_1\n"
] | [
"def __enter__(self):...\n",
"init(self.site)\n",
"return local\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_3 = self.client.get('/admin/logout/')\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"self.assertTrue(SESSION_KEY not in self.client.session)\n"
] | [
"def logout(self):...\n",
"response = self.client.get('/admin/logout/')\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertTrue(SESSION_KEY not in self.client.session)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_20(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_27 = Template(\n '{% load rest_framework %}{% autoescape off %}{{ content|urlize_quoted_links }}{% endautoescape %}'\n )\n",
"VAR_28 = VAR_27.render(Context({'content': '\"http://example.com\"'}))\n",
"assert VAR_28 == '\"<a href=\"http://example.com\" rel=\"nofollow\">http://example.com</a>\"'\n"
] | [
"def test_template_render_with_noautoescape(self):...\n",
"\"\"\"docstring\"\"\"\n",
"template = Template(\n '{% load rest_framework %}{% autoescape off %}{{ content|urlize_quoted_links }}{% endautoescape %}'\n )\n",
"rendered = template.render(Context({'content': '\"http://example.com\"'}))\n",
"assert rendered == '\"<a href=\"http://example.com\" rel=\"nofollow\">http://example.com</a>\"'\n"
] | [
0,
0,
0,
2,
2
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assert'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_15(VAR_3, **VAR_4):...\n",
"VAR_5 = JsonResource(self.homeserver)\n",
"VAR_5.register_paths('GET', [re.compile('^/_matrix/foo$')], FUNC_15,\n 'test_servlet')\n",
"VAR_7, VAR_6 = make_request(self.reactor, FakeSite(VAR_5), b'GET',\n b'/_matrix/foo')\n",
"self.assertEqual(VAR_6.result['code'], b'403')\n",
"self.assertEqual(VAR_6.json_body['error'], 'Forbidden!!one!')\n",
"self.assertEqual(VAR_6.json_body['errcode'], 'M_FORBIDDEN')\n"
] | [
"def test_callback_synapseerror(self):...\n",
"\"\"\"docstring\"\"\"\n",
"def _callback(request, **kwargs):...\n",
"res = JsonResource(self.homeserver)\n",
"res.register_paths('GET', [re.compile('^/_matrix/foo$')], _callback,\n 'test_servlet')\n",
"_, channel = make_request(self.reactor, FakeSite(res), b'GET', b'/_matrix/foo')\n",
"self.assertEqual(channel.result['code'], b'403')\n",
"self.assertEqual(channel.json_body['error'], 'Forbidden!!one!')\n",
"self.assertEqual(channel.json_body['errcode'], 'M_FORBIDDEN')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_9, VAR_15 = self.make_request('PUT', '/profile/%s/displayname' % (self.\n owner,), content=json.dumps({'displayname': 'test' * 100}), VAR_7=self.\n owner_tok)\n",
"self.assertEqual(VAR_15.code, 400, VAR_15.result)\n",
"VAR_16 = self.get_displayname()\n",
"self.assertEqual(VAR_16, 'owner')\n"
] | [
"def test_set_displayname_too_long(self):...\n",
"\"\"\"docstring\"\"\"\n",
"request, channel = self.make_request('PUT', '/profile/%s/displayname' % (\n self.owner,), content=json.dumps({'displayname': 'test' * 100}),\n access_token=self.owner_tok)\n",
"self.assertEqual(channel.code, 400, channel.result)\n",
"res = self.get_displayname()\n",
"self.assertEqual(res, 'owner')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_58(VAR_44):...\n",
"return [VAR_221 for VAR_221 in VAR_44 if not (VAR_221[:1] in '#' or VAR_221\n .endswith('~') or VAR_221.endswith('.bak'))]\n"
] | [
"def ignore(fs):...\n",
"return [f for f in fs if not (f[:1] in '#' or f.endswith('~') or f.endswith\n ('.bak'))]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_2.route('/robots.txt')...\n",
"return send_from_directory(constants.STATIC_DIR, 'robots.txt')\n"
] | [
"@web.route('/robots.txt')...\n",
"return send_from_directory(constants.STATIC_DIR, 'robots.txt')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_72(self, VAR_43=None):...\n",
"if not VAR_43:\n",
"return self.get('__onload', frappe._dict())\n",
"return self.get('__onload')[VAR_43]\n"
] | [
"def get_onload(self, key=None):...\n",
"if not key:\n",
"return self.get('__onload', frappe._dict())\n",
"return self.get('__onload')[key]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_10(VAR_2, VAR_10, VAR_5=None, VAR_7=None, **VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2.session.modified = True\n",
"VAR_54 = VAR_6.get('template', None)\n",
"if VAR_54 is None:\n",
"if VAR_10 == 'userdata':\n",
"VAR_59 = VAR_6.get('show', Show(VAR_5, VAR_2, VAR_10))\n",
"VAR_54 = 'webclient/data/containers.html'\n",
"if VAR_10 == 'usertags':\n",
"VAR_242 = VAR_59.first_selected\n",
"return HttpResponseRedirect(e.uri)\n",
"VAR_60 = VAR_59.initially_open_owner\n",
"VAR_54 = 'webclient/data/containers.html'\n",
"VAR_54 = 'webclient/%s/%s.html' % (VAR_10, VAR_10)\n",
"if VAR_2.GET.get('show', None) is not None and VAR_242 is None:\n",
"if settings.PUBLIC_ENABLED and settings.PUBLIC_USER == VAR_5.getUser(\n",
"if VAR_242 is not None:\n",
"return HttpResponseRedirect('%s?url=%s' % (VAR_350('weblogin'), VAR_7))\n",
"FUNC_7(VAR_2, VAR_242.details.group.id.val)\n",
"VAR_61 = {}\n",
"VAR_62 = GlobalSearchForm(VAR_158=request.GET.copy())\n",
"if VAR_10 == 'search':\n",
"if VAR_62.is_valid():\n",
"VAR_7 = VAR_6.get('load_template_url', None)\n",
"VAR_61['query'] = VAR_62.cleaned_data['search_query']\n",
"if VAR_7 is None:\n",
"VAR_7 = VAR_350(viewname='load_template', args=[menu])\n",
"VAR_8 = VAR_2.session.get('active_group') or VAR_5.getEventContext().groupId\n",
"VAR_63, VAR_64 = VAR_5.getObject('ExperimenterGroup', VAR_8).groupSummary()\n",
"VAR_65 = [u.id for u in VAR_63]\n",
"VAR_65.extend([u.id for u in VAR_64])\n",
"VAR_66 = VAR_2.GET.get('experimenter')\n",
"if VAR_60 is not None:\n",
"if VAR_2.session.get('user_id', None) != -1:\n",
"VAR_66 = VAR_241(VAR_66)\n",
"VAR_66 = None\n",
"if VAR_66 is not None:\n",
"VAR_66 = VAR_60\n",
"if VAR_66 not in set(map(lambda x: x.id, VAR_63)) | set(map(lambda x: x.id,\n",
"if VAR_66 is None:\n",
"VAR_66 = None\n",
"VAR_66 = VAR_2.session.get('user_id', None)\n",
"VAR_2.session['user_id'] = VAR_66\n",
"if VAR_66 is None or int(VAR_66) not in VAR_65:\n",
"VAR_67 = list(VAR_5.getGroupsMemberOf())\n",
"if VAR_66 != -1:\n",
"VAR_67.sort(VAR_310=lambda x: x.getName().lower())\n",
"VAR_66 = VAR_5.getEventContext().userId\n",
"VAR_68 = VAR_67\n",
"VAR_69 = ContainerForm()\n",
"VAR_70 = {}\n",
"if VAR_10 == 'search':\n",
"for g in VAR_68:\n",
"VAR_53 = {'menu': VAR_10, 'init': VAR_61, 'myGroups': VAR_67,\n 'new_container_form': VAR_69, 'global_search_form': VAR_62}\n",
"g.loadLeadersAndMembers()\n",
"VAR_70 = list(VAR_70.values())\n",
"VAR_53['groups'] = VAR_68\n",
"for VAR_383 in (g.leaders + g.colleagues):\n",
"VAR_70.sort(VAR_310=lambda x: x.getLastName().lower())\n",
"VAR_53['myColleagues'] = VAR_70\n",
"VAR_70[VAR_383.id] = VAR_383\n",
"VAR_53['active_group'] = VAR_5.getObject('ExperimenterGroup', VAR_241(VAR_8))\n",
"VAR_53['active_user'] = VAR_5.getObject('Experimenter', VAR_241(VAR_66))\n",
"VAR_53['initially_select'] = VAR_59.initially_select\n",
"VAR_53['initially_open'] = VAR_59.initially_open\n",
"VAR_53['isLeader'] = VAR_5.isLeader()\n",
"VAR_53['current_url'] = VAR_7\n",
"VAR_53['page_size'] = settings.PAGE\n",
"VAR_53['template'] = VAR_54\n",
"VAR_53['thumbnails_batch'] = settings.THUMBNAILS_BATCH\n",
"VAR_53['current_admin_privileges'] = VAR_5.getCurrentAdminPrivileges()\n",
"VAR_53['leader_of_groups'] = VAR_5.getEventContext().leaderOfGroups\n",
"return VAR_53\n"
] | [
"def _load_template(request, menu, conn=None, url=None, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"request.session.modified = True\n",
"template = kwargs.get('template', None)\n",
"if template is None:\n",
"if menu == 'userdata':\n",
"show = kwargs.get('show', Show(conn, request, menu))\n",
"template = 'webclient/data/containers.html'\n",
"if menu == 'usertags':\n",
"first_sel = show.first_selected\n",
"return HttpResponseRedirect(e.uri)\n",
"initially_open_owner = show.initially_open_owner\n",
"template = 'webclient/data/containers.html'\n",
"template = 'webclient/%s/%s.html' % (menu, menu)\n",
"if request.GET.get('show', None) is not None and first_sel is None:\n",
"if settings.PUBLIC_ENABLED and settings.PUBLIC_USER == conn.getUser(\n",
"if first_sel is not None:\n",
"return HttpResponseRedirect('%s?url=%s' % (reverse('weblogin'), url))\n",
"switch_active_group(request, first_sel.details.group.id.val)\n",
"init = {}\n",
"global_search_form = GlobalSearchForm(data=request.GET.copy())\n",
"if menu == 'search':\n",
"if global_search_form.is_valid():\n",
"url = kwargs.get('load_template_url', None)\n",
"init['query'] = global_search_form.cleaned_data['search_query']\n",
"if url is None:\n",
"url = reverse(viewname='load_template', args=[menu])\n",
"active_group = request.session.get('active_group') or conn.getEventContext(\n ).groupId\n",
"leaders, members = conn.getObject('ExperimenterGroup', active_group\n ).groupSummary()\n",
"userIds = [u.id for u in leaders]\n",
"userIds.extend([u.id for u in members])\n",
"user_id = request.GET.get('experimenter')\n",
"if initially_open_owner is not None:\n",
"if request.session.get('user_id', None) != -1:\n",
"user_id = long(user_id)\n",
"user_id = None\n",
"if user_id is not None:\n",
"user_id = initially_open_owner\n",
"if user_id not in set(map(lambda x: x.id, leaders)) | set(map(lambda x: x.\n",
"if user_id is None:\n",
"user_id = None\n",
"user_id = request.session.get('user_id', None)\n",
"request.session['user_id'] = user_id\n",
"if user_id is None or int(user_id) not in userIds:\n",
"myGroups = list(conn.getGroupsMemberOf())\n",
"if user_id != -1:\n",
"myGroups.sort(key=lambda x: x.getName().lower())\n",
"user_id = conn.getEventContext().userId\n",
"groups = myGroups\n",
"new_container_form = ContainerForm()\n",
"myColleagues = {}\n",
"if menu == 'search':\n",
"for g in groups:\n",
"context = {'menu': menu, 'init': init, 'myGroups': myGroups,\n 'new_container_form': new_container_form, 'global_search_form':\n global_search_form}\n",
"g.loadLeadersAndMembers()\n",
"myColleagues = list(myColleagues.values())\n",
"context['groups'] = groups\n",
"for c in (g.leaders + g.colleagues):\n",
"myColleagues.sort(key=lambda x: x.getLastName().lower())\n",
"context['myColleagues'] = myColleagues\n",
"myColleagues[c.id] = c\n",
"context['active_group'] = conn.getObject('ExperimenterGroup', long(\n active_group))\n",
"context['active_user'] = conn.getObject('Experimenter', long(user_id))\n",
"context['initially_select'] = show.initially_select\n",
"context['initially_open'] = show.initially_open\n",
"context['isLeader'] = conn.isLeader()\n",
"context['current_url'] = url\n",
"context['page_size'] = settings.PAGE\n",
"context['template'] = template\n",
"context['thumbnails_batch'] = settings.THUMBNAILS_BATCH\n",
"context['current_admin_privileges'] = conn.getCurrentAdminPrivileges()\n",
"context['leader_of_groups'] = conn.getEventContext().leaderOfGroups\n",
"return context\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_38(self):...\n",
"self.register_user('user', 'pass')\n",
"VAR_52 = self.login('user', 'pass')\n",
"VAR_22, VAR_23 = self.make_request('GET', self.url, VAR_16=tok)\n",
"self.assertEqual(VAR_23.code, 200, VAR_23.result)\n"
] | [
"def test_restricted_auth(self):...\n",
"self.register_user('user', 'pass')\n",
"tok = self.login('user', 'pass')\n",
"request, channel = self.make_request('GET', self.url, access_token=tok)\n",
"self.assertEqual(channel.code, 200, channel.result)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"self.mock_federation.make_query.return_value = make_awaitable({'room_id':\n '!8765qwer:test', 'servers': ['test', 'remote']})\n",
"VAR_10 = self.get_success(self.handler.get_association(self.remote_room))\n",
"self.assertEquals({'room_id': '!8765qwer:test', 'servers': ['test',\n 'remote']}, VAR_10)\n",
"self.mock_federation.make_query.assert_called_with(destination='remote',\n VAR_8='directory', args={'room_alias': '#another:remote'},\n retry_on_dns_fail=False, ignore_backoff=True)\n"
] | [
"def test_get_remote_association(self):...\n",
"self.mock_federation.make_query.return_value = make_awaitable({'room_id':\n '!8765qwer:test', 'servers': ['test', 'remote']})\n",
"result = self.get_success(self.handler.get_association(self.remote_room))\n",
"self.assertEquals({'room_id': '!8765qwer:test', 'servers': ['test',\n 'remote']}, result)\n",
"self.mock_federation.make_query.assert_called_with(destination='remote',\n query_type='directory', args={'room_alias': '#another:remote'},\n retry_on_dns_fail=False, ignore_backoff=True)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@VAR_2.before_app_request...\n",
"if VAR_62.is_authenticated:\n",
"confirm_login()\n",
"if not ub.check_user_session(VAR_62.id, flask_session.get('_id')\n",
"logout_user()\n",
"g.constants = constants\n",
"g.user = VAR_62\n",
"g.allow_registration = config.config_public_reg\n",
"g.allow_anonymous = config.config_anonbrowse\n",
"g.allow_upload = config.config_uploading\n",
"g.current_theme = config.config_theme\n",
"g.config_authors_max = config.config_authors_max\n",
"g.shelves_access = ub.session.query(ub.Shelf).filter(or_(ub.Shelf.is_public ==\n 1, ub.Shelf.user_id == VAR_62.id)).order_by(ub.Shelf.name).all()\n",
"if '/static/' not in request.path and not config.db_configured and request.endpoint not in (\n",
"return redirect(url_for('admin.db_configuration'))\n"
] | [
"@admi.before_app_request...\n",
"if current_user.is_authenticated:\n",
"confirm_login()\n",
"if not ub.check_user_session(current_user.id, flask_session.get('_id')\n",
"logout_user()\n",
"g.constants = constants\n",
"g.user = current_user\n",
"g.allow_registration = config.config_public_reg\n",
"g.allow_anonymous = config.config_anonbrowse\n",
"g.allow_upload = config.config_uploading\n",
"g.current_theme = config.config_theme\n",
"g.config_authors_max = config.config_authors_max\n",
"g.shelves_access = ub.session.query(ub.Shelf).filter(or_(ub.Shelf.is_public ==\n 1, ub.Shelf.user_id == current_user.id)).order_by(ub.Shelf.name).all()\n",
"if '/static/' not in request.path and not config.db_configured and request.endpoint not in (\n",
"return redirect(url_for('admin.db_configuration'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def __init__(self, *VAR_3, **VAR_4):...\n",
"super().__init__(**kwargs)\n",
"self.hello = VAR_4.get('test_kwarg')\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super().__init__(**kwargs)\n",
"self.hello = kwargs.get('test_kwarg')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"@expose('/logout/')...\n",
"logout_user()\n",
"return redirect(self.appbuilder.get_url_for_index)\n"
] | [
"@expose('/logout/')...\n",
"logout_user()\n",
"return redirect(self.appbuilder.get_url_for_index)\n"
] | [
0,
0,
0
] | [
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_14(VAR_25):...\n",
"\"\"\"docstring\"\"\"\n",
"return '\"%s\"' % VAR_25.replace('\\\\', '\\\\\\\\').replace('\"', '\\\\\"')\n"
] | [
"def quote_etag(etag):...\n",
"\"\"\"docstring\"\"\"\n",
"return '\"%s\"' % etag.replace('\\\\', '\\\\\\\\').replace('\"', '\\\\\"')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@VAR_0.filter...\n",
"if isinstance(VAR_2, QuerySet):\n",
"VAR_7 = serialize('json', VAR_2, VAR_3=indent)\n",
"VAR_7 = json.dumps(VAR_2, VAR_3=indent, cls=DjbletsJSONEncoder)\n",
"return mark_safe(VAR_7)\n"
] | [
"@register.filter...\n",
"if isinstance(value, QuerySet):\n",
"result = serialize('json', value, indent=indent)\n",
"result = json.dumps(value, indent=indent, cls=DjbletsJSONEncoder)\n",
"return mark_safe(result)\n"
] | [
0,
0,
0,
0,
2
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_9(*VAR_15):...\n",
"return defer.succeed(None)\n"
] | [
"def get_received_txn_response(*args):...\n",
"return defer.succeed(None)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_10 = test.test_src_dir_path(VAR_0)\n",
"VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'regress_x2_to_y3', '--input_exprs',\n 'x2=np.ones((3,1))'] + (['--use_tfrt'] if VAR_5 else []))\n",
"saved_model_cli.run(VAR_11)\n"
] | [
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'regress_x2_to_y3', '--input_exprs',\n 'x2=np.ones((3,1))'] + (['--use_tfrt'] if use_tfrt else []))\n",
"saved_model_cli.run(args)\n"
] | [
0,
0,
0,
5,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"VAR_6 = Mock()\n",
"VAR_3 = '@foo:bar'\n",
"VAR_7 = 5000000\n",
"VAR_8 = UserPresenceState.default(VAR_3)\n",
"VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=now)\n",
"VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n",
"self.assertTrue(VAR_11)\n",
"self.assertTrue(VAR_10.currently_active)\n",
"self.assertEquals(VAR_9.state, VAR_10.state)\n",
"self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n",
"self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n",
"self.assertEquals(VAR_6.insert.call_count, 3)\n",
"VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_active_ts + IDLE_TIMER), call(VAR_7=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(VAR_7=now, obj\n =user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n"
] | [
"def test_offline_to_online(self):...\n",
"wheel_timer = Mock()\n",
"user_id = '@foo:bar'\n",
"now = 5000000\n",
"prev_state = UserPresenceState.default(user_id)\n",
"new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now)\n",
"state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n",
"self.assertTrue(persist_and_notify)\n",
"self.assertTrue(state.currently_active)\n",
"self.assertEquals(new_state.state, state.state)\n",
"self.assertEquals(new_state.status_msg, state.status_msg)\n",
"self.assertEquals(state.last_federation_update_ts, now)\n",
"self.assertEquals(wheel_timer.insert.call_count, 3)\n",
"wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_active_ts + IDLE_TIMER), call(now=now, obj=user_id, then\n =new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(now=now, obj=\n user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_40(self, VAR_128, VAR_129=['email'], VAR_33=True, VAR_130=True):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_254 = self.table_user()\n",
"VAR_255 = self.settings.cas_create_user\n",
"VAR_141 = None\n",
"VAR_256 = []\n",
"VAR_257 = ['registration_id', 'username', 'email']\n",
"if self.settings.login_userfield:\n",
"VAR_257.append(self.settings.login_userfield)\n",
"for VAR_385 in VAR_257:\n",
"if VAR_385 in VAR_254.fields() and VAR_128.get(VAR_385, None):\n",
"if not VAR_256:\n",
"VAR_256.append(VAR_385)\n",
"return None\n",
"if 'registration_id' not in VAR_128:\n",
"VAR_179 = VAR_128[VAR_385]\n",
"VAR_128['registration_id'] = VAR_128[VAR_256[0]]\n",
"if 'registration_id' in VAR_256 and VAR_141 and VAR_141.registration_id and (\n",
"VAR_141 = VAR_254(**{fieldname: value})\n",
"VAR_141 = None\n",
"if VAR_141:\n",
"if VAR_141:\n",
"if not VAR_130:\n",
"if VAR_256:\n",
"return None\n",
"VAR_362 = dict(registration_id=keys['registration_id'])\n",
"if VAR_255 is False:\n",
"return VAR_141\n",
"for VAR_199 in VAR_129:\n",
"self.logout(VAR_112=None, VAR_140=None, VAR_136=None)\n",
"if 'first_name' not in VAR_128 and 'first_name' in VAR_254.fields:\n",
"if VAR_199 in VAR_128:\n",
"VAR_141.update_record(**update_keys)\n",
"VAR_448 = VAR_128.get('email', 'anonymous').split('@')[0]\n",
"VAR_96 = VAR_254._filter_fields(VAR_128)\n",
"VAR_362[VAR_199] = VAR_128[VAR_199]\n",
"VAR_128['first_name'] = VAR_128.get('username', VAR_448)\n",
"VAR_143 = VAR_254.insert(**vars)\n",
"VAR_141 = VAR_254[VAR_143]\n",
"if self.settings.create_user_groups:\n",
"VAR_147 = self.add_group(self.settings.create_user_groups % VAR_141)\n",
"if self.settings.everybody_group_id:\n",
"self.add_membership(VAR_147, VAR_143)\n",
"self.add_membership(self.settings.everybody_group_id, VAR_143)\n",
"if VAR_33:\n",
"self.user = VAR_141\n",
"if self.settings.register_onaccept:\n",
"VAR_26(self.settings.register_onaccept, Storage(VAR_96=user))\n"
] | [
"def get_or_create_user(self, keys, update_fields=['email'], login=True, get...\n",
"\"\"\"docstring\"\"\"\n",
"table_user = self.table_user()\n",
"create_user = self.settings.cas_create_user\n",
"user = None\n",
"checks = []\n",
"guess_fields = ['registration_id', 'username', 'email']\n",
"if self.settings.login_userfield:\n",
"guess_fields.append(self.settings.login_userfield)\n",
"for fieldname in guess_fields:\n",
"if fieldname in table_user.fields() and keys.get(fieldname, None):\n",
"if not checks:\n",
"checks.append(fieldname)\n",
"return None\n",
"if 'registration_id' not in keys:\n",
"value = keys[fieldname]\n",
"keys['registration_id'] = keys[checks[0]]\n",
"if 'registration_id' in checks and user and user.registration_id and (\n",
"user = table_user(**{fieldname: value})\n",
"user = None\n",
"if user:\n",
"if user:\n",
"if not get:\n",
"if checks:\n",
"return None\n",
"update_keys = dict(registration_id=keys['registration_id'])\n",
"if create_user is False:\n",
"return user\n",
"for key in update_fields:\n",
"self.logout(next=None, onlogout=None, log=None)\n",
"if 'first_name' not in keys and 'first_name' in table_user.fields:\n",
"if key in keys:\n",
"user.update_record(**update_keys)\n",
"guess = keys.get('email', 'anonymous').split('@')[0]\n",
"vars = table_user._filter_fields(keys)\n",
"update_keys[key] = keys[key]\n",
"keys['first_name'] = keys.get('username', guess)\n",
"user_id = table_user.insert(**vars)\n",
"user = table_user[user_id]\n",
"if self.settings.create_user_groups:\n",
"group_id = self.add_group(self.settings.create_user_groups % user)\n",
"if self.settings.everybody_group_id:\n",
"self.add_membership(group_id, user_id)\n",
"self.add_membership(self.settings.everybody_group_id, user_id)\n",
"if login:\n",
"self.user = user\n",
"if self.settings.register_onaccept:\n",
"callback(self.settings.register_onaccept, Storage(vars=user))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"For",
"Condition",
"Condition",
"Expr'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"For",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = '@john:test_remote'\n",
"VAR_13 = 'test_remote'\n",
"self.resync_attempts = 0\n",
"def FUNC_5(VAR_5, VAR_0):...\n",
"if VAR_0 == VAR_12:\n",
"self.resync_attempts += 1\n",
"VAR_14 = self.homeserver.get_federation_client()\n",
"VAR_14.query_user_devices = Mock(side_effect=query_user_devices)\n",
"VAR_15 = self.homeserver.get_datastore()\n",
"VAR_15.get_rooms_for_user = Mock(return_value=make_awaitable([\n '!someroom:test']))\n",
"VAR_16 = self.homeserver.get_device_handler().device_list_updater\n",
"self.get_success(VAR_16.incoming_device_list_update(origin=remote_origin,\n edu_content={'deleted': False, 'device_display_name': 'Mobile',\n 'device_id': 'QBUAZIFURK', 'prev_id': [5], 'stream_id': 6, 'user_id':\n remote_user_id}))\n",
"self.assertEqual(self.resync_attempts, 1)\n",
"VAR_17 = self.get_success(VAR_15.get_user_ids_requiring_device_list_resync())\n",
"self.assertIn(VAR_12, VAR_17)\n",
"self.reactor.advance(30)\n",
"self.assertEqual(self.resync_attempts, 2)\n"
] | [
"def test_retry_device_list_resync(self):...\n",
"\"\"\"docstring\"\"\"\n",
"remote_user_id = '@john:test_remote'\n",
"remote_origin = 'test_remote'\n",
"self.resync_attempts = 0\n",
"def query_user_devices(destination, user_id):...\n",
"if user_id == remote_user_id:\n",
"self.resync_attempts += 1\n",
"federation_client = self.homeserver.get_federation_client()\n",
"federation_client.query_user_devices = Mock(side_effect=query_user_devices)\n",
"store = self.homeserver.get_datastore()\n",
"store.get_rooms_for_user = Mock(return_value=make_awaitable(['!someroom:test'])\n )\n",
"device_list_updater = self.homeserver.get_device_handler().device_list_updater\n",
"self.get_success(device_list_updater.incoming_device_list_update(origin=\n remote_origin, edu_content={'deleted': False, 'device_display_name':\n 'Mobile', 'device_id': 'QBUAZIFURK', 'prev_id': [5], 'stream_id': 6,\n 'user_id': remote_user_id}))\n",
"self.assertEqual(self.resync_attempts, 1)\n",
"need_resync = self.get_success(store.\n get_user_ids_requiring_device_list_resync())\n",
"self.assertIn(remote_user_id, need_resync)\n",
"self.reactor.advance(30)\n",
"self.assertEqual(self.resync_attempts, 2)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_16(self):...\n",
"return VAR_5(str('WagtailForm'), (CLASS_0,), self.formfields)\n"
] | [
"def get_form_class(self):...\n",
"return type(str('WagtailForm'), (BaseForm,), self.formfields)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(VAR_2, VAR_3, VAR_5, VAR_6=0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = saved_model_utils.get_meta_graph_def(VAR_2, VAR_3)\n",
"VAR_27 = FUNC_2(VAR_4, VAR_5)\n",
"VAR_28 = FUNC_3(VAR_4, VAR_5)\n",
"VAR_29 = ' ' * VAR_6\n",
"def FUNC_33(VAR_30):...\n",
"print(VAR_29 + VAR_30)\n",
"FUNC_33('The given SavedModel SignatureDef contains the following input(s):')\n",
"for VAR_71, input_tensor in sorted(VAR_27.items()):\n",
"FUNC_33(\" inputs['%s'] tensor_info:\" % VAR_71)\n",
"FUNC_33('The given SavedModel SignatureDef contains the following output(s):')\n",
"FUNC_7(input_tensor, VAR_6 + 1)\n",
"for output_key, output_tensor in sorted(VAR_28.items()):\n",
"FUNC_33(\" outputs['%s'] tensor_info:\" % output_key)\n",
"FUNC_33('Method name is: %s' % VAR_4.signature_def[VAR_5].method_name)\n",
"FUNC_7(output_tensor, VAR_6 + 1)\n"
] | [
"def _show_inputs_outputs(saved_model_dir, tag_set, signature_def_key, indent=0...\n",
"\"\"\"docstring\"\"\"\n",
"meta_graph_def = saved_model_utils.get_meta_graph_def(saved_model_dir, tag_set)\n",
"inputs_tensor_info = _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def\n , signature_def_key)\n",
"outputs_tensor_info = _get_outputs_tensor_info_from_meta_graph_def(\n meta_graph_def, signature_def_key)\n",
"indent_str = ' ' * indent\n",
"def in_print(s):...\n",
"print(indent_str + s)\n",
"in_print('The given SavedModel SignatureDef contains the following input(s):')\n",
"for input_key, input_tensor in sorted(inputs_tensor_info.items()):\n",
"in_print(\" inputs['%s'] tensor_info:\" % input_key)\n",
"in_print('The given SavedModel SignatureDef contains the following output(s):')\n",
"_print_tensor_info(input_tensor, indent + 1)\n",
"for output_key, output_tensor in sorted(outputs_tensor_info.items()):\n",
"in_print(\" outputs['%s'] tensor_info:\" % output_key)\n",
"in_print('Method name is: %s' % meta_graph_def.signature_def[\n signature_def_key].method_name)\n",
"_print_tensor_info(output_tensor, indent + 1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_65():...\n",
"\"\"\"docstring\"\"\"\n",
"if not hasattr(VAR_1, 'doc_events_hooks'):\n",
"VAR_184 = FUNC_66('doc_events', {})\n",
"return VAR_1.doc_events_hooks\n",
"VAR_179 = {}\n",
"for VAR_46, VAR_105 in iteritems(VAR_184):\n",
"if isinstance(VAR_46, tuple):\n",
"VAR_1.doc_events_hooks = VAR_179\n",
"for VAR_62 in VAR_46:\n",
"FUNC_67(VAR_179, VAR_46, VAR_105)\n",
"FUNC_67(VAR_179, VAR_62, VAR_105)\n"
] | [
"def get_doc_hooks():...\n",
"\"\"\"docstring\"\"\"\n",
"if not hasattr(local, 'doc_events_hooks'):\n",
"hooks = get_hooks('doc_events', {})\n",
"return local.doc_events_hooks\n",
"out = {}\n",
"for key, value in iteritems(hooks):\n",
"if isinstance(key, tuple):\n",
"local.doc_events_hooks = out\n",
"for doctype in key:\n",
"append_hook(out, key, value)\n",
"append_hook(out, doctype, value)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'",
"Assign'",
"For",
"Condition",
"Assign'",
"For",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"super().setUp()\n",
"useChameleonEngine()\n",
"zope.component.provideAdapter(DefaultTraversable, (None,))\n",
"provideUtility(DefaultUnicodeEncodingConflictResolver,\n IUnicodeEncodingConflictResolver)\n",
"self.folder = VAR_14 = CLASS_1()\n",
"VAR_14.laf = CLASS_0()\n",
"VAR_14.t = CLASS_0()\n",
"self.policy = CLASS_2()\n",
"self.oldPolicy = SecurityManager.setSecurityPolicy(self.policy)\n",
"noSecurityManager()\n"
] | [
"def setUp(self):...\n",
"super().setUp()\n",
"useChameleonEngine()\n",
"zope.component.provideAdapter(DefaultTraversable, (None,))\n",
"provideUtility(DefaultUnicodeEncodingConflictResolver,\n IUnicodeEncodingConflictResolver)\n",
"self.folder = f = Folder()\n",
"f.laf = AqPageTemplate()\n",
"f.t = AqPageTemplate()\n",
"self.policy = UnitTestSecurityPolicy()\n",
"self.oldPolicy = SecurityManager.setSecurityPolicy(self.policy)\n",
"noSecurityManager()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_6, VAR_7):...\n",
"VAR_51 = self.userdb.auth(VAR_6, VAR_7)\n",
"VAR_52 = cherry.config['server.permit_remote_admin_login']\n",
"VAR_50 = VAR_53.request.remote.ip in ('127.0.0.1', '::1')\n",
"if not VAR_50 and VAR_51.isadmin and not VAR_52:\n",
"log.i(_('Rejected remote admin login from user: {name}').format(VAR_81=user\n .name))\n",
"VAR_53.session['username'] = VAR_51.name\n",
"VAR_51 = userdb.User.nobody()\n",
"VAR_53.session['userid'] = VAR_51.uid\n",
"VAR_53.session['admin'] = VAR_51.isadmin\n"
] | [
"def session_auth(self, username, password):...\n",
"user = self.userdb.auth(username, password)\n",
"allow_remote = cherry.config['server.permit_remote_admin_login']\n",
"is_loopback = cherrypy.request.remote.ip in ('127.0.0.1', '::1')\n",
"if not is_loopback and user.isadmin and not allow_remote:\n",
"log.i(_('Rejected remote admin login from user: {name}').format(name=user.name)\n )\n",
"cherrypy.session['username'] = user.name\n",
"user = userdb.User.nobody()\n",
"cherrypy.session['userid'] = user.uid\n",
"cherrypy.session['admin'] = user.isadmin\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_35(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_10 = test.test_src_dir_path(VAR_0)\n",
"VAR_11 = self.parser.parse_args(['scan', '--dir', VAR_10])\n",
"saved_model_cli.scan(VAR_11)\n",
"VAR_12 = out.getvalue().strip()\n",
"self.assertTrue('does not contain denylisted ops' in VAR_12)\n"
] | [
"def testScanCommand(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"args = self.parser.parse_args(['scan', '--dir', base_path])\n",
"saved_model_cli.scan(args)\n",
"output = out.getvalue().strip()\n",
"self.assertTrue('does not contain denylisted ops' in output)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(VAR_13, VAR_14=2):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_13 in VAR_15:\n",
"return VAR_15[VAR_13]\n",
"VAR_27 = frozenset('abcdefghijklmnopqrstuvwxyz0123456789')\n",
"VAR_28 = ''\n",
"for char in VAR_13.lower():\n",
"if len(VAR_28) == VAR_14:\n",
"VAR_15[VAR_13] = VAR_28\n",
"if char in VAR_27:\n",
"return VAR_28\n",
"VAR_28 += char\n"
] | [
"def groupFromKey(key, length=2):...\n",
"\"\"\"docstring\"\"\"\n",
"if key in _groupFromKeyCache:\n",
"return _groupFromKeyCache[key]\n",
"safeChars = frozenset('abcdefghijklmnopqrstuvwxyz0123456789')\n",
"group = ''\n",
"for char in key.lower():\n",
"if len(group) == length:\n",
"_groupFromKeyCache[key] = group\n",
"if char in safeChars:\n",
"return group\n",
"group += char\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Return'",
"AugAssign'"
] |
[
"@property...\n",
"return super().start()\n"
] | [
"@property...\n",
"return super().start()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_25(VAR_0, VAR_2, VAR_1):...\n",
"VAR_8 = VAR_1.post('/dataobj/move/1', follow_redirects=True)\n",
"assert b'No path specified.' in VAR_8.data\n",
"VAR_8 = VAR_1.post('/dataobj/move/2', data={'path': 'aaa', 'submit': 'true'\n }, follow_redirects=True)\n",
"assert b'Data not found' in VAR_8.data\n",
"VAR_8 = VAR_1.post('/dataobj/move/1', data={'path': '', 'submit': 'true'},\n follow_redirects=True)\n",
"assert b'Data already in target directory' in VAR_8.data\n",
"VAR_16 = ['../adarnad', '~/adasd', 'ssss']\n",
"for p in VAR_16:\n",
"VAR_8 = VAR_1.post('/dataobj/move/1', data={'path': p, 'submit': 'true'},\n follow_redirects=True)\n",
"assert b'Data could not be moved to ' + bytes(p, 'utf-8') in VAR_8.data\n"
] | [
"def test_invalid_inputs_fail_move_data(test_app, note_fixture, client):...\n",
"resp = client.post('/dataobj/move/1', follow_redirects=True)\n",
"assert b'No path specified.' in resp.data\n",
"resp = client.post('/dataobj/move/2', data={'path': 'aaa', 'submit': 'true'\n }, follow_redirects=True)\n",
"assert b'Data not found' in resp.data\n",
"resp = client.post('/dataobj/move/1', data={'path': '', 'submit': 'true'},\n follow_redirects=True)\n",
"assert b'Data already in target directory' in resp.data\n",
"faulty_paths = ['../adarnad', '~/adasd', 'ssss']\n",
"for p in faulty_paths:\n",
"resp = client.post('/dataobj/move/1', data={'path': p, 'submit': 'true'},\n follow_redirects=True)\n",
"assert b'Data could not be moved to ' + bytes(p, 'utf-8') in resp.data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assign'",
"For",
"Assign'",
"Assert'"
] |
[
"def FUNC_30(VAR_27, VAR_32='\\n'):...\n",
"print(VAR_26 + VAR_27, VAR_32=end)\n"
] | [
"def in_print(s, end='\\n'):...\n",
"print(indent_str + s, end=end)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_27(self):...\n",
"VAR_9 = self._makeEngine()\n",
"VAR_5 = self._makeContext()\n",
"VAR_11 = VAR_9.compile('string:$eightbit')\n",
"self.assertRaises(UnicodeDecodeError, VAR_5.evaluate, VAR_11)\n",
"from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver\n",
"from Products.PageTemplates.unicodeconflictresolver import StrictUnicodeEncodingConflictResolver\n",
"from zope.component import provideUtility\n",
"provideUtility(StrictUnicodeEncodingConflictResolver,\n IUnicodeEncodingConflictResolver)\n",
"self.assertEqual(VAR_5.evaluate(VAR_11), 'äüö')\n"
] | [
"def test_mixed(self):...\n",
"eng = self._makeEngine()\n",
"ec = self._makeContext()\n",
"expr = eng.compile('string:$eightbit')\n",
"self.assertRaises(UnicodeDecodeError, ec.evaluate, expr)\n",
"from Products.PageTemplates.interfaces import IUnicodeEncodingConflictResolver\n",
"from Products.PageTemplates.unicodeconflictresolver import StrictUnicodeEncodingConflictResolver\n",
"from zope.component import provideUtility\n",
"provideUtility(StrictUnicodeEncodingConflictResolver,\n IUnicodeEncodingConflictResolver)\n",
"self.assertEqual(ec.evaluate(expr), 'äüö')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Expr'"
] |
[
"def FUNC_30(self):...\n",
"VAR_22 = models.Band.objects.create(name='Linkin Park')\n",
"VAR_28 = VAR_22.album_set.create(name='Hybrid Theory', cover_art=\n 'albums\\\\hybrid_theory.jpg')\n",
"VAR_27 = widgets.AdminFileWidget()\n",
"self.assertHTMLEqual(conditional_escape(VAR_27.render('test', VAR_28.\n cover_art)), 'string' % {'STORAGE_URL': default_storage.url('')})\n",
"self.assertHTMLEqual(conditional_escape(VAR_27.render('test',\n SimpleUploadedFile('test', b'content'))),\n '<input type=\"file\" name=\"test\" />')\n"
] | [
"def test_render(self):...\n",
"band = models.Band.objects.create(name='Linkin Park')\n",
"album = band.album_set.create(name='Hybrid Theory', cover_art=\n 'albums\\\\hybrid_theory.jpg')\n",
"w = widgets.AdminFileWidget()\n",
"self.assertHTMLEqual(conditional_escape(w.render('test', album.cover_art)),\n \n '<p class=\"file-upload\">Currently: <a href=\"%(STORAGE_URL)salbums/hybrid_theory.jpg\">albums\\\\hybrid_theory.jpg</a> <span class=\"clearable-file-input\"><input type=\"checkbox\" name=\"test-clear\" id=\"test-clear_id\" /> <label for=\"test-clear_id\">Clear</label></span><br />Change: <input type=\"file\" name=\"test\" /></p>'\n % {'STORAGE_URL': default_storage.url('')})\n",
"self.assertHTMLEqual(conditional_escape(w.render('test', SimpleUploadedFile\n ('test', b'content'))), '<input type=\"file\" name=\"test\" />')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_121(*VAR_79, **VAR_42):...\n",
"if VAR_13.read_from_replica:\n",
"FUNC_6()\n",
"VAR_225 = VAR_129(*VAR_79, **get_newargs(fn, kwargs))\n",
"if VAR_1 and hasattr(VAR_1, 'primary_db'):\n",
"return VAR_225\n",
"VAR_1.db.close()\n",
"VAR_1.db = VAR_1.primary_db\n"
] | [
"def wrapper_fn(*args, **kwargs):...\n",
"if conf.read_from_replica:\n",
"connect_replica()\n",
"retval = fn(*args, **get_newargs(fn, kwargs))\n",
"if local and hasattr(local, 'primary_db'):\n",
"return retval\n",
"local.db.close()\n",
"local.db = local.primary_db\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'"
] |
[
"def FUNC_14(self, VAR_15, VAR_16):...\n",
"if not VAR_16:\n",
"self._pull_to_push_producer = CLASS_5(self.reactor, VAR_15, self)\n",
"super().registerProducer(VAR_15, True)\n",
"VAR_15 = self._pull_to_push_producer\n"
] | [
"def registerProducer(self, producer, streaming):...\n",
"if not streaming:\n",
"self._pull_to_push_producer = _PullToPushProducer(self.reactor, producer, self)\n",
"super().registerProducer(producer, True)\n",
"producer = self._pull_to_push_producer\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def __init__(self, VAR_8, *VAR_6, **VAR_7):...\n",
"if 'instance' in VAR_7:\n",
"VAR_7.pop('instance')\n",
"super().__init__(VAR_8, *VAR_6, **kwargs)\n",
"self.fields['source_language'].initial = Language.objects.default_language\n",
"self.request = VAR_8\n",
"self.helper = FormHelper()\n",
"self.helper.form_tag = False\n",
"self.instance = None\n"
] | [
"def __init__(self, request, *args, **kwargs):...\n",
"if 'instance' in kwargs:\n",
"kwargs.pop('instance')\n",
"super().__init__(request, *args, **kwargs)\n",
"self.fields['source_language'].initial = Language.objects.default_language\n",
"self.request = request\n",
"self.helper = FormHelper()\n",
"self.helper.form_tag = False\n",
"self.instance = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_17(VAR_22, VAR_17):...\n",
"VAR_58 = [VAR_22.strip()]\n",
"VAR_58 = [x for x in VAR_58 if x != '']\n",
"return FUNC_7(VAR_58, VAR_17.series, db.Series, calibre_db.session, 'series')\n"
] | [
"def edit_book_series(series, book):...\n",
"input_series = [series.strip()]\n",
"input_series = [x for x in input_series if x != '']\n",
"return modify_database_object(input_series, book.series, db.Series,\n calibre_db.session, 'series')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_171(VAR_14):...\n",
"VAR_415 = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'\n",
"VAR_416 = 1.5 * (VAR_14 - VAR_13) / max(VAR_9 - VAR_13, 1) + 1.3\n",
"return VAR_415 % (1.3, VAR_416)\n"
] | [
"def style(c):...\n",
"STYLE = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'\n",
"size = 1.5 * (c - b) / max(a - b, 1) + 1.3\n",
"return STYLE % (1.3, size)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_20(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"frappe.db.sql('delete from `tabSingles` where doctype=%s', self.doctype)\n",
"for field, VAR_26 in iteritems(VAR_21):\n",
"if field != 'doctype':\n",
"if self.doctype in frappe.db.value_cache:\n",
"frappe.db.sql(\n \"\"\"insert into `tabSingles` (doctype, field, value)\n\t\t\t\t\tvalues (%s, %s, %s)\"\"\"\n , (self.doctype, field, VAR_26))\n"
] | [
"def update_single(self, d):...\n",
"\"\"\"docstring\"\"\"\n",
"frappe.db.sql('delete from `tabSingles` where doctype=%s', self.doctype)\n",
"for field, value in iteritems(d):\n",
"if field != 'doctype':\n",
"if self.doctype in frappe.db.value_cache:\n",
"frappe.db.sql(\n \"\"\"insert into `tabSingles` (doctype, field, value)\n\t\t\t\t\tvalues (%s, %s, %s)\"\"\"\n , (self.doctype, field, value))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"For",
"Condition",
"Condition",
"Expr'"
] |
[
"def __init__(self, VAR_7: str, VAR_8: HomeServerConfig, VAR_9=None, VAR_10=...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_9:\n",
"from twisted.internet import reactor as _reactor\n",
"self._reactor = VAR_9\n",
"VAR_9 = _reactor\n",
"self.hostname = VAR_7\n",
"self.signing_key = VAR_8.key.signing_key[0]\n",
"self.config = VAR_8\n",
"self._listening_services = []\n",
"self.start_time = None\n",
"self._instance_id = random_string(5)\n",
"self._instance_name = VAR_8.worker_name or 'master'\n",
"self.version_string = VAR_10\n",
"self.datastores = None\n"
] | [
"def __init__(self, hostname: str, config: HomeServerConfig, reactor=None,...\n",
"\"\"\"docstring\"\"\"\n",
"if not reactor:\n",
"from twisted.internet import reactor as _reactor\n",
"self._reactor = reactor\n",
"reactor = _reactor\n",
"self.hostname = hostname\n",
"self.signing_key = config.key.signing_key[0]\n",
"self.config = config\n",
"self._listening_services = []\n",
"self.start_time = None\n",
"self._instance_id = random_string(5)\n",
"self._instance_name = config.worker_name or 'master'\n",
"self.version_string = version_string\n",
"self.datastores = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(VAR_4: Dict[str, Any]) ->Union[datetime, date]:...\n",
"VAR_5: Union[datetime, date]\n",
"VAR_5 = datetime.fromisoformat(VAR_0['aCamelDateTime'])\n",
"VAR_5 = date.fromisoformat(VAR_0['aCamelDateTime'])\n",
"return VAR_5\n",
"return VAR_5\n"
] | [
"def _parse_a_camel_date_time(data: Dict[str, Any]) ->Union[datetime, date]:...\n",
"a_camel_date_time: Union[datetime, date]\n",
"a_camel_date_time = datetime.fromisoformat(d['aCamelDateTime'])\n",
"a_camel_date_time = date.fromisoformat(d['aCamelDateTime'])\n",
"return a_camel_date_time\n",
"return a_camel_date_time\n"
] | [
0,
5,
5,
5,
0,
0
] | [
"FunctionDef'",
"AnnAssign'",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_8(self, VAR_0, VAR_13):...\n",
"VAR_26 = VAR_0 / 'foo'\n",
"VAR_27 = VAR_0 / 'bar'\n",
"VAR_26.ensure()\n",
"VAR_27.ensure()\n",
"VAR_25 = VAR_13(str(VAR_0))\n",
"assert VAR_25.parent\n",
"assert not VAR_25.folders\n",
"VAR_28 = self.Item(FUNC_1(VAR_26), VAR_26.relto(VAR_0))\n",
"VAR_29 = self.Item(FUNC_1(VAR_27), VAR_27.relto(VAR_0))\n",
"assert VAR_25.files == [VAR_29, VAR_28]\n"
] | [
"def test_files(self, tmpdir, parser):...\n",
"foo_file = tmpdir / 'foo'\n",
"bar_file = tmpdir / 'bar'\n",
"foo_file.ensure()\n",
"bar_file.ensure()\n",
"parsed = parser(str(tmpdir))\n",
"assert parsed.parent\n",
"assert not parsed.folders\n",
"foo_item = self.Item(_file_url(foo_file), foo_file.relto(tmpdir))\n",
"bar_item = self.Item(_file_url(bar_file), bar_file.relto(tmpdir))\n",
"assert parsed.files == [bar_item, foo_item]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assign'",
"Assert'"
] |
[
"def FUNC_21():...\n",
"from octoprint.util.jinja import get_all_template_paths\n",
"return get_all_template_paths(app.jinja_loader)\n"
] | [
"def _get_all_templates():...\n",
"from octoprint.util.jinja import get_all_template_paths\n",
"return get_all_template_paths(app.jinja_loader)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Return'"
] |
[
"def __init__(self, VAR_100=None, VAR_186=False):...\n",
"self.check_args = VAR_186\n",
"self.run_procedures = {}\n",
"self.csv_procedures = {}\n",
"self.xml_procedures = {}\n",
"self.rss_procedures = {}\n",
"self.json_procedures = {}\n",
"self.jsonrpc_procedures = {}\n",
"self.jsonrpc2_procedures = {}\n",
"self.xmlrpc_procedures = {}\n",
"self.amfrpc_procedures = {}\n",
"self.amfrpc3_procedures = {}\n",
"self.soap_procedures = {}\n"
] | [
"def __init__(self, environment=None, check_args=False):...\n",
"self.check_args = check_args\n",
"self.run_procedures = {}\n",
"self.csv_procedures = {}\n",
"self.xml_procedures = {}\n",
"self.rss_procedures = {}\n",
"self.json_procedures = {}\n",
"self.jsonrpc_procedures = {}\n",
"self.jsonrpc2_procedures = {}\n",
"self.xmlrpc_procedures = {}\n",
"self.amfrpc_procedures = {}\n",
"self.amfrpc3_procedures = {}\n",
"self.soap_procedures = {}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_105(VAR_62, VAR_9, VAR_166=None, VAR_167=False, VAR_168=True):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_202 = FUNC_48(VAR_62)\n",
"if VAR_202.track_changes:\n",
"VAR_216 = VAR_12.get_all('Version', VAR_106={'ref_doctype': doctype,\n 'docname': name, 'order_by': 'creation' if head else None, 'limit':\n limit}, VAR_34=1)\n",
"if VAR_168:\n",
"from frappe.chat.util import squashify, dictify, safe_json_loads\n",
"VAR_217 = []\n",
"for VAR_9 in VAR_216:\n",
"VAR_9 = squashify(VAR_9)\n",
"return VAR_217\n",
"VAR_94 = FUNC_45('Version', VAR_9)\n",
"VAR_224 = VAR_94.data\n",
"VAR_224 = safe_json_loads(VAR_224)\n",
"VAR_224 = dictify(dict(version=data, VAR_10=doc.owner, creation=doc.creation))\n",
"VAR_217.append(VAR_224)\n"
] | [
"def get_version(doctype, name, limit=None, head=False, raise_err=True):...\n",
"\"\"\"docstring\"\"\"\n",
"meta = get_meta(doctype)\n",
"if meta.track_changes:\n",
"names = db.get_all('Version', filters={'ref_doctype': doctype, 'docname':\n name, 'order_by': 'creation' if head else None, 'limit': limit}, as_list=1)\n",
"if raise_err:\n",
"from frappe.chat.util import squashify, dictify, safe_json_loads\n",
"versions = []\n",
"for name in names:\n",
"name = squashify(name)\n",
"return versions\n",
"doc = get_doc('Version', name)\n",
"data = doc.data\n",
"data = safe_json_loads(data)\n",
"data = dictify(dict(version=data, user=doc.owner, creation=doc.creation))\n",
"versions.append(data)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"ImportFrom'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = list(VAR_12)\n",
"VAR_12.reverse()\n",
"while VAR_12:\n",
"VAR_30 = VAR_12.pop()\n",
"return VAR_10\n",
"if VAR_30 == '_':\n",
"warnings.warn(\n 'Traversing to the name `_` is deprecated and will be removed in Zope 6.',\n DeprecationWarning)\n",
"if VAR_30.startswith('_'):\n",
"if ITraversable.providedBy(VAR_10):\n",
"VAR_10 = getattr(VAR_10, VAR_9.traverse_method)(VAR_30)\n",
"VAR_10 = traversePathElement(VAR_10, VAR_30, VAR_12, VAR_11=request)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"path_items = list(path_items)\n",
"path_items.reverse()\n",
"while path_items:\n",
"name = path_items.pop()\n",
"return base\n",
"if name == '_':\n",
"warnings.warn(\n 'Traversing to the name `_` is deprecated and will be removed in Zope 6.',\n DeprecationWarning)\n",
"if name.startswith('_'):\n",
"if ITraversable.providedBy(base):\n",
"base = getattr(base, cls.traverse_method)(name)\n",
"base = traversePathElement(base, name, path_items, request=request)\n"
] | [
0,
0,
0,
0,
1,
0,
0,
1,
1,
1,
0,
0,
1
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_8(self, VAR_5):...\n",
"for VAR_20 in self._subexprs[:-1]:\n",
"if self._name == 'nocall':\n",
"VAR_6 = VAR_20(VAR_5)\n",
"return VAR_6\n",
"return FUNC_2(VAR_6, VAR_5.vars)\n"
] | [
"def _eval(self, econtext):...\n",
"for expr in self._subexprs[:-1]:\n",
"if self._name == 'nocall':\n",
"ob = expr(econtext)\n",
"return ob\n",
"return render(ob, econtext.vars)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_1():...\n",
"return tornado.web.Application([('/', CLASS_5)])\n"
] | [
"def make_redirect_app():...\n",
"return tornado.web.Application([('/', redirect_handler)])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@login_required...\n",
"VAR_11 = get_object_or_404(TopicPrivate.objects.select_related('topic'),\n VAR_3=topic_id, VAR_7=request.user)\n",
"VAR_12 = VAR_11.topic\n",
"if VAR_12.slug != VAR_4:\n",
"return HttpResponsePermanentRedirect(VAR_12.get_absolute_url())\n",
"topic_viewed(VAR_1=request, VAR_12=topic)\n",
"VAR_13 = Comment.objects.for_topic(VAR_12=topic).with_likes(VAR_7=request.user\n ).with_polls(VAR_7=request.user).order_by('date')\n",
"VAR_13 = paginate(VAR_13, per_page=config.comments_per_page, page_number=\n request.GET.get('page', 1))\n",
"return render(VAR_1=request, template_name=\n 'spirit/topic/private/detail.html', context={'topic': topic,\n 'topic_private': topic_private, 'comments': comments})\n"
] | [
"@login_required...\n",
"topic_private = get_object_or_404(TopicPrivate.objects.select_related(\n 'topic'), topic_id=topic_id, user=request.user)\n",
"topic = topic_private.topic\n",
"if topic.slug != slug:\n",
"return HttpResponsePermanentRedirect(topic.get_absolute_url())\n",
"topic_viewed(request=request, topic=topic)\n",
"comments = Comment.objects.for_topic(topic=topic).with_likes(user=request.user\n ).with_polls(user=request.user).order_by('date')\n",
"comments = paginate(comments, per_page=config.comments_per_page,\n page_number=request.GET.get('page', 1))\n",
"return render(request=request, template_name=\n 'spirit/topic/private/detail.html', context={'topic': topic,\n 'topic_private': topic_private, 'comments': comments})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_38(self):...\n",
"from zope.interface.verify import verifyObject\n",
"from zope.tal.interfaces import ITALExpressionEngine\n",
"verifyObject(ITALExpressionEngine, self._makeOne())\n"
] | [
"def test_instance_conforms_to_ITALExpressionEngine(self):...\n",
"from zope.interface.verify import verifyObject\n",
"from zope.tal.interfaces import ITALExpressionEngine\n",
"verifyObject(ITALExpressionEngine, self._makeOne())\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"ImportFrom'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.module:\n",
"self.module = frappe.db.get_value('DocType', self.ref_doctype, 'module')\n",
"if not self.is_standard:\n",
"self.is_standard = 'No'\n",
"if self.is_standard == 'No':\n",
"if frappe.session.user == 'Administrator' and getattr(frappe.local.conf,\n",
"if self.report_type != 'Report Builder':\n",
"if self.is_standard == 'Yes' and frappe.session.user != 'Administrator':\n",
"self.is_standard = 'Yes'\n",
"frappe.only_for('Script Manager', True)\n",
"if frappe.db.get_value('Report', self.name, 'is_standard') == 'Yes':\n",
"frappe.throw(_(\n 'Only Administrator can save a standard report. Please rename and save.'))\n",
"if self.report_type == 'Report Builder':\n",
"frappe.throw(_(\n 'Cannot edit a standard report. Please duplicate and create a new report'))\n",
"self.update_report_json()\n"
] | [
"def validate(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.module:\n",
"self.module = frappe.db.get_value('DocType', self.ref_doctype, 'module')\n",
"if not self.is_standard:\n",
"self.is_standard = 'No'\n",
"if self.is_standard == 'No':\n",
"if frappe.session.user == 'Administrator' and getattr(frappe.local.conf,\n",
"if self.report_type != 'Report Builder':\n",
"if self.is_standard == 'Yes' and frappe.session.user != 'Administrator':\n",
"self.is_standard = 'Yes'\n",
"frappe.only_for('Script Manager', True)\n",
"if frappe.db.get_value('Report', self.name, 'is_standard') == 'Yes':\n",
"frappe.throw(_(\n 'Only Administrator can save a standard report. Please rename and save.'))\n",
"if self.report_type == 'Report Builder':\n",
"frappe.throw(_(\n 'Cannot edit a standard report. Please duplicate and create a new report'))\n",
"self.update_report_json()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"async def FUNC_18(VAR_24):...\n",
"VAR_65 = await self.store.get_current_state_ids(VAR_24)\n",
"VAR_40[VAR_24] = VAR_65\n"
] | [
"async def _fetch_room_state(room_id):...\n",
"room_state = await self.store.get_current_state_ids(room_id)\n",
"state_by_room[room_id] = room_state\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_56(self, VAR_18, VAR_26=None, VAR_27=True, VAR_28=False, VAR_29=False...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_18, dict):\n",
"self.update(VAR_18)\n",
"self.set(VAR_18, VAR_26)\n",
"if VAR_27 and (self.doctype, self.name) not in frappe.flags.currently_saving:\n",
"self.set('modified', now())\n",
"self.load_doc_before_save()\n",
"self.set('modified_by', frappe.session.user)\n",
"self.run_method('before_change')\n",
"frappe.db.set_value(self.doctype, self.name, VAR_18, VAR_26, self.modified,\n self.modified_by, VAR_27=update_modified)\n",
"self.run_method('on_change')\n",
"if VAR_28:\n",
"self.notify_update()\n",
"self.clear_cache()\n",
"if VAR_29:\n",
"frappe.db.commit()\n"
] | [
"def db_set(self, fieldname, value=None, update_modified=True, notify=False,...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(fieldname, dict):\n",
"self.update(fieldname)\n",
"self.set(fieldname, value)\n",
"if update_modified and (self.doctype, self.name\n",
"self.set('modified', now())\n",
"self.load_doc_before_save()\n",
"self.set('modified_by', frappe.session.user)\n",
"self.run_method('before_change')\n",
"frappe.db.set_value(self.doctype, self.name, fieldname, value, self.\n modified, self.modified_by, update_modified=update_modified)\n",
"self.run_method('on_change')\n",
"if notify:\n",
"self.notify_update()\n",
"self.clear_cache()\n",
"if commit:\n",
"frappe.db.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"@pytest.fixture...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3.setattr('s3file.forms.S3FileInputMixin.upload_folder', os.path.join(\n storage.aws_location, 'tmp'))\n"
] | [
"@pytest.fixture...\n",
"\"\"\"docstring\"\"\"\n",
"monkeypatch.setattr('s3file.forms.S3FileInputMixin.upload_folder', os.path.\n join(storage.aws_location, 'tmp'))\n"
] | [
1,
0,
1
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_79(self):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.desk.doctype.tag.tag import DocTags\n",
"return DocTags(self.doctype).get_tags(self.name).split(',')[1:]\n"
] | [
"def get_tags(self):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.desk.doctype.tag.tag import DocTags\n",
"return DocTags(self.doctype).get_tags(self.name).split(',')[1:]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_26(VAR_48=[], VAR_49='', VAR_50='No Subject', VAR_51='No Message',...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_181 = None\n",
"if VAR_78:\n",
"VAR_51, VAR_181 = get_email_from_template(VAR_78, VAR_79)\n",
"VAR_51 = VAR_61 or VAR_51\n",
"if VAR_52:\n",
"from frappe.utils import md_to_html\n",
"if not VAR_53:\n",
"VAR_51 = md_to_html(VAR_51)\n",
"VAR_74 = True\n",
"from frappe.email import queue\n",
"queue.send(VAR_48=recipients, VAR_49=sender, VAR_50=subject, VAR_51=message,\n VAR_181=text_content, VAR_54=doctype or reference_doctype, VAR_55=name or\n reference_name, VAR_59=add_unsubscribe_link, VAR_56=unsubscribe_method,\n VAR_57=unsubscribe_params, VAR_58=unsubscribe_message, VAR_60=\n attachments, VAR_63=reply_to, VAR_65=cc, VAR_66=bcc, VAR_67=message_id,\n VAR_68=in_reply_to, VAR_69=send_after, VAR_70=expose_recipients, VAR_71\n =send_priority, VAR_64=queue_separately, VAR_72=communication, VAR_74=\n now, VAR_75=read_receipt, VAR_76=is_notification, VAR_77=inline_images,\n VAR_80=header, VAR_81=print_letterhead, VAR_82=with_container)\n"
] | [
"def sendmail(recipients=[], sender='', subject='No Subject', message=...\n",
"\"\"\"docstring\"\"\"\n",
"text_content = None\n",
"if template:\n",
"message, text_content = get_email_from_template(template, args)\n",
"message = content or message\n",
"if as_markdown:\n",
"from frappe.utils import md_to_html\n",
"if not delayed:\n",
"message = md_to_html(message)\n",
"now = True\n",
"from frappe.email import queue\n",
"queue.send(recipients=recipients, sender=sender, subject=subject, message=\n message, text_content=text_content, reference_doctype=doctype or\n reference_doctype, reference_name=name or reference_name,\n add_unsubscribe_link=add_unsubscribe_link, unsubscribe_method=\n unsubscribe_method, unsubscribe_params=unsubscribe_params,\n unsubscribe_message=unsubscribe_message, attachments=attachments,\n reply_to=reply_to, cc=cc, bcc=bcc, message_id=message_id, in_reply_to=\n in_reply_to, send_after=send_after, expose_recipients=expose_recipients,\n send_priority=send_priority, queue_separately=queue_separately,\n communication=communication, now=now, read_receipt=read_receipt,\n is_notification=is_notification, inline_images=inline_images, header=\n header, print_letterhead=print_letterhead, with_container=with_container)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"ImportFrom'",
"Condition",
"Assign'",
"Assign'",
"ImportFrom'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"VAR_3 = super().default_config()\n",
"VAR_3['worker_app'] = 'synapse.app.frontend_proxy'\n",
"VAR_3['worker_listeners'] = [{'type': 'http', 'port': 8080,\n 'bind_addresses': ['0.0.0.0'], 'resources': [{'names': ['client']}]}]\n",
"return VAR_3\n"
] | [
"def default_config(self):...\n",
"c = super().default_config()\n",
"c['worker_app'] = 'synapse.app.frontend_proxy'\n",
"c['worker_listeners'] = [{'type': 'http', 'port': 8080, 'bind_addresses': [\n '0.0.0.0'], 'resources': [{'names': ['client']}]}]\n",
"return c\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_32(VAR_87):...\n",
"VAR_86.update(VAR_87.encode('utf-8'))\n"
] | [
"def hash_update(value):...\n",
"hash.update(value.encode('utf-8'))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"async def FUNC_6(self):...\n",
"return self.get_repo_url()\n"
] | [
"async def get_resolved_spec(self):...\n",
"return self.get_repo_url()\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Return'"
] |
[
"def FUNC_12(self):...\n",
"VAR_11, VAR_12, VAR_13, VAR_14 = self._makeTree()\n",
"VAR_16 = CLASS_1()\n",
"VAR_20 = 'http://localhost/VirtualHostBase/http/test/VirtualHostRoot/xxx'\n",
"VAR_21 = 'http://test/xxx'\n",
"VAR_17 = FauxRequest(RESPONSE=response, URL=vhm, ACTUAL_URL=actualURL)\n",
"VAR_12.REQUEST = VAR_17\n",
"VAR_15 = self._makeOne().__of__(VAR_12)\n",
"VAR_15.challenge(VAR_17, VAR_16)\n",
"self.assertEqual(VAR_16.status, 302)\n",
"self.assertEqual(len(VAR_16.headers), 3)\n",
"VAR_22 = VAR_16.headers['Location']\n",
"self.assertTrue(VAR_22.endswith(quote(VAR_21)))\n",
"self.assertFalse(VAR_22.endswith(quote(VAR_20)))\n",
"self.assertEqual(VAR_16.headers['Cache-Control'], 'no-cache')\n",
"self.assertEqual(VAR_16.headers['Expires'], 'Sat, 01 Jan 2000 00:00:00 GMT')\n"
] | [
"def test_challenge_with_vhm(self):...\n",
"rc, root, folder, object = self._makeTree()\n",
"response = FauxCookieResponse()\n",
"vhm = 'http://localhost/VirtualHostBase/http/test/VirtualHostRoot/xxx'\n",
"actualURL = 'http://test/xxx'\n",
"request = FauxRequest(RESPONSE=response, URL=vhm, ACTUAL_URL=actualURL)\n",
"root.REQUEST = request\n",
"helper = self._makeOne().__of__(root)\n",
"helper.challenge(request, response)\n",
"self.assertEqual(response.status, 302)\n",
"self.assertEqual(len(response.headers), 3)\n",
"loc = response.headers['Location']\n",
"self.assertTrue(loc.endswith(quote(actualURL)))\n",
"self.assertFalse(loc.endswith(quote(vhm)))\n",
"self.assertEqual(response.headers['Cache-Control'], 'no-cache')\n",
"self.assertEqual(response.headers['Expires'], 'Sat, 01 Jan 2000 00:00:00 GMT')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_147(self, VAR_104='default', VAR_105='index'):...\n",
"VAR_101 = self.auth.db\n",
"VAR_56 = VAR_263.request\n",
"VAR_341 = VAR_101.wiki_page(VAR_156='wiki-menu')\n",
"VAR_342 = []\n",
"if VAR_341:\n",
"VAR_411 = {'': VAR_342}\n",
"if self.can_see_menu():\n",
"VAR_412 = re.compile(\n '[\\r\\n\\t]*(?P<base>(\\\\s*\\\\-\\\\s*)+)(?P<title>\\\\w.*?)\\\\s+\\\\>\\\\s+(?P<link>\\\\S+)'\n )\n",
"VAR_413 = []\n",
"return VAR_342\n",
"for match in VAR_412.finditer(self.fix_hostname(VAR_341.body)):\n",
"VAR_342.append((VAR_263.T('[Wiki]'), None, None, VAR_413))\n",
"VAR_200 = match.group('base').replace(' ', '')\n",
"if URL() == URL(VAR_104, VAR_105):\n",
"VAR_441 = match.group('title')\n",
"if not str(VAR_56.args(0)).startswith('_'):\n",
"VAR_413.append((VAR_263.T('Create New Page'), None, URL(VAR_104, VAR_105,\n VAR_11='_create')))\n",
"VAR_276 = match.group('link')\n",
"VAR_156 = VAR_56.args(0) or 'index'\n",
"if VAR_56.args(0) == '_edit':\n",
"if self.can_manage():\n",
"VAR_442 = None\n",
"VAR_118 = 1\n",
"VAR_156 = VAR_56.args(1) or 'index'\n",
"if VAR_56.args(0) == '_editmedia':\n",
"VAR_413.append((VAR_263.T('Manage Pages'), None, URL(VAR_104, VAR_105,\n VAR_11='_pages')))\n",
"VAR_413.append((VAR_263.T('Search Pages'), None, URL(VAR_104, VAR_105,\n VAR_11='_search')))\n",
"if VAR_276.startswith('@'):\n",
"if VAR_118 in (2, 3):\n",
"VAR_118 = 2\n",
"VAR_156 = VAR_56.args(1) or 'index'\n",
"VAR_118 = 0\n",
"VAR_413.append((VAR_263.T('Edit Menu'), None, URL(VAR_104, VAR_105, VAR_11=\n ('_edit', 'wiki-menu'))))\n",
"VAR_250 = VAR_276[2:].split('/')\n",
"VAR_443 = VAR_411.get(VAR_200[1:], VAR_411[''])\n",
"VAR_413.append((VAR_263.T('View Page'), None, URL(VAR_104, VAR_105, VAR_11=\n slug)))\n",
"if VAR_118 in (1, 3):\n",
"VAR_118 = 3\n",
"if len(VAR_250) > 3:\n",
"VAR_444 = []\n",
"VAR_413.append((VAR_263.T('Edit Page'), None, URL(VAR_104, VAR_105, VAR_11=\n ('_edit', slug))))\n",
"if VAR_118 in (1, 2):\n",
"VAR_442 = VAR_250[3]\n",
"VAR_411[VAR_200] = VAR_444\n",
"VAR_413.append((VAR_263.T('Edit Page Media'), None, URL(VAR_104, VAR_105,\n VAR_11=('_editmedia', slug))))\n",
"VAR_276 = URL(VAR_9=items[0] or None, VAR_14=items[1] or controller, VAR_10\n =items[2] or function, VAR_11=items[3:])\n",
"VAR_443.append((VAR_263.T(VAR_441), VAR_56.args(0) == VAR_442, VAR_276,\n VAR_444))\n"
] | [
"def menu(self, controller='default', function='index'):...\n",
"db = self.auth.db\n",
"request = current.request\n",
"menu_page = db.wiki_page(slug='wiki-menu')\n",
"menu = []\n",
"if menu_page:\n",
"tree = {'': menu}\n",
"if self.can_see_menu():\n",
"regex = re.compile(\n '[\\r\\n\\t]*(?P<base>(\\\\s*\\\\-\\\\s*)+)(?P<title>\\\\w.*?)\\\\s+\\\\>\\\\s+(?P<link>\\\\S+)'\n )\n",
"submenu = []\n",
"return menu\n",
"for match in regex.finditer(self.fix_hostname(menu_page.body)):\n",
"menu.append((current.T('[Wiki]'), None, None, submenu))\n",
"base = match.group('base').replace(' ', '')\n",
"if URL() == URL(controller, function):\n",
"title = match.group('title')\n",
"if not str(request.args(0)).startswith('_'):\n",
"submenu.append((current.T('Create New Page'), None, URL(controller,\n function, args='_create')))\n",
"link = match.group('link')\n",
"slug = request.args(0) or 'index'\n",
"if request.args(0) == '_edit':\n",
"if self.can_manage():\n",
"title_page = None\n",
"mode = 1\n",
"slug = request.args(1) or 'index'\n",
"if request.args(0) == '_editmedia':\n",
"submenu.append((current.T('Manage Pages'), None, URL(controller, function,\n args='_pages')))\n",
"submenu.append((current.T('Search Pages'), None, URL(controller, function,\n args='_search')))\n",
"if link.startswith('@'):\n",
"if mode in (2, 3):\n",
"mode = 2\n",
"slug = request.args(1) or 'index'\n",
"mode = 0\n",
"submenu.append((current.T('Edit Menu'), None, URL(controller, function,\n args=('_edit', 'wiki-menu'))))\n",
"items = link[2:].split('/')\n",
"parent = tree.get(base[1:], tree[''])\n",
"submenu.append((current.T('View Page'), None, URL(controller, function,\n args=slug)))\n",
"if mode in (1, 3):\n",
"mode = 3\n",
"if len(items) > 3:\n",
"subtree = []\n",
"submenu.append((current.T('Edit Page'), None, URL(controller, function,\n args=('_edit', slug))))\n",
"if mode in (1, 2):\n",
"title_page = items[3]\n",
"tree[base] = subtree\n",
"submenu.append((current.T('Edit Page Media'), None, URL(controller,\n function, args=('_editmedia', slug))))\n",
"link = URL(a=items[0] or None, c=items[1] or controller, f=items[2] or\n function, args=items[3:])\n",
"parent.append((current.T(title), request.args(0) == title_page, link, subtree))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"For",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_102(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"self.amfrpc_procedures[VAR_10.__name__] = VAR_10\n",
"return VAR_10\n"
] | [
"def amfrpc(self, f):...\n",
"\"\"\"docstring\"\"\"\n",
"self.amfrpc_procedures[f.__name__] = f\n",
"return f\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"import os\n",
"import shutil\n",
"import subprocess\n",
"import sys\n",
"import tempfile\n",
"import traceback\n",
"from pathlib import Path\n",
"from typing import List\n",
"from flask import Response, request\n",
"from werkzeug.utils import secure_filename\n",
"from archivy import click_web\n",
"from .input_fields import FieldId\n",
"VAR_0 = None\n",
"def FUNC_0(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = 'cli/' + VAR_1\n",
"VAR_0 = click_web.logger\n",
"VAR_6 = ['shell', 'run', 'routes', 'create-admin']\n",
"VAR_7, *VAR_4 = VAR_1.split('/')\n",
"VAR_3 = ['archivy']\n",
"VAR_2 = CLASS_0()\n",
"VAR_3.extend(VAR_2.command_args(0))\n",
"for i, command in enumerate(VAR_4):\n",
"if command in VAR_6:\n",
"def FUNC_5():...\n",
"return Response(status=400)\n",
"VAR_3.append(command)\n",
"yield FUNC_2(VAR_4)\n",
"VAR_3.extend(VAR_2.command_args(i + 1))\n",
"yield from FUNC_1(VAR_2, VAR_3)\n",
"yield f\"\"\"\nERROR: Got exception when reading output from script: {type(e)}\n\"\"\"\n",
"return Response(FUNC_5(), mimetype='text/plain')\n",
"yield traceback.format_exc()\n"
] | [
"import os\n",
"import shutil\n",
"import subprocess\n",
"import sys\n",
"import tempfile\n",
"import traceback\n",
"from pathlib import Path\n",
"from typing import List\n",
"from flask import Response, request\n",
"from werkzeug.utils import secure_filename\n",
"from archivy import click_web\n",
"from .input_fields import FieldId\n",
"logger = None\n",
"def exec(command_path):...\n",
"\"\"\"docstring\"\"\"\n",
"command_path = 'cli/' + command_path\n",
"logger = click_web.logger\n",
"omitted = ['shell', 'run', 'routes', 'create-admin']\n",
"root_command, *commands = command_path.split('/')\n",
"cmd = ['archivy']\n",
"req_to_args = RequestToCommandArgs()\n",
"cmd.extend(req_to_args.command_args(0))\n",
"for i, command in enumerate(commands):\n",
"if command in omitted:\n",
"def _generate_output():...\n",
"return Response(status=400)\n",
"cmd.append(command)\n",
"yield _create_cmd_header(commands)\n",
"cmd.extend(req_to_args.command_args(i + 1))\n",
"yield from _run_script_and_generate_stream(req_to_args, cmd)\n",
"yield f\"\"\"\nERROR: Got exception when reading output from script: {type(e)}\n\"\"\"\n",
"return Response(_generate_output(), mimetype='text/plain')\n",
"yield traceback.format_exc()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Condition",
"FunctionDef'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Expr'"
] |
[
"def FUNC_40(self):...\n",
"if self.flags.ignore_links or self._action == 'cancel':\n",
"return\n",
"VAR_63, VAR_64 = self.get_invalid_links()\n",
"for VAR_21 in self.get_all_children():\n",
"VAR_86 = VAR_21.get_invalid_links(is_submittable=self.meta.is_submittable)\n",
"if VAR_63:\n",
"VAR_63.extend(VAR_86[0])\n",
"VAR_87 = ', '.join(each[2] for each in VAR_63)\n",
"if VAR_64:\n",
"VAR_64.extend(VAR_86[1])\n",
"frappe.throw(_('Could not find {0}').format(VAR_87), frappe.LinkValidationError\n )\n",
"VAR_87 = ', '.join(each[2] for each in VAR_64)\n",
"frappe.throw(_('Cannot link cancelled document: {0}').format(VAR_87),\n frappe.CancelledLinkError)\n"
] | [
"def _validate_links(self):...\n",
"if self.flags.ignore_links or self._action == 'cancel':\n",
"return\n",
"invalid_links, cancelled_links = self.get_invalid_links()\n",
"for d in self.get_all_children():\n",
"result = d.get_invalid_links(is_submittable=self.meta.is_submittable)\n",
"if invalid_links:\n",
"invalid_links.extend(result[0])\n",
"msg = ', '.join(each[2] for each in invalid_links)\n",
"if cancelled_links:\n",
"cancelled_links.extend(result[1])\n",
"frappe.throw(_('Could not find {0}').format(msg), frappe.LinkValidationError)\n",
"msg = ', '.join(each[2] for each in cancelled_links)\n",
"frappe.throw(_('Cannot link cancelled document: {0}').format(msg), frappe.\n CancelledLinkError)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"return self.spec\n"
] | [
"def get_repo_url(self):...\n",
"return self.spec\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_10(VAR_16, VAR_17):...\n",
"if VAR_17 not in [u.to_string() for u in self.room_members]:\n",
"return None\n"
] | [
"async def check_user_in_room(room_id, user_id):...\n",
"if user_id not in [u.to_string() for u in self.room_members]:\n",
"return None\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"For",
"Return'"
] |
[
"@FUNC_0...\n",
"return Clock(self._reactor)\n"
] | [
"@cache_in_self...\n",
"return Clock(self._reactor)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_30():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = '/'.join(request.args)\n",
"VAR_15 = apath(VAR_5, VAR_122=request)\n",
"VAR_7 = FUNC_3(VAR_15).split('\\n')\n",
"VAR_8 = FUNC_3(VAR_15 + '.1').split('\\n')\n",
"session.flash = 'Other file, no longer there'\n",
"VAR_68 = difflib.ndiff(VAR_7, VAR_8)\n",
"redirect(URL('edit', VAR_98=request.args))\n",
"def FUNC_60(VAR_69):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_136 = ''\n",
"for VAR_145, VAR_48 in enumerate(VAR_69):\n",
"if VAR_48 == ' ':\n",
"return XML(VAR_136)\n",
"VAR_136 += ' '\n",
"if VAR_48 == ' \\t':\n",
"VAR_136 += ' '\n",
"if VAR_145 == 0 and VAR_48 == '?':\n"
] | [
"def resolve():...\n",
"\"\"\"docstring\"\"\"\n",
"filename = '/'.join(request.args)\n",
"path = apath(filename, r=request)\n",
"a = safe_read(path).split('\\n')\n",
"b = safe_read(path + '.1').split('\\n')\n",
"session.flash = 'Other file, no longer there'\n",
"d = difflib.ndiff(a, b)\n",
"redirect(URL('edit', args=request.args))\n",
"def leading(line):...\n",
"\"\"\"docstring\"\"\"\n",
"z = ''\n",
"for k, c in enumerate(line):\n",
"if c == ' ':\n",
"return XML(z)\n",
"z += ' '\n",
"if c == ' \\t':\n",
"z += ' '\n",
"if k == 0 and c == '?':\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"AugAssign'",
"Condition",
"AugAssign'",
"Condition"
] |
[
"def FUNC_18(self, VAR_17, VAR_18):...\n",
"if not VAR_53.session['admin']:\n",
"VAR_66 = albumArtFilePath(VAR_17)\n",
"VAR_64 = albumartfetcher.AlbumArtFetcher()\n",
"VAR_20, VAR_67 = VAR_64.retrieveData(VAR_18)\n",
"self.albumartcache_save(VAR_66, VAR_20)\n"
] | [
"def api_albumart_set(self, directory, imageurl):...\n",
"if not cherrypy.session['admin']:\n",
"b64imgpath = albumArtFilePath(directory)\n",
"fetcher = albumartfetcher.AlbumArtFetcher()\n",
"data, header = fetcher.retrieveData(imageurl)\n",
"self.albumartcache_save(b64imgpath, data)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(VAR_17):...\n",
"if not VAR_17:\n",
"return ''\n",
"VAR_24 = list(urlsplit(VAR_17))\n",
"VAR_24[0] = ''\n",
"VAR_24[1] = ''\n",
"VAR_25 = urlunsplit(VAR_24)\n",
"if not VAR_25:\n",
"VAR_25 = './'\n",
"return VAR_25\n"
] | [
"def get_next_path(unsafe_next_path):...\n",
"if not unsafe_next_path:\n",
"return ''\n",
"parts = list(urlsplit(unsafe_next_path))\n",
"parts[0] = ''\n",
"parts[1] = ''\n",
"safe_next_path = urlunsplit(parts)\n",
"if not safe_next_path:\n",
"safe_next_path = './'\n",
"return safe_next_path\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_118 = VAR_9['wid']\n",
"VAR_119 = VAR_8.getObject('Well', VAR_118)\n",
"if VAR_119 is None:\n",
"return HttpJavascriptResponseServerError('\"\"')\n",
"VAR_120 = VAR_9.get('thumbprefix', 'webgateway_render_thumbnail')\n",
"def FUNC_65(VAR_6):...\n",
"return reverse(VAR_120, VAR_116=(iid,))\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"wid = kwargs['wid']\n",
"well = conn.getObject('Well', wid)\n",
"if well is None:\n",
"return HttpJavascriptResponseServerError('\"\"')\n",
"prefix = kwargs.get('thumbprefix', 'webgateway_render_thumbnail')\n",
"def urlprefix(iid):...\n",
"return reverse(prefix, args=(iid,))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_5(VAR_14, VAR_19=True, **VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"return CLASS_5(VAR_19=escape, **kwargs)(VAR_14)\n"
] | [
"def markdown(text, escape=True, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"return Markdown(escape=escape, **kwargs)(text)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_31():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_63 = argparse.ArgumentParser(description=\n 'saved_model_cli: Command-line interface for SavedModel')\n",
"VAR_63.add_argument('-v', '--version', action='version', version='0.1.0')\n",
"VAR_23 = VAR_63.add_subparsers(title='commands', description=\n 'valid commands', help='additional help')\n",
"FUNC_24(VAR_23)\n",
"FUNC_25(VAR_23)\n",
"FUNC_26(VAR_23)\n",
"FUNC_27(VAR_23)\n",
"FUNC_30(VAR_23)\n",
"FUNC_29(VAR_23)\n",
"return VAR_63\n"
] | [
"def create_parser():...\n",
"\"\"\"docstring\"\"\"\n",
"parser = argparse.ArgumentParser(description=\n 'saved_model_cli: Command-line interface for SavedModel')\n",
"parser.add_argument('-v', '--version', action='version', version='0.1.0')\n",
"subparsers = parser.add_subparsers(title='commands', description=\n 'valid commands', help='additional help')\n",
"add_show_subparser(subparsers)\n",
"add_run_subparser(subparsers)\n",
"add_scan_subparser(subparsers)\n",
"add_convert_subparser(subparsers)\n",
"add_aot_compile_cpu_subparser(subparsers)\n",
"add_freeze_model_subparser(subparsers)\n",
"return parser\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_49(self, VAR_38, VAR_39):...\n",
"VAR_87 = 'attachment; filename=\"' + VAR_39 + '\"'\n",
"VAR_53.response.headers['Content-Type'] = 'application/x-download'\n",
"VAR_53.response.headers['Content-Disposition'] = VAR_87\n",
"return codecs.encode(VAR_38, 'UTF-8')\n"
] | [
"def serve_string_as_file(self, string, filename):...\n",
"content_disposition = 'attachment; filename=\"' + filename + '\"'\n",
"cherrypy.response.headers['Content-Type'] = 'application/x-download'\n",
"cherrypy.response.headers['Content-Disposition'] = content_disposition\n",
"return codecs.encode(string, 'UTF-8')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@property...\n",
"return super().item()\n"
] | [
"@property...\n",
"return super().item()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_3(self, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"assert NotImplementedError(\n 'This method is to be implemented by Attachment classes')\n"
] | [
"def attachment_path(self, filename):...\n",
"\"\"\"docstring\"\"\"\n",
"assert NotImplementedError(\n 'This method is to be implemented by Attachment classes')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assert'"
] |
[
"def FUNC_51(VAR_2, VAR_39, VAR_40, VAR_8=None, **VAR_9):...\n",
"warnings.warn('Deprecated. Use _bulk_file_annotations()', DeprecationWarning)\n",
"return FUNC_52(VAR_2, VAR_39, VAR_40, VAR_8, **kwargs)\n"
] | [
"def _annotations(request, objtype, objid, conn=None, **kwargs):...\n",
"warnings.warn('Deprecated. Use _bulk_file_annotations()', DeprecationWarning)\n",
"return _bulk_file_annotations(request, objtype, objid, conn, **kwargs)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_40(self, VAR_44):...\n",
"VAR_14 = self.output(VAR_44.group(1))\n",
"return self.renderer.strikethrough(VAR_14)\n"
] | [
"def output_strikethrough(self, m):...\n",
"text = self.output(m.group(1))\n",
"return self.renderer.strikethrough(text)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(VAR_11):...\n",
"if VAR_11 == self.room_id:\n",
"return defer.succeed([self.user])\n",
"return defer.succeed([])\n"
] | [
"def get_room_members(room_id):...\n",
"if room_id == self.room_id:\n",
"return defer.succeed([self.user])\n",
"return defer.succeed([])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_7(self, VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_9 = SynapseSite('test', 'site_tag', parse_listener_def({'type': 'http',\n 'port': 0}), self.resource, '1.0')\n",
"VAR_7, VAR_6 = make_request(self.reactor, VAR_9, VAR_0, VAR_1, shorthand=False)\n",
"return VAR_6\n"
] | [
"def _make_request(self, method, path):...\n",
"\"\"\"docstring\"\"\"\n",
"site = SynapseSite('test', 'site_tag', parse_listener_def({'type': 'http',\n 'port': 0}), self.resource, '1.0')\n",
"_, channel = make_request(self.reactor, site, method, path, shorthand=False)\n",
"return channel\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_15(self):...\n",
"VAR_5 = self._makeContext()\n",
"self.assertEqual(VAR_5.evaluate('x | python:int'), int)\n"
] | [
"def test_hybrid_with_python_expression_type_value_not_called(self):...\n",
"ec = self._makeContext()\n",
"self.assertEqual(ec.evaluate('x | python:int'), int)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def __setattr__(self, VAR_46, VAR_105):...\n",
"self[VAR_46] = VAR_105\n"
] | [
"def __setattr__(self, key, value):...\n",
"self[key] = value\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_83(self, VAR_71):...\n",
"if isinstance(VAR_71, dict):\n",
"if not self.get('_return_value'):\n",
"self._return_value = VAR_71 or self.get('_return_value')\n",
"self._return_value = {}\n",
"self._return_value.update(VAR_71)\n"
] | [
"def add_to_return_value(self, new_return_value):...\n",
"if isinstance(new_return_value, dict):\n",
"if not self.get('_return_value'):\n",
"self._return_value = new_return_value or self.get('_return_value')\n",
"self._return_value = {}\n",
"self._return_value.update(new_return_value)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_8, *VAR_6, **VAR_7):...\n",
"super().__init__(VAR_8, *VAR_6, **kwargs)\n",
"for VAR_39, VAR_13 in self.fields.items():\n",
"if VAR_39 == 'discovery':\n",
"self.fields['vcs'].choices = VCS_REGISTRY.get_choices()\n",
"VAR_13.widget = forms.HiddenInput()\n",
"self.discovered = self.perform_discovery(VAR_8, VAR_7)\n",
"for i, VAR_13 in enumerate(self.discovered):\n",
"self.fields['discovery'].choices.append((i, self.render_choice(VAR_13)))\n"
] | [
"def __init__(self, request, *args, **kwargs):...\n",
"super().__init__(request, *args, **kwargs)\n",
"for field, value in self.fields.items():\n",
"if field == 'discovery':\n",
"self.fields['vcs'].choices = VCS_REGISTRY.get_choices()\n",
"value.widget = forms.HiddenInput()\n",
"self.discovered = self.perform_discovery(request, kwargs)\n",
"for i, value in enumerate(self.discovered):\n",
"self.fields['discovery'].choices.append((i, self.render_choice(value)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_11(self) ->None:...\n",
"VAR_24 = self.package_dir / 'client.py'\n",
"VAR_25 = self.env.get_template('client.pyi')\n",
"VAR_24.write_text(VAR_25.render())\n",
"VAR_26 = self.package_dir / 'api'\n",
"VAR_26.mkdir()\n",
"VAR_27 = VAR_26 / '__init__.py'\n",
"VAR_27.write_text('\"\"\" Contains synchronous methods for accessing the API \"\"\"')\n",
"VAR_28 = self.package_dir / 'async_api'\n",
"VAR_28.mkdir()\n",
"VAR_29 = VAR_28 / '__init__.py'\n",
"VAR_29.write_text('\"\"\" Contains async methods for accessing the API \"\"\"')\n",
"VAR_30 = self.package_dir / 'errors.py'\n",
"VAR_31 = self.env.get_template('errors.pyi')\n",
"VAR_30.write_text(VAR_31.render())\n",
"VAR_32 = self.env.get_template('endpoint_module.pyi')\n",
"VAR_33 = self.env.get_template('async_endpoint_module.pyi')\n",
"for VAR_37, collection in self.openapi.endpoint_collections_by_tag.items():\n",
"VAR_37 = utils.snake_case(VAR_37)\n",
"VAR_36 = VAR_26 / f'{VAR_37}.py'\n",
"VAR_36.write_text(VAR_32.render(collection=collection))\n",
"VAR_38 = VAR_28 / f'{VAR_37}.py'\n",
"VAR_38.write_text(VAR_33.render(collection=collection))\n"
] | [
"def _build_api(self) ->None:...\n",
"client_path = self.package_dir / 'client.py'\n",
"client_template = self.env.get_template('client.pyi')\n",
"client_path.write_text(client_template.render())\n",
"api_dir = self.package_dir / 'api'\n",
"api_dir.mkdir()\n",
"api_init = api_dir / '__init__.py'\n",
"api_init.write_text(\n '\"\"\" Contains synchronous methods for accessing the API \"\"\"')\n",
"async_api_dir = self.package_dir / 'async_api'\n",
"async_api_dir.mkdir()\n",
"async_api_init = async_api_dir / '__init__.py'\n",
"async_api_init.write_text(\n '\"\"\" Contains async methods for accessing the API \"\"\"')\n",
"api_errors = self.package_dir / 'errors.py'\n",
"errors_template = self.env.get_template('errors.pyi')\n",
"api_errors.write_text(errors_template.render())\n",
"endpoint_template = self.env.get_template('endpoint_module.pyi')\n",
"async_endpoint_template = self.env.get_template('async_endpoint_module.pyi')\n",
"for tag, collection in self.openapi.endpoint_collections_by_tag.items():\n",
"tag = utils.snake_case(tag)\n",
"module_path = api_dir / f'{tag}.py'\n",
"module_path.write_text(endpoint_template.render(collection=collection))\n",
"async_module_path = async_api_dir / f'{tag}.py'\n",
"async_module_path.write_text(async_endpoint_template.render(collection=\n collection))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@log_function...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_34:\n",
"VAR_2 = FUNC_1('/groups/%s/summary/roles/%s/users/%s', VAR_30, VAR_34, VAR_16)\n",
"VAR_2 = FUNC_1('/groups/%s/summary/users/%s', VAR_30, VAR_16)\n",
"return self.client.post_json(VAR_5=destination, VAR_2=path, VAR_3={\n 'requester_user_id': requester_user_id}, VAR_39=content, VAR_15=True)\n"
] | [
"@log_function...\n",
"\"\"\"docstring\"\"\"\n",
"if role_id:\n",
"path = _create_v1_path('/groups/%s/summary/roles/%s/users/%s', group_id,\n role_id, user_id)\n",
"path = _create_v1_path('/groups/%s/summary/users/%s', group_id, user_id)\n",
"return self.client.post_json(destination=destination, path=path, args={\n 'requester_user_id': requester_user_id}, data=content, ignore_backoff=True)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_15(self, VAR_22):...\n",
"VAR_65 = type(VAR_22)\n",
"if isinstance(VAR_22, VAR_72):\n",
"VAR_55 = fromstring(VAR_22)\n",
"VAR_55 = copy.deepcopy(VAR_22)\n",
"self(VAR_55)\n",
"return _transform_result(VAR_65, VAR_55)\n"
] | [
"def clean_html(self, html):...\n",
"result_type = type(html)\n",
"if isinstance(html, basestring):\n",
"doc = fromstring(html)\n",
"doc = copy.deepcopy(html)\n",
"self(doc)\n",
"return _transform_result(result_type, doc)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_86():...\n",
"from frappe.utils import cint\n",
"return VAR_19.mute_emails or cint(VAR_13.get('mute_emails') or 0) or False\n"
] | [
"def are_emails_muted():...\n",
"from frappe.utils import cint\n",
"return flags.mute_emails or cint(conf.get('mute_emails') or 0) or False\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_35(VAR_50):...\n",
""
] | [
"def resp_read(chunk_size):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@VAR_0.filter()...\n",
"return [VAR_11 for VAR_11 in VAR_4 if VAR_11.location == VAR_5]\n"
] | [
"@register.filter()...\n",
"return [field for field in fields if field.location == location]\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_71(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"VAR_61 = self._get_canonical_alias()\n",
"self.assertEqual(VAR_61, {'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"self._set_canonical_alias({})\n",
"VAR_61 = self._get_canonical_alias()\n",
"self.assertEqual(VAR_61, {})\n"
] | [
"def test_alias_alt_aliases(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"res = self._get_canonical_alias()\n",
"self.assertEqual(res, {'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"self._set_canonical_alias({})\n",
"res = self._get_canonical_alias()\n",
"self.assertEqual(res, {})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_10 = test.test_src_dir_path(VAR_0)\n",
"VAR_42 = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":8.0,\"x2\":5.0}]', '--outdir', VAR_42] + (['--use_tfrt'] if\n VAR_5 else []))\n",
"saved_model_cli.run(VAR_11)\n"
] | [
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"output_dir = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":8.0,\"x2\":5.0}]', '--outdir', output_dir] + (['--use_tfrt'\n ] if use_tfrt else []))\n",
"saved_model_cli.run(args)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(VAR_4, VAR_5, VAR_6):...\n",
"VAR_8 = []\n",
"for c_elements in VAR_4:\n",
"VAR_82 = False\n",
"return VAR_8\n",
"if VAR_5 == 'languages':\n",
"VAR_101 = c_elements.lang_code\n",
"if VAR_5 == 'custom':\n",
"for inp_element in VAR_6:\n",
"VAR_101 = c_elements.value\n",
"VAR_101 = c_elements.name\n",
"if inp_element.lower() == VAR_101.lower():\n",
"if not VAR_82:\n",
"VAR_82 = True\n",
"VAR_8.append(c_elements)\n"
] | [
"def search_objects_remove(db_book_object, db_type, input_elements):...\n",
"del_elements = []\n",
"for c_elements in db_book_object:\n",
"found = False\n",
"return del_elements\n",
"if db_type == 'languages':\n",
"type_elements = c_elements.lang_code\n",
"if db_type == 'custom':\n",
"for inp_element in input_elements:\n",
"type_elements = c_elements.value\n",
"type_elements = c_elements.name\n",
"if inp_element.lower() == type_elements.lower():\n",
"if not found:\n",
"found = True\n",
"del_elements.append(c_elements)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_35(self, VAR_38):...\n",
"VAR_91 = 'https' if self.certfile else 'http'\n",
"return '%s://%s:%i%s' % (VAR_91, VAR_38, self.port, self.base_url)\n"
] | [
"def _url(self, ip):...\n",
"proto = 'https' if self.certfile else 'http'\n",
"return '%s://%s:%i%s' % (proto, ip, self.port, self.base_url)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@app.route('/login', methods=['GET', 'POST'])...\n",
"VAR_9 = forms.UserForm()\n",
"if VAR_9.validate_on_submit():\n",
"VAR_31 = get_db()\n",
"return render_template('users/login.html', VAR_9=form, title='Login')\n",
"VAR_32 = VAR_31.search((Query().username == VAR_9.username.data) & (Query()\n .type == 'user'))\n",
"if VAR_32 and check_password_hash(VAR_32[0]['hashed_password'], VAR_9.\n",
"VAR_32 = User.from_db(VAR_32[0])\n",
"flash('Invalid credentials', 'error')\n",
"login_user(VAR_32, remember=True)\n",
"return redirect('/login')\n",
"flash('Login successful!', 'success')\n",
"VAR_37 = request.args.get('next')\n",
"return redirect(VAR_37 or '/')\n"
] | [
"@app.route('/login', methods=['GET', 'POST'])...\n",
"form = forms.UserForm()\n",
"if form.validate_on_submit():\n",
"db = get_db()\n",
"return render_template('users/login.html', form=form, title='Login')\n",
"user = db.search((Query().username == form.username.data) & (Query().type ==\n 'user'))\n",
"if user and check_password_hash(user[0]['hashed_password'], form.password.data\n",
"user = User.from_db(user[0])\n",
"flash('Invalid credentials', 'error')\n",
"login_user(user, remember=True)\n",
"return redirect('/login')\n",
"flash('Login successful!', 'success')\n",
"next_url = request.args.get('next')\n",
"return redirect(next_url or '/')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_8():...\n",
"return FUNC_4('tickets_per_page')\n"
] | [
"def tickets_per_page_default():...\n",
"return get_default_setting('tickets_per_page')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@log_function...\n",
"VAR_2 = FUNC_1('/send_join/%s/%s', VAR_6, VAR_7)\n",
"VAR_37 = await self.client.put_json(VAR_5=destination, VAR_2=path, VAR_39=\n content)\n",
"return VAR_37\n"
] | [
"@log_function...\n",
"path = _create_v1_path('/send_join/%s/%s', room_id, event_id)\n",
"response = await self.client.put_json(destination=destination, path=path,\n data=content)\n",
"return response\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_11(self, VAR_11=None, VAR_17=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.flags.in_print:\n",
"return\n",
"self.flags.notifications_executed = []\n",
"if VAR_11 != None:\n",
"self.flags.ignore_permissions = VAR_11\n",
"self.flags.ignore_version = frappe.flags.in_test if VAR_17 is None else VAR_17\n",
"if self.get('__islocal') or not self.get('name'):\n",
"self.insert()\n",
"self.check_permission('write', 'save')\n",
"return\n",
"self.set_user_and_timestamp()\n",
"self.set_docstatus()\n",
"self.check_if_latest()\n",
"self.set_parent_in_children()\n",
"self.set_name_in_children()\n",
"self.validate_higher_perm_levels()\n",
"self._validate_links()\n",
"self.run_before_save_methods()\n",
"if self._action != 'cancel':\n",
"self._validate()\n",
"if self._action == 'update_after_submit':\n",
"self.validate_update_after_submit()\n",
"self.set_docstatus()\n",
"if self.meta.issingle:\n",
"self.update_single(self.get_valid_dict())\n",
"self.db_update()\n",
"self.update_children()\n",
"self.run_post_save_methods()\n",
"if hasattr(self, '__unsaved'):\n",
"delattr(self, '__unsaved')\n",
"return self\n"
] | [
"def _save(self, ignore_permissions=None, ignore_version=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.flags.in_print:\n",
"return\n",
"self.flags.notifications_executed = []\n",
"if ignore_permissions != None:\n",
"self.flags.ignore_permissions = ignore_permissions\n",
"self.flags.ignore_version = (frappe.flags.in_test if ignore_version is None\n else ignore_version)\n",
"if self.get('__islocal') or not self.get('name'):\n",
"self.insert()\n",
"self.check_permission('write', 'save')\n",
"return\n",
"self.set_user_and_timestamp()\n",
"self.set_docstatus()\n",
"self.check_if_latest()\n",
"self.set_parent_in_children()\n",
"self.set_name_in_children()\n",
"self.validate_higher_perm_levels()\n",
"self._validate_links()\n",
"self.run_before_save_methods()\n",
"if self._action != 'cancel':\n",
"self._validate()\n",
"if self._action == 'update_after_submit':\n",
"self.validate_update_after_submit()\n",
"self.set_docstatus()\n",
"if self.meta.issingle:\n",
"self.update_single(self.get_valid_dict())\n",
"self.db_update()\n",
"self.update_children()\n",
"self.run_post_save_methods()\n",
"if hasattr(self, '__unsaved'):\n",
"delattr(self, '__unsaved')\n",
"return self\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Return'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.