lines
sequencelengths
1
383
raw_lines
sequencelengths
1
383
label
sequencelengths
1
383
type
sequencelengths
1
383
[ "def FUNC_54(self):...\n", "\"\"\"docstring\"\"\"\n", "self.login()\n", "VAR_3 = self.client.get('/logout/')\n", "self.assertContains(VAR_3, 'Logged out')\n", "self.confirm_logged_out()\n" ]
[ "def test_logout_default(self):...\n", "\"\"\"docstring\"\"\"\n", "self.login()\n", "response = self.client.get('/logout/')\n", "self.assertContains(response, 'Logged out')\n", "self.confirm_logged_out()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n", "VAR_65 = get_course_by_id(VAR_10)\n", "VAR_40 = VAR_9.GET.get('unique_student_identifier', None)\n", "if VAR_40 is not None:\n", "VAR_40 = get_student_from_identifier(VAR_40)\n", "VAR_189 = VAR_10.make_usage_key_from_deprecated_string(VAR_65.entrance_exam_id)\n", "return HttpResponseBadRequest(_('Course has no valid entrance exam section.'))\n", "if VAR_40:\n", "VAR_132 = instructor_task.api.get_entrance_exam_instructor_task_history(VAR_10,\n VAR_189, VAR_40)\n", "VAR_132 = instructor_task.api.get_entrance_exam_instructor_task_history(VAR_10,\n VAR_189)\n", "VAR_63 = {'tasks': map(extract_task_features, VAR_132)}\n", "return JsonResponse(VAR_63)\n" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n", "course = get_course_by_id(course_id)\n", "student = request.GET.get('unique_student_identifier', None)\n", "if student is not None:\n", "student = get_student_from_identifier(student)\n", "entrance_exam_key = course_id.make_usage_key_from_deprecated_string(course.\n entrance_exam_id)\n", "return HttpResponseBadRequest(_('Course has no valid entrance exam section.'))\n", "if student:\n", "tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id\n , entrance_exam_key, student)\n", "tasks = instructor_task.api.get_entrance_exam_instructor_task_history(course_id\n , entrance_exam_key)\n", "response_payload = {'tasks': map(extract_task_features, tasks)}\n", "return JsonResponse(response_payload)\n" ]
[ 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_47(self, VAR_139):...\n", "\"\"\"docstring\"\"\"\n", "VAR_139.auth_two_factor_user = None\n", "VAR_139.auth_two_factor = None\n", "VAR_139.auth_two_factor_enabled = False\n", "VAR_139.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left\n" ]
[ "def _reset_two_factor_auth(self, session):...\n", "\"\"\"docstring\"\"\"\n", "session.auth_two_factor_user = None\n", "session.auth_two_factor = None\n", "session.auth_two_factor_enabled = False\n", "session.auth_two_factor_tries_left = self.settings.auth_two_factor_tries_left\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_42(self, VAR_21, VAR_22):...\n", "if not self.view_settings.get(VAR_21):\n", "return None\n", "return self.view_settings[VAR_21].get(VAR_22)\n" ]
[ "def get_view_property(self, page, prop):...\n", "if not self.view_settings.get(page):\n", "return None\n", "return self.view_settings[page].get(prop)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_83(*VAR_2, **VAR_3):...\n", "VAR_9 = VAR_2[0]\n", "VAR_193 = {'error': 'Missing required query parameter(s)', 'parameters': [],\n 'info': {}}\n", "for VAR_209, extra in VAR_47:\n", "VAR_199 = object()\n", "if len(VAR_193['parameters']) > 0:\n", "if VAR_9.POST.get(VAR_209, VAR_199) == VAR_199:\n", "return JsonResponse(VAR_193, status=400)\n", "return VAR_1(*VAR_2, **kwargs)\n", "VAR_193['parameters'].append(VAR_209)\n", "VAR_193['info'][VAR_209] = extra\n" ]
[ "def wrapped(*args, **kwargs):...\n", "request = args[0]\n", "error_response_data = {'error': 'Missing required query parameter(s)',\n 'parameters': [], 'info': {}}\n", "for param, extra in required_params:\n", "default = object()\n", "if len(error_response_data['parameters']) > 0:\n", "if request.POST.get(param, default) == default:\n", "return JsonResponse(error_response_data, status=400)\n", "return func(*args, **kwargs)\n", "error_response_data['parameters'].append(param)\n", "error_response_data['info'][param] = extra\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Return'", "Return'", "Expr'", "Assign'" ]
[ "def FUNC_16(self, VAR_14):...\n", "VAR_36 = QUrl('file:///tmp/foo')\n", "VAR_37 = QNetworkRequest(VAR_36)\n", "VAR_41 = UnicodeEncodeError('ascii', '', 0, 2, 'foo')\n", "VAR_14.patch('os.path.isdir', side_effect=err)\n", "VAR_38 = filescheme.handler(VAR_37)\n", "assert VAR_38 is None\n" ]
[ "def test_unicode_encode_error(self, mocker):...\n", "url = QUrl('file:///tmp/foo')\n", "req = QNetworkRequest(url)\n", "err = UnicodeEncodeError('ascii', '', 0, 2, 'foo')\n", "mocker.patch('os.path.isdir', side_effect=err)\n", "reply = filescheme.handler(req)\n", "assert reply is None\n" ]
[ 0, 0, 0, 0, 0, 3, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assert'" ]
[ "def __call__(self, VAR_14):...\n", "return self.parse(VAR_14)\n" ]
[ "def __call__(self, text):...\n", "return self.parse(text)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(VAR_1: Type['pl.Trainer'], VAR_5: str=...\n", "\"\"\"docstring\"\"\"\n", "VAR_18 = FUNC_3(VAR_1)\n", "VAR_19 = {}\n", "for VAR_40, VAR_11, VAR_11 in VAR_18:\n", "VAR_29 = VAR_5 % {'cls_name': VAR_1.__name__.upper(), 'cls_argument':\n VAR_40.upper()}\n", "return Namespace(**env_args)\n", "VAR_30 = os.environ.get(VAR_29)\n", "if not (VAR_30 is None or VAR_30 == ''):\n", "VAR_30 = eval(VAR_30)\n", "VAR_19[VAR_40] = VAR_30\n" ]
[ "def parse_env_variables(cls: Type['pl.Trainer'], template: str=...\n", "\"\"\"docstring\"\"\"\n", "cls_arg_defaults = get_init_arguments_and_types(cls)\n", "env_args = {}\n", "for arg_name, _, _ in cls_arg_defaults:\n", "env = template % {'cls_name': cls.__name__.upper(), 'cls_argument':\n arg_name.upper()}\n", "return Namespace(**env_args)\n", "val = os.environ.get(env)\n", "if not (val is None or val == ''):\n", "val = eval(val)\n", "env_args[arg_name] = val\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_27(self):...\n", "VAR_5 = {'org.matrix.not_labels': ['#fun']}\n", "VAR_6 = FUNC_0(sender='@foo:bar', type='m.room.message', room_id=\n '!secretbase:unknown', content={EventContentFields.LABELS: ['#fun']})\n", "self.assertFalse(Filter(VAR_5).check(VAR_6))\n", "VAR_6 = FUNC_0(sender='@foo:bar', type='m.room.message', room_id=\n '!secretbase:unknown', content={EventContentFields.LABELS: ['#notfun']})\n", "self.assertTrue(Filter(VAR_5).check(VAR_6))\n" ]
[ "def test_filter_not_labels(self):...\n", "definition = {'org.matrix.not_labels': ['#fun']}\n", "event = MockEvent(sender='@foo:bar', type='m.room.message', room_id=\n '!secretbase:unknown', content={EventContentFields.LABELS: ['#fun']})\n", "self.assertFalse(Filter(definition).check(event))\n", "event = MockEvent(sender='@foo:bar', type='m.room.message', room_id=\n '!secretbase:unknown', content={EventContentFields.LABELS: ['#notfun']})\n", "self.assertTrue(Filter(definition).check(event))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_6(VAR_0):...\n", "VAR_12 = VAR_0.cfg.url_prefix_static\n", "VAR_0.write('string' % locals())\n" ]
[ "def image_dialog(request):...\n", "url_prefix_static = request.cfg.url_prefix_static\n", "request.write(\n \"\"\"\n<!--\n * FCKeditor - The text editor for internet\n * Copyright (C) 2003-2004 Frederico Caldeira Knabben\n *\n * Licensed under the terms of the GNU Lesser General Public License:\n * http://www.opensource.org/licenses/lgpl-license.php\n *\n * For further information visit:\n * http://www.fckeditor.net/\n *\n * File Authors:\n * Frederico Caldeira Knabben ([email protected])\n * Florian Festi\n-->\n<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">\n<html>\n <head>\n <title>Link Properties</title>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n <meta name=\"robots\" content=\"noindex,nofollow\" />\n <script src=\"%(url_prefix_static)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js\" type=\"text/javascript\"></script>\n <script src=\"%(url_prefix_static)s/applets/moinFCKplugins/moinimage/fck_image.js\" type=\"text/javascript\"></script>\n <script src=\"%(url_prefix_static)s/applets/moinFCKplugins/moinurllib.js\" type=\"text/javascript\"></script>\n </head>\n <body scroll=\"no\" style=\"OVERFLOW: hidden\">\n <table cellspacing=\"0\" cellpadding=\"0\" width=\"100%%\" border=\"0\">\n <tr>\n <td nowrap=\"nowrap\">\n <span fckLang=\"DlgLnkProto\">Protocol</span><br />\n <select id=\"cmbLinkProtocol\" onchange=\"OnProtocolChange();\">\n <option value=\"attachment:\" selected=\"selected\">attachment:</option>\n <option value=\"http://\">http://</option>\n <option value=\"https://\">https://</option>\n <!-- crashes often: <option value=\"drawing:\">drawing:</option> -->\n <option value=\"\" fckLang=\"DlgLnkProtoOther\">&lt;other&gt;</option>\n </select>\n </td>\n <td nowrap=\"nowrap\">&nbsp;</td>\n <td nowrap=\"nowrap\" width=\"100%%\">\n <span fckLang=\"DlgLnkURL\">URL or File Name (attachment:)</span><br />\n <input id=\"txtUrl\" style=\"WIDTH: 100%%\" type=\"text\" onkeyup=\"OnUrlChange();\" onchange=\"OnUrlChange();\" />\n </td>\n </tr>\n <tr>\n <td colspan=2>\n <div id=\"divChkLink\">\n <input id=\"chkLink\" type=\"checkbox\"> Link to\n </div>\n </td>\n </table>\n </body>\n</html>\n\"\"\"\n % locals())\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@app.route('/')...\n", "VAR_7 = request.args.get('path', '').lstrip('/')\n", "VAR_29 = data.get_items(VAR_7=path)\n", "flash('Directory does not exist.', 'error')\n", "return render_template('home.html', title=path or 'root', search_enabled=\n app.config['SEARCH_CONF']['enabled'], dir=files, current_path=path,\n new_folder_form=forms.NewFolderForm(), delete_form=forms.\n DeleteFolderForm(), rename_form=forms.RenameDirectoryForm(), view_only=\n 0, search_engine=app.config['SEARCH_CONF']['engine'])\n", "return redirect('/')\n" ]
[ "@app.route('/')...\n", "path = request.args.get('path', '').lstrip('/')\n", "files = data.get_items(path=path)\n", "flash('Directory does not exist.', 'error')\n", "return render_template('home.html', title=path or 'root', search_enabled=\n app.config['SEARCH_CONF']['enabled'], dir=files, current_path=path,\n new_folder_form=forms.NewFolderForm(), delete_form=forms.\n DeleteFolderForm(), rename_form=forms.RenameDirectoryForm(), view_only=\n 0, search_engine=app.config['SEARCH_CONF']['engine'])\n", "return redirect('/')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_88(VAR_176, VAR_177):...\n", "\"\"\"docstring\"\"\"\n", "VAR_207 = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')\n", "VAR_210 = next(VAR_207)\n", "VAR_210 = []\n", "VAR_180 = None\n", "if 'cohort' not in VAR_210:\n", "VAR_180 = _(\"The file must contain a 'cohort' column containing cohort names.\")\n", "if 'email' not in VAR_210 and 'username' not in VAR_210:\n", "if VAR_180:\n", "VAR_180 = _(\n \"The file must contain a 'username' column, an 'email' column, or both.\")\n" ]
[ "def validator(file_storage, file_to_validate):...\n", "\"\"\"docstring\"\"\"\n", "reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')\n", "fieldnames = next(reader)\n", "fieldnames = []\n", "msg = None\n", "if 'cohort' not in fieldnames:\n", "msg = _(\"The file must contain a 'cohort' column containing cohort names.\")\n", "if 'email' not in fieldnames and 'username' not in fieldnames:\n", "if msg:\n", "msg = _(\n \"The file must contain a 'username' column, an 'email' column, or both.\")\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'" ]
[ "@log_function...\n", "VAR_2 = FUNC_1('/send_leave/%s/%s', VAR_6, VAR_7)\n", "VAR_37 = await self.client.put_json(VAR_5=destination, VAR_2=path, VAR_39=\n content, VAR_15=True)\n", "return VAR_37\n" ]
[ "@log_function...\n", "path = _create_v1_path('/send_leave/%s/%s', room_id, event_id)\n", "response = await self.client.put_json(destination=destination, path=path,\n data=content, ignore_backoff=True)\n", "return response\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Return'" ]
[ "@VAR_2.route('/advsearch', methods=['POST'])...\n", "VAR_89 = dict(request.form)\n", "VAR_90 = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie',\n 'include_shelf', 'exclude_shelf', 'include_language',\n 'exclude_language', 'include_extension', 'exclude_extension']\n", "for VAR_111 in VAR_90:\n", "VAR_89[VAR_111] = list(request.form.getlist(VAR_111))\n", "VAR_91['query'] = json.dumps(VAR_89)\n", "return redirect(url_for('web.books_list', VAR_8='advsearch', VAR_20=\n 'stored', VAR_56=''))\n" ]
[ "@web.route('/advsearch', methods=['POST'])...\n", "values = dict(request.form)\n", "params = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie',\n 'include_shelf', 'exclude_shelf', 'include_language',\n 'exclude_language', 'include_extension', 'exclude_extension']\n", "for param in params:\n", "values[param] = list(request.form.getlist(param))\n", "flask_session['query'] = json.dumps(values)\n", "return redirect(url_for('web.books_list', data='advsearch', sort_param=\n 'stored', query=''))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_32(VAR_87):...\n", "VAR_86.update(VAR_87.encode('utf-8'))\n" ]
[ "def hash_update(value):...\n", "hash.update(value.encode('utf-8'))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_33(VAR_30):...\n", "print(VAR_29 + VAR_30)\n" ]
[ "def in_print(s):...\n", "print(indent_str + s)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n", "VAR_134 = strip_if_string(VAR_9.GET.get('problem_location_str', False))\n", "VAR_40 = VAR_9.GET.get('unique_student_identifier', None)\n", "if VAR_40 is not None:\n", "VAR_40 = get_student_from_identifier(VAR_40)\n", "if VAR_40 and not VAR_134:\n", "return HttpResponseBadRequest(\n 'unique_student_identifier must accompany problem_location_str')\n", "if VAR_134:\n", "VAR_132 = instructor_task.api.get_running_instructor_tasks(VAR_10)\n", "VAR_188 = VAR_10.make_usage_key_from_deprecated_string(VAR_134)\n", "return HttpResponseBadRequest()\n", "if VAR_40:\n", "VAR_63 = {'tasks': map(extract_task_features, VAR_132)}\n", "VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_188,\n VAR_40)\n", "VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_188)\n", "return JsonResponse(VAR_63)\n" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n", "problem_location_str = strip_if_string(request.GET.get(\n 'problem_location_str', False))\n", "student = request.GET.get('unique_student_identifier', None)\n", "if student is not None:\n", "student = get_student_from_identifier(student)\n", "if student and not problem_location_str:\n", "return HttpResponseBadRequest(\n 'unique_student_identifier must accompany problem_location_str')\n", "if problem_location_str:\n", "tasks = instructor_task.api.get_running_instructor_tasks(course_id)\n", "module_state_key = course_id.make_usage_key_from_deprecated_string(\n problem_location_str)\n", "return HttpResponseBadRequest()\n", "if student:\n", "response_payload = {'tasks': map(extract_task_features, tasks)}\n", "tasks = instructor_task.api.get_instructor_task_history(course_id,\n module_state_key, student)\n", "tasks = instructor_task.api.get_instructor_task_history(course_id,\n module_state_key)\n", "return JsonResponse(response_payload)\n" ]
[ 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_18(self):...\n", "VAR_19 = self.helper.create_room_as(self.user_id)\n", "VAR_20, VAR_21 = self.make_request('PUT', b'directory/room/%23test%3Atest',\n ('{\"room_id\":\"%s\"}' % (VAR_19,)).encode('ascii'))\n", "self.assertEquals(403, VAR_21.code, VAR_21.result)\n" ]
[ "def test_denied(self):...\n", "room_id = self.helper.create_room_as(self.user_id)\n", "request, channel = self.make_request('PUT',\n b'directory/room/%23test%3Atest', ('{\"room_id\":\"%s\"}' % (room_id,)).\n encode('ascii'))\n", "self.assertEquals(403, channel.code, channel.result)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "@property...\n", "return self.role_anonymous()\n" ]
[ "@property...\n", "return self.role_anonymous()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "from typing import Any, Callable, List, Optional\n", "from urllib.parse import urlparse\n", "from django.conf import settings\n", "from django.http import HttpResponse\n", "from django.urls import URLPattern, include, path, re_path\n", "from django.views.decorators import csrf\n", "from django.views.decorators.csrf import csrf_exempt\n", "from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView\n", "from posthog.api import api_not_found, authentication, capture, dashboard, decide, organizations_router, project_dashboards_router, projects_router, router, signup, user\n", "from posthog.demo import demo\n", "from .utils import render_template\n", "from .views import health, login_required, preflight_check, robots_txt, security_txt, sso_login, stats\n", "ee_urlpatterns: List[Any] = []\n", "from ee.urls import extend_api_router\n", "extend_api_router(router, projects_router=projects_router,\n project_dashboards_router=project_dashboards_router)\n", "from multi_tenancy.router import extend_api_router as extend_api_router_cloud\n", "extend_api_router_cloud(router, organizations_router=organizations_router,\n projects_router=projects_router)\n", "@csrf.ensure_csrf_cookie...\n", "from ee.urls import urlpatterns as ee_urlpatterns\n", "return render_template('index.html', VAR_0)\n" ]
[ "from typing import Any, Callable, List, Optional\n", "from urllib.parse import urlparse\n", "from django.conf import settings\n", "from django.http import HttpResponse\n", "from django.urls import URLPattern, include, path, re_path\n", "from django.views.decorators import csrf\n", "from django.views.decorators.csrf import csrf_exempt\n", "from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView, SpectacularSwaggerView\n", "from posthog.api import api_not_found, authentication, capture, dashboard, decide, organizations_router, project_dashboards_router, projects_router, router, signup, user\n", "from posthog.demo import demo\n", "from .utils import render_template\n", "from .views import health, login_required, preflight_check, robots_txt, security_txt, sso_login, stats\n", "ee_urlpatterns: List[Any] = []\n", "from ee.urls import extend_api_router\n", "extend_api_router(router, projects_router=projects_router,\n project_dashboards_router=project_dashboards_router)\n", "from multi_tenancy.router import extend_api_router as extend_api_router_cloud\n", "extend_api_router_cloud(router, organizations_router=organizations_router,\n projects_router=projects_router)\n", "@csrf.ensure_csrf_cookie...\n", "from ee.urls import urlpatterns as ee_urlpatterns\n", "return render_template('index.html', request)\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "AnnAssign'", "ImportFrom'", "Expr'", "ImportFrom'", "Expr'", "Condition", "ImportFrom'", "Return'" ]
[ "@override_config({'push': {'group_unread_count_by_room': False}})...\n", "\"\"\"docstring\"\"\"\n", "self._test_push_unread_count()\n", "self.assertEqual(self.push_attempts[5][2]['notification']['counts'][\n 'unread'], 4)\n" ]
[ "@override_config({'push': {'group_unread_count_by_room': False}})...\n", "\"\"\"docstring\"\"\"\n", "self._test_push_unread_count()\n", "self.assertEqual(self.push_attempts[5][2]['notification']['counts'][\n 'unread'], 4)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Expr'" ]
[ "def FUNC_24(self, VAR_12):...\n", "VAR_14 = None\n", "if VAR_12.get('sort_by'):\n", "VAR_26 = CLASS_0._format(VAR_12.get('sort_by').split('.')) + ' ' + VAR_12.get(\n 'sort_order')\n", "if VAR_12.get('order_by'):\n", "if VAR_12.get('sort_by_next'):\n", "VAR_26 = VAR_12.get('order_by')\n", "VAR_26 = CLASS_0._format([self.ref_doctype, 'modified']) + ' desc'\n", "VAR_26 += ', ' + CLASS_0._format(VAR_12.get('sort_by_next').split('.')\n ) + ' ' + VAR_12.get('sort_order_next')\n", "VAR_27 = None\n", "if VAR_12.get('group_by'):\n", "VAR_14 = frappe._dict(VAR_12['group_by'])\n", "return VAR_26, VAR_27, VAR_14\n", "VAR_27 = VAR_14['group_by']\n", "VAR_26 = '_aggregate_column desc'\n" ]
[ "def get_standard_report_order_by(self, params):...\n", "group_by_args = None\n", "if params.get('sort_by'):\n", "order_by = Report._format(params.get('sort_by').split('.')) + ' ' + params.get(\n 'sort_order')\n", "if params.get('order_by'):\n", "if params.get('sort_by_next'):\n", "order_by = params.get('order_by')\n", "order_by = Report._format([self.ref_doctype, 'modified']) + ' desc'\n", "order_by += ', ' + Report._format(params.get('sort_by_next').split('.')\n ) + ' ' + params.get('sort_order_next')\n", "group_by = None\n", "if params.get('group_by'):\n", "group_by_args = frappe._dict(params['group_by'])\n", "return order_by, group_by, group_by_args\n", "group_by = group_by_args['group_by']\n", "order_by = '_aggregate_column desc'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "AugAssign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n", "FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n", "for VAR_6 in VAR_73:\n", "FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n", "return self._return_value\n" ]
[ "def runner(self, method, *args, **kwargs):...\n", "add_to_return_value(self, fn(self, *args, **kwargs))\n", "for f in hooks:\n", "add_to_return_value(self, f(self, method, *args, **kwargs))\n", "return self._return_value\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "For", "Expr'", "Return'" ]
[ "def FUNC_58(VAR_91, *VAR_119):...\n", "\"\"\"docstring\"\"\"\n", "VAR_91 = FUNC_56(VAR_91)\n", "return FUNC_61(VAR_1.module_app[VAR_91] + '.' + VAR_91, *VAR_119)\n" ]
[ "def get_module_path(module, *joins):...\n", "\"\"\"docstring\"\"\"\n", "module = scrub(module)\n", "return get_pymodule_path(local.module_app[module] + '.' + module, *joins)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):...\n", "VAR_37 = super(CLASS_13, self).render(VAR_5, VAR_6, VAR_3)\n", "if VAR_6:\n", "VAR_6 = force_text(self._format_value(VAR_6))\n", "return VAR_37\n", "VAR_28 = {'href': mark_safe(smart_urlquote(VAR_6))}\n", "VAR_37 = format_html('<p class=\"url\">{0} <a {1}>{2}</a><br />{3} {4}</p>',\n _('Currently:'), flatatt(VAR_28), VAR_6, _('Change:'), VAR_37)\n" ]
[ "def render(self, name, value, attrs=None):...\n", "html = super(AdminURLFieldWidget, self).render(name, value, attrs)\n", "if value:\n", "value = force_text(self._format_value(value))\n", "return html\n", "final_attrs = {'href': mark_safe(smart_urlquote(value))}\n", "html = format_html('<p class=\"url\">{0} <a {1}>{2}</a><br />{3} {4}</p>', _(\n 'Currently:'), flatatt(final_attrs), value, _('Change:'), html)\n" ]
[ 0, 0, 0, 0, 0, 2, 2 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_31(self):...\n", "if VAR_53.session['admin'] and cherry.config['general.update_notification']:\n", "FUNC_0()\n", "return {'type': 'wisdom', 'data': self.model.motd()}\n", "VAR_104 = self.model.check_for_updates()\n", "if VAR_104:\n", "VAR_115 = VAR_104[0]['version']\n", "VAR_116 = []\n", "VAR_117 = []\n", "for version in VAR_104:\n", "for update in version['features']:\n", "VAR_118 = {'type': 'update', 'data': {}}\n", "if update.startswith('FEATURE:'):\n", "VAR_118['data']['version'] = VAR_115\n", "VAR_116.append(update[len('FEATURE:'):])\n", "if update.startswith('FIX:'):\n", "VAR_118['data']['features'] = VAR_116\n", "VAR_117.append(update[len('FIX:'):])\n", "if update.startswith('FIXED:'):\n", "VAR_118['data']['fixes'] = VAR_117\n", "VAR_117.append(update[len('FIXED:'):])\n", "return VAR_118\n" ]
[ "def api_getmotd(self):...\n", "if cherrypy.session['admin'] and cherry.config['general.update_notification']:\n", "_save_and_release_session()\n", "return {'type': 'wisdom', 'data': self.model.motd()}\n", "new_versions = self.model.check_for_updates()\n", "if new_versions:\n", "newest_version = new_versions[0]['version']\n", "features = []\n", "fixes = []\n", "for version in new_versions:\n", "for update in version['features']:\n", "retdata = {'type': 'update', 'data': {}}\n", "if update.startswith('FEATURE:'):\n", "retdata['data']['version'] = newest_version\n", "features.append(update[len('FEATURE:'):])\n", "if update.startswith('FIX:'):\n", "retdata['data']['features'] = features\n", "fixes.append(update[len('FIX:'):])\n", "if update.startswith('FIXED:'):\n", "retdata['data']['fixes'] = fixes\n", "fixes.append(update[len('FIXED:'):])\n", "return retdata\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Return'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "For", "For", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import json\n", "import os\n", "import time\n", "import textwrap\n", "import mimetypes\n", "import urllib\n", "import collections\n", "import pkg_resources\n", "import sip\n", "from PyQt5.QtCore import QUrlQuery, QUrl\n", "import qutebrowser\n", "from qutebrowser.config import config, configdata, configexc, configdiff\n", "from qutebrowser.utils import version, utils, jinja, log, message, docutils, objreg, urlutils\n", "from qutebrowser.misc import objects\n", "VAR_0 = ':pyeval was never called'\n", "VAR_1 = ':spawn was never called'\n", "VAR_2 = {}\n", "\"\"\"Raised when no handler was found for the given URL.\"\"\"\n", "\"\"\"Called when there was an OSError inside a handler.\"\"\"\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_7, VAR_8):...\n", "self.errorstring = VAR_7\n", "self.error = VAR_8\n", "super().__init__(VAR_7)\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_3):...\n", "super().__init__(VAR_3.toDisplayString())\n", "self.url = VAR_3\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_9, VAR_10=None):...\n", "self._name = VAR_9\n", "self._backend = VAR_10\n", "self._function = None\n", "def __call__(self, VAR_11):...\n", "self._function = VAR_11\n", "VAR_2[self._name] = self.wrapper\n", "return VAR_11\n" ]
[ "\"\"\"Backend-independent qute://* code.\n\nModule attributes:\n pyeval_output: The output of the last :pyeval command.\n _HANDLERS: The handlers registered via decorators.\n\"\"\"\n", "import json\n", "import os\n", "import time\n", "import textwrap\n", "import mimetypes\n", "import urllib\n", "import collections\n", "import pkg_resources\n", "import sip\n", "from PyQt5.QtCore import QUrlQuery, QUrl\n", "import qutebrowser\n", "from qutebrowser.config import config, configdata, configexc, configdiff\n", "from qutebrowser.utils import version, utils, jinja, log, message, docutils, objreg, urlutils\n", "from qutebrowser.misc import objects\n", "pyeval_output = ':pyeval was never called'\n", "spawn_output = ':spawn was never called'\n", "_HANDLERS = {}\n", "\"\"\"Raised when no handler was found for the given URL.\"\"\"\n", "\"\"\"Called when there was an OSError inside a handler.\"\"\"\n", "\"\"\"Exception to signal that a handler should return an ErrorReply.\n\n Attributes correspond to the arguments in\n networkreply.ErrorNetworkReply.\n\n Attributes:\n errorstring: Error string to print.\n error: Numerical error value.\n \"\"\"\n", "def __init__(self, errorstring, error):...\n", "self.errorstring = errorstring\n", "self.error = error\n", "super().__init__(errorstring)\n", "\"\"\"Exception to signal a redirect should happen.\n\n Attributes:\n url: The URL to redirect to, as a QUrl.\n \"\"\"\n", "def __init__(self, url):...\n", "super().__init__(url.toDisplayString())\n", "self.url = url\n", "\"\"\"Decorator to register a qute://* URL handler.\n\n Attributes:\n _name: The 'foo' part of qute://foo\n backend: Limit which backends the handler can run with.\n \"\"\"\n", "def __init__(self, name, backend=None):...\n", "self._name = name\n", "self._backend = backend\n", "self._function = None\n", "def __call__(self, function):...\n", "self._function = function\n", "_HANDLERS[self._name] = self.wrapper\n", "return function\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = VAR_6.content\n", "VAR_17 = VAR_15.get('application/json')\n", "if VAR_17 is not None and VAR_17.media_type_schema is not None:\n", "return property_from_data('json_body', VAR_26=True, VAR_5=json_body.\n media_type_schema)\n", "return None\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "body_content = body.content\n", "json_body = body_content.get('application/json')\n", "if json_body is not None and json_body.media_type_schema is not None:\n", "return property_from_data('json_body', required=True, data=json_body.\n media_type_schema)\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_32(self):...\n", "VAR_5 = {'rooms': ['!allowed:example.com', '!excluded:example.com'],\n 'not_rooms': ['!excluded:example.com']}\n", "VAR_12 = ['!allowed:example.com', '!excluded:example.com',\n '!not_included:example.com']\n", "VAR_13 = list(Filter(VAR_5).filter_rooms(VAR_12))\n", "self.assertEquals(VAR_13, ['!allowed:example.com'])\n" ]
[ "def test_filter_rooms(self):...\n", "definition = {'rooms': ['!allowed:example.com', '!excluded:example.com'],\n 'not_rooms': ['!excluded:example.com']}\n", "room_ids = ['!allowed:example.com', '!excluded:example.com',\n '!not_included:example.com']\n", "filtered_room_ids = list(Filter(definition).filter_rooms(room_ids))\n", "self.assertEquals(filtered_room_ids, ['!allowed:example.com'])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_47():...\n", "\"\"\"docstring\"\"\"\n", "if len(request.args) != 2:\n", "session.flash = T('invalid ticket')\n", "VAR_3 = FUNC_5()\n", "redirect(URL('site'))\n", "VAR_27 = request.env.web2py_version\n", "VAR_113 = request.args[1]\n", "VAR_114 = RestrictedError()\n", "request.tickets_db = FUNC_43(VAR_3)[0]\n", "VAR_114.load(request, VAR_3, VAR_113)\n", "VAR_43.view = 'default/ticket.html'\n", "return dict(VAR_3=app, VAR_113=ticket, VAR_67=e.output, VAR_18=e.traceback and\n TRACEBACK(e.traceback), snapshot=e.snapshot, VAR_180=e.code, layer=e.\n layer, VAR_27=myversion)\n" ]
[ "def ticketdb():...\n", "\"\"\"docstring\"\"\"\n", "if len(request.args) != 2:\n", "session.flash = T('invalid ticket')\n", "app = get_app()\n", "redirect(URL('site'))\n", "myversion = request.env.web2py_version\n", "ticket = request.args[1]\n", "e = RestrictedError()\n", "request.tickets_db = get_ticket_storage(app)[0]\n", "e.load(request, app, ticket)\n", "response.view = 'default/ticket.html'\n", "return dict(app=app, ticket=ticket, output=e.output, traceback=e.traceback and\n TRACEBACK(e.traceback), snapshot=e.snapshot, code=e.code, layer=e.layer,\n myversion=myversion)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_5(self):...\n", "VAR_10 = test.test_src_dir_path(VAR_0)\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_11 = self.parser.parse_args(['show', '--dir', VAR_10])\n", "saved_model_cli.show(VAR_11)\n", "VAR_12 = out.getvalue().strip()\n", "VAR_13 = \"\"\"The given SavedModel contains the following tag-sets:\n'serve'\"\"\"\n", "self.assertMultiLineEqual(VAR_12, VAR_13)\n", "self.assertEqual(err.getvalue().strip(), '')\n" ]
[ "def testShowCommandTags(self):...\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "self.parser = saved_model_cli.create_parser()\n", "args = self.parser.parse_args(['show', '--dir', base_path])\n", "saved_model_cli.show(args)\n", "output = out.getvalue().strip()\n", "exp_out = \"\"\"The given SavedModel contains the following tag-sets:\n'serve'\"\"\"\n", "self.assertMultiLineEqual(output, exp_out)\n", "self.assertEqual(err.getvalue().strip(), '')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_10(self, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_31 = []\n", "for arg in VAR_5:\n", "if len(VAR_31) == 1:\n", "VAR_13 = complex(arg)\n", "if arg in self._mathSafeEnv:\n", "VAR_3.reply(str(self._complexToString(complex(VAR_31[0]))))\n", "VAR_33 = ', '.join(map(self._complexToString, list(map(complex, VAR_31))))\n", "if VAR_13 == abs(VAR_13):\n", "VAR_39 = self._mathSafeEnv[arg]\n", "if arg in self._rpnEnv:\n", "VAR_3.reply(VAR_0('Stack: [%s]') % VAR_33)\n", "VAR_13 = abs(VAR_13)\n", "VAR_31.append(VAR_13)\n", "if callable(VAR_39):\n", "self._rpnEnv[arg](VAR_31)\n", "VAR_42 = VAR_31.pop()\n", "VAR_40 = False\n", "VAR_31.append(VAR_39)\n", "VAR_43 = VAR_31.pop()\n", "VAR_41 = []\n", "VAR_33 = '%s%s%s' % (VAR_43, arg, VAR_42)\n", "while not VAR_40 and VAR_31:\n", "VAR_31.append(eval(VAR_33, self._mathSafeEnv, self._mathSafeEnv))\n", "VAR_3.error(format(VAR_0('%q is not a defined function.'), arg))\n", "VAR_41.append(VAR_31.pop())\n", "if not VAR_40:\n", "return\n", "VAR_31.append(VAR_39(*VAR_41))\n", "VAR_3.error(VAR_0('Not enough arguments for %s') % arg)\n", "VAR_40 = True\n", "return\n" ]
[ "def rpn(self, irc, msg, args):...\n", "\"\"\"docstring\"\"\"\n", "stack = []\n", "for arg in args:\n", "if len(stack) == 1:\n", "x = complex(arg)\n", "if arg in self._mathSafeEnv:\n", "irc.reply(str(self._complexToString(complex(stack[0]))))\n", "s = ', '.join(map(self._complexToString, list(map(complex, stack))))\n", "if x == abs(x):\n", "f = self._mathSafeEnv[arg]\n", "if arg in self._rpnEnv:\n", "irc.reply(_('Stack: [%s]') % s)\n", "x = abs(x)\n", "stack.append(x)\n", "if callable(f):\n", "self._rpnEnv[arg](stack)\n", "arg2 = stack.pop()\n", "called = False\n", "stack.append(f)\n", "arg1 = stack.pop()\n", "arguments = []\n", "s = '%s%s%s' % (arg1, arg, arg2)\n", "while not called and stack:\n", "stack.append(eval(s, self._mathSafeEnv, self._mathSafeEnv))\n", "irc.error(format(_('%q is not a defined function.'), arg))\n", "arguments.append(stack.pop())\n", "if not called:\n", "return\n", "stack.append(f(*arguments))\n", "irc.error(_('Not enough arguments for %s') % arg)\n", "called = True\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Return'", "Expr'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_14: Headers) ->None:...\n", "\"\"\"docstring\"\"\"\n", "VAR_41 = VAR_14.getRawHeaders(b'Content-Type')\n", "if VAR_41 is None:\n", "VAR_41 = VAR_41[0].decode('ascii')\n", "VAR_42, VAR_43 = cgi.parse_header(VAR_41)\n", "if VAR_42 != 'application/json':\n" ]
[ "def check_content_type_is_json(headers: Headers) ->None:...\n", "\"\"\"docstring\"\"\"\n", "c_type = headers.getRawHeaders(b'Content-Type')\n", "if c_type is None:\n", "c_type = c_type[0].decode('ascii')\n", "val, options = cgi.parse_header(c_type)\n", "if val != 'application/json':\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Condition" ]
[ "def FUNC_77(self):...\n", "VAR_78 = frappe.get_all('ToDo', fields=['owner'], filters={'reference_type':\n self.doctype, 'reference_name': self.name, 'status': ('!=', 'Cancelled')})\n", "VAR_79 = set([assignment.owner for assignment in VAR_78])\n", "return VAR_79\n" ]
[ "def get_assigned_users(self):...\n", "assignments = frappe.get_all('ToDo', fields=['owner'], filters={\n 'reference_type': self.doctype, 'reference_name': self.name, 'status':\n ('!=', 'Cancelled')})\n", "users = set([assignment.owner for assignment in assignments])\n", "return users\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12(self):...\n", "" ]
[ "def finalize_options(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def __call__(self, VAR_2):...\n", "VAR_4 = VAR_2.POST.getlist('s3file')\n", "for field_name in VAR_4:\n", "VAR_3 = VAR_2.POST.getlist(field_name)\n", "if local_dev and VAR_2.path == '/__s3_mock__/':\n", "VAR_2.FILES.setlist(field_name, list(self.get_files_from_storage(VAR_3)))\n", "return views.S3MockView.as_view()(VAR_2)\n", "return self.get_response(VAR_2)\n" ]
[ "def __call__(self, request):...\n", "file_fields = request.POST.getlist('s3file')\n", "for field_name in file_fields:\n", "paths = request.POST.getlist(field_name)\n", "if local_dev and request.path == '/__s3_mock__/':\n", "request.FILES.setlist(field_name, list(self.get_files_from_storage(paths)))\n", "return views.S3MockView.as_view()(request)\n", "return self.get_response(request)\n" ]
[ 0, 0, 0, 0, 0, 1, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Condition", "Expr'", "Return'", "Return'" ]
[ "@FUNC_0...\n", "return SsoHandler(self)\n" ]
[ "@cache_in_self...\n", "return SsoHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_136(self, VAR_212=None):...\n", "if not self.auth.user:\n", "redirect(self.auth.settings.login_url)\n", "VAR_167 = self.settings.groups\n", "return 'wiki_editor' in VAR_167 or VAR_212 is None and 'wiki_author' in VAR_167 or VAR_212 is not None and (\n set(VAR_167).intersection(set(VAR_212.can_edit)) or VAR_212.created_by ==\n self.auth.user.id)\n" ]
[ "def can_edit(self, page=None):...\n", "if not self.auth.user:\n", "redirect(self.auth.settings.login_url)\n", "groups = self.settings.groups\n", "return 'wiki_editor' in groups or page is None and 'wiki_author' in groups or page is not None and (\n set(groups).intersection(set(page.can_edit)) or page.created_by == self\n .auth.user.id)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Return'" ]
[ "from __future__ import unicode_literals\n", "import json\n", "from django import template\n", "from django.core.serializers import serialize\n", "from django.db.models.query import QuerySet\n", "from django.utils import six\n", "from django.utils.safestring import mark_safe\n", "from djblets.util.serializers import DjbletsJSONEncoder\n", "VAR_0 = template.Library()\n", "@VAR_0.simple_tag...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = ''\n", "for field in VAR_1:\n", "VAR_6 += \"{ name: '%s', \" % field.name\n", "return '[ %s ]' % VAR_6[:-1]\n", "if field.is_hidden:\n", "VAR_6 += 'hidden: true, '\n", "VAR_6 += \"label: '%s', \" % field.label_tag(field.label + ':')\n", "VAR_6 += \"widget: '%s' },\" % six.text_type(field)\n", "if field.field.required:\n", "VAR_6 += 'required: true, '\n", "if field.field.help_text:\n", "VAR_6 += \"help_text: '%s', \" % field.field.help_text\n" ]
[ "from __future__ import unicode_literals\n", "import json\n", "from django import template\n", "from django.core.serializers import serialize\n", "from django.db.models.query import QuerySet\n", "from django.utils import six\n", "from django.utils.safestring import mark_safe\n", "from djblets.util.serializers import DjbletsJSONEncoder\n", "register = template.Library()\n", "@register.simple_tag...\n", "\"\"\"docstring\"\"\"\n", "s = ''\n", "for field in form:\n", "s += \"{ name: '%s', \" % field.name\n", "return '[ %s ]' % s[:-1]\n", "if field.is_hidden:\n", "s += 'hidden: true, '\n", "s += \"label: '%s', \" % field.label_tag(field.label + ':')\n", "s += \"widget: '%s' },\" % six.text_type(field)\n", "if field.field.required:\n", "s += 'required: true, '\n", "if field.field.help_text:\n", "s += \"help_text: '%s', \" % field.field.help_text\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Condition", "Docstring", "Assign'", "For", "AugAssign'", "Return'", "Condition", "AugAssign'", "AugAssign'", "AugAssign'", "Condition", "AugAssign'", "Condition", "AugAssign'" ]
[ "def FUNC_1(self, VAR_4, VAR_5, **VAR_3):...\n", "return 'string'\n" ]
[ "def generate_config_section(self, config_dir_path, server_name, **kwargs):...\n", "return \"\"\" ## Federation ##\n\n # Restrict federation to the following whitelist of domains.\n # N.B. we recommend also firewalling your federation listener to limit\n # inbound federation traffic as early as possible, rather than relying\n # purely on this application-layer restriction. If not specified, the\n # default is to whitelist everything.\n #\n #federation_domain_whitelist:\n # - lon.example.com\n # - nyc.example.com\n # - syd.example.com\n\n # Prevent federation requests from being sent to the following\n # blacklist IP address CIDR ranges. If this option is not specified, or\n # specified with an empty list, no ip range blacklist will be enforced.\n #\n # As of Synapse v1.4.0 this option also affects any outbound requests to identity\n # servers provided by user input.\n #\n # (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly\n # listed here, since they correspond to unroutable addresses.)\n #\n federation_ip_range_blacklist:\n - '127.0.0.0/8'\n - '10.0.0.0/8'\n - '172.16.0.0/12'\n - '192.168.0.0/16'\n - '100.64.0.0/10'\n - '169.254.0.0/16'\n - '::1/128'\n - 'fe80::/64'\n - 'fc00::/7'\n\n # Report prometheus metrics on the age of PDUs being sent to and received from\n # the following domains. This can be used to give an idea of \"delay\" on inbound\n # and outbound federation, though be aware that any delay can be due to problems\n # at either end or with the intermediate network.\n #\n # By default, no domains are monitored in this way.\n #\n #federation_metrics_domains:\n # - matrix.org\n # - example.com\n \"\"\"\n" ]
[ 0, 4 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_79(VAR_31, VAR_136, VAR_137=None, VAR_138=None, VAR_5=None,...\n", "\"\"\"docstring\"\"\"\n", "VAR_1.message_title = VAR_31\n", "VAR_1.message = VAR_136\n", "VAR_1.response['type'] = 'page'\n", "VAR_1.response['route'] = VAR_78\n", "VAR_1.no_cache = 1\n", "if VAR_138:\n", "VAR_1.response['http_status_code'] = VAR_138\n", "if not VAR_5:\n", "VAR_5 = {}\n", "if not VAR_139:\n", "if VAR_137:\n", "VAR_5['indicator_color'] = VAR_139\n", "VAR_139 = 'green'\n", "if VAR_138 and VAR_138 > 300:\n", "VAR_5['primary_label'] = VAR_140\n", "VAR_139 = 'red'\n", "VAR_139 = 'blue'\n", "VAR_5['primary_action'] = VAR_37\n", "VAR_5['error_code'] = VAR_138\n", "VAR_5['fullpage'] = VAR_141\n", "if VAR_142:\n", "VAR_5['card_width'] = VAR_142\n", "VAR_1.response['context'] = VAR_5\n" ]
[ "def respond_as_web_page(title, html, success=None, http_status_code=None,...\n", "\"\"\"docstring\"\"\"\n", "local.message_title = title\n", "local.message = html\n", "local.response['type'] = 'page'\n", "local.response['route'] = template\n", "local.no_cache = 1\n", "if http_status_code:\n", "local.response['http_status_code'] = http_status_code\n", "if not context:\n", "context = {}\n", "if not indicator_color:\n", "if success:\n", "context['indicator_color'] = indicator_color\n", "indicator_color = 'green'\n", "if http_status_code and http_status_code > 300:\n", "context['primary_label'] = primary_label\n", "indicator_color = 'red'\n", "indicator_color = 'blue'\n", "context['primary_action'] = primary_action\n", "context['error_code'] = http_status_code\n", "context['fullpage'] = fullpage\n", "if width:\n", "context['card_width'] = width\n", "local.response['context'] = context\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_48(self):...\n", "\"\"\"docstring\"\"\"\n", "return {'url': self.connection_url, 'hostname': self.ip if self.ip else\n 'localhost', 'port': self.port, 'secure': bool(self.certfile),\n 'base_url': self.base_url, 'notebook_dir': os.path.abspath(self.\n notebook_dir), 'pid': os.getpid()}\n" ]
[ "def server_info(self):...\n", "\"\"\"docstring\"\"\"\n", "return {'url': self.connection_url, 'hostname': self.ip if self.ip else\n 'localhost', 'port': self.port, 'secure': bool(self.certfile),\n 'base_url': self.base_url, 'notebook_dir': os.path.abspath(self.\n notebook_dir), 'pid': os.getpid()}\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_36(self):...\n", "\"\"\"docstring\"\"\"\n", "self.helper.send(self.room, body='Hi!', VAR_52=self.other_access_token)\n", "self.helper.send(self.room, body='There!', VAR_52=self.other_access_token)\n", "VAR_22, VAR_23 = self.make_request('POST', '/search?access_token=%s' % (\n self.access_token,), {'search_categories': {'room_events': {'keys': [\n 'content.body'], 'search_term': 'Hi', 'event_context': {\n 'include_profile': True}}}})\n", "self.assertEqual(VAR_23.code, 200)\n", "VAR_49 = VAR_23.json_body['search_categories']['room_events']\n", "self.assertEqual(VAR_49['count'], 1)\n", "self.assertEqual(VAR_49['results'][0]['result']['content']['body'], 'Hi!')\n", "VAR_50 = VAR_49['results'][0]['context']\n", "self.assertEqual(len(VAR_50['profile_info'].keys()), 2)\n", "self.assertEqual(VAR_50['profile_info'][self.other_user_id]['displayname'],\n 'otheruser')\n" ]
[ "def test_include_context(self):...\n", "\"\"\"docstring\"\"\"\n", "self.helper.send(self.room, body='Hi!', tok=self.other_access_token)\n", "self.helper.send(self.room, body='There!', tok=self.other_access_token)\n", "request, channel = self.make_request('POST', '/search?access_token=%s' % (\n self.access_token,), {'search_categories': {'room_events': {'keys': [\n 'content.body'], 'search_term': 'Hi', 'event_context': {\n 'include_profile': True}}}})\n", "self.assertEqual(channel.code, 200)\n", "results = channel.json_body['search_categories']['room_events']\n", "self.assertEqual(results['count'], 1)\n", "self.assertEqual(results['results'][0]['result']['content']['body'], 'Hi!')\n", "context = results['results'][0]['context']\n", "self.assertEqual(len(context['profile_info'].keys()), 2)\n", "self.assertEqual(context['profile_info'][self.other_user_id]['displayname'],\n 'otheruser')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(VAR_13: IResponse, VAR_14: BinaryIO, VAR_15: Optional[int]...\n", "\"\"\"docstring\"\"\"\n", "VAR_37 = defer.Deferred()\n", "VAR_13.deliverBody(CLASS_3(VAR_14, VAR_37, VAR_15))\n", "return VAR_37\n" ]
[ "def readBodyToFile(response: IResponse, stream: BinaryIO, max_size:...\n", "\"\"\"docstring\"\"\"\n", "d = defer.Deferred()\n", "response.deliverBody(_ReadBodyToFileProtocol(stream, d, max_size))\n", "return d\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_4(VAR_2: Directive) ->tuple[str, str]:...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = file.readlines()\n", "VAR_20 = FUNC_3(VAR_8, VAR_2.meta['lineno'] - 1)\n", "VAR_21 = ''.join(VAR_20).rstrip('\\n')\n", "return VAR_21, FUNC_0(VAR_21)\n" ]
[ "def get_entry_slice(entry: Directive) ->tuple[str, str]:...\n", "\"\"\"docstring\"\"\"\n", "lines = file.readlines()\n", "entry_lines = find_entry_lines(lines, entry.meta['lineno'] - 1)\n", "entry_source = ''.join(entry_lines).rstrip('\\n')\n", "return entry_source, sha256_str(entry_source)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_30(self):...\n", "VAR_27 = widgets.FilteredSelectMultiple('test', False)\n", "self.assertHTMLEqual(conditional_escape(VAR_27.render('test', 'test')), \n 'string' % VAR_0())\n" ]
[ "def test_render(self):...\n", "w = widgets.FilteredSelectMultiple('test', False)\n", "self.assertHTMLEqual(conditional_escape(w.render('test', 'test')), \n \"\"\"<select multiple=\"multiple\" name=\"test\" class=\"selectfilter\">\n</select><script type=\"text/javascript\">addEvent(window, \"load\", function(e) {SelectFilter.init(\"id_test\", \"test\", 0, \"%(ADMIN_STATIC_PREFIX)s\"); });</script>\n\"\"\"\n % admin_static_prefix())\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@def_function.function(input_signature=[tensor_spec.TensorSpec(shape=(),...\n", "self.write_var.assign(VAR_44 + self.var)\n", "return {'res': self.write_var}\n" ]
[ "@def_function.function(input_signature=[tensor_spec.TensorSpec(shape=(),...\n", "self.write_var.assign(x + self.var)\n", "return {'res': self.write_var}\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Return'" ]
[ "@parameterized.named_parameters(('non_tfrt', False))...\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_10 = test.test_src_dir_path(VAR_0)\n", "VAR_45 = np.array([[1], [2]])\n", "VAR_46 = np.zeros((6, 3))\n", "VAR_32 = os.path.join(test.get_temp_dir(),\n 'testRunCommandOutOverwrite_inputs.npz')\n", "np.savez(VAR_32, VAR_27=x, VAR_28=x_notused)\n", "VAR_47 = os.path.join(test.get_temp_dir(), 'y.npy')\n", "open(VAR_47, 'a').close()\n", "VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n VAR_32 + '[x0]', '--outdir', test.get_temp_dir(), '--overwrite'] + ([\n '--use_tfrt'] if VAR_5 else []))\n", "saved_model_cli.run(VAR_11)\n", "VAR_43 = np.load(VAR_47)\n", "VAR_44 = np.array([[2.5], [3.0]])\n", "self.assertAllClose(VAR_44, VAR_43)\n" ]
[ "@parameterized.named_parameters(('non_tfrt', False))...\n", "self.parser = saved_model_cli.create_parser()\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "x = np.array([[1], [2]])\n", "x_notused = np.zeros((6, 3))\n", "input_path = os.path.join(test.get_temp_dir(),\n 'testRunCommandOutOverwrite_inputs.npz')\n", "np.savez(input_path, x0=x, x1=x_notused)\n", "output_file = os.path.join(test.get_temp_dir(), 'y.npy')\n", "open(output_file, 'a').close()\n", "args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n input_path + '[x0]', '--outdir', test.get_temp_dir(), '--overwrite'] +\n (['--use_tfrt'] if use_tfrt else []))\n", "saved_model_cli.run(args)\n", "y_actual = np.load(output_file)\n", "y_expected = np.array([[2.5], [3.0]])\n", "self.assertAllClose(y_expected, y_actual)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0(VAR_1: str, VAR_2: Optional[str]=None, VAR_3: Optional[str]=None...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = inspect.currentframe()\n", "assert VAR_9 is not None\n", "VAR_10 = VAR_9.f_back\n", "assert VAR_10 is not None\n", "VAR_11 = inspect.getmodule(VAR_10)\n", "assert VAR_11 is not None\n", "VAR_12 = VAR_11.__name__\n", "if VAR_12 == '__main__':\n", "VAR_26 = inspect.getfile(VAR_10)\n", "VAR_13 = f'{VAR_12}.{VAR_1}'\n", "VAR_27 = os.path.basename(VAR_26)\n", "VAR_14 = CLASS_1(VAR_1=component_name, VAR_2=path, VAR_3=url)\n", "VAR_12, VAR_28 = os.path.splitext(VAR_27)\n", "CLASS_3.instance().register_component(VAR_14)\n", "return VAR_14\n" ]
[ "def declare_component(name: str, path: Optional[str]=None, url: Optional[...\n", "\"\"\"docstring\"\"\"\n", "current_frame = inspect.currentframe()\n", "assert current_frame is not None\n", "caller_frame = current_frame.f_back\n", "assert caller_frame is not None\n", "module = inspect.getmodule(caller_frame)\n", "assert module is not None\n", "module_name = module.__name__\n", "if module_name == '__main__':\n", "file_path = inspect.getfile(caller_frame)\n", "component_name = f'{module_name}.{name}'\n", "filename = os.path.basename(file_path)\n", "component = CustomComponent(name=component_name, path=path, url=url)\n", "module_name, _ = os.path.splitext(filename)\n", "ComponentRegistry.instance().register_component(component)\n", "return component\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_4(self, VAR_13: Text, VAR_14: Text) ->None:...\n", "\"\"\"docstring\"\"\"\n", "self._container_client().upload_blob(VAR_1=file_key, data=data)\n" ]
[ "def _persist_tar(self, file_key: Text, tar_path: Text) ->None:...\n", "\"\"\"docstring\"\"\"\n", "self._container_client().upload_blob(name=file_key, data=data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "\"\"\"string\"\"\"\n", "import html\n", "import json\n", "import os\n", "import time\n", "import textwrap\n", "import mimetypes\n", "import urllib\n", "import collections\n", "import pkg_resources\n", "from PyQt5.QtCore import QUrlQuery, QUrl\n", "import qutebrowser\n", "from qutebrowser.config import config, configdata, configexc, configdiff\n", "from qutebrowser.utils import version, utils, jinja, log, message, docutils, objreg, urlutils\n", "from qutebrowser.misc import objects\n", "from qutebrowser.qt import sip\n", "VAR_0 = ':pyeval was never called'\n", "VAR_1 = ':spawn was never called'\n", "VAR_2 = {}\n", "\"\"\"Raised when no handler was found for the given URL.\"\"\"\n", "\"\"\"Called when there was an OSError inside a handler.\"\"\"\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_7, VAR_8):...\n", "self.errorstring = VAR_7\n", "self.error = VAR_8\n", "super().__init__(VAR_7)\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_3):...\n", "super().__init__(VAR_3.toDisplayString())\n", "self.url = VAR_3\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_9, VAR_10=None):...\n", "self._name = VAR_9\n", "self._backend = VAR_10\n", "self._function = None\n", "def __call__(self, VAR_11):...\n", "self._function = VAR_11\n", "VAR_2[self._name] = self.wrapper\n", "return VAR_11\n" ]
[ "\"\"\"Backend-independent qute://* code.\n\nModule attributes:\n pyeval_output: The output of the last :pyeval command.\n _HANDLERS: The handlers registered via decorators.\n\"\"\"\n", "import html\n", "import json\n", "import os\n", "import time\n", "import textwrap\n", "import mimetypes\n", "import urllib\n", "import collections\n", "import pkg_resources\n", "from PyQt5.QtCore import QUrlQuery, QUrl\n", "import qutebrowser\n", "from qutebrowser.config import config, configdata, configexc, configdiff\n", "from qutebrowser.utils import version, utils, jinja, log, message, docutils, objreg, urlutils\n", "from qutebrowser.misc import objects\n", "from qutebrowser.qt import sip\n", "pyeval_output = ':pyeval was never called'\n", "spawn_output = ':spawn was never called'\n", "_HANDLERS = {}\n", "\"\"\"Raised when no handler was found for the given URL.\"\"\"\n", "\"\"\"Called when there was an OSError inside a handler.\"\"\"\n", "\"\"\"Exception to signal that a handler should return an ErrorReply.\n\n Attributes correspond to the arguments in\n networkreply.ErrorNetworkReply.\n\n Attributes:\n errorstring: Error string to print.\n error: Numerical error value.\n \"\"\"\n", "def __init__(self, errorstring, error):...\n", "self.errorstring = errorstring\n", "self.error = error\n", "super().__init__(errorstring)\n", "\"\"\"Exception to signal a redirect should happen.\n\n Attributes:\n url: The URL to redirect to, as a QUrl.\n \"\"\"\n", "def __init__(self, url):...\n", "super().__init__(url.toDisplayString())\n", "self.url = url\n", "\"\"\"Decorator to register a qute://* URL handler.\n\n Attributes:\n _name: The 'foo' part of qute://foo\n backend: Limit which backends the handler can run with.\n \"\"\"\n", "def __init__(self, name, backend=None):...\n", "self._name = name\n", "self._backend = backend\n", "self._function = None\n", "def __call__(self, function):...\n", "self._function = function\n", "_HANDLERS[self._name] = self.wrapper\n", "return function\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12(self):...\n", "VAR_5 = self._makeContext()\n", "warnings.simplefilter('ignore')\n", "self.assertEqual(VAR_5.evaluate('d/_'), 'under')\n" ]
[ "def test_evaluate_dict_key_as_underscore(self):...\n", "ec = self._makeContext()\n", "warnings.simplefilter('ignore')\n", "self.assertEqual(ec.evaluate('d/_'), 'under')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_83(*VAR_2, **VAR_3):...\n", "VAR_9 = VAR_2[0]\n", "VAR_65 = get_course_by_id(CourseKey.from_string(VAR_3['course_id']))\n", "if has_access(VAR_9.user, VAR_4, VAR_65):\n", "return VAR_1(*VAR_2, **kwargs)\n", "return HttpResponseForbidden()\n" ]
[ "def wrapped(*args, **kwargs):...\n", "request = args[0]\n", "course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))\n", "if has_access(request.user, level, course):\n", "return func(*args, **kwargs)\n", "return HttpResponseForbidden()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(*, VAR_0: Optional[str], VAR_1: Optional[Path]) ->Union[Dict[str,...\n", "VAR_35: bytes\n", "if VAR_0 is not None and VAR_1 is not None:\n", "return GeneratorError(header='Provide URL or Path, not both.')\n", "if VAR_0 is not None:\n", "if VAR_1 is not None:\n", "VAR_34 = httpx.get(VAR_0)\n", "return GeneratorError(header='Could not get OpenAPI document from provided URL'\n )\n", "return yaml.safe_load(VAR_35)\n", "return GeneratorError(header='Invalid YAML from provided source')\n", "VAR_35 = VAR_1.read_bytes()\n", "return GeneratorError(header='No URL or Path provided')\n", "VAR_35 = VAR_34.content\n" ]
[ "def _get_document(*, url: Optional[str], path: Optional[Path]) ->Union[Dict...\n", "yaml_bytes: bytes\n", "if url is not None and path is not None:\n", "return GeneratorError(header='Provide URL or Path, not both.')\n", "if url is not None:\n", "if path is not None:\n", "response = httpx.get(url)\n", "return GeneratorError(header='Could not get OpenAPI document from provided URL'\n )\n", "return yaml.safe_load(yaml_bytes)\n", "return GeneratorError(header='Invalid YAML from provided source')\n", "yaml_bytes = path.read_bytes()\n", "return GeneratorError(header='No URL or Path provided')\n", "yaml_bytes = response.content\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "AnnAssign'", "Condition", "Return'", "Condition", "Condition", "Assign'", "Return'", "Return'", "Return'", "Assign'", "Return'", "Assign'" ]
[ "@VAR_1.route('/')...\n", "\"\"\"docstring\"\"\"\n", "if not g.beancount_file_slug:\n", "g.beancount_file_slug = next(iter(VAR_1.config['LEDGERS']))\n", "VAR_25 = FUNC_6('index')\n", "VAR_26 = VAR_1.config['LEDGERS'][g.beancount_file_slug\n ].fava_options.default_page\n", "return redirect(f'{VAR_25}{VAR_26}')\n" ]
[ "@app.route('/')...\n", "\"\"\"docstring\"\"\"\n", "if not g.beancount_file_slug:\n", "g.beancount_file_slug = next(iter(app.config['LEDGERS']))\n", "index_url = url_for('index')\n", "default_path = app.config['LEDGERS'][g.beancount_file_slug\n ].fava_options.default_page\n", "return redirect(f'{index_url}{default_path}')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_36(self):...\n", "if not test.is_built_with_xla():\n", "self.skipTest('Skipping test because XLA is not compiled in.')\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_9 = test.test_src_dir_path(VAR_0)\n", "VAR_41 = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir')\n", "VAR_10 = self.parser.parse_args(['aot_compile_cpu', '--dir', VAR_9,\n '--tag_set', 'serve', '--output_prefix', VAR_41, '--cpp_class',\n 'Compiled', '--signature_def_key', 'MISSING'])\n", "saved_model_cli.aot_compile_cpu(VAR_10)\n" ]
[ "def testAOTCompileCPUWrongSignatureDefKey(self):...\n", "if not test.is_built_with_xla():\n", "self.skipTest('Skipping test because XLA is not compiled in.')\n", "self.parser = saved_model_cli.create_parser()\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "output_dir = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir')\n", "args = self.parser.parse_args(['aot_compile_cpu', '--dir', base_path,\n '--tag_set', 'serve', '--output_prefix', output_dir, '--cpp_class',\n 'Compiled', '--signature_def_key', 'MISSING'])\n", "saved_model_cli.aot_compile_cpu(args)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_57(VAR_17, VAR_18=None, VAR_10=None, VAR_19=None):...\n", "VAR_7 = VAR_10[0] if VAR_10 else [db.Books.sort]\n", "VAR_65 = None\n", "VAR_16 = get_cc_columns(filter_config_custom_read=True)\n", "calibre_db.session.connection().connection.connection.create_function('lower',\n 1, db.lcase)\n", "VAR_21 = calibre_db.session.query(db.Books).outerjoin(db.books_series_link,\n db.Books.id == db.books_series_link.c.book).outerjoin(db.Series).filter(\n calibre_db.common_filters(True))\n", "VAR_41 = dict()\n", "VAR_92 = ['tag', 'serie', 'shelf', 'language', 'extension']\n", "for VAR_112 in VAR_92:\n", "VAR_41['include_' + VAR_112] = VAR_17.get('include_' + VAR_112)\n", "VAR_36 = VAR_17.get('author_name')\n", "VAR_41['exclude_' + VAR_112] = VAR_17.get('exclude_' + VAR_112)\n", "VAR_37 = VAR_17.get('book_title')\n", "VAR_38 = VAR_17.get('publisher')\n", "VAR_39 = VAR_17.get('publishstart')\n", "VAR_40 = VAR_17.get('publishend')\n", "VAR_25 = VAR_17.get('ratinghigh')\n", "VAR_24 = VAR_17.get('ratinglow')\n", "VAR_93 = VAR_17.get('comment')\n", "VAR_26 = VAR_17.get('read_status')\n", "if VAR_36:\n", "VAR_36 = VAR_36.strip().lower().replace(',', '|')\n", "if VAR_37:\n", "VAR_37 = VAR_37.strip().lower()\n", "if VAR_38:\n", "VAR_38 = VAR_38.strip().lower()\n", "VAR_35 = []\n", "VAR_94 = False\n", "for c in VAR_16:\n", "if c.datatype == 'datetime':\n", "if any(VAR_41.values()\n", "VAR_132 = VAR_17.get('custom_column_' + str(c.id) + '_start')\n", "if VAR_17.get('custom_column_' + str(c.id)):\n", "VAR_35, VAR_39, VAR_40 = FUNC_56(VAR_35, VAR_36, VAR_37, VAR_38, VAR_39,\n VAR_40, VAR_41, VAR_24, VAR_25, VAR_26)\n", "VAR_21 = VAR_21.order_by(*VAR_7).all()\n", "VAR_133 = VAR_17.get('custom_column_' + str(c.id) + '_end')\n", "VAR_35.extend([u'{}: {}'.format(c.name, VAR_17.get('custom_column_' + str(c\n .id)))])\n", "VAR_21 = VAR_21.filter()\n", "VAR_91['query'] = json.dumps(VAR_17)\n", "if VAR_132:\n", "VAR_94 = True\n", "if VAR_36:\n", "ub.store_ids(VAR_21)\n", "VAR_35.extend([u'{} >= {}'.format(c.name, format_date(datetime.strptime(\n VAR_132, '%Y-%m-%d').date(), format='medium', locale=get_locale()))])\n", "if VAR_133:\n", "VAR_21 = VAR_21.filter(db.Books.authors.any(func.lower(db.Authors.name).\n ilike('%' + VAR_36 + '%')))\n", "if VAR_37:\n", "VAR_77 = len(VAR_21)\n", "VAR_94 = True\n", "VAR_35.extend([u'{} <= {}'.format(c.name, format_date(datetime.strptime(\n VAR_133, '%Y-%m-%d').date(), format='medium', locale=get_locale()))])\n", "VAR_21 = VAR_21.filter(func.lower(db.Books.title).ilike('%' + VAR_37 + '%'))\n", "if VAR_39:\n", "if VAR_18 is not None and VAR_19 is not None:\n", "VAR_94 = True\n", "VAR_21 = VAR_21.filter(func.datetime(db.Books.pubdate) > func.datetime(VAR_39))\n", "if VAR_40:\n", "VAR_18 = int(VAR_18)\n", "VAR_18 = 0\n", "VAR_21 = VAR_21.filter(func.datetime(db.Books.pubdate) < func.datetime(VAR_40))\n", "VAR_21 = FUNC_51(VAR_21, VAR_26)\n", "VAR_113 = VAR_18 + int(VAR_19)\n", "VAR_113 = VAR_77\n", "if VAR_38:\n", "VAR_65 = Pagination(VAR_18 / int(VAR_19) + 1, VAR_19, VAR_77)\n", "return render_title_template('search.html', adv_searchterm=searchterm,\n VAR_65=pagination, VAR_63=q[offset:limit_all], VAR_77=result_count,\n VAR_150=_(u'Advanced Search'), VAR_9='advsearch', VAR_10=order[1])\n", "VAR_21 = VAR_21.filter(db.Books.publishers.any(func.lower(db.Publishers.\n name).ilike('%' + VAR_38 + '%')))\n", "VAR_21 = FUNC_53(VAR_21, VAR_41['include_tag'], VAR_41['exclude_tag'])\n", "VAR_21 = FUNC_54(VAR_21, VAR_41['include_serie'], VAR_41['exclude_serie'])\n", "VAR_21 = FUNC_55(VAR_21, VAR_41['include_shelf'], VAR_41['exclude_shelf'])\n", "VAR_21 = FUNC_52(VAR_21, VAR_41['include_extension'], VAR_41[\n 'exclude_extension'])\n", "VAR_21 = FUNC_49(VAR_21, VAR_41['include_language'], VAR_41['exclude_language']\n )\n", "VAR_21 = FUNC_50(VAR_21, VAR_24, VAR_25)\n", "if VAR_93:\n", "VAR_21 = VAR_21.filter(db.Books.comments.any(func.lower(db.Comments.text).\n ilike('%' + VAR_93 + '%')))\n", "VAR_21 = FUNC_48(VAR_16, VAR_17, VAR_21)\n", "VAR_3.debug_or_exception(ex)\n", "flash(_('Error on search for custom columns, please restart Calibre-Web'),\n category='error')\n" ]
[ "def render_adv_search_results(term, offset=None, order=None, limit=None):...\n", "sort = order[0] if order else [db.Books.sort]\n", "pagination = None\n", "cc = get_cc_columns(filter_config_custom_read=True)\n", "calibre_db.session.connection().connection.connection.create_function('lower',\n 1, db.lcase)\n", "q = calibre_db.session.query(db.Books).outerjoin(db.books_series_link, db.\n Books.id == db.books_series_link.c.book).outerjoin(db.Series).filter(\n calibre_db.common_filters(True))\n", "tags = dict()\n", "elements = ['tag', 'serie', 'shelf', 'language', 'extension']\n", "for element in elements:\n", "tags['include_' + element] = term.get('include_' + element)\n", "author_name = term.get('author_name')\n", "tags['exclude_' + element] = term.get('exclude_' + element)\n", "book_title = term.get('book_title')\n", "publisher = term.get('publisher')\n", "pub_start = term.get('publishstart')\n", "pub_end = term.get('publishend')\n", "rating_low = term.get('ratinghigh')\n", "rating_high = term.get('ratinglow')\n", "description = term.get('comment')\n", "read_status = term.get('read_status')\n", "if author_name:\n", "author_name = author_name.strip().lower().replace(',', '|')\n", "if book_title:\n", "book_title = book_title.strip().lower()\n", "if publisher:\n", "publisher = publisher.strip().lower()\n", "searchterm = []\n", "cc_present = False\n", "for c in cc:\n", "if c.datatype == 'datetime':\n", "if any(tags.values()\n", "column_start = term.get('custom_column_' + str(c.id) + '_start')\n", "if term.get('custom_column_' + str(c.id)):\n", "searchterm, pub_start, pub_end = extend_search_term(searchterm, author_name,\n book_title, publisher, pub_start, pub_end, tags, rating_high,\n rating_low, read_status)\n", "q = q.order_by(*sort).all()\n", "column_end = term.get('custom_column_' + str(c.id) + '_end')\n", "searchterm.extend([u'{}: {}'.format(c.name, term.get('custom_column_' + str\n (c.id)))])\n", "q = q.filter()\n", "flask_session['query'] = json.dumps(term)\n", "if column_start:\n", "cc_present = True\n", "if author_name:\n", "ub.store_ids(q)\n", "searchterm.extend([u'{} >= {}'.format(c.name, format_date(datetime.strptime\n (column_start, '%Y-%m-%d').date(), format='medium', locale=get_locale()))])\n", "if column_end:\n", "q = q.filter(db.Books.authors.any(func.lower(db.Authors.name).ilike('%' +\n author_name + '%')))\n", "if book_title:\n", "result_count = len(q)\n", "cc_present = True\n", "searchterm.extend([u'{} <= {}'.format(c.name, format_date(datetime.strptime\n (column_end, '%Y-%m-%d').date(), format='medium', locale=get_locale()))])\n", "q = q.filter(func.lower(db.Books.title).ilike('%' + book_title + '%'))\n", "if pub_start:\n", "if offset is not None and limit is not None:\n", "cc_present = True\n", "q = q.filter(func.datetime(db.Books.pubdate) > func.datetime(pub_start))\n", "if pub_end:\n", "offset = int(offset)\n", "offset = 0\n", "q = q.filter(func.datetime(db.Books.pubdate) < func.datetime(pub_end))\n", "q = adv_search_read_status(q, read_status)\n", "limit_all = offset + int(limit)\n", "limit_all = result_count\n", "if publisher:\n", "pagination = Pagination(offset / int(limit) + 1, limit, result_count)\n", "return render_title_template('search.html', adv_searchterm=searchterm,\n pagination=pagination, entries=q[offset:limit_all], result_count=\n result_count, title=_(u'Advanced Search'), page='advsearch', order=order[1]\n )\n", "q = q.filter(db.Books.publishers.any(func.lower(db.Publishers.name).ilike(\n '%' + publisher + '%')))\n", "q = adv_search_tag(q, tags['include_tag'], tags['exclude_tag'])\n", "q = adv_search_serie(q, tags['include_serie'], tags['exclude_serie'])\n", "q = adv_search_shelf(q, tags['include_shelf'], tags['exclude_shelf'])\n", "q = adv_search_extension(q, tags['include_extension'], tags[\n 'exclude_extension'])\n", "q = adv_search_language(q, tags['include_language'], tags['exclude_language'])\n", "q = adv_search_ratings(q, rating_high, rating_low)\n", "if description:\n", "q = q.filter(db.Books.comments.any(func.lower(db.Comments.text).ilike('%' +\n description + '%')))\n", "q = adv_search_custom_columns(cc, term, q)\n", "log.debug_or_exception(ex)\n", "flash(_('Error on search for custom columns, please restart Calibre-Web'),\n category='error')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_74(VAR_9, VAR_38):...\n", "\"\"\"docstring\"\"\"\n", "VAR_41 = FUNC_75(VAR_9)\n", "VAR_19 = VAR_41.get('user_name', '') or VAR_41.get('user_email', '')\n", "if not VAR_19:\n", "VAR_155 = FUNC_76(VAR_19, VAR_38)\n", "return VAR_41, VAR_155\n" ]
[ "def parse_request_data_and_get_user(request, course_key):...\n", "\"\"\"docstring\"\"\"\n", "certificate_exception = parse_request_data(request)\n", "user = certificate_exception.get('user_name', '') or certificate_exception.get(\n 'user_email', '')\n", "if not user:\n", "db_user = get_student(user, course_key)\n", "return certificate_exception, db_user\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def __init__(self):...\n", "self._components = {}\n", "self._lock = threading.Lock()\n" ]
[ "def __init__(self):...\n", "self._components = {}\n", "self._lock = threading.Lock()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_2(self, *VAR_4, **VAR_5):...\n", "VAR_7 = super().build_attrs(*VAR_4, **kwargs)\n", "VAR_6 = VAR_7.get('accept')\n", "VAR_8 = self.client.generate_presigned_post(self.bucket_name, str(pathlib.\n PurePosixPath(self.upload_folder, '${filename}')), Conditions=self.\n get_conditions(accept), ExpiresIn=self.expires)\n", "VAR_9 = {('data-fields-%s' % key): value for key, value in VAR_8['fields'].\n items()}\n", "VAR_9['data-url'] = VAR_8['url']\n", "VAR_9.update(VAR_7)\n", "VAR_9['class'] += ' s3file'\n", "VAR_9['class'] = 's3file'\n", "return VAR_9\n" ]
[ "def build_attrs(self, *args, **kwargs):...\n", "attrs = super().build_attrs(*args, **kwargs)\n", "accept = attrs.get('accept')\n", "response = self.client.generate_presigned_post(self.bucket_name, str(\n pathlib.PurePosixPath(self.upload_folder, '${filename}')), Conditions=\n self.get_conditions(accept), ExpiresIn=self.expires)\n", "defaults = {('data-fields-%s' % key): value for key, value in response[\n 'fields'].items()}\n", "defaults['data-url'] = response['url']\n", "defaults.update(attrs)\n", "defaults['class'] += ' s3file'\n", "defaults['class'] = 's3file'\n", "return defaults\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "AugAssign'", "Assign'", "Return'" ]
[ "\"\"\"Tests for SavedModelCLI tool.\"\"\"\n", "import contextlib\n", "import os\n", "import pickle\n", "import platform\n", "import shutil\n", "import sys\n", "from absl.testing import parameterized\n", "import numpy as np\n", "from six import StringIO\n", "from tensorflow.core.example import example_pb2\n", "from tensorflow.core.framework import types_pb2\n", "from tensorflow.core.protobuf import meta_graph_pb2\n", "from tensorflow.python.debug.wrappers import local_cli_wrapper\n", "from tensorflow.python.eager import def_function\n", "from tensorflow.python.framework import constant_op\n", "from tensorflow.python.framework import dtypes\n", "from tensorflow.python.framework import tensor_spec\n", "from tensorflow.python.lib.io import file_io\n", "from tensorflow.python.ops import variables\n", "from tensorflow.python.platform import test\n", "from tensorflow.python.platform import tf_logging as logging\n", "from tensorflow.python.saved_model import save\n", "from tensorflow.python.tools import saved_model_cli\n", "from tensorflow.python.training.tracking import tracking\n", "VAR_0 = 'cc/saved_model/testdata/half_plus_two/00000123'\n", "@contextlib.contextmanager...\n", "VAR_1, VAR_2 = StringIO(), StringIO()\n", "VAR_3, VAR_4 = VAR_8.stdout, VAR_8.stderr\n", "VAR_8.stdout, VAR_8.stderr = VAR_1, VAR_2\n", "VAR_8.stdout, VAR_8.stderr = VAR_3, VAR_4\n", "def FUNC_1(self):...\n", "yield VAR_8.stdout, VAR_8.stderr\n", "super(CLASS_0, self).setUp()\n", "if platform.system() == 'Windows':\n", "self.skipTest('Skipping failing tests on Windows.')\n", "def FUNC_2(self):...\n", "VAR_9 = test.test_src_dir_path(VAR_0)\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_10 = self.parser.parse_args(['show', '--dir', VAR_9, '--all'])\n", "saved_model_cli.show(VAR_10)\n", "VAR_11 = out.getvalue().strip()\n", "VAR_12 = 'string'\n", "self.maxDiff = None\n", "self.assertMultiLineEqual(VAR_11, VAR_12)\n", "self.assertEqual(err.getvalue().strip(), '')\n", "def FUNC_3(self):...\n", "\"\"\"Model with callable polymorphic functions specified.\"\"\"\n", "@def_function.function...\n", "if VAR_55:\n", "return VAR_53 + VAR_54\n", "return VAR_53 * VAR_54\n" ]
[ "\"\"\"Tests for SavedModelCLI tool.\"\"\"\n", "import contextlib\n", "import os\n", "import pickle\n", "import platform\n", "import shutil\n", "import sys\n", "from absl.testing import parameterized\n", "import numpy as np\n", "from six import StringIO\n", "from tensorflow.core.example import example_pb2\n", "from tensorflow.core.framework import types_pb2\n", "from tensorflow.core.protobuf import meta_graph_pb2\n", "from tensorflow.python.debug.wrappers import local_cli_wrapper\n", "from tensorflow.python.eager import def_function\n", "from tensorflow.python.framework import constant_op\n", "from tensorflow.python.framework import dtypes\n", "from tensorflow.python.framework import tensor_spec\n", "from tensorflow.python.lib.io import file_io\n", "from tensorflow.python.ops import variables\n", "from tensorflow.python.platform import test\n", "from tensorflow.python.platform import tf_logging as logging\n", "from tensorflow.python.saved_model import save\n", "from tensorflow.python.tools import saved_model_cli\n", "from tensorflow.python.training.tracking import tracking\n", "SAVED_MODEL_PATH = 'cc/saved_model/testdata/half_plus_two/00000123'\n", "@contextlib.contextmanager...\n", "new_out, new_err = StringIO(), StringIO()\n", "old_out, old_err = sys.stdout, sys.stderr\n", "sys.stdout, sys.stderr = new_out, new_err\n", "sys.stdout, sys.stderr = old_out, old_err\n", "def setUp(self):...\n", "yield sys.stdout, sys.stderr\n", "super(SavedModelCLITestCase, self).setUp()\n", "if platform.system() == 'Windows':\n", "self.skipTest('Skipping failing tests on Windows.')\n", "def testShowCommandAll(self):...\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "self.parser = saved_model_cli.create_parser()\n", "args = self.parser.parse_args(['show', '--dir', base_path, '--all'])\n", "saved_model_cli.show(args)\n", "output = out.getvalue().strip()\n", "exp_out = \"\"\"MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:\n\nsignature_def['classify_x2_to_y3']:\n The given SavedModel SignatureDef contains the following input(s):\n inputs['inputs'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: x2:0\n The given SavedModel SignatureDef contains the following output(s):\n outputs['scores'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y3:0\n Method name is: tensorflow/serving/classify\n\nsignature_def['classify_x_to_y']:\n The given SavedModel SignatureDef contains the following input(s):\n inputs['inputs'] tensor_info:\n dtype: DT_STRING\n shape: unknown_rank\n name: tf_example:0\n The given SavedModel SignatureDef contains the following output(s):\n outputs['scores'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y:0\n Method name is: tensorflow/serving/classify\n\nsignature_def['regress_x2_to_y3']:\n The given SavedModel SignatureDef contains the following input(s):\n inputs['inputs'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: x2:0\n The given SavedModel SignatureDef contains the following output(s):\n outputs['outputs'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y3:0\n Method name is: tensorflow/serving/regress\n\nsignature_def['regress_x_to_y']:\n The given SavedModel SignatureDef contains the following input(s):\n inputs['inputs'] tensor_info:\n dtype: DT_STRING\n shape: unknown_rank\n name: tf_example:0\n The given SavedModel SignatureDef contains the following output(s):\n outputs['outputs'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y:0\n Method name is: tensorflow/serving/regress\n\nsignature_def['regress_x_to_y2']:\n The given SavedModel SignatureDef contains the following input(s):\n inputs['inputs'] tensor_info:\n dtype: DT_STRING\n shape: unknown_rank\n name: tf_example:0\n The given SavedModel SignatureDef contains the following output(s):\n outputs['outputs'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y2:0\n Method name is: tensorflow/serving/regress\n\nsignature_def['serving_default']:\n The given SavedModel SignatureDef contains the following input(s):\n inputs['x'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: x:0\n The given SavedModel SignatureDef contains the following output(s):\n outputs['y'] tensor_info:\n dtype: DT_FLOAT\n shape: (-1, 1)\n name: y:0\n Method name is: tensorflow/serving/predict\"\"\"\n", "self.maxDiff = None\n", "self.assertMultiLineEqual(output, exp_out)\n", "self.assertEqual(err.getvalue().strip(), '')\n", "def testShowAllWithFunctions(self):...\n", "\"\"\"Model with callable polymorphic functions specified.\"\"\"\n", "@def_function.function...\n", "if c:\n", "return a + b\n", "return a * b\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "For", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "if self._private:\n", "return\n", "VAR_26 = QCoreApplication.instance()\n", "VAR_27 = objreg.get('cache')\n", "self.setCache(VAR_27)\n", "VAR_27.setParent(VAR_26)\n" ]
[ "def _set_cache(self):...\n", "\"\"\"docstring\"\"\"\n", "if self._private:\n", "return\n", "app = QCoreApplication.instance()\n", "cache = objreg.get('cache')\n", "self.setCache(cache)\n", "cache.setParent(app)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_12(self, VAR_10, VAR_11):...\n", "self.http_client = Mock()\n", "VAR_33 = self.setup_test_homeserver(http_client=self.http_client)\n", "return VAR_33\n" ]
[ "def make_homeserver(self, reactor, clock):...\n", "self.http_client = Mock()\n", "hs = self.setup_test_homeserver(http_client=self.http_client)\n", "return hs\n" ]
[ 0, 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_6(self):...\n", "\"\"\"docstring\"\"\"\n", "self._test_push_unread_count()\n", "self.assertEqual(self.push_attempts[5][2]['notification']['counts'][\n 'unread'], 1)\n" ]
[ "def test_push_unread_count_group_by_room(self):...\n", "\"\"\"docstring\"\"\"\n", "self._test_push_unread_count()\n", "self.assertEqual(self.push_attempts[5][2]['notification']['counts'][\n 'unread'], 1)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]
[ "def FUNC_63(self):...\n", "VAR_66 = self._random_alias()\n", "self._set_alias_via_directory(VAR_66)\n", "self.helper.send_state(self.room_id, EventTypes.RoomHistoryVisibility, body\n ={'history_visibility': 'world_readable'}, VAR_52=self.room_owner_tok)\n", "self.register_user('user', 'test')\n", "VAR_65 = self.login('user', 'test')\n", "VAR_61 = self._get_aliases(VAR_65)\n", "self.assertEqual(VAR_61['aliases'], [VAR_66])\n" ]
[ "def test_peekable_room(self):...\n", "alias1 = self._random_alias()\n", "self._set_alias_via_directory(alias1)\n", "self.helper.send_state(self.room_id, EventTypes.RoomHistoryVisibility, body\n ={'history_visibility': 'world_readable'}, tok=self.room_owner_tok)\n", "self.register_user('user', 'test')\n", "user_tok = self.login('user', 'test')\n", "res = self._get_aliases(user_tok)\n", "self.assertEqual(res['aliases'], [alias1])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "\"\"\" Generate modern Python clients from OpenAPI \"\"\"\n", "from __future__ import annotations\n", "import shutil\n", "import subprocess\n", "import sys\n", "from pathlib import Path\n", "from typing import Any, Dict, Optional, Sequence, Union\n", "import httpcore\n", "import httpx\n", "import yaml\n", "from jinja2 import Environment, PackageLoader\n", "from openapi_python_client import utils\n", "from .parser import GeneratorData, import_string_from_reference\n", "from .parser.errors import GeneratorError\n", "if sys.version_info.minor == 7:\n", "from importlib_metadata import version\n", "from importlib.metadata import version\n", "__version__ = version(__package__)\n", "def FUNC_0(VAR_0: Optional[str], VAR_1: Optional[Path]) ->Union[CLASS_0,...\n", "VAR_2 = FUNC_3(VAR_0=url, VAR_1=path)\n", "if isinstance(VAR_2, GeneratorError):\n", "return VAR_2\n", "VAR_3 = GeneratorData.from_dict(VAR_2)\n", "if isinstance(VAR_3, GeneratorError):\n", "return VAR_3\n", "return CLASS_0(VAR_3=openapi)\n" ]
[ "\"\"\" Generate modern Python clients from OpenAPI \"\"\"\n", "from __future__ import annotations\n", "import shutil\n", "import subprocess\n", "import sys\n", "from pathlib import Path\n", "from typing import Any, Dict, Optional, Sequence, Union\n", "import httpcore\n", "import httpx\n", "import yaml\n", "from jinja2 import Environment, PackageLoader\n", "from openapi_python_client import utils\n", "from .parser import GeneratorData, import_string_from_reference\n", "from .parser.errors import GeneratorError\n", "if sys.version_info.minor == 7:\n", "from importlib_metadata import version\n", "from importlib.metadata import version\n", "__version__ = version(__package__)\n", "def _get_project_for_url_or_path(url: Optional[str], path: Optional[Path]...\n", "data_dict = _get_document(url=url, path=path)\n", "if isinstance(data_dict, GeneratorError):\n", "return data_dict\n", "openapi = GeneratorData.from_dict(data_dict)\n", "if isinstance(openapi, GeneratorError):\n", "return openapi\n", "return Project(openapi=openapi)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "ImportFrom'", "ImportFrom'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_38(self, VAR_119, VAR_120=None, VAR_121='%(tablename)s_archive',...\n", "\"\"\"docstring\"\"\"\n", "VAR_123 = VAR_123 or VAR_263.T(VAR_122.replace('_', ' ').title())\n", "for VAR_153 in VAR_119:\n", "VAR_359 = VAR_153.fields()\n", "if 'id' in VAR_359 and 'modified_on' in VAR_359 and VAR_122 not in VAR_359:\n", "VAR_153._enable_record_versioning(VAR_120=archive_db, archive_name=\n archive_names, VAR_122=current_record, VAR_123=current_record_label)\n" ]
[ "def enable_record_versioning(self, tables, archive_db=None, archive_names=...\n", "\"\"\"docstring\"\"\"\n", "current_record_label = current_record_label or current.T(current_record.\n replace('_', ' ').title())\n", "for table in tables:\n", "fieldnames = table.fields()\n", "if 'id' in fieldnames and 'modified_on' in fieldnames and current_record not in fieldnames:\n", "table._enable_record_versioning(archive_db=archive_db, archive_name=\n archive_names, current_record=current_record, current_record_label=\n current_record_label)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "For", "Assign'", "Condition", "Expr'" ]
[ "async def FUNC_20(VAR_23):...\n", "self.assertEquals(current_context().request, 'context_11')\n", "self.assertEqual(VAR_23, {'server10': {FUNC_0(VAR_20): 0}})\n", "await make_deferred_yieldable(VAR_22)\n", "return {'server10': {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20),\n 100)}}\n" ]
[ "async def first_lookup_fetch(keys_to_fetch):...\n", "self.assertEquals(current_context().request, 'context_11')\n", "self.assertEqual(keys_to_fetch, {'server10': {get_key_id(key1): 0}})\n", "await make_deferred_yieldable(first_lookup_deferred)\n", "return {'server10': {get_key_id(key1): FetchKeyResult(get_verify_key(key1),\n 100)}}\n" ]
[ 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_56(self, VAR_112=VAR_3, VAR_134=VAR_3, VAR_135=VAR_3, VAR_136=VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_254 = self.table_user()\n", "VAR_56 = VAR_263.request\n", "VAR_244 = VAR_263.response\n", "VAR_139 = VAR_263.session\n", "if not self.settings.mailer:\n", "VAR_244.flash = self.messages.function_disabled\n", "if VAR_112 is VAR_3:\n", "return ''\n", "VAR_112 = self.get_vars_next() or self.settings.retrieve_password_next\n", "if VAR_134 is VAR_3:\n", "VAR_134 = self.settings.retrieve_password_onvalidation\n", "if VAR_135 is VAR_3:\n", "VAR_135 = self.settings.retrieve_password_onaccept\n", "if VAR_136 is VAR_3:\n", "VAR_136 = self.messages['retrieve_password_log']\n", "VAR_271 = VAR_254.email.requires\n", "VAR_254.email.requires = [IS_IN_DB(self.db, VAR_254.email, VAR_59=self.\n messages.invalid_email)]\n", "VAR_7 = SQLFORM(VAR_254, VAR_133=['email'], hidden=dict(_next=next), showid\n =self.settings.showid, submit_button=self.messages.submit_button,\n delete_label=self.messages.delete_label, VAR_273=self.settings.\n formstyle, separator=self.settings.label_separator)\n", "if VAR_7.accepts(VAR_56, VAR_139 if self.csrf_prevention else None, VAR_171\n", "VAR_141 = VAR_254(email=form.vars.email)\n", "VAR_254.email.requires = VAR_271\n", "if not VAR_141:\n", "return VAR_7\n", "VAR_263.session.flash = self.messages.invalid_email\n", "VAR_199 = VAR_141.registration_key\n", "redirect(self.url(VAR_11=request.args))\n", "if VAR_199 in ('pending', 'disabled', 'blocked') or (VAR_199 or '').startswith(\n", "VAR_263.session.flash = self.messages.registration_pending\n", "VAR_132 = self.random_password()\n", "redirect(self.url(VAR_11=request.args))\n", "VAR_261 = self.settings.password_field\n", "VAR_27 = {VAR_261: str(VAR_254[VAR_261].validate(VAR_132)[0]),\n 'registration_key': ''}\n", "VAR_141.update_record(**d)\n", "if self.settings.mailer and self.settings.mailer.send(VAR_35=form.vars.\n", "VAR_139.flash = self.messages.email_sent\n", "VAR_139.flash = self.messages.unable_send_email\n", "self.log_event(VAR_136, VAR_141)\n", "VAR_26(VAR_135, VAR_7)\n", "if not VAR_112:\n", "VAR_112 = self.url(VAR_11=request.args)\n", "VAR_112 = FUNC_4(VAR_112, VAR_7)\n", "redirect(VAR_112)\n" ]
[ "def reset_password_deprecated(self, next=DEFAULT, onvalidation=DEFAULT,...\n", "\"\"\"docstring\"\"\"\n", "table_user = self.table_user()\n", "request = current.request\n", "response = current.response\n", "session = current.session\n", "if not self.settings.mailer:\n", "response.flash = self.messages.function_disabled\n", "if next is DEFAULT:\n", "return ''\n", "next = self.get_vars_next() or self.settings.retrieve_password_next\n", "if onvalidation is DEFAULT:\n", "onvalidation = self.settings.retrieve_password_onvalidation\n", "if onaccept is DEFAULT:\n", "onaccept = self.settings.retrieve_password_onaccept\n", "if log is DEFAULT:\n", "log = self.messages['retrieve_password_log']\n", "old_requires = table_user.email.requires\n", "table_user.email.requires = [IS_IN_DB(self.db, table_user.email,\n error_message=self.messages.invalid_email)]\n", "form = SQLFORM(table_user, fields=['email'], hidden=dict(_next=next),\n showid=self.settings.showid, submit_button=self.messages.submit_button,\n delete_label=self.messages.delete_label, formstyle=self.settings.\n formstyle, separator=self.settings.label_separator)\n", "if form.accepts(request, session if self.csrf_prevention else None,\n", "user = table_user(email=form.vars.email)\n", "table_user.email.requires = old_requires\n", "if not user:\n", "return form\n", "current.session.flash = self.messages.invalid_email\n", "key = user.registration_key\n", "redirect(self.url(args=request.args))\n", "if key in ('pending', 'disabled', 'blocked') or (key or '').startswith(\n", "current.session.flash = self.messages.registration_pending\n", "password = self.random_password()\n", "redirect(self.url(args=request.args))\n", "passfield = self.settings.password_field\n", "d = {passfield: str(table_user[passfield].validate(password)[0]),\n 'registration_key': ''}\n", "user.update_record(**d)\n", "if self.settings.mailer and self.settings.mailer.send(to=form.vars.email,\n", "session.flash = self.messages.email_sent\n", "session.flash = self.messages.unable_send_email\n", "self.log_event(log, user)\n", "callback(onaccept, form)\n", "if not next:\n", "next = self.url(args=request.args)\n", "next = replace_id(next, form)\n", "redirect(next)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "For", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "async def FUNC_6(self):...\n", "if not hasattr(self, 'resolved_spec'):\n", "await self.get_resolved_ref()\n", "return self.resolved_spec\n" ]
[ "async def get_resolved_spec(self):...\n", "if not hasattr(self, 'resolved_spec'):\n", "await self.get_resolved_ref()\n", "return self.resolved_spec\n" ]
[ 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_46(self):...\n", "return super(CLASS_18, self).get_queryset().order_by('ordering')\n" ]
[ "def get_queryset(self):...\n", "return super(CustomFieldManager, self).get_queryset().order_by('ordering')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@def_function.function...\n", "if VAR_56:\n", "return VAR_54 + VAR_55\n", "return VAR_54 * VAR_55\n" ]
[ "@def_function.function...\n", "if c:\n", "return a + b\n", "return a * b\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_149(self, VAR_158):...\n", "VAR_56 = VAR_263.request\n", "if 'render' not in VAR_56.post_vars:\n", "VAR_56.post_vars.render = None\n", "return VAR_158(VAR_56.post_vars)\n" ]
[ "def preview(self, render):...\n", "request = current.request\n", "if 'render' not in request.post_vars:\n", "request.post_vars.render = None\n", "return render(request.post_vars)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_70(VAR_15, VAR_145):...\n", "VAR_107 = FUNC_12(VAR_145, 'inverted', VAR_15.getSizeC())\n", "VAR_50, VAR_51, VAR_52 = FUNC_2(VAR_145['c'])\n", "VAR_15.setActiveChannels(VAR_50, VAR_51, VAR_52, VAR_107)\n", "if VAR_145['m'] == 'g':\n", "VAR_15.setGreyscaleRenderingModel()\n", "VAR_15.setColorRenderingModel()\n", "if 'z' in VAR_145:\n", "VAR_15._re.setDefaultZ(VAR_178(VAR_145['z']) - 1)\n", "if 't' in VAR_145:\n", "VAR_15._re.setDefaultT(VAR_178(VAR_145['t']) - 1)\n", "VAR_15.saveDefaults()\n" ]
[ "def applyRenderingSettings(image, rdef):...\n", "invert_flags = _get_maps_enabled(rdef, 'inverted', image.getSizeC())\n", "channels, windows, colors = _split_channel_info(rdef['c'])\n", "image.setActiveChannels(channels, windows, colors, invert_flags)\n", "if rdef['m'] == 'g':\n", "image.setGreyscaleRenderingModel()\n", "image.setColorRenderingModel()\n", "if 'z' in rdef:\n", "image._re.setDefaultZ(long(rdef['z']) - 1)\n", "if 't' in rdef:\n", "image._re.setDefaultT(long(rdef['t']) - 1)\n", "image.saveDefaults()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_39(self):...\n", "return CLASS_2.objects.filter(VAR_60=self, status__in=(1, 2)).count()\n" ]
[ "def num_open_tickets(self):...\n", "return Ticket.objects.filter(kbitem=self, status__in=(1, 2)).count()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_85(self, *VAR_0, **VAR_1):...\n", "VAR_73 = []\n", "VAR_25 = VAR_6.__name__\n", "VAR_91 = frappe.get_doc_hooks()\n", "for handler in (VAR_91.get(self.doctype, {}).get(VAR_25, []) + VAR_91.get(\n", "VAR_73.append(frappe.get_attr(handler))\n", "VAR_92 = FUNC_84(VAR_6, *VAR_73)\n", "return VAR_92(self, VAR_25, *VAR_0, **kwargs)\n" ]
[ "def composer(self, *args, **kwargs):...\n", "hooks = []\n", "method = f.__name__\n", "doc_events = frappe.get_doc_hooks()\n", "for handler in (doc_events.get(self.doctype, {}).get(method, []) +\n", "hooks.append(frappe.get_attr(handler))\n", "composed = compose(f, *hooks)\n", "return composed(self, method, *args, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_19(VAR_3, VAR_40=ValidationError, VAR_31=None, VAR_38=None, VAR_39...\n", "\"\"\"docstring\"\"\"\n", "FUNC_15(VAR_3, VAR_32=exc, VAR_31=title, VAR_35='red', VAR_38=\n is_minimizable, VAR_39=wide, VAR_34=as_list)\n" ]
[ "def throw(msg, exc=ValidationError, title=None, is_minimizable=None, wide=...\n", "\"\"\"docstring\"\"\"\n", "msgprint(msg, raise_exception=exc, title=title, indicator='red',\n is_minimizable=is_minimizable, wide=wide, as_list=as_list)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'" ]
[ "def FUNC_0(self, VAR_3, **VAR_4):...\n", "VAR_5 = VAR_3.get('user_consent')\n", "self.terms_template = self.read_templates(['terms.html'], autoescape=True)[0]\n", "if VAR_5 is None:\n", "return\n", "self.user_consent_version = str(VAR_5['version'])\n", "self.user_consent_template_dir = self.abspath(VAR_5['template_dir'])\n", "if not path.isdir(self.user_consent_template_dir):\n", "self.user_consent_server_notice_content = VAR_5.get('server_notice_content')\n", "self.block_events_without_consent_error = VAR_5.get('block_events_error')\n", "self.user_consent_server_notice_to_guests = bool(VAR_5.get(\n 'send_server_notice_to_guests', False))\n", "self.user_consent_at_registration = bool(VAR_5.get(\n 'require_at_registration', False))\n", "self.user_consent_policy_name = VAR_5.get('policy_name', 'Privacy Policy')\n" ]
[ "def read_config(self, config, **kwargs):...\n", "consent_config = config.get('user_consent')\n", "self.terms_template = self.read_templates(['terms.html'], autoescape=True)[0]\n", "if consent_config is None:\n", "return\n", "self.user_consent_version = str(consent_config['version'])\n", "self.user_consent_template_dir = self.abspath(consent_config['template_dir'])\n", "if not path.isdir(self.user_consent_template_dir):\n", "self.user_consent_server_notice_content = consent_config.get(\n 'server_notice_content')\n", "self.block_events_without_consent_error = consent_config.get(\n 'block_events_error')\n", "self.user_consent_server_notice_to_guests = bool(consent_config.get(\n 'send_server_notice_to_guests', False))\n", "self.user_consent_at_registration = bool(consent_config.get(\n 'require_at_registration', False))\n", "self.user_consent_policy_name = consent_config.get('policy_name',\n 'Privacy Policy')\n" ]
[ 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_100(self, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "self.jsonrpc2_procedures[VAR_10.__name__] = VAR_10\n", "return VAR_10\n" ]
[ "def jsonrpc2(self, f):...\n", "\"\"\"docstring\"\"\"\n", "self.jsonrpc2_procedures[f.__name__] = f\n", "return f\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0, *VAR_1, **VAR_2):...\n", "return HttpResponse('Not here yet: %s (%r, %r)' % (VAR_0.path, VAR_1, VAR_2\n ), status=410)\n" ]
[ "def _not_here_yet(request, *args, **kwargs):...\n", "return HttpResponse('Not here yet: %s (%r, %r)' % (request.path, args,\n kwargs), status=410)\n" ]
[ 0, 2 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_13(self):...\n", "VAR_5 = self._makeContext()\n", "self.assertEqual(VAR_5.evaluate('d/?blank'), 'blank')\n" ]
[ "def test_evaluate_dict_with_key_from_expansion(self):...\n", "ec = self._makeContext()\n", "self.assertEqual(ec.evaluate('d/?blank'), 'blank')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_40(self, VAR_21=0):...\n", "VAR_22 = 'dummy'\n", "if not VAR_21:\n", "VAR_22 = 'http://server/' + VAR_22\n", "return VAR_22\n" ]
[ "def absolute_url(self, relative=0):...\n", "url = 'dummy'\n", "if not relative:\n", "url = 'http://server/' + url\n", "return url\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@default('private_token')...\n", "return os.getenv('GITLAB_PRIVATE_TOKEN', '')\n" ]
[ "@default('private_token')...\n", "return os.getenv('GITLAB_PRIVATE_TOKEN', '')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@app.after_request...\n", "VAR_14.set_cookie('XSRF-COOKIE', generate_csrf())\n", "return VAR_14\n" ]
[ "@app.after_request...\n", "response.set_cookie('XSRF-COOKIE', generate_csrf())\n", "return response\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "VAR_50 = []\n", "VAR_51 = []\n", "VAR_52 = []\n", "for VAR_179 in VAR_4.split(','):\n", "VAR_179 = VAR_179.split('|', 1)\n", "VAR_1.debug(str(VAR_50) + ',' + str(VAR_51) + ',' + str(VAR_52))\n", "VAR_23 = VAR_179[0].strip()\n", "return VAR_50, VAR_51, VAR_52\n", "VAR_68 = None\n", "if VAR_23.find('$') >= 0:\n", "VAR_23, VAR_68 = VAR_23.split('$')\n", "VAR_50.append(int(VAR_23))\n", "VAR_262 = None, None\n", "if len(VAR_179) > 1:\n", "VAR_23 = VAR_179[1].strip()\n", "VAR_51.append(VAR_262)\n", "if VAR_23.find('$') >= 0:\n", "VAR_52.append(VAR_68)\n", "VAR_23, VAR_68 = VAR_23.split('$', 1)\n", "VAR_23 = VAR_23.split(':')\n", "if len(VAR_23) == 2:\n", "VAR_262 = [float(VAR_30) for VAR_30 in VAR_23]\n" ]
[ "def _split_channel_info(rchannels):...\n", "\"\"\"docstring\"\"\"\n", "channels = []\n", "windows = []\n", "colors = []\n", "for chan in rchannels.split(','):\n", "chan = chan.split('|', 1)\n", "logger.debug(str(channels) + ',' + str(windows) + ',' + str(colors))\n", "t = chan[0].strip()\n", "return channels, windows, colors\n", "color = None\n", "if t.find('$') >= 0:\n", "t, color = t.split('$')\n", "channels.append(int(t))\n", "ch_window = None, None\n", "if len(chan) > 1:\n", "t = chan[1].strip()\n", "windows.append(ch_window)\n", "if t.find('$') >= 0:\n", "colors.append(color)\n", "t, color = t.split('$', 1)\n", "t = t.split(':')\n", "if len(t) == 2:\n", "ch_window = [float(x) for x in t]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_30():...\n", "VAR_82 = FUNC_13(VAR_1[VAR_14], FUNC_26)\n", "VAR_107 = FUNC_25(VAR_82)\n", "VAR_108 = userManager.has_been_customized()\n", "VAR_43 = FUNC_9(VAR_82, VAR_2, VAR_3, VAR_21)\n", "VAR_43.update({'enableWebcam': VAR_54, 'enableTemperatureGraph': VAR_55,\n 'enableAccessControl': True, 'accessControlActive': VAR_108,\n 'enableLoadingAnimation': VAR_52, 'enableSdSupport': VAR_53,\n 'sockJsConnectTimeout': VAR_56 * 1000, 'wizard': VAR_107, 'online':\n connectivityChecker.online, 'now': VAR_21})\n", "def FUNC_42():...\n", "VAR_129 = make_response(render_template('index.jinja2', **render_kwargs))\n", "if VAR_107:\n", "VAR_129 = util.flask.add_non_caching_response_headers(VAR_129)\n", "return VAR_129\n" ]
[ "def default_view():...\n", "filtered_templates = _filter_templates(_templates[locale],\n default_template_filter)\n", "wizard = wizard_active(filtered_templates)\n", "accesscontrol_active = userManager.has_been_customized()\n", "render_kwargs = _get_render_kwargs(filtered_templates, _plugin_names,\n _plugin_vars, now)\n", "render_kwargs.update({'enableWebcam': enable_webcam,\n 'enableTemperatureGraph': enable_temperature_graph,\n 'enableAccessControl': True, 'accessControlActive':\n accesscontrol_active, 'enableLoadingAnimation':\n enable_loading_animation, 'enableSdSupport': enable_sd_support,\n 'sockJsConnectTimeout': sockjs_connect_timeout * 1000, 'wizard': wizard,\n 'online': connectivityChecker.online, 'now': now})\n", "def make_default_ui():...\n", "r = make_response(render_template('index.jinja2', **render_kwargs))\n", "if wizard:\n", "r = util.flask.add_non_caching_response_headers(r)\n", "return r\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_3(self):...\n", "self.get_success(self.store.create_room_alias_association(self.your_room,\n '!8765asdf:test', ['test']))\n", "VAR_11 = self.get_success(self.handler.on_directory_query({'room_alias':\n '#your-room:test'}))\n", "self.assertEquals({'room_id': '!8765asdf:test', 'servers': ['test']}, VAR_11)\n" ]
[ "def test_incoming_fed_query(self):...\n", "self.get_success(self.store.create_room_alias_association(self.your_room,\n '!8765asdf:test', ['test']))\n", "response = self.get_success(self.handler.on_directory_query({'room_alias':\n '#your-room:test'}))\n", "self.assertEquals({'room_id': '!8765asdf:test', 'servers': ['test']}, response)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_34(self, VAR_40, VAR_3, *VAR_13, **VAR_14):...\n", "VAR_14.setdefault('timeout', self.timeout)\n", "return super(CLASS_4, self).request(VAR_40, VAR_3, *VAR_13, **kwargs)\n" ]
[ "def request(self, method, url, *args, **kwargs):...\n", "kwargs.setdefault('timeout', self.timeout)\n", "return super(PipSession, self).request(method, url, *args, **kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "@FUNC_0...\n", "return Auth(self)\n" ]
[ "@cache_in_self...\n", "return Auth(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_18(VAR_14):...\n", "VAR_2 = VAR_14\n", "VAR_12 = create_engine(u'sqlite:///{0}'.format(VAR_14), echo=False)\n", "VAR_83 = scoped_session(sessionmaker())\n", "VAR_83.configure(bind=engine)\n", "VAR_1 = VAR_83()\n", "if os.path.exists(VAR_14):\n", "VAR_3.metadata.create_all(VAR_12)\n", "VAR_3.metadata.create_all(VAR_12)\n", "FUNC_12(VAR_1)\n", "FUNC_17(VAR_1)\n", "FUNC_13(VAR_1)\n", "FUNC_16(VAR_1)\n", "if cli.user_credentials:\n", "VAR_91, VAR_27 = cli.user_credentials.split(':', 1)\n", "VAR_6 = VAR_1.query(CLASS_1).filter(func.lower(CLASS_1.name) == VAR_91.lower()\n ).first()\n", "if VAR_6:\n", "if not VAR_27:\n", "print(\"Username '{}' not valid, can't change password\".format(VAR_91))\n", "print('Empty password is not allowed')\n", "VAR_6.password = generate_password_hash(VAR_27)\n", "sys.exit(3)\n", "sys.exit(4)\n", "if FUNC_20() == '':\n", "print(\"Password for user '{}' changed\".format(VAR_91))\n", "print('Failed changing password')\n", "sys.exit(0)\n", "sys.exit(3)\n" ]
[ "def init_db(app_db_path):...\n", "app_DB_path = app_db_path\n", "engine = create_engine(u'sqlite:///{0}'.format(app_db_path), echo=False)\n", "Session = scoped_session(sessionmaker())\n", "Session.configure(bind=engine)\n", "session = Session()\n", "if os.path.exists(app_db_path):\n", "Base.metadata.create_all(engine)\n", "Base.metadata.create_all(engine)\n", "migrate_Database(session)\n", "create_admin_user(session)\n", "clean_database(session)\n", "create_anonymous_user(session)\n", "if cli.user_credentials:\n", "username, password = cli.user_credentials.split(':', 1)\n", "user = session.query(User).filter(func.lower(User.name) == username.lower()\n ).first()\n", "if user:\n", "if not password:\n", "print(\"Username '{}' not valid, can't change password\".format(username))\n", "print('Empty password is not allowed')\n", "user.password = generate_password_hash(password)\n", "sys.exit(3)\n", "sys.exit(4)\n", "if session_commit() == '':\n", "print(\"Password for user '{}' changed\".format(username))\n", "print('Failed changing password')\n", "sys.exit(0)\n", "sys.exit(3)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_5(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_18 = event_from_pdu_json({'type': EventTypes.Message, 'content': {\n 'bool': True, 'null': None, 'int': 1, 'str': 'foobar'}, 'room_id':\n '!room:test', 'sender': '@user:test', 'depth': 1, 'prev_events': [],\n 'auth_events': [], 'origin_server_ts': 1234}, RoomVersions.V6)\n", "self.assertIsInstance(VAR_18, EventBase)\n" ]
[ "def test_valid_json(self):...\n", "\"\"\"docstring\"\"\"\n", "ev = event_from_pdu_json({'type': EventTypes.Message, 'content': {'bool': \n True, 'null': None, 'int': 1, 'str': 'foobar'}, 'room_id': '!room:test',\n 'sender': '@user:test', 'depth': 1, 'prev_events': [], 'auth_events': [\n ], 'origin_server_ts': 1234}, RoomVersions.V6)\n", "self.assertIsInstance(ev, EventBase)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "@VAR_8.route('/shutdown', methods=['GET'])...\n", "VAR_44 = request.environ.get('werkzeug.server.shutdown')\n", "if VAR_44 is None:\n", "VAR_44()\n", "return 'Shutting down...'\n" ]
[ "@app.route('/shutdown', methods=['GET'])...\n", "shutdown_func = request.environ.get('werkzeug.server.shutdown')\n", "if shutdown_func is None:\n", "shutdown_func()\n", "return 'Shutting down...'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def __call__(self, VAR_17=None, VAR_18={}, VAR_19=None, **VAR_20):...\n", "return 'dummy'\n" ]
[ "def __call__(self, client=None, REQUEST={}, RESPONSE=None, **kw):...\n", "return 'dummy'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_15(self, VAR_0):...\n", "VAR_3 = VAR_0.MagicMock(title='My Test API')\n", "from openapi_python_client import Project\n", "Project.project_name_override = 'my-special-project-name'\n", "VAR_8 = Project(VAR_3=openapi)\n", "assert VAR_8.project_name == 'my-special-project-name'\n", "assert VAR_8.package_name == 'my_special_project_name'\n", "Project.package_name_override = 'my_special_package_name'\n", "VAR_8 = Project(VAR_3=openapi)\n", "assert VAR_8.project_name == 'my-special-project-name'\n", "assert VAR_8.package_name == 'my_special_package_name'\n" ]
[ "def test_project_and_package_name_overrides(self, mocker):...\n", "openapi = mocker.MagicMock(title='My Test API')\n", "from openapi_python_client import Project\n", "Project.project_name_override = 'my-special-project-name'\n", "project = Project(openapi=openapi)\n", "assert project.project_name == 'my-special-project-name'\n", "assert project.package_name == 'my_special_project_name'\n", "Project.package_name_override = 'my_special_package_name'\n", "project = Project(openapi=openapi)\n", "assert project.project_name == 'my-special-project-name'\n", "assert project.package_name == 'my_special_package_name'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "ImportFrom'", "Assign'", "Assign'", "Assert'", "Assert'", "Assign'", "Assign'", "Assert'", "Assert'" ]
[ "def FUNC_41(VAR_105):...\n", "return VAR_50 or VAR_104 != VAR_105.get_etag()[0]\n" ]
[ "def validate_cache(cached):...\n", "return force_refresh or current_etag != cached.get_etag()[0]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "async def FUNC_11(self, VAR_28: Dict[str, Any], VAR_30: EventBase, VAR_27:...\n", "if VAR_30.type != EventTypes.Message and VAR_30.type != EventTypes.Encrypted:\n", "return None\n", "VAR_55 = VAR_27['m.room.member', VAR_30.sender]\n", "VAR_56 = await self.store.get_event(VAR_55)\n", "VAR_57 = name_from_member_event(VAR_56)\n", "VAR_58 = VAR_56.content.get('avatar_url')\n", "VAR_59 = FUNC_3(VAR_30.sender)\n", "VAR_34 = {'event_type': VAR_30.type, 'is_historical': VAR_30.event_id !=\n VAR_28['event_id'], 'id': VAR_30.event_id, 'ts': VAR_30.\n origin_server_ts, 'sender_name': VAR_57, 'sender_avatar_url': VAR_58,\n 'sender_hash': VAR_59}\n", "if VAR_30.type == EventTypes.Encrypted:\n", "return VAR_34\n", "VAR_60 = VAR_30.content.get('msgtype')\n", "VAR_34['msgtype'] = VAR_60\n", "if VAR_60 == 'm.text':\n", "self.add_text_message_vars(VAR_34, VAR_30)\n", "if VAR_60 == 'm.image':\n", "if 'body' in VAR_30.content:\n", "self.add_image_message_vars(VAR_34, VAR_30)\n", "VAR_34['body_text_plain'] = VAR_30.content['body']\n", "return VAR_34\n" ]
[ "async def get_message_vars(self, notif: Dict[str, Any], event: EventBase,...\n", "if event.type != EventTypes.Message and event.type != EventTypes.Encrypted:\n", "return None\n", "sender_state_event_id = room_state_ids['m.room.member', event.sender]\n", "sender_state_event = await self.store.get_event(sender_state_event_id)\n", "sender_name = name_from_member_event(sender_state_event)\n", "sender_avatar_url = sender_state_event.content.get('avatar_url')\n", "sender_hash = string_ordinal_total(event.sender)\n", "ret = {'event_type': event.type, 'is_historical': event.event_id != notif[\n 'event_id'], 'id': event.event_id, 'ts': event.origin_server_ts,\n 'sender_name': sender_name, 'sender_avatar_url': sender_avatar_url,\n 'sender_hash': sender_hash}\n", "if event.type == EventTypes.Encrypted:\n", "return ret\n", "msgtype = event.content.get('msgtype')\n", "ret['msgtype'] = msgtype\n", "if msgtype == 'm.text':\n", "self.add_text_message_vars(ret, event)\n", "if msgtype == 'm.image':\n", "if 'body' in event.content:\n", "self.add_image_message_vars(ret, event)\n", "ret['body_text_plain'] = event.content['body']\n", "return ret\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Condition", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_80():...\n", "VAR_84 = self.as_dict()\n", "for VAR_43, VAR_26 in iteritems(VAR_84):\n", "if VAR_26 == None:\n", "return VAR_84\n", "VAR_84[VAR_43] = ''\n" ]
[ "def get_values():...\n", "values = self.as_dict()\n", "for key, value in iteritems(values):\n", "if value == None:\n", "return values\n", "values[key] = ''\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "def FUNC_24(self):...\n", "def FUNC_81(VAR_19):...\n", "if self.parentfield:\n", "return '{} {} #{}: {} {}'.format(frappe.bold(_(self.doctype)), _('Row'),\n self.idx, _('Value cannot be negative for'), frappe.bold(_(VAR_19.label)))\n", "return _('Value cannot be negative for {0}: {1}').format(_(VAR_19.parent),\n frappe.bold(_(VAR_19.label)))\n" ]
[ "def _validate_non_negative(self):...\n", "def get_msg(df):...\n", "if self.parentfield:\n", "return '{} {} #{}: {} {}'.format(frappe.bold(_(self.doctype)), _('Row'),\n self.idx, _('Value cannot be negative for'), frappe.bold(_(df.label)))\n", "return _('Value cannot be negative for {0}: {1}').format(_(df.parent),\n frappe.bold(_(df.label)))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "@frappe.whitelist()...\n", "return frappe.db.get_value('Report', VAR_0, 'disable_prepared_report') or 0\n" ]
[ "@frappe.whitelist()...\n", "return frappe.db.get_value('Report', report, 'disable_prepared_report') or 0\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_26(self, VAR_15, VAR_13):...\n", "VAR_25 = []\n", "for row in VAR_15:\n", "if isinstance(row, (list, tuple)):\n", "return VAR_25\n", "VAR_42 = frappe._dict()\n", "if isinstance(row, dict):\n", "for VAR_43, val in enumerate(row):\n", "VAR_42 = frappe._dict(row)\n", "VAR_25.append(VAR_42)\n", "VAR_42[VAR_13[VAR_43].get('fieldname')] = val\n" ]
[ "def build_data_dict(self, result, columns):...\n", "data = []\n", "for row in result:\n", "if isinstance(row, (list, tuple)):\n", "return data\n", "_row = frappe._dict()\n", "if isinstance(row, dict):\n", "for i, val in enumerate(row):\n", "_row = frappe._dict(row)\n", "data.append(_row)\n", "_row[columns[i].get('fieldname')] = val\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'", "Condition", "For", "Assign'", "Expr'", "Assign'" ]