lines
sequencelengths
1
383
raw_lines
sequencelengths
1
383
label
sequencelengths
1
383
type
sequencelengths
1
383
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n", "VAR_57 = VAR_9.POST.get('action')\n", "VAR_58 = VAR_9.POST.get('identifiers')\n", "VAR_59 = FUNC_64(VAR_58)\n", "VAR_61 = VAR_9.POST.get('email_students') in ['true', 'True', True]\n", "VAR_60 = VAR_9.POST.get('auto_enroll') in ['true', 'True', True]\n", "VAR_51 = []\n", "VAR_64 = 'beta'\n", "VAR_65 = get_course_by_id(VAR_10)\n", "VAR_25 = {}\n", "if VAR_61:\n", "VAR_168 = VAR_9.is_secure()\n", "for identifier in VAR_59:\n", "VAR_25 = get_email_params(VAR_65, VAR_60=auto_enroll, VAR_168=secure)\n", "VAR_63 = {'action': VAR_57, 'results': VAR_51}\n", "VAR_169 = False\n", "VAR_169 = True\n", "if VAR_61:\n", "VAR_51.append({'identifier': identifier, 'error': VAR_169,\n 'userDoesNotExist': VAR_195})\n", "return JsonResponse(VAR_63)\n", "VAR_195 = False\n", "VAR_195 = True\n", "send_beta_role_email(VAR_57, VAR_19, VAR_25)\n", "if VAR_60:\n", "VAR_19 = get_student_from_identifier(identifier)\n", "VAR_0.exception(u'Error while #{}ing student')\n", "if not CourseEnrollment.is_enrolled(VAR_19, VAR_10):\n", "if VAR_57 == 'add':\n", "VAR_0.exception(exc)\n", "CourseEnrollment.enroll(VAR_19, VAR_10)\n", "allow_access(VAR_65, VAR_19, VAR_64)\n", "if VAR_57 == 'remove':\n", "VAR_169 = True\n", "revoke_access(VAR_65, VAR_19, VAR_64)\n", "return HttpResponseBadRequest(strip_tags(\"Unrecognized action '{}'\".format(\n VAR_57)))\n" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n", "action = request.POST.get('action')\n", "identifiers_raw = request.POST.get('identifiers')\n", "identifiers = _split_input_list(identifiers_raw)\n", "email_students = request.POST.get('email_students') in ['true', 'True', True]\n", "auto_enroll = request.POST.get('auto_enroll') in ['true', 'True', True]\n", "results = []\n", "rolename = 'beta'\n", "course = get_course_by_id(course_id)\n", "email_params = {}\n", "if email_students:\n", "secure = request.is_secure()\n", "for identifier in identifiers:\n", "email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)\n", "response_payload = {'action': action, 'results': results}\n", "error = False\n", "error = True\n", "if email_students:\n", "results.append({'identifier': identifier, 'error': error,\n 'userDoesNotExist': user_does_not_exist})\n", "return JsonResponse(response_payload)\n", "user_does_not_exist = False\n", "user_does_not_exist = True\n", "send_beta_role_email(action, user, email_params)\n", "if auto_enroll:\n", "user = get_student_from_identifier(identifier)\n", "log.exception(u'Error while #{}ing student')\n", "if not CourseEnrollment.is_enrolled(user, course_id):\n", "if action == 'add':\n", "log.exception(exc)\n", "CourseEnrollment.enroll(user, course_id)\n", "allow_access(course, user, rolename)\n", "if action == 'remove':\n", "error = True\n", "revoke_access(course, user, rolename)\n", "return HttpResponseBadRequest(strip_tags(\"Unrecognized action '{}'\".format(\n action)))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Return'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_7, VAR_8):...\n", "" ]
[ "def delete_user_session(user_id, session_key):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_15(self, VAR_5, VAR_10, VAR_16):...\n", "VAR_16 = ElementTree.fromstring(VAR_16)\n", "VAR_29 = 'ami'\n", "VAR_30 = 'machine'\n", "VAR_43 = VAR_16.find('machine_configuration/kernel_id').text\n", "VAR_43 = None\n", "VAR_44 = VAR_16.find('machine_configuration/ramdisk_id').text\n", "VAR_44 = None\n", "VAR_45 = VAR_16.find('machine_configuration/architecture').text\n", "VAR_45 = 'x86_64'\n", "VAR_31 = []\n", "if VAR_43 == 'true':\n", "if VAR_44 == 'true':\n", "VAR_46 = VAR_16.findall('machine_configuration/block_device_mapping/mapping')\n", "VAR_31 = []\n", "VAR_32 = VAR_10['properties']\n", "VAR_29 = 'aki'\n", "VAR_29 = 'ari'\n", "for bdm in VAR_46:\n", "VAR_32['project_id'] = VAR_5.project_id\n", "VAR_30 = 'kernel'\n", "VAR_30 = 'ramdisk'\n", "VAR_31.append({'virtual': bdm.find('virtual').text, 'device': bdm.find(\n 'device').text})\n", "VAR_32['architecture'] = VAR_45\n", "VAR_43 = None\n", "VAR_44 = None\n", "def FUNC_20(VAR_33, VAR_6):...\n", "VAR_6 = ec2utils.ec2_id_to_id(VAR_6)\n", "VAR_7 = self.get_image_uuid(VAR_5, VAR_6)\n", "VAR_32['image_id'] = VAR_7\n", "if VAR_43:\n", "FUNC_20('kernel_id', VAR_43)\n", "if VAR_44:\n", "FUNC_20('ramdisk_id', VAR_44)\n", "if VAR_31:\n", "VAR_32['mappings'] = VAR_31\n", "VAR_10.update({'disk_format': VAR_29, 'container_format': VAR_29, 'status':\n 'queued', 'is_public': False, 'properties': VAR_32})\n", "VAR_10['properties']['image_state'] = 'pending'\n", "VAR_6 = VAR_10.pop('id', None)\n", "VAR_9 = self.service.create(VAR_5, VAR_10)\n", "VAR_7 = VAR_9['id']\n", "VAR_9['id'] = self._create_image_id(VAR_5, VAR_7)\n", "return VAR_16, VAR_9, VAR_7\n" ]
[ "def _s3_parse_manifest(self, context, metadata, manifest):...\n", "manifest = ElementTree.fromstring(manifest)\n", "image_format = 'ami'\n", "image_type = 'machine'\n", "kernel_id = manifest.find('machine_configuration/kernel_id').text\n", "kernel_id = None\n", "ramdisk_id = manifest.find('machine_configuration/ramdisk_id').text\n", "ramdisk_id = None\n", "arch = manifest.find('machine_configuration/architecture').text\n", "arch = 'x86_64'\n", "mappings = []\n", "if kernel_id == 'true':\n", "if ramdisk_id == 'true':\n", "block_device_mapping = manifest.findall(\n 'machine_configuration/block_device_mapping/mapping')\n", "mappings = []\n", "properties = metadata['properties']\n", "image_format = 'aki'\n", "image_format = 'ari'\n", "for bdm in block_device_mapping:\n", "properties['project_id'] = context.project_id\n", "image_type = 'kernel'\n", "image_type = 'ramdisk'\n", "mappings.append({'virtual': bdm.find('virtual').text, 'device': bdm.find(\n 'device').text})\n", "properties['architecture'] = arch\n", "kernel_id = None\n", "ramdisk_id = None\n", "def _translate_dependent_image_id(image_key, image_id):...\n", "image_id = ec2utils.ec2_id_to_id(image_id)\n", "image_uuid = self.get_image_uuid(context, image_id)\n", "properties['image_id'] = image_uuid\n", "if kernel_id:\n", "_translate_dependent_image_id('kernel_id', kernel_id)\n", "if ramdisk_id:\n", "_translate_dependent_image_id('ramdisk_id', ramdisk_id)\n", "if mappings:\n", "properties['mappings'] = mappings\n", "metadata.update({'disk_format': image_format, 'container_format':\n image_format, 'status': 'queued', 'is_public': False, 'properties':\n properties})\n", "metadata['properties']['image_state'] = 'pending'\n", "image_id = metadata.pop('id', None)\n", "image = self.service.create(context, metadata)\n", "image_uuid = image['id']\n", "image['id'] = self._create_image_id(context, image_uuid)\n", "return manifest, image, image_uuid\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_16():...\n", "VAR_46 = FORM.confirm(T('Upgrade'), {T('Cancel'): URL('site')})\n", "if VAR_46.accepted:\n", "VAR_124, VAR_125 = upgrade(request)\n", "return dict(VAR_46=dialog)\n", "if VAR_124:\n", "session.flash = T('web2py upgraded; please restart it')\n", "session.flash = T('unable to upgrade because \"%s\"', VAR_125)\n", "redirect(URL('site'))\n" ]
[ "def upgrade_web2py():...\n", "dialog = FORM.confirm(T('Upgrade'), {T('Cancel'): URL('site')})\n", "if dialog.accepted:\n", "success, error = upgrade(request)\n", "return dict(dialog=dialog)\n", "if success:\n", "session.flash = T('web2py upgraded; please restart it')\n", "session.flash = T('unable to upgrade because \"%s\"', error)\n", "redirect(URL('site'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_89(*VAR_79, **VAR_42):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.utils.formatters\n", "return frappe.utils.formatters.format_value(*VAR_79, **kwargs)\n" ]
[ "def format(*args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.utils.formatters\n", "return frappe.utils.formatters.format_value(*args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Return'" ]
[ "@VAR_0.route('/pid_mod_unique_id/<unique_id>/<state>')...\n", "\"\"\"docstring\"\"\"\n", "if not utils_general.user_has_permission('edit_controllers'):\n", "return 'Insufficient user permissions to manipulate PID'\n", "VAR_70 = PID.query.filter(PID.unique_id == VAR_9).first()\n", "VAR_69 = DaemonControl()\n", "if VAR_24 == 'activate_pid':\n", "VAR_70.is_activated = True\n", "if VAR_24 == 'deactivate_pid':\n", "VAR_70.save()\n", "VAR_70.is_activated = False\n", "if VAR_24 == 'pause_pid':\n", "VAR_37, VAR_105 = VAR_69.controller_activate(VAR_70.unique_id)\n", "VAR_70.is_paused = False\n", "VAR_70.is_paused = True\n", "if VAR_24 == 'hold_pid':\n", "return VAR_105\n", "VAR_70.is_held = False\n", "VAR_70.save()\n", "VAR_70.is_held = True\n", "if VAR_24 == 'resume_pid':\n", "VAR_70.save()\n", "if VAR_70.is_activated:\n", "VAR_70.save()\n", "VAR_70.is_held = False\n", "if 'set_setpoint_pid' in VAR_24:\n", "VAR_37, VAR_105 = VAR_69.controller_deactivate(VAR_70.unique_id)\n", "VAR_105 = VAR_69.pid_pause(VAR_70.unique_id)\n", "VAR_105 = 'PID Paused (Note: PID is not currently active)'\n", "if VAR_70.is_activated:\n", "VAR_70.is_paused = False\n", "VAR_70.setpoint = VAR_24.split('|')[1]\n", "return VAR_105\n", "return VAR_105\n", "VAR_105 = VAR_69.pid_hold(VAR_70.unique_id)\n", "VAR_105 = 'PID Held (Note: PID is not currently active)'\n", "VAR_70.save()\n", "VAR_70.save()\n", "return VAR_105\n", "if VAR_70.is_activated:\n", "if VAR_70.is_activated:\n", "VAR_105 = VAR_69.pid_resume(VAR_70.unique_id)\n", "VAR_105 = 'PID Resumed (Note: PID is not currently active)'\n", "VAR_105 = VAR_69.pid_set(VAR_70.unique_id, 'setpoint', float(VAR_24.split(\n '|')[1]))\n", "VAR_105 = 'PID Setpoint changed (Note: PID is not currently active)'\n", "return VAR_105\n", "return VAR_105\n" ]
[ "@blueprint.route('/pid_mod_unique_id/<unique_id>/<state>')...\n", "\"\"\"docstring\"\"\"\n", "if not utils_general.user_has_permission('edit_controllers'):\n", "return 'Insufficient user permissions to manipulate PID'\n", "pid = PID.query.filter(PID.unique_id == unique_id).first()\n", "daemon = DaemonControl()\n", "if state == 'activate_pid':\n", "pid.is_activated = True\n", "if state == 'deactivate_pid':\n", "pid.save()\n", "pid.is_activated = False\n", "if state == 'pause_pid':\n", "_, return_str = daemon.controller_activate(pid.unique_id)\n", "pid.is_paused = False\n", "pid.is_paused = True\n", "if state == 'hold_pid':\n", "return return_str\n", "pid.is_held = False\n", "pid.save()\n", "pid.is_held = True\n", "if state == 'resume_pid':\n", "pid.save()\n", "if pid.is_activated:\n", "pid.save()\n", "pid.is_held = False\n", "if 'set_setpoint_pid' in state:\n", "_, return_str = daemon.controller_deactivate(pid.unique_id)\n", "return_str = daemon.pid_pause(pid.unique_id)\n", "return_str = 'PID Paused (Note: PID is not currently active)'\n", "if pid.is_activated:\n", "pid.is_paused = False\n", "pid.setpoint = state.split('|')[1]\n", "return return_str\n", "return return_str\n", "return_str = daemon.pid_hold(pid.unique_id)\n", "return_str = 'PID Held (Note: PID is not currently active)'\n", "pid.save()\n", "pid.save()\n", "return return_str\n", "if pid.is_activated:\n", "if pid.is_activated:\n", "return_str = daemon.pid_resume(pid.unique_id)\n", "return_str = 'PID Resumed (Note: PID is not currently active)'\n", "return_str = daemon.pid_set(pid.unique_id, 'setpoint', float(state.split(\n '|')[1]))\n", "return_str = 'PID Setpoint changed (Note: PID is not currently active)'\n", "return return_str\n", "return return_str\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Return'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_15(self):...\n", "VAR_230 = self.API_URI\n", "VAR_231 = self.options.pop('hl', None)\n", "if VAR_231:\n", "VAR_230 = self.API_URI + '?hl=%s' % VAR_231\n", "VAR_57 = self.public_key\n", "self.options['sitekey'] = VAR_57\n", "VAR_232 = DIV(SCRIPT(_src=api_uri, _async='', _defer=''), DIV(_class=\n 'g-recaptcha', VAR_17=self.options), TAG.noscript(XML('string' % dict(\n VAR_57=public_key))))\n", "if not self.errors.captcha:\n", "return XML(VAR_232).xml()\n", "VAR_232.append(DIV(self.errors['captcha'], _class='error'))\n", "return XML(VAR_232).xml()\n" ]
[ "def xml(self):...\n", "api_uri = self.API_URI\n", "hl = self.options.pop('hl', None)\n", "if hl:\n", "api_uri = self.API_URI + '?hl=%s' % hl\n", "public_key = self.public_key\n", "self.options['sitekey'] = public_key\n", "captcha = DIV(SCRIPT(_src=api_uri, _async='', _defer=''), DIV(_class=\n 'g-recaptcha', data=self.options), TAG.noscript(XML(\n \"\"\"\n<div style=\"width: 302px; height: 352px;\">\n<div style=\"width: 302px; height: 352px; position: relative;\">\n <div style=\"width: 302px; height: 352px; position: absolute;\">\n <iframe src=\"https://www.google.com/recaptcha/api/fallback?k=%(public_key)s\"\n frameborder=\"0\" scrolling=\"no\"\n style=\"width: 302px; height:352px; border-style: none;\">\n </iframe>\n </div>\n <div style=\"width: 250px; height: 80px; position: absolute; border-style: none;\n bottom: 21px; left: 25px; margin: 0px; padding: 0px; right: 25px;\">\n <textarea id=\"g-recaptcha-response\" name=\"g-recaptcha-response\"\n class=\"g-recaptcha-response\"\n style=\"width: 250px; height: 80px; border: 1px solid #c1c1c1;\n margin: 0px; padding: 0px; resize: none;\" value=\"\">\n </textarea>\n </div>\n</div>\n</div>\"\"\"\n % dict(public_key=public_key))))\n", "if not self.errors.captcha:\n", "return XML(captcha).xml()\n", "captcha.append(DIV(self.errors['captcha'], _class='error'))\n", "return XML(captcha).xml()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_4(self, VAR_0, VAR_1, VAR_3):...\n", "VAR_19 = self.helper.create_room_as(self.user_id)\n", "VAR_20, VAR_21 = self.make_request('PUT', b'directory/list/room/%s' % (\n VAR_19.encode('ascii'),), b'{}')\n", "self.assertEquals(200, VAR_21.code, VAR_21.result)\n", "self.room_list_handler = VAR_3.get_room_list_handler()\n", "self.directory_handler = VAR_3.get_directory_handler()\n", "return VAR_3\n" ]
[ "def prepare(self, reactor, clock, hs):...\n", "room_id = self.helper.create_room_as(self.user_id)\n", "request, channel = self.make_request('PUT', b'directory/list/room/%s' % (\n room_id.encode('ascii'),), b'{}')\n", "self.assertEquals(200, channel.code, channel.result)\n", "self.room_list_handler = hs.get_room_list_handler()\n", "self.directory_handler = hs.get_directory_handler()\n", "return hs\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12(self):...\n", "async def FUNC_18(VAR_3):...\n", "VAR_3.write(b'response')\n", "VAR_3.finish()\n", "VAR_5 = CLASS_2.TestResource()\n", "VAR_5.callback = VAR_10\n", "VAR_7, VAR_6 = make_request(self.reactor, FakeSite(VAR_5), b'GET', b'/path')\n", "self.assertEqual(VAR_6.result['code'], b'200')\n", "VAR_11 = VAR_6.result['body']\n", "self.assertEqual(VAR_11, b'response')\n" ]
[ "def test_good_response(self):...\n", "async def callback(request):...\n", "request.write(b'response')\n", "request.finish()\n", "res = WrapHtmlRequestHandlerTests.TestResource()\n", "res.callback = callback\n", "_, channel = make_request(self.reactor, FakeSite(res), b'GET', b'/path')\n", "self.assertEqual(channel.result['code'], b'200')\n", "body = channel.result['body']\n", "self.assertEqual(body, b'response')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "AsyncFunctionDef'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_86():...\n", "from frappe.utils import cint\n", "return VAR_19.mute_emails or cint(VAR_13.get('mute_emails') or 0) or False\n" ]
[ "def are_emails_muted():...\n", "from frappe.utils import cint\n", "return flags.mute_emails or cint(conf.get('mute_emails') or 0) or False\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Return'" ]
[ "def FUNC_13(self):...\n", "VAR_20 = 'inputx=file[[v1]v2'\n", "saved_model_cli.preprocess_inputs_arg_string(VAR_20)\n", "VAR_20 = 'inputx:file'\n", "saved_model_cli.preprocess_inputs_arg_string(VAR_20)\n", "VAR_20 = 'inputx:np.zeros((5))'\n", "saved_model_cli.preprocess_input_exprs_arg_string(VAR_20)\n" ]
[ "def testInputPreProcessErrorBadFormat(self):...\n", "input_str = 'inputx=file[[v1]v2'\n", "saved_model_cli.preprocess_inputs_arg_string(input_str)\n", "input_str = 'inputx:file'\n", "saved_model_cli.preprocess_inputs_arg_string(input_str)\n", "input_str = 'inputx:np.zeros((5))'\n", "saved_model_cli.preprocess_input_exprs_arg_string(input_str)\n" ]
[ 0, 0, 0, 0, 0, 0, 5 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "@VAR_0.route('/query/runs/all')...\n", "VAR_23 = g.conn.session.query(Query).join(Query.latest_rev).join(QueryRevision\n .latest_run)\n", "VAR_24 = 'all'\n", "if request.args.get('published') == 'true':\n", "VAR_23 = VAR_23.filter(Query.published)\n", "VAR_13 = int(request.args.get('limit', VAR_0.config.get(\n 'QUERY_RESULTS_PER_PAGE', 50)))\n", "VAR_24 = 'published'\n", "VAR_23, VAR_25, VAR_26 = CLASS_0(VAR_23, request.args.get('from'), VAR_13,\n request.path, request.referrer, dict(request.args)).paginate()\n", "return render_template('query/list.html', user=get_user(), VAR_23=queries,\n VAR_25=prev_link, VAR_26=next_link, VAR_24=queries_filter)\n" ]
[ "@app.route('/query/runs/all')...\n", "queries = g.conn.session.query(Query).join(Query.latest_rev).join(QueryRevision\n .latest_run)\n", "queries_filter = 'all'\n", "if request.args.get('published') == 'true':\n", "queries = queries.filter(Query.published)\n", "limit = int(request.args.get('limit', app.config.get(\n 'QUERY_RESULTS_PER_PAGE', 50)))\n", "queries_filter = 'published'\n", "queries, prev_link, next_link = QueriesRangeBasedPagination(queries,\n request.args.get('from'), limit, request.path, request.referrer, dict(\n request.args)).paginate()\n", "return render_template('query/list.html', user=get_user(), queries=queries,\n prev_link=prev_link, next_link=next_link, queries_filter=queries_filter)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_19(self):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_80():...\n", "VAR_84 = self.as_dict()\n", "for VAR_43, VAR_26 in iteritems(VAR_84):\n", "if VAR_26 == None:\n", "return VAR_84\n", "VAR_84[VAR_43] = ''\n" ]
[ "def set_title_field(self):...\n", "\"\"\"docstring\"\"\"\n", "def get_values():...\n", "values = self.as_dict()\n", "for key, value in iteritems(values):\n", "if value == None:\n", "return values\n", "values[key] = ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "def FUNC_110(VAR_171, VAR_172=1, VAR_173='en'):...\n", "VAR_203 = []\n", "VAR_204 = VAR_0.Faker(VAR_173)\n", "if VAR_171 not in dir(VAR_204):\n", "for i in range(VAR_172):\n", "from frappe.chat.util import squashify\n", "VAR_224 = getattr(VAR_204, VAR_171)()\n", "return squashify(VAR_203)\n", "VAR_203.append(VAR_224)\n" ]
[ "def mock(type, size=1, locale='en'):...\n", "results = []\n", "fake = faker.Faker(locale)\n", "if type not in dir(fake):\n", "for i in range(size):\n", "from frappe.chat.util import squashify\n", "data = getattr(fake, type)()\n", "return squashify(results)\n", "results.append(data)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "For", "ImportFrom'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_85(self, *VAR_0, **VAR_1):...\n", "VAR_73 = []\n", "VAR_25 = VAR_6.__name__\n", "VAR_91 = frappe.get_doc_hooks()\n", "for handler in (VAR_91.get(self.doctype, {}).get(VAR_25, []) + VAR_91.get(\n", "VAR_73.append(frappe.get_attr(handler))\n", "VAR_92 = FUNC_84(VAR_6, *VAR_73)\n", "return VAR_92(self, VAR_25, *VAR_0, **kwargs)\n" ]
[ "def composer(self, *args, **kwargs):...\n", "hooks = []\n", "method = f.__name__\n", "doc_events = frappe.get_doc_hooks()\n", "for handler in (doc_events.get(self.doctype, {}).get(method, []) +\n", "hooks.append(frappe.get_attr(handler))\n", "composed = compose(f, *hooks)\n", "return composed(self, method, *args, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_39(self):...\n", "VAR_53.lib.sessions.expire()\n" ]
[ "def api_logout(self):...\n", "cherrypy.lib.sessions.expire()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@parameterized.named_parameters(('non_tfrt', False))...\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_10 = test.test_src_dir_path(VAR_0)\n", "VAR_42 = os.path.join(test.get_temp_dir(), 'new_dir' + ('tfrt' if VAR_5 else\n ''))\n", "VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":[8.0],\"x2\":[5.0]}, {\"x\":[4.0],\"x2\":[3.0]}]', '--outdir',\n VAR_42] + (['--use_tfrt'] if VAR_5 else []))\n", "saved_model_cli.run(VAR_11)\n", "VAR_43 = np.load(os.path.join(VAR_42, 'outputs.npy'))\n", "VAR_44 = np.array([[6.0], [4.0]])\n", "self.assertAllEqual(VAR_44, VAR_43)\n" ]
[ "@parameterized.named_parameters(('non_tfrt', False))...\n", "self.parser = saved_model_cli.create_parser()\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "output_dir = os.path.join(test.get_temp_dir(), 'new_dir' + ('tfrt' if\n use_tfrt else ''))\n", "args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":[8.0],\"x2\":[5.0]}, {\"x\":[4.0],\"x2\":[3.0]}]', '--outdir',\n output_dir] + (['--use_tfrt'] if use_tfrt else []))\n", "saved_model_cli.run(args)\n", "y_actual = np.load(os.path.join(output_dir, 'outputs.npy'))\n", "y_expected = np.array([[6.0], [4.0]])\n", "self.assertAllEqual(y_expected, y_actual)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_28(self, VAR_33, VAR_34=None, VAR_35=None, VAR_36=None, VAR_37=...\n", "VAR_79 = FUNC_5(VAR_33.url)\n", "VAR_15 = Response()\n", "VAR_15.status_code = 200\n", "VAR_15.url = VAR_33.url\n", "VAR_98 = os.stat(VAR_79)\n", "VAR_15.status_code = 404\n", "VAR_99 = email.utils.formatdate(VAR_98.st_mtime, usegmt=True)\n", "return VAR_15\n", "VAR_15.raw = exc\n", "VAR_57 = mimetypes.guess_type(VAR_79)[0] or 'text/plain'\n", "VAR_15.headers = CaseInsensitiveDict({'Content-Type': VAR_57,\n 'Content-Length': VAR_98.st_size, 'Last-Modified': VAR_99})\n", "VAR_15.raw = open(VAR_79, 'rb')\n", "VAR_15.close = VAR_15.raw.close\n" ]
[ "def send(self, request, stream=None, timeout=None, verify=None, cert=None,...\n", "pathname = url_to_path(request.url)\n", "resp = Response()\n", "resp.status_code = 200\n", "resp.url = request.url\n", "stats = os.stat(pathname)\n", "resp.status_code = 404\n", "modified = email.utils.formatdate(stats.st_mtime, usegmt=True)\n", "return resp\n", "resp.raw = exc\n", "content_type = mimetypes.guess_type(pathname)[0] or 'text/plain'\n", "resp.headers = CaseInsensitiveDict({'Content-Type': content_type,\n 'Content-Length': stats.st_size, 'Last-Modified': modified})\n", "resp.raw = open(pathname, 'rb')\n", "resp.close = resp.raw.close\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_5(VAR_0):...\n", "VAR_0.DEBUG = True\n", "VAR_6 = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view pk=99\"\n )\n", "VAR_7 = unicorn(None, VAR_6)\n", "VAR_10 = CLASS_0(component_name='test', component_id='asdf')\n", "VAR_8 = {'view': VAR_10}\n", "VAR_7.render(VAR_8)\n", "assert VAR_7.component_id == 'asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:99'\n" ]
[ "def test_unicorn_render_parent_with_pk(settings):...\n", "settings.DEBUG = True\n", "token = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view pk=99\"\n )\n", "unicorn_node = unicorn(None, token)\n", "view = FakeComponentParent(component_name='test', component_id='asdf')\n", "context = {'view': view}\n", "unicorn_node.render(context)\n", "assert unicorn_node.component_id == 'asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:99'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assert'" ]
[ "def FUNC_11(self):...\n", "VAR_15 = ClearableFileInput()\n", "assert 'accept' not in VAR_15.render(name='file', value='test.jpg')\n", "assert ['starts-with', '$Content-Type', ''] in VAR_15.get_conditions(None)\n", "VAR_15 = ClearableFileInput(attrs={'accept': 'image/*'})\n", "assert 'accept=\"image/*\"' in VAR_15.render(name='file', value='test.jpg')\n", "assert ['starts-with', '$Content-Type', 'image/'] in VAR_15.get_conditions(\n 'image/*')\n", "VAR_15 = ClearableFileInput(attrs={'accept': 'image/jpeg'})\n", "assert 'accept=\"image/jpeg\"' in VAR_15.render(name='file', value='test.jpg')\n", "assert {'Content-Type': 'image/jpeg'} in VAR_15.get_conditions('image/jpeg')\n", "VAR_15 = ClearableFileInput(attrs={'accept': 'application/pdf,image/*'})\n", "assert 'accept=\"application/pdf,image/*\"' in VAR_15.render(name='file',\n value='test.jpg')\n", "assert ['starts-with', '$Content-Type', ''] in VAR_15.get_conditions(\n 'application/pdf,image/*')\n", "assert {'Content-Type': 'application/pdf'} not in VAR_15.get_conditions(\n 'application/pdf,image/*')\n" ]
[ "def test_accept(self):...\n", "widget = ClearableFileInput()\n", "assert 'accept' not in widget.render(name='file', value='test.jpg')\n", "assert ['starts-with', '$Content-Type', ''] in widget.get_conditions(None)\n", "widget = ClearableFileInput(attrs={'accept': 'image/*'})\n", "assert 'accept=\"image/*\"' in widget.render(name='file', value='test.jpg')\n", "assert ['starts-with', '$Content-Type', 'image/'] in widget.get_conditions(\n 'image/*')\n", "widget = ClearableFileInput(attrs={'accept': 'image/jpeg'})\n", "assert 'accept=\"image/jpeg\"' in widget.render(name='file', value='test.jpg')\n", "assert {'Content-Type': 'image/jpeg'} in widget.get_conditions('image/jpeg')\n", "widget = ClearableFileInput(attrs={'accept': 'application/pdf,image/*'})\n", "assert 'accept=\"application/pdf,image/*\"' in widget.render(name='file',\n value='test.jpg')\n", "assert ['starts-with', '$Content-Type', ''] in widget.get_conditions(\n 'application/pdf,image/*')\n", "assert {'Content-Type': 'application/pdf'} not in widget.get_conditions(\n 'application/pdf,image/*')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assert'" ]
[ "async def FUNC_14(VAR_14, VAR_15, VAR_16, VAR_17=None, VAR_18=None):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_15(VAR_37):...\n", "VAR_39, VAR_23 = VAR_37\n", "VAR_16.write(VAR_39)\n", "return VAR_23\n" ]
[ "async def get_file(destination, path, output_stream, args=None, max_size=None):...\n", "\"\"\"docstring\"\"\"\n", "def write_to(r):...\n", "data, response = r\n", "output_stream.write(data)\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Docstring", "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def __call__(self):...\n", "" ]
[ "def __call__(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@require_POST...\n", "\"\"\"docstring\"\"\"\n", "VAR_142 = VAR_5.getObject('Image', VAR_26)\n", "VAR_169 = VAR_142.getSizeC()\n", "VAR_170 = {}\n", "VAR_171 = {}\n", "for VAR_318 in range(VAR_169):\n", "VAR_297 = VAR_2.POST.get('channel%d' % VAR_318, None)\n", "if VAR_2.POST.get('confirm_apply', None) is not None:\n", "if VAR_297 is not None:\n", "VAR_298 = VAR_2.POST.get('parentId', None)\n", "VAR_299 = VAR_5.setChannelNames('Image', [VAR_142.getId()], VAR_171)\n", "VAR_297 = smart_str(VAR_297)[:255]\n", "if VAR_298 is not None:\n", "VAR_172 = {'channelNames': VAR_170}\n", "VAR_170['channel%d' % VAR_318] = VAR_297\n", "VAR_255 = VAR_298.split('-')[0].title()\n", "if VAR_299:\n", "VAR_171[VAR_318 + 1] = VAR_297\n", "VAR_359 = VAR_241(VAR_298.split('-')[1])\n", "VAR_172['imageCount'] = VAR_299['imageCount']\n", "return {'error': 'No parent found to apply Channel Names'}\n", "VAR_299 = VAR_5.setChannelNames(VAR_255, [VAR_359], VAR_171, channelCount=sizeC\n )\n", "VAR_172['updateCount'] = VAR_299['updateCount']\n", "return VAR_172\n" ]
[ "@require_POST...\n", "\"\"\"docstring\"\"\"\n", "image = conn.getObject('Image', imageId)\n", "sizeC = image.getSizeC()\n", "channelNames = {}\n", "nameDict = {}\n", "for i in range(sizeC):\n", "cname = request.POST.get('channel%d' % i, None)\n", "if request.POST.get('confirm_apply', None) is not None:\n", "if cname is not None:\n", "parentId = request.POST.get('parentId', None)\n", "counts = conn.setChannelNames('Image', [image.getId()], nameDict)\n", "cname = smart_str(cname)[:255]\n", "if parentId is not None:\n", "rv = {'channelNames': channelNames}\n", "channelNames['channel%d' % i] = cname\n", "ptype = parentId.split('-')[0].title()\n", "if counts:\n", "nameDict[i + 1] = cname\n", "pid = long(parentId.split('-')[1])\n", "rv['imageCount'] = counts['imageCount']\n", "return {'error': 'No parent found to apply Channel Names'}\n", "counts = conn.setChannelNames(ptype, [pid], nameDict, channelCount=sizeC)\n", "rv['updateCount'] = counts['updateCount']\n", "return rv\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Return'" ]
[ "@pytest.mark.not_frozen...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = ['-m', 'qutebrowser', '--version'] + FUNC_0(VAR_4.config)\n", "VAR_19 = QProcess()\n", "VAR_19.setProcessChannelMode(QProcess.SeparateChannels)\n", "VAR_19.start(sys.executable, VAR_11)\n", "VAR_20 = VAR_19.waitForStarted(2000)\n", "assert VAR_20\n", "VAR_20 = VAR_19.waitForFinished(10000)\n", "VAR_21 = bytes(VAR_19.readAllStandardOutput()).decode('utf-8')\n", "print(VAR_21)\n", "VAR_22 = bytes(VAR_19.readAllStandardError()).decode('utf-8')\n", "print(VAR_22)\n", "assert VAR_20\n", "assert VAR_19.exitStatus() == QProcess.NormalExit\n", "assert re.search('^qutebrowser\\\\s+v\\\\d+(\\\\.\\\\d+)', VAR_21) is not None\n" ]
[ "@pytest.mark.not_frozen...\n", "\"\"\"docstring\"\"\"\n", "args = ['-m', 'qutebrowser', '--version'] + _base_args(request.config)\n", "proc = QProcess()\n", "proc.setProcessChannelMode(QProcess.SeparateChannels)\n", "proc.start(sys.executable, args)\n", "ok = proc.waitForStarted(2000)\n", "assert ok\n", "ok = proc.waitForFinished(10000)\n", "stdout = bytes(proc.readAllStandardOutput()).decode('utf-8')\n", "print(stdout)\n", "stderr = bytes(proc.readAllStandardError()).decode('utf-8')\n", "print(stderr)\n", "assert ok\n", "assert proc.exitStatus() == QProcess.NormalExit\n", "assert re.search('^qutebrowser\\\\s+v\\\\d+(\\\\.\\\\d+)', stdout) is not None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assert'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_69(self):...\n", "if not self.tokens:\n", "return None\n", "self.token = self.tokens.pop()\n", "return self.token\n" ]
[ "def pop(self):...\n", "if not self.tokens:\n", "return None\n", "self.token = self.tokens.pop()\n", "return self.token\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_15(self):...\n", "self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,\n VAR_55=['members'])\n" ]
[ "def testFilteredManyToMany(self):...\n", "self.assertFormfield(models.Band, 'members', widgets.FilteredSelectMultiple,\n filter_vertical=['members'])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(*VAR_0, **VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_0:\n", "if isinstance(VAR_0[0], BaseDocument):\n", "if len(VAR_0) < 2 and VAR_1:\n", "return VAR_0[0]\n", "if isinstance(VAR_0[0], string_types):\n", "if 'doctype' in VAR_1:\n", "VAR_5 = get_controller(VAR_2)\n", "VAR_2 = VAR_0[0]\n", "if isinstance(VAR_0[0], dict):\n", "VAR_2 = VAR_1['doctype']\n", "if VAR_5:\n", "VAR_1 = VAR_0[0]\n", "return VAR_5(*VAR_0, **kwargs)\n" ]
[ "def get_doc(*args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "if args:\n", "if isinstance(args[0], BaseDocument):\n", "if len(args) < 2 and kwargs:\n", "return args[0]\n", "if isinstance(args[0], string_types):\n", "if 'doctype' in kwargs:\n", "controller = get_controller(doctype)\n", "doctype = args[0]\n", "if isinstance(args[0], dict):\n", "doctype = kwargs['doctype']\n", "if controller:\n", "kwargs = args[0]\n", "return controller(*args, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Condition", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@property...\n", "VAR_26 = ['calendar.js', 'admin/DateTimeShortcuts.js']\n", "return forms.Media(VAR_26=[static('admin/js/%s' % path) for path in js])\n" ]
[ "@property...\n", "js = ['calendar.js', 'admin/DateTimeShortcuts.js']\n", "return forms.Media(js=[static('admin/js/%s' % path) for path in js])\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "VAR_1 = VAR_0.MagicMock()\n", "VAR_2 = VAR_0.patch('openapi_python_client._get_document', return_value=\n data_dict)\n", "VAR_3 = VAR_0.MagicMock()\n", "VAR_4 = VAR_0.patch('openapi_python_client.parser.GeneratorData.from_dict',\n return_value=openapi)\n", "VAR_5 = VAR_0.patch('openapi_python_client.Project')\n", "VAR_6 = VAR_0.MagicMock()\n", "VAR_7 = VAR_0.MagicMock()\n", "from openapi_python_client import _get_project_for_url_or_path\n", "VAR_8 = VAR_10(VAR_6=url, VAR_7=path)\n", "VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)\n", "VAR_4.assert_called_once_with(VAR_1)\n", "VAR_5.assert_called_once_with(VAR_3=openapi)\n", "assert VAR_8 == VAR_5()\n" ]
[ "def test__get_project_for_url_or_path(mocker):...\n", "data_dict = mocker.MagicMock()\n", "_get_document = mocker.patch('openapi_python_client._get_document',\n return_value=data_dict)\n", "openapi = mocker.MagicMock()\n", "from_dict = mocker.patch('openapi_python_client.parser.GeneratorData.from_dict'\n , return_value=openapi)\n", "_Project = mocker.patch('openapi_python_client.Project')\n", "url = mocker.MagicMock()\n", "path = mocker.MagicMock()\n", "from openapi_python_client import _get_project_for_url_or_path\n", "project = _get_project_for_url_or_path(url=url, path=path)\n", "_get_document.assert_called_once_with(url=url, path=path)\n", "from_dict.assert_called_once_with(data_dict)\n", "_Project.assert_called_once_with(openapi=openapi)\n", "assert project == _Project()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Expr'", "Assert'" ]
[ "def FUNC_7(self, VAR_6):...\n", "VAR_13 = UploadForm(data={'file': ''}, instance=filemodel)\n", "assert VAR_13.is_valid()\n", "assert VAR_13.cleaned_data['file'] == VAR_6.file\n" ]
[ "def test_initial_fallback(self, filemodel):...\n", "form = UploadForm(data={'file': ''}, instance=filemodel)\n", "assert form.is_valid()\n", "assert form.cleaned_data['file'] == filemodel.file\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assert'", "Assert'" ]
[ "def FUNC_12(self):...\n", "VAR_5 = self.get_counts('json')\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertJSONEqual(VAR_5.content.decode(), VAR_0)\n" ]
[ "def test_counts_view_json(self):...\n", "response = self.get_counts('json')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertJSONEqual(response.content.decode(), COUNTS_DATA)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_50(self):...\n", "\"\"\"docstring\"\"\"\n", "self.load_doc_before_save()\n", "self.reset_seen()\n", "if self._action in ('save', 'submit'):\n", "self.run_method('before_validate')\n", "if self.flags.ignore_validate:\n", "return\n", "if self._action == 'save':\n", "self.run_method('validate')\n", "if self._action == 'submit':\n", "self.run_method('before_save')\n", "self.run_method('validate')\n", "if self._action == 'cancel':\n", "self.set_title_field()\n", "self.run_method('before_submit')\n", "self.run_method('before_cancel')\n", "if self._action == 'update_after_submit':\n", "self.run_method('before_update_after_submit')\n" ]
[ "def run_before_save_methods(self):...\n", "\"\"\"docstring\"\"\"\n", "self.load_doc_before_save()\n", "self.reset_seen()\n", "if self._action in ('save', 'submit'):\n", "self.run_method('before_validate')\n", "if self.flags.ignore_validate:\n", "return\n", "if self._action == 'save':\n", "self.run_method('validate')\n", "if self._action == 'submit':\n", "self.run_method('before_save')\n", "self.run_method('validate')\n", "if self._action == 'cancel':\n", "self.set_title_field()\n", "self.run_method('before_submit')\n", "self.run_method('before_cancel')\n", "if self._action == 'update_after_submit':\n", "self.run_method('before_update_after_submit')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Return'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_13(self):...\n", "VAR_20 = 'inputx=file[[v1]v2'\n", "saved_model_cli.preprocess_inputs_arg_string(VAR_20)\n", "VAR_20 = 'inputx:file'\n", "saved_model_cli.preprocess_inputs_arg_string(VAR_20)\n", "VAR_20 = 'inputx:np.zeros((5))'\n", "saved_model_cli.preprocess_input_exprs_arg_string(VAR_20)\n" ]
[ "def testInputPreProcessErrorBadFormat(self):...\n", "input_str = 'inputx=file[[v1]v2'\n", "saved_model_cli.preprocess_inputs_arg_string(input_str)\n", "input_str = 'inputx:file'\n", "saved_model_cli.preprocess_inputs_arg_string(input_str)\n", "input_str = 'inputx:np.zeros((5))'\n", "saved_model_cli.preprocess_input_exprs_arg_string(input_str)\n" ]
[ 0, 0, 0, 0, 0, 0, 5 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "@wraps(VAR_3)...\n", "if current_user.role_upload() or current_user.role_admin():\n", "return VAR_3(*VAR_46, **kwargs)\n", "abort(403)\n" ]
[ "@wraps(f)...\n", "if current_user.role_upload() or current_user.role_admin():\n", "return f(*args, **kwargs)\n", "abort(403)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Expr'" ]
[ "def FUNC_78(VAR_259, VAR_11):...\n", "VAR_300 = 0\n", "VAR_301 = 1000\n", "while VAR_300 < len(VAR_11):\n", "VAR_301 = min(VAR_301, len(VAR_11) - VAR_300)\n", "VAR_322 = VAR_259.slice(VAR_252, VAR_11[VAR_300:VAR_300 + VAR_301])\n", "VAR_300 += VAR_301\n", "yield [[col.values[row] for col in VAR_322.columns] for row in range(0, len\n (VAR_322.rowNumbers))]\n" ]
[ "def row_generator(table, h):...\n", "idx = 0\n", "batch = 1000\n", "while idx < len(h):\n", "batch = min(batch, len(h) - idx)\n", "res = table.slice(col_indices, h[idx:idx + batch])\n", "idx += batch\n", "yield [[col.values[row] for col in res.columns] for row in range(0, len(res\n .rowNumbers))]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "AugAssign'", "Expr'" ]
[ "from html import escape as html_escape\n", "import re\n", "from typing import Union\n", "from six import ensure_str\n", "from cmk.gui.utils.html import HTML\n", "VAR_0 = Union[None, int, HTML, str]\n", "VAR_1 = re.compile(\n '&lt;(/?)(h1|h2|b|tt|i|u|br(?: /)?|nobr(?: /)?|pre|a|sup|p|li|ul|ol)&gt;')\n", "VAR_2 = re.compile('(?:&quot;|&#x27;)')\n", "VAR_3 = re.compile('&lt;a href=((?:&quot;|&#x27;).*?(?:&quot;|&#x27;))&gt;')\n", "def FUNC_0(VAR_4: EscapableEntity) ->str:...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = type(VAR_4)\n", "if VAR_4 is None:\n", "return u''\n", "if VAR_7 == int:\n", "return str(VAR_4)\n", "if isinstance(VAR_4, HTML):\n", "return VAR_4.__html__()\n", "if isinstance(VAR_7, str):\n", "return html_escape(VAR_4, quote=True)\n", "if isinstance(VAR_7, bytes):\n", "return html_escape(ensure_str(VAR_4), quote=True)\n", "return html_escape(u'%s' % VAR_4, quote=True)\n" ]
[ "from html import escape as html_escape\n", "import re\n", "from typing import Union\n", "from six import ensure_str\n", "from cmk.gui.utils.html import HTML\n", "EscapableEntity = Union[None, int, HTML, str]\n", "_UNESCAPER_TEXT = re.compile(\n '&lt;(/?)(h1|h2|b|tt|i|u|br(?: /)?|nobr(?: /)?|pre|a|sup|p|li|ul|ol)&gt;')\n", "_QUOTE = re.compile('(?:&quot;|&#x27;)')\n", "_A_HREF = re.compile('&lt;a href=((?:&quot;|&#x27;).*?(?:&quot;|&#x27;))&gt;')\n", "def escape_attribute(value: EscapableEntity) ->str:...\n", "\"\"\"docstring\"\"\"\n", "attr_type = type(value)\n", "if value is None:\n", "return u''\n", "if attr_type == int:\n", "return str(value)\n", "if isinstance(value, HTML):\n", "return value.__html__()\n", "if isinstance(attr_type, str):\n", "return html_escape(value, quote=True)\n", "if isinstance(attr_type, bytes):\n", "return html_escape(ensure_str(value), quote=True)\n", "return html_escape(u'%s' % value, quote=True)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_10(self):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.utils import has_common\n", "VAR_17 = [d.role for d in frappe.get_all('Has Role', fields=['role'], VAR_6\n ={'parent': self.name})]\n", "VAR_18 = get_custom_allowed_roles('report', self.name)\n", "VAR_17.extend(VAR_18)\n", "if not VAR_17:\n", "return True\n", "if has_common(frappe.get_roles(), VAR_17):\n", "return True\n" ]
[ "def is_permitted(self):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.utils import has_common\n", "allowed = [d.role for d in frappe.get_all('Has Role', fields=['role'],\n filters={'parent': self.name})]\n", "custom_roles = get_custom_allowed_roles('report', self.name)\n", "allowed.extend(custom_roles)\n", "if not allowed:\n", "return True\n", "if has_common(frappe.get_roles(), allowed):\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Condition", "Return'" ]
[ "def __call__(self, *VAR_14, **VAR_9):...\n", "assert self.request is None\n", "self.request = SynapseRequest(*VAR_14, **kwargs)\n", "return self.request\n" ]
[ "def __call__(self, *args, **kwargs):...\n", "assert self.request is None\n", "self.request = SynapseRequest(*args, **kwargs)\n", "return self.request\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assert'", "Assign'", "Return'" ]
[ "import os\n", "import attr\n", "import pytest\n", "import bs4\n", "from PyQt5.QtCore import QUrl\n", "from PyQt5.QtNetwork import QNetworkRequest\n", "from qutebrowser.browser.webkit.network import filescheme\n", "from qutebrowser.utils import urlutils, utils\n", "@pytest.mark.parametrize('create_file, create_dir, filterfunc, expected', [...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = VAR_0 / 'foo'\n", "if VAR_1 or VAR_2:\n", "VAR_5.ensure(dir=create_dir)\n", "VAR_6 = os.listdir(str(VAR_0))\n", "VAR_7 = filescheme.get_file_list(str(VAR_0), VAR_6, VAR_3)\n", "VAR_8 = {'name': 'foo', 'absname': str(VAR_5)}\n", "assert (VAR_8 in VAR_7) == VAR_4\n", "@pytest.mark.windows...\n", "assert filescheme.is_root(VAR_9) == VAR_10\n", "@pytest.mark.posix...\n", "assert filescheme.is_root(VAR_9) == VAR_10\n", "@pytest.mark.windows...\n", "assert filescheme.parent_dir(VAR_9) == VAR_11\n", "@pytest.mark.posix...\n", "assert filescheme.parent_dir(VAR_9) == VAR_11\n", "def FUNC_1(VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "return urlutils.file_url(str(VAR_5))\n" ]
[ "import os\n", "import attr\n", "import pytest\n", "import bs4\n", "from PyQt5.QtCore import QUrl\n", "from PyQt5.QtNetwork import QNetworkRequest\n", "from qutebrowser.browser.webkit.network import filescheme\n", "from qutebrowser.utils import urlutils, utils\n", "@pytest.mark.parametrize('create_file, create_dir, filterfunc, expected', [...\n", "\"\"\"docstring\"\"\"\n", "path = tmpdir / 'foo'\n", "if create_file or create_dir:\n", "path.ensure(dir=create_dir)\n", "all_files = os.listdir(str(tmpdir))\n", "result = filescheme.get_file_list(str(tmpdir), all_files, filterfunc)\n", "item = {'name': 'foo', 'absname': str(path)}\n", "assert (item in result) == expected\n", "@pytest.mark.windows...\n", "assert filescheme.is_root(directory) == is_root\n", "@pytest.mark.posix...\n", "assert filescheme.is_root(directory) == is_root\n", "@pytest.mark.windows...\n", "assert filescheme.parent_dir(directory) == parent\n", "@pytest.mark.posix...\n", "assert filescheme.parent_dir(directory) == parent\n", "def _file_url(path):...\n", "\"\"\"docstring\"\"\"\n", "return urlutils.file_url(str(path))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "Docstring", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assert'", "Condition", "Assert'", "Condition", "Assert'", "Condition", "Assert'", "Condition", "Assert'", "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_28(self):...\n", "VAR_13 = self.cleaned_data['instructions']\n", "self.spam_check(VAR_13)\n", "return VAR_13\n" ]
[ "def clean_instructions(self):...\n", "value = self.cleaned_data['instructions']\n", "self.spam_check(value)\n", "return value\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_37(self):...\n", "\"\"\"docstring\"\"\"\n", "if self.votes > 0:\n", "return self.recommendations / self.votes * 10\n", "return _('Unrated')\n" ]
[ "def _score(self):...\n", "\"\"\"docstring\"\"\"\n", "if self.votes > 0:\n", "return self.recommendations / self.votes * 10\n", "return _('Unrated')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_59(self):...\n", "\"\"\"docstring\"\"\"\n", "return self._blitzcon.canBeAdmin()\n" ]
[ "def canBeAdmin(self):...\n", "\"\"\"docstring\"\"\"\n", "return self._blitzcon.canBeAdmin()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def __init__(self, *VAR_6, **VAR_7):...\n", "super().__init__(*VAR_6, **kwargs)\n", "self.helper = FormHelper(self)\n", "self.helper.form_tag = False\n", "self.helper.layout = Layout(Field('style'), Field('period'), Div(\n 'start_date', 'end_date', css_class='input-group input-daterange',\n data_provide='datepicker', data_date_format='yyyy-mm-dd'))\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super().__init__(*args, **kwargs)\n", "self.helper = FormHelper(self)\n", "self.helper.form_tag = False\n", "self.helper.layout = Layout(Field('style'), Field('period'), Div(\n 'start_date', 'end_date', css_class='input-group input-daterange',\n data_provide='datepicker', data_date_format='yyyy-mm-dd'))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_3(VAR_9):...\n", "\"\"\"docstring\"\"\"\n", "print('checking package data')\n", "for pkg, data in VAR_9.items():\n", "VAR_45 = VAR_1(*pkg.split('.'))\n", "for d in data:\n", "VAR_14 = VAR_1(VAR_45, d)\n", "if '*' in VAR_14:\n", "assert len(glob(VAR_14)) > 0, 'No files match pattern %s' % VAR_14\n", "assert os.path.exists(VAR_14), 'Missing package data: %s' % VAR_14\n" ]
[ "def check_package_data(package_data):...\n", "\"\"\"docstring\"\"\"\n", "print('checking package data')\n", "for pkg, data in package_data.items():\n", "pkg_root = pjoin(*pkg.split('.'))\n", "for d in data:\n", "path = pjoin(pkg_root, d)\n", "if '*' in path:\n", "assert len(glob(path)) > 0, 'No files match pattern %s' % path\n", "assert os.path.exists(path), 'Missing package data: %s' % path\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "For", "Assign'", "For", "Assign'", "Condition", "Assert'", "Assert'" ]
[ "def FUNC_13(self):...\n", "VAR_62 = self.useroptions.forUser(self.getUserId())\n", "VAR_63 = VAR_62.getChangableOptions()\n", "if VAR_53.session['admin']:\n", "VAR_63['media'].update({'may_download': True})\n", "VAR_63['media'].update({'may_download': VAR_62.getOptionValue(\n 'media.may_download')})\n", "return VAR_63\n" ]
[ "def api_getuseroptions(self):...\n", "uo = self.useroptions.forUser(self.getUserId())\n", "uco = uo.getChangableOptions()\n", "if cherrypy.session['admin']:\n", "uco['media'].update({'may_download': True})\n", "uco['media'].update({'may_download': uo.getOptionValue('media.may_download')})\n", "return uco\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Return'" ]
[ "@VAR_0.simple_tag...\n", "\"\"\"docstring\"\"\"\n", "VAR_52 = reverse('rest_framework:login')\n", "return ''\n", "VAR_41 = \"<li><a href='{href}?next={next}'>Log in</a></li>\"\n", "VAR_41 = format_html(VAR_41, href=login_url, next=escape(request.path))\n", "return mark_safe(VAR_41)\n" ]
[ "@register.simple_tag...\n", "\"\"\"docstring\"\"\"\n", "login_url = reverse('rest_framework:login')\n", "return ''\n", "snippet = \"<li><a href='{href}?next={next}'>Log in</a></li>\"\n", "snippet = format_html(snippet, href=login_url, next=escape(request.path))\n", "return mark_safe(snippet)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_3(VAR_15):...\n", "\"\"\"docstring\"\"\"\n", "return force_text(urllib_parse.unquote_plus(force_str(VAR_15)))\n" ]
[ "def urlunquote_plus(quoted_url):...\n", "\"\"\"docstring\"\"\"\n", "return force_text(urllib_parse.unquote_plus(force_str(quoted_url)))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def __init__(self, VAR_2: IReactorTime):...\n", "super().__init__()\n", "self.reactor = VAR_2\n", "self._pull_to_push_producer = None\n" ]
[ "def __init__(self, reactor: IReactorTime):...\n", "super().__init__()\n", "self.reactor = reactor\n", "self._pull_to_push_producer = None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'" ]
[ "\"\"\"string\"\"\"\n", "import re\n", "import inspect\n", "__version__ = '0.8'\n", "__author__ = 'Hsiaoming Yang <[email protected]>'\n", "__all__ = ['BlockGrammar', 'BlockLexer', 'InlineGrammar', 'InlineLexer',\n 'Renderer', 'Markdown', 'markdown', 'escape']\n", "VAR_0 = re.compile('\\\\s+')\n", "VAR_1 = re.compile('\\\\W')\n", "VAR_2 = re.compile('&(?!#?\\\\w+;)')\n", "VAR_3 = re.compile('\\\\r\\\\n|\\\\r')\n", "VAR_4 = re.compile('^ *> ?', VAR_71=re.M)\n", "VAR_5 = re.compile('^ {4}', re.M)\n", "VAR_6 = ['a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr',\n 'data', 'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b',\n 'u', 'mark', 'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr',\n 'ins', 'del', 'img', 'font']\n", "VAR_7 = ['pre', 'script', 'style']\n", "VAR_8 = '(?!:/|[^\\\\w\\\\s@]*@)\\\\b'\n", "VAR_9 = '\\\\s*[a-zA-Z\\\\-](?:\\\\=(?:\"[^\"]*\"|\\'[^\\']*\\'|[^\\\\s\\'\">]+))?'\n", "VAR_10 = '(?!(?:%s)\\\\b)\\\\w+%s' % ('|'.join(VAR_6), VAR_8)\n", "VAR_11 = 'javascript:', 'vbscript:'\n", "def FUNC_0(VAR_12):...\n", "VAR_21 = VAR_12.pattern\n", "if VAR_21.startswith('^'):\n", "VAR_21 = VAR_21[1:]\n", "return VAR_21\n" ]
[ "\"\"\"\n mistune\n ~~~~~~~\n\n The fastest markdown parser in pure Python with renderer feature.\n\n :copyright: (c) 2014 - 2017 by Hsiaoming Yang.\n\"\"\"\n", "import re\n", "import inspect\n", "__version__ = '0.8'\n", "__author__ = 'Hsiaoming Yang <[email protected]>'\n", "__all__ = ['BlockGrammar', 'BlockLexer', 'InlineGrammar', 'InlineLexer',\n 'Renderer', 'Markdown', 'markdown', 'escape']\n", "_key_pattern = re.compile('\\\\s+')\n", "_nonalpha_pattern = re.compile('\\\\W')\n", "_escape_pattern = re.compile('&(?!#?\\\\w+;)')\n", "_newline_pattern = re.compile('\\\\r\\\\n|\\\\r')\n", "_block_quote_leading_pattern = re.compile('^ *> ?', flags=re.M)\n", "_block_code_leading_pattern = re.compile('^ {4}', re.M)\n", "_inline_tags = ['a', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn',\n 'abbr', 'data', 'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i',\n 'b', 'u', 'mark', 'ruby', 'rt', 'rp', 'bdi', 'bdo', 'span', 'br', 'wbr',\n 'ins', 'del', 'img', 'font']\n", "_pre_tags = ['pre', 'script', 'style']\n", "_valid_end = '(?!:/|[^\\\\w\\\\s@]*@)\\\\b'\n", "_valid_attr = '\\\\s*[a-zA-Z\\\\-](?:\\\\=(?:\"[^\"]*\"|\\'[^\\']*\\'|[^\\\\s\\'\">]+))?'\n", "_block_tag = '(?!(?:%s)\\\\b)\\\\w+%s' % ('|'.join(_inline_tags), _valid_end)\n", "_scheme_blacklist = 'javascript:', 'vbscript:'\n", "def _pure_pattern(regex):...\n", "pattern = regex.pattern\n", "if pattern.startswith('^'):\n", "pattern = pattern[1:]\n", "return pattern\n" ]
[ 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_1):...\n", "return open(FUNC_0(VAR_1)).read()\n" ]
[ "def read(fname):...\n", "return open(fpath(fname)).read()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(*, VAR_0: Optional[str], VAR_1: Optional[Path]) ->Sequence[...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = FUNC_0(VAR_0=url, VAR_1=path)\n", "if isinstance(VAR_4, GeneratorError):\n", "return [VAR_4]\n", "return VAR_4.update()\n" ]
[ "def update_existing_client(*, url: Optional[str], path: Optional[Path]...\n", "\"\"\"docstring\"\"\"\n", "project = _get_project_for_url_or_path(url=url, path=path)\n", "if isinstance(project, GeneratorError):\n", "return [project]\n", "return project.update()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self, VAR_2):...\n", "return parse.urlparse(VAR_2).scheme == 'https'\n" ]
[ "def issecure(self, url):...\n", "return parse.urlparse(url).scheme == 'https'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2():...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = [VAR_1('static', 'components'), VAR_1('static', '*', 'less'),\n VAR_1('static', '*', 'node_modules')]\n", "VAR_22 = os.getcwd()\n", "os.chdir('notebook')\n", "VAR_23 = []\n", "for parent, VAR_54, VAR_65 in os.walk('static'):\n", "if any(fnmatch(parent, pat) for pat in VAR_21):\n", "for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:\n", "VAR_54[:] = []\n", "for f in VAR_65:\n", "VAR_23.append(VAR_1('static', app, 'js', 'main.min.js'))\n", "VAR_24 = VAR_1('static', 'components')\n", "VAR_23.append(VAR_1(parent, f))\n", "VAR_23.extend([VAR_1(VAR_24, 'backbone', 'backbone-min.js'), VAR_1(VAR_24,\n 'bootstrap', 'dist', 'js', 'bootstrap.min.js'), VAR_1(VAR_24,\n 'bootstrap-tour', 'build', 'css', 'bootstrap-tour.min.css'), VAR_1(\n VAR_24, 'bootstrap-tour', 'build', 'js', 'bootstrap-tour.min.js'),\n VAR_1(VAR_24, 'create-react-class', 'index.js'), VAR_1(VAR_24,\n 'font-awesome', 'css', '*.css'), VAR_1(VAR_24, 'es6-promise', '*.js'),\n VAR_1(VAR_24, 'font-awesome', 'fonts', '*.*'), VAR_1(VAR_24,\n 'google-caja', 'html-css-sanitizer-minified.js'), VAR_1(VAR_24, 'jed',\n 'jed.js'), VAR_1(VAR_24, 'jquery', 'jquery.min.js'), VAR_1(VAR_24,\n 'jquery-typeahead', 'dist', 'jquery.typeahead.min.js'), VAR_1(VAR_24,\n 'jquery-typeahead', 'dist', 'jquery.typeahead.min.css'), VAR_1(VAR_24,\n 'jquery-ui', 'jquery-ui.min.js'), VAR_1(VAR_24, 'jquery-ui', 'themes',\n 'smoothness', 'jquery-ui.min.css'), VAR_1(VAR_24, 'jquery-ui', 'themes',\n 'smoothness', 'images', '*'), VAR_1(VAR_24, 'marked', 'lib',\n 'marked.js'), VAR_1(VAR_24, 'react', 'react.production.min.js'), VAR_1(\n VAR_24, 'react', 'react-dom.production.min.js'), VAR_1(VAR_24,\n 'requirejs', 'require.js'), VAR_1(VAR_24, 'requirejs-plugins', 'src',\n 'json.js'), VAR_1(VAR_24, 'requirejs-text', 'text.js'), VAR_1(VAR_24,\n 'underscore', 'underscore-min.js'), VAR_1(VAR_24, 'moment', 'moment.js'\n ), VAR_1(VAR_24, 'moment', 'min', '*.js'), VAR_1(VAR_24, 'xterm.js',\n 'index.js'), VAR_1(VAR_24, 'xterm.js-css', 'index.css'), VAR_1(VAR_24,\n 'xterm.js-fit', 'index.js'), VAR_1(VAR_24, 'text-encoding', 'lib',\n 'encoding.js')])\n", "for parent, VAR_54, VAR_65 in os.walk(VAR_1(VAR_24, 'codemirror')):\n", "for f in VAR_65:\n", "VAR_25 = lambda *VAR_14: VAR_1(VAR_24, 'MathJax', *VAR_14)\n", "if f.endswith(('.js', '.css')):\n", "VAR_23.extend([VAR_25('MathJax.js'), VAR_25('config',\n 'TeX-AMS-MML_HTMLorMML-full.js'), VAR_25('config', 'Safe.js')])\n", "VAR_23.append(VAR_1(parent, f))\n", "VAR_26 = []\n", "VAR_27 = VAR_25('jax', 'output')\n", "if os.path.exists(VAR_27):\n", "for output in os.listdir(VAR_27):\n", "for tree in (VAR_26 + [VAR_25('localization'), VAR_25('fonts', 'HTML-CSS',\n", "VAR_14 = VAR_1(VAR_27, output)\n", "for parent, VAR_54, VAR_65 in os.walk(tree):\n", "os.chdir(os.path.join('tests'))\n", "VAR_23.append(VAR_1(VAR_14, '*.js'))\n", "for f in VAR_65:\n", "VAR_28 = glob('*.js') + glob('*/*.js')\n", "VAR_55 = VAR_1(VAR_14, 'autoload')\n", "VAR_23.append(VAR_1(parent, f))\n", "os.chdir(VAR_22)\n", "if os.path.isdir(VAR_55):\n", "VAR_9 = {'notebook': ['templates/*'] + VAR_23, 'notebook.tests': VAR_28,\n 'notebook.bundler.tests': ['resources/*', 'resources/*/*',\n 'resources/*/*/.*'], 'notebook.services.api': ['api.yaml'],\n 'notebook.i18n': ['*/LC_MESSAGES/*.*']}\n", "VAR_26.append(VAR_55)\n", "return VAR_9\n" ]
[ "def find_package_data():...\n", "\"\"\"docstring\"\"\"\n", "excludes = [pjoin('static', 'components'), pjoin('static', '*', 'less'),\n pjoin('static', '*', 'node_modules')]\n", "cwd = os.getcwd()\n", "os.chdir('notebook')\n", "static_data = []\n", "for parent, dirs, files in os.walk('static'):\n", "if any(fnmatch(parent, pat) for pat in excludes):\n", "for app in ['auth', 'edit', 'notebook', 'terminal', 'tree']:\n", "dirs[:] = []\n", "for f in files:\n", "static_data.append(pjoin('static', app, 'js', 'main.min.js'))\n", "components = pjoin('static', 'components')\n", "static_data.append(pjoin(parent, f))\n", "static_data.extend([pjoin(components, 'backbone', 'backbone-min.js'), pjoin\n (components, 'bootstrap', 'dist', 'js', 'bootstrap.min.js'), pjoin(\n components, 'bootstrap-tour', 'build', 'css', 'bootstrap-tour.min.css'),\n pjoin(components, 'bootstrap-tour', 'build', 'js',\n 'bootstrap-tour.min.js'), pjoin(components, 'create-react-class',\n 'index.js'), pjoin(components, 'font-awesome', 'css', '*.css'), pjoin(\n components, 'es6-promise', '*.js'), pjoin(components, 'font-awesome',\n 'fonts', '*.*'), pjoin(components, 'google-caja',\n 'html-css-sanitizer-minified.js'), pjoin(components, 'jed', 'jed.js'),\n pjoin(components, 'jquery', 'jquery.min.js'), pjoin(components,\n 'jquery-typeahead', 'dist', 'jquery.typeahead.min.js'), pjoin(\n components, 'jquery-typeahead', 'dist', 'jquery.typeahead.min.css'),\n pjoin(components, 'jquery-ui', 'jquery-ui.min.js'), pjoin(components,\n 'jquery-ui', 'themes', 'smoothness', 'jquery-ui.min.css'), pjoin(\n components, 'jquery-ui', 'themes', 'smoothness', 'images', '*'), pjoin(\n components, 'marked', 'lib', 'marked.js'), pjoin(components, 'react',\n 'react.production.min.js'), pjoin(components, 'react',\n 'react-dom.production.min.js'), pjoin(components, 'requirejs',\n 'require.js'), pjoin(components, 'requirejs-plugins', 'src', 'json.js'),\n pjoin(components, 'requirejs-text', 'text.js'), pjoin(components,\n 'underscore', 'underscore-min.js'), pjoin(components, 'moment',\n 'moment.js'), pjoin(components, 'moment', 'min', '*.js'), pjoin(\n components, 'xterm.js', 'index.js'), pjoin(components, 'xterm.js-css',\n 'index.css'), pjoin(components, 'xterm.js-fit', 'index.js'), pjoin(\n components, 'text-encoding', 'lib', 'encoding.js')])\n", "for parent, dirs, files in os.walk(pjoin(components, 'codemirror')):\n", "for f in files:\n", "mj = lambda *path: pjoin(components, 'MathJax', *path)\n", "if f.endswith(('.js', '.css')):\n", "static_data.extend([mj('MathJax.js'), mj('config',\n 'TeX-AMS-MML_HTMLorMML-full.js'), mj('config', 'Safe.js')])\n", "static_data.append(pjoin(parent, f))\n", "trees = []\n", "mj_out = mj('jax', 'output')\n", "if os.path.exists(mj_out):\n", "for output in os.listdir(mj_out):\n", "for tree in (trees + [mj('localization'), mj('fonts', 'HTML-CSS',\n", "path = pjoin(mj_out, output)\n", "for parent, dirs, files in os.walk(tree):\n", "os.chdir(os.path.join('tests'))\n", "static_data.append(pjoin(path, '*.js'))\n", "for f in files:\n", "js_tests = glob('*.js') + glob('*/*.js')\n", "autoload = pjoin(path, 'autoload')\n", "static_data.append(pjoin(parent, f))\n", "os.chdir(cwd)\n", "if os.path.isdir(autoload):\n", "package_data = {'notebook': ['templates/*'] + static_data, 'notebook.tests':\n js_tests, 'notebook.bundler.tests': ['resources/*', 'resources/*/*',\n 'resources/*/*/.*'], 'notebook.services.api': ['api.yaml'],\n 'notebook.i18n': ['*/LC_MESSAGES/*.*']}\n", "trees.append(autoload)\n", "return package_data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "For", "For", "For", "Assign'", "For", "Expr'", "Assign'", "Expr'", "Expr'", "For", "For", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "For", "For", "Assign'", "For", "Expr'", "Expr'", "For", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_88(*VAR_79, **VAR_42):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.utils.formatters\n", "return frappe.utils.formatters.format_value(*VAR_79, **kwargs)\n" ]
[ "def format_value(*args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.utils.formatters\n", "return frappe.utils.formatters.format_value(*args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Return'" ]
[ "def FUNC_9(self):...\n", "self._record_users()\n", "VAR_15 = {'display_name': 'new display'}\n", "self.get_success(self.handler.update_device(VAR_0, 'abc', VAR_15))\n", "VAR_10 = self.get_success(self.handler.get_device(VAR_0, 'abc'))\n", "self.assertEqual(VAR_10['display_name'], 'new display')\n" ]
[ "def test_update_device(self):...\n", "self._record_users()\n", "update = {'display_name': 'new display'}\n", "self.get_success(self.handler.update_device(user1, 'abc', update))\n", "res = self.get_success(self.handler.get_device(user1, 'abc'))\n", "self.assertEqual(res['display_name'], 'new display')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_3(VAR_3, VAR_5=None, VAR_6=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_6 is None:\n", "VAR_41 = VAR_7.search(VAR_3)\n", "if VAR_41:\n", "VAR_42 = VAR_41.group(1).lower()\n", "VAR_101 = auto_decode(f.read())\n", "return VAR_3, VAR_101\n", "if VAR_42 == 'file' and VAR_5 and VAR_5.startswith('http'):\n", "if VAR_42 == 'file':\n", "VAR_44 = VAR_3.split(':', 1)[1]\n", "VAR_15 = VAR_6.get(VAR_3)\n", "VAR_44 = VAR_44.replace('\\\\', '/')\n", "VAR_15.raise_for_status()\n", "VAR_41 = VAR_8.match(VAR_44)\n", "return VAR_15.url, VAR_15.text\n", "if VAR_41:\n", "VAR_44 = VAR_41.group(1) + ':' + VAR_44.split('|', 1)[1]\n", "VAR_44 = urllib_parse.unquote(VAR_44)\n", "if VAR_44.startswith('/'):\n", "VAR_44 = '/' + VAR_44.lstrip('/')\n", "VAR_3 = VAR_44\n" ]
[ "def get_file_content(url, comes_from=None, session=None):...\n", "\"\"\"docstring\"\"\"\n", "if session is None:\n", "match = _scheme_re.search(url)\n", "if match:\n", "scheme = match.group(1).lower()\n", "content = auto_decode(f.read())\n", "return url, content\n", "if scheme == 'file' and comes_from and comes_from.startswith('http'):\n", "if scheme == 'file':\n", "path = url.split(':', 1)[1]\n", "resp = session.get(url)\n", "path = path.replace('\\\\', '/')\n", "resp.raise_for_status()\n", "match = _url_slash_drive_re.match(path)\n", "return resp.url, resp.text\n", "if match:\n", "path = match.group(1) + ':' + path.split('|', 1)[1]\n", "path = urllib_parse.unquote(path)\n", "if path.startswith('/'):\n", "path = '/' + path.lstrip('/')\n", "url = path\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_3(self):...\n", "if self.request.protocol == 'http':\n", "if self.request.host == 'rtxcomplete.ixlab.org':\n", "self.redirect('https://' + self.request.host, permanent=False)\n" ]
[ "def prepare(self):...\n", "if self.request.protocol == 'http':\n", "if self.request.host == 'rtxcomplete.ixlab.org':\n", "self.redirect('https://' + self.request.host, permanent=False)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Expr'" ]
[ "@VAR_2.route('/search', methods=['GET'])...\n", "VAR_17 = request.args.get('query')\n", "if VAR_17:\n", "return redirect(url_for('web.books_list', VAR_8='search', VAR_20='stored',\n VAR_56=term))\n", "return render_title_template('search.html', VAR_35='', VAR_77=0, VAR_150=_(\n u'Search'), VAR_9='search')\n" ]
[ "@web.route('/search', methods=['GET'])...\n", "term = request.args.get('query')\n", "if term:\n", "return redirect(url_for('web.books_list', data='search', sort_param=\n 'stored', query=term))\n", "return render_title_template('search.html', searchterm='', result_count=0,\n title=_(u'Search'), page='search')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_26(self, VAR_22):...\n", "return CLASS_7(self)\n" ]
[ "def buildProtocol(self, addr):...\n", "return FakeRedisPubSubProtocol(self)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_40):...\n", "self.is_authenticated = True\n", "self.is_active = True\n", "self.is_anonymous = False\n", "self.id = VAR_40\n" ]
[ "def __init__(self, id):...\n", "self.is_authenticated = True\n", "self.is_active = True\n", "self.is_anonymous = False\n", "self.id = id\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_13(VAR_22, VAR_23: Element) ->Union[Any, Type[NoValue]]:...\n", "VAR_23.component_instance.component_name = self.name\n", "VAR_23.component_instance.form_id = current_form_id(VAR_22)\n", "if self.url is not None:\n", "VAR_23.component_instance.url = self.url\n", "def FUNC_14():...\n", "VAR_23.component_instance.json_args = VAR_34\n", "VAR_23.component_instance.special_args.extend(VAR_21)\n", "if VAR_6 is None:\n", "FUNC_14()\n", "def FUNC_15(VAR_35, VAR_36=''):...\n", "return VAR_35\n" ]
[ "def marshall_component(dg, element: Element) ->Union[Any, Type[NoValue]]:...\n", "element.component_instance.component_name = self.name\n", "element.component_instance.form_id = current_form_id(dg)\n", "if self.url is not None:\n", "element.component_instance.url = self.url\n", "def marshall_element_args():...\n", "element.component_instance.json_args = serialized_json_args\n", "element.component_instance.special_args.extend(special_args)\n", "if key is None:\n", "marshall_element_args()\n", "def deserialize_component(ui_value, widget_id=''):...\n", "return ui_value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Condition", "Expr'", "FunctionDef'", "Return'" ]
[ "@def_function.function...\n", "if VAR_56:\n", "return VAR_54 + VAR_55\n", "return VAR_54 * VAR_55\n" ]
[ "@def_function.function...\n", "if c:\n", "return a + b\n", "return a * b\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "async def FUNC_4(VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=0):...\n", "if VAR_6.startswith('/_matrix/federation/v1/get_missing_events/'):\n", "return {'events': []}\n", "self.http_client.post_json = FUNC_4\n", "VAR_3 = self.get_success(self.store.get_latest_event_ids_in_room(self.room_id)\n )[0]\n", "VAR_10 = make_event_from_dict({'room_id': self.room_id, 'sender':\n '@baduser:test.serv', 'event_id': 'one:test.serv', 'depth': 1000,\n 'origin_server_ts': 1, 'type': 'm.room.message', 'origin': 'test.serv',\n 'content': {'body': 'hewwo?'}, 'auth_events': [], 'prev_events': [(\n 'two:test.serv', {}), (VAR_3, {})]})\n", "VAR_24 = self.get_failure(self.handler.on_receive_pdu('test.serv', VAR_10,\n sent_to_us_directly=True), FederationError)\n", "self.assertEqual(VAR_24.value.args[0],\n \"ERROR 403: Your server isn't divulging details about prev_events referenced in this event.\"\n )\n", "VAR_11 = self.get_success(self.store.get_latest_event_ids_in_room(self.room_id)\n )\n", "self.assertEqual(VAR_11[0], '$join:test.serv')\n" ]
[ "def test_cant_hide_direct_ancestors(self):...\n", "\"\"\"docstring\"\"\"\n", "async def post_json(destination, path, data, headers=None, timeout=0):...\n", "if path.startswith('/_matrix/federation/v1/get_missing_events/'):\n", "return {'events': []}\n", "self.http_client.post_json = post_json\n", "most_recent = self.get_success(self.store.get_latest_event_ids_in_room(self\n .room_id))[0]\n", "lying_event = make_event_from_dict({'room_id': self.room_id, 'sender':\n '@baduser:test.serv', 'event_id': 'one:test.serv', 'depth': 1000,\n 'origin_server_ts': 1, 'type': 'm.room.message', 'origin': 'test.serv',\n 'content': {'body': 'hewwo?'}, 'auth_events': [], 'prev_events': [(\n 'two:test.serv', {}), (most_recent, {})]})\n", "failure = self.get_failure(self.handler.on_receive_pdu('test.serv',\n lying_event, sent_to_us_directly=True), FederationError)\n", "self.assertEqual(failure.value.args[0],\n \"ERROR 403: Your server isn't divulging details about prev_events referenced in this event.\"\n )\n", "extrem = self.get_success(self.store.get_latest_event_ids_in_room(self.room_id)\n )\n", "self.assertEqual(extrem[0], '$join:test.serv')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "AsyncFunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def __init__(self, **VAR_24):...\n", "for name, value in VAR_24.items():\n", "if not hasattr(self, name):\n", "if self.inline_style is None and 'inline_style' not in VAR_24:\n", "setattr(self, name, value)\n", "self.inline_style = self.style\n" ]
[ "def __init__(self, **kw):...\n", "for name, value in kw.items():\n", "if not hasattr(self, name):\n", "if self.inline_style is None and 'inline_style' not in kw:\n", "setattr(self, name, value)\n", "self.inline_style = self.style\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_32(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_27 = widgets.AdminDateWidget()\n", "self.assertHTMLEqual(conditional_escape(VAR_27.render('test', datetime(2007,\n 12, 1, 9, 30))),\n '<input value=\"2007-12-01\" type=\"text\" class=\"vDateField\" name=\"test\" size=\"10\" />'\n )\n", "VAR_27 = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})\n", "self.assertHTMLEqual(conditional_escape(VAR_27.render('test', datetime(2007,\n 12, 1, 9, 30))),\n '<input value=\"2007-12-01\" type=\"text\" class=\"myDateField\" name=\"test\" size=\"20\" />'\n )\n" ]
[ "def test_attrs(self):...\n", "\"\"\"docstring\"\"\"\n", "w = widgets.AdminDateWidget()\n", "self.assertHTMLEqual(conditional_escape(w.render('test', datetime(2007, 12,\n 1, 9, 30))),\n '<input value=\"2007-12-01\" type=\"text\" class=\"vDateField\" name=\"test\" size=\"10\" />'\n )\n", "w = widgets.AdminDateWidget(attrs={'size': 20, 'class': 'myDateField'})\n", "self.assertHTMLEqual(conditional_escape(w.render('test', datetime(2007, 12,\n 1, 9, 30))),\n '<input value=\"2007-12-01\" type=\"text\" class=\"myDateField\" name=\"test\" size=\"20\" />'\n )\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "@cached_property...\n", "return str(pathlib.PurePosixPath(self.upload_path, base64.urlsafe_b64encode\n (uuid.uuid4().bytes).decode('utf-8').rstrip('=\\n')))\n" ]
[ "@cached_property...\n", "return str(pathlib.PurePosixPath(self.upload_path, base64.urlsafe_b64encode\n (uuid.uuid4().bytes).decode('utf-8').rstrip('=\\n')))\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_36(self):...\n", "VAR_30 = models.Inventory.objects.create(barcode=86, name='Apple')\n", "models.Inventory.objects.create(barcode=22, name='Pear')\n", "VAR_31 = models.Inventory.objects.create(barcode=87, name='Core', parent=apple)\n", "VAR_29 = models.Inventory._meta.get_field('parent').rel\n", "VAR_27 = widgets.ForeignKeyRawIdWidget(VAR_29, widget_admin_site)\n", "self.assertHTMLEqual(VAR_27.render('test', VAR_31.parent_id, attrs={}), \n 'string' % VAR_0())\n" ]
[ "def test_relations_to_non_primary_key(self):...\n", "apple = models.Inventory.objects.create(barcode=86, name='Apple')\n", "models.Inventory.objects.create(barcode=22, name='Pear')\n", "core = models.Inventory.objects.create(barcode=87, name='Core', parent=apple)\n", "rel = models.Inventory._meta.get_field('parent').rel\n", "w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)\n", "self.assertHTMLEqual(w.render('test', core.parent_id, attrs={}), \n '<input type=\"text\" name=\"test\" value=\"86\" class=\"vForeignKeyRawIdAdminField\" /><a href=\"/widget_admin/admin_widgets/inventory/?t=barcode\" class=\"related-lookup\" id=\"lookup_id_test\" onclick=\"return showRelatedObjectLookupPopup(this);\"> <img src=\"%(ADMIN_STATIC_PREFIX)simg/selector-search.gif\" width=\"16\" height=\"16\" alt=\"Lookup\" /></a>&nbsp;<strong>Apple</strong>'\n % admin_static_prefix())\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_3(VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_4 not in ['instructor', 'staff']:\n", "def FUNC_84(VAR_1):...\n", "def FUNC_83(*VAR_2, **VAR_3):...\n", "VAR_9 = VAR_2[0]\n", "VAR_65 = get_course_by_id(CourseKey.from_string(VAR_3['course_id']))\n", "if has_access(VAR_9.user, VAR_4, VAR_65):\n", "return VAR_1(*VAR_2, **kwargs)\n", "return HttpResponseForbidden()\n" ]
[ "def require_level(level):...\n", "\"\"\"docstring\"\"\"\n", "if level not in ['instructor', 'staff']:\n", "def decorator(func):...\n", "def wrapped(*args, **kwargs):...\n", "request = args[0]\n", "course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))\n", "if has_access(request.user, level, course):\n", "return func(*args, **kwargs)\n", "return HttpResponseForbidden()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "FunctionDef'", "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(VAR_10: Dependant, VAR_12: Optional[ModelField]=None, VAR_13:...\n", "assert VAR_10.call is not None, 'dependant.call must be a function'\n", "VAR_9 = asyncio.iscoroutinefunction(VAR_10.call)\n", "VAR_23 = VAR_12 and isinstance(VAR_12.field_info, params.Form)\n", "if isinstance(VAR_14, DefaultPlaceholder):\n", "VAR_52: Type[Response] = VAR_14.value\n", "VAR_52 = VAR_14\n", "async def FUNC_5(VAR_24: Request) ->Response:...\n", "VAR_69 = None\n", "VAR_53 = await solve_dependencies(VAR_24=request, VAR_10=dependant, VAR_69=\n body, VAR_22=dependency_overrides_provider)\n", "if VAR_12:\n", "VAR_11, VAR_51, VAR_54, VAR_55, VAR_56 = VAR_53\n", "if VAR_23:\n", "if VAR_51:\n", "VAR_69 = await VAR_24.form()\n", "VAR_77 = await VAR_24.body()\n", "VAR_70 = await FUNC_2(VAR_10=dependant, VAR_11=values, VAR_9=is_coroutine)\n", "return FUNC_5\n", "if VAR_77:\n", "if isinstance(VAR_70, Response):\n", "VAR_69 = await VAR_24.json()\n", "if VAR_70.background is None:\n", "VAR_71 = await FUNC_1(VAR_4=response_field, VAR_5=raw_response, VAR_6=\n response_model_include, VAR_7=response_model_exclude, VAR_8=\n response_model_by_alias, VAR_1=response_model_exclude_unset, VAR_2=\n response_model_exclude_defaults, VAR_3=response_model_exclude_none,\n VAR_9=is_coroutine)\n", "VAR_70.background = VAR_54\n", "return VAR_70\n", "VAR_72 = VAR_52(content=response_data, VAR_13=status_code, background=\n background_tasks)\n", "VAR_72.headers.raw.extend(VAR_55.headers.raw)\n", "if VAR_55.status_code:\n", "VAR_72.status_code = VAR_55.status_code\n", "return VAR_72\n" ]
[ "def get_request_handler(dependant: Dependant, body_field: Optional[...\n", "assert dependant.call is not None, 'dependant.call must be a function'\n", "is_coroutine = asyncio.iscoroutinefunction(dependant.call)\n", "is_body_form = body_field and isinstance(body_field.field_info, params.Form)\n", "if isinstance(response_class, DefaultPlaceholder):\n", "actual_response_class: Type[Response] = response_class.value\n", "actual_response_class = response_class\n", "async def app(request: Request) ->Response:...\n", "body = None\n", "solved_result = await solve_dependencies(request=request, dependant=\n dependant, body=body, dependency_overrides_provider=\n dependency_overrides_provider)\n", "if body_field:\n", "values, errors, background_tasks, sub_response, _ = solved_result\n", "if is_body_form:\n", "if errors:\n", "body = await request.form()\n", "body_bytes = await request.body()\n", "raw_response = await run_endpoint_function(dependant=dependant, values=\n values, is_coroutine=is_coroutine)\n", "return app\n", "if body_bytes:\n", "if isinstance(raw_response, Response):\n", "body = await request.json()\n", "if raw_response.background is None:\n", "response_data = await serialize_response(field=response_field,\n response_content=raw_response, include=response_model_include, exclude=\n response_model_exclude, by_alias=response_model_by_alias, exclude_unset\n =response_model_exclude_unset, exclude_defaults=\n response_model_exclude_defaults, exclude_none=\n response_model_exclude_none, is_coroutine=is_coroutine)\n", "raw_response.background = background_tasks\n", "return raw_response\n", "response = actual_response_class(content=response_data, status_code=\n status_code, background=background_tasks)\n", "response.headers.raw.extend(sub_response.headers.raw)\n", "if sub_response.status_code:\n", "response.status_code = sub_response.status_code\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assert'", "Assign'", "Assign'", "Condition", "AnnAssign'", "Assign'", "AsyncFunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Assign'", "Expr'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_14():...\n", "VAR_43 = f.readline()\n", "VAR_43 = '/'.join(VAR_43.split('/')[:-2])\n", "VAR_33 = []\n", "for c in VAR_32:\n", "VAR_33.append(FUNC_10(VAR_43 + '/' + c.strip()))\n", "return VAR_33\n" ]
[ "def get_class_path():...\n", "dataset_path = f.readline()\n", "dataset_path = '/'.join(dataset_path.split('/')[:-2])\n", "class_paths = []\n", "for c in classes_list:\n", "class_paths.append(_get_obj_absolute_path(dataset_path + '/' + c.strip()))\n", "return class_paths\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Return'" ]
[ "def FUNC_26(self, VAR_15, VAR_13):...\n", "VAR_25 = []\n", "for row in VAR_15:\n", "if isinstance(row, (list, tuple)):\n", "return VAR_25\n", "VAR_42 = frappe._dict()\n", "if isinstance(row, dict):\n", "for VAR_43, val in enumerate(row):\n", "VAR_42 = frappe._dict(row)\n", "VAR_25.append(VAR_42)\n", "VAR_42[VAR_13[VAR_43].get('fieldname')] = val\n" ]
[ "def build_data_dict(self, result, columns):...\n", "data = []\n", "for row in result:\n", "if isinstance(row, (list, tuple)):\n", "return data\n", "_row = frappe._dict()\n", "if isinstance(row, dict):\n", "for i, val in enumerate(row):\n", "_row = frappe._dict(row)\n", "data.append(_row)\n", "_row[columns[i].get('fieldname')] = val\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'", "Condition", "For", "Assign'", "Expr'", "Assign'" ]
[ "@VAR_2.route('/tasks')...\n", "VAR_51 = WorkerThread.getInstance().tasks\n", "VAR_88 = render_task_status(VAR_51)\n", "return render_title_template('tasks.html', VAR_63=answer, VAR_149=_(\n u'Tasks'), VAR_9='tasks')\n" ]
[ "@web.route('/tasks')...\n", "tasks = WorkerThread.getInstance().tasks\n", "answer = render_task_status(tasks)\n", "return render_title_template('tasks.html', entries=answer, title=_(u'Tasks'\n ), page='tasks')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_10, VAR_11='detail', VAR_12=None, VAR_13=None, VAR_14=None,...\n", "\"\"\"docstring\"\"\"\n", "for module in get_modules():\n", "VAR_18 = module.get_model_url(VAR_10, VAR_11, VAR_14)\n", "if not VAR_18:\n", "if VAR_12 is None:\n", "return VAR_18\n", "from shuup.utils.django_compat import Resolver404, resolve\n", "if VAR_13 is not None:\n", "return VAR_18\n", "warnings.warn(\n 'Warning! `required_permissions` parameter will be deprecated in Shuup 2.0 as unused for this util.'\n , DeprecationWarning)\n", "VAR_39 = resolve(VAR_18)\n", "VAR_6 = VAR_13\n", "from shuup.admin.utils.permissions import get_permissions_for_module_url\n", "VAR_35 = get_missing_permissions(VAR_12, VAR_6)\n", "VAR_6 = get_permissions_for_module_url(module, VAR_39.url_name)\n", "if not VAR_35:\n", "return VAR_18\n", "if VAR_15:\n", "from django.core.exceptions import PermissionDenied\n", "VAR_24 = _(\n \"Can't view this page. You do not have the required permission(s): `{permissions}`.\"\n ).format(VAR_6=', '.join(missing_permissions))\n" ]
[ "def get_model_url(object, kind='detail', user=None, required_permissions=...\n", "\"\"\"docstring\"\"\"\n", "for module in get_modules():\n", "url = module.get_model_url(object, kind, shop)\n", "if not url:\n", "if user is None:\n", "return url\n", "from shuup.utils.django_compat import Resolver404, resolve\n", "if required_permissions is not None:\n", "return url\n", "warnings.warn(\n 'Warning! `required_permissions` parameter will be deprecated in Shuup 2.0 as unused for this util.'\n , DeprecationWarning)\n", "resolved = resolve(url)\n", "permissions = required_permissions\n", "from shuup.admin.utils.permissions import get_permissions_for_module_url\n", "missing_permissions = get_missing_permissions(user, permissions)\n", "permissions = get_permissions_for_module_url(module, resolved.url_name)\n", "if not missing_permissions:\n", "return url\n", "if raise_permission_denied:\n", "from django.core.exceptions import PermissionDenied\n", "reason = _(\n \"Can't view this page. You do not have the required permission(s): `{permissions}`.\"\n ).format(permissions=', '.join(missing_permissions))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "For", "Assign'", "Condition", "Condition", "Return'", "ImportFrom'", "Condition", "Return'", "Expr'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assign'", "Condition", "Return'", "Condition", "ImportFrom'", "Assign'" ]
[ "@VAR_2.route('/advsearch', methods=['POST'])...\n", "VAR_89 = dict(request.form)\n", "VAR_90 = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie',\n 'include_shelf', 'exclude_shelf', 'include_language',\n 'exclude_language', 'include_extension', 'exclude_extension']\n", "for VAR_110 in VAR_90:\n", "VAR_89[VAR_110] = list(request.form.getlist(VAR_110))\n", "VAR_91['query'] = json.dumps(VAR_89)\n", "return redirect(url_for('web.books_list', VAR_8='advsearch', VAR_20=\n 'stored', VAR_56=''))\n" ]
[ "@web.route('/advsearch', methods=['POST'])...\n", "values = dict(request.form)\n", "params = ['include_tag', 'exclude_tag', 'include_serie', 'exclude_serie',\n 'include_shelf', 'exclude_shelf', 'include_language',\n 'exclude_language', 'include_extension', 'exclude_extension']\n", "for param in params:\n", "values[param] = list(request.form.getlist(param))\n", "flask_session['query'] = json.dumps(values)\n", "return redirect(url_for('web.books_list', data='advsearch', sort_param=\n 'stored', query=''))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_6(self):...\n", "self.get_success(self.inject_room_member(self.room1, self.u_alice,\n Membership.JOIN))\n", "VAR_16 = self.get_success(self.inject_room_member(self.room1, self.u_bob,\n Membership.JOIN, VAR_7={'blue': 'red'}))\n", "VAR_14 = self.get_success(self.store.get_event(VAR_16.event_id))\n", "self.assertObjectHasAttributes({'type': EventTypes.Member, 'user_id': self.\n u_bob.to_string(), 'content': {'membership': Membership.JOIN, 'blue':\n 'red'}}, VAR_14)\n", "self.assertFalse(hasattr(VAR_14, 'redacted_because'))\n", "VAR_10 = 'Because I said so'\n", "self.get_success(self.inject_redaction(self.room1, VAR_16.event_id, self.\n u_alice, VAR_10))\n", "VAR_14 = self.get_success(self.store.get_event(VAR_16.event_id))\n", "self.assertTrue('redacted_because' in VAR_14.unsigned)\n", "self.assertObjectHasAttributes({'type': EventTypes.Member, 'user_id': self.\n u_bob.to_string(), 'content': {'membership': Membership.JOIN}}, VAR_14)\n", "self.assertObjectHasAttributes({'type': EventTypes.Redaction, 'user_id':\n self.u_alice.to_string(), 'content': {'reason': VAR_10}}, VAR_14.\n unsigned['redacted_because'])\n" ]
[ "def test_redact_join(self):...\n", "self.get_success(self.inject_room_member(self.room1, self.u_alice,\n Membership.JOIN))\n", "msg_event = self.get_success(self.inject_room_member(self.room1, self.u_bob,\n Membership.JOIN, extra_content={'blue': 'red'}))\n", "event = self.get_success(self.store.get_event(msg_event.event_id))\n", "self.assertObjectHasAttributes({'type': EventTypes.Member, 'user_id': self.\n u_bob.to_string(), 'content': {'membership': Membership.JOIN, 'blue':\n 'red'}}, event)\n", "self.assertFalse(hasattr(event, 'redacted_because'))\n", "reason = 'Because I said so'\n", "self.get_success(self.inject_redaction(self.room1, msg_event.event_id, self\n .u_alice, reason))\n", "event = self.get_success(self.store.get_event(msg_event.event_id))\n", "self.assertTrue('redacted_because' in event.unsigned)\n", "self.assertObjectHasAttributes({'type': EventTypes.Member, 'user_id': self.\n u_bob.to_string(), 'content': {'membership': Membership.JOIN}}, event)\n", "self.assertObjectHasAttributes({'type': EventTypes.Redaction, 'user_id':\n self.u_alice.to_string(), 'content': {'reason': reason}}, event.\n unsigned['redacted_because'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_19():...\n", "VAR_84 = VAR_1\n", "VAR_1 = None\n", "if VAR_84:\n", "VAR_84.close()\n", "if VAR_84.bind:\n", "VAR_84.bind.dispose()\n" ]
[ "def dispose():...\n", "old_session = session\n", "session = None\n", "if old_session:\n", "old_session.close()\n", "if old_session.bind:\n", "old_session.bind.dispose()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'" ]
[ "from __future__ import division, print_function, unicode_literals\n", "import os\n", "from datetime import datetime\n", "import json\n", "import mimetypes\n", "import chardet\n", "import copy\n", "from babel.dates import format_date\n", "from babel import Locale as LC\n", "from babel.core import UnknownLocaleError\n", "from flask import Blueprint, jsonify\n", "from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for\n", "from flask import session as flask_session\n", "from flask_babel import gettext as _\n", "from flask_login import login_user, logout_user, login_required, current_user\n", "from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError\n", "from sqlalchemy.sql.expression import text, func, false, not_, and_, or_\n", "from sqlalchemy.orm.attributes import flag_modified\n", "from sqlalchemy.sql.functions import coalesce\n", "from .services.worker import WorkerThread\n", "from werkzeug.datastructures import Headers\n", "from werkzeug.security import generate_password_hash, check_password_hash\n", "from . import constants, logger, isoLanguages, services\n", "from . import babel, db, ub, config, get_locale, app\n", "from . import calibre_db\n", "from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download\n", "from .helper import check_valid_domain, render_task_status, check_email, check_username, get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email\n", "from .pagination import Pagination\n", "from .redirect import redirect_back\n", "from .usermanagement import login_required_if_no_ano\n", "from .render_template import render_title_template\n", "VAR_0 = {'ldap': bool(services.ldap), 'goodreads': bool(services.\n goodreads_support), 'kobo': bool(services.kobo)}\n", "from .oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status\n", "VAR_0['oauth'] = False\n", "from functools import wraps\n", "from natsort import natsorted as sort\n", "VAR_7 = sorted\n", "@app.after_request...\n", "VAR_0['oauth'] = True\n", "VAR_100 = {}\n", "VAR_1.headers['Content-Security-Policy'\n ] = \"default-src 'self' 'unsafe-inline' 'unsafe-eval';\"\n", "if request.endpoint == 'editbook.edit_book':\n", "VAR_1.headers['Content-Security-Policy'] += 'img-src * data:'\n", "VAR_1.headers['X-Content-Type-Options'] = 'nosniff'\n", "VAR_1.headers['X-Frame-Options'] = 'SAMEORIGIN'\n", "VAR_1.headers['X-XSS-Protection'] = '1; mode=block'\n", "VAR_1.headers['Strict-Transport-Security'\n ] = 'max-age=31536000; includeSubDomains'\n", "return VAR_1\n" ]
[ "from __future__ import division, print_function, unicode_literals\n", "import os\n", "from datetime import datetime\n", "import json\n", "import mimetypes\n", "import chardet\n", "import copy\n", "from babel.dates import format_date\n", "from babel import Locale as LC\n", "from babel.core import UnknownLocaleError\n", "from flask import Blueprint, jsonify\n", "from flask import request, redirect, send_from_directory, make_response, flash, abort, url_for\n", "from flask import session as flask_session\n", "from flask_babel import gettext as _\n", "from flask_login import login_user, logout_user, login_required, current_user\n", "from sqlalchemy.exc import IntegrityError, InvalidRequestError, OperationalError\n", "from sqlalchemy.sql.expression import text, func, false, not_, and_, or_\n", "from sqlalchemy.orm.attributes import flag_modified\n", "from sqlalchemy.sql.functions import coalesce\n", "from .services.worker import WorkerThread\n", "from werkzeug.datastructures import Headers\n", "from werkzeug.security import generate_password_hash, check_password_hash\n", "from . import constants, logger, isoLanguages, services\n", "from . import babel, db, ub, config, get_locale, app\n", "from . import calibre_db\n", "from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download\n", "from .helper import check_valid_domain, render_task_status, check_email, check_username, get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, send_registration_mail, check_send_to_kindle, check_read_formats, tags_filters, reset_password, valid_email\n", "from .pagination import Pagination\n", "from .redirect import redirect_back\n", "from .usermanagement import login_required_if_no_ano\n", "from .render_template import render_title_template\n", "feature_support = {'ldap': bool(services.ldap), 'goodreads': bool(services.\n goodreads_support), 'kobo': bool(services.kobo)}\n", "from .oauth_bb import oauth_check, register_user_with_oauth, logout_oauth_user, get_oauth_status\n", "feature_support['oauth'] = False\n", "from functools import wraps\n", "from natsort import natsorted as sort\n", "sort = sorted\n", "@app.after_request...\n", "feature_support['oauth'] = True\n", "oauth_check = {}\n", "resp.headers['Content-Security-Policy'\n ] = \"default-src 'self' 'unsafe-inline' 'unsafe-eval';\"\n", "if request.endpoint == 'editbook.edit_book':\n", "resp.headers['Content-Security-Policy'] += 'img-src * data:'\n", "resp.headers['X-Content-Type-Options'] = 'nosniff'\n", "resp.headers['X-Frame-Options'] = 'SAMEORIGIN'\n", "resp.headers['X-XSS-Protection'] = '1; mode=block'\n", "resp.headers['Strict-Transport-Security'\n ] = 'max-age=31536000; includeSubDomains'\n", "return resp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "ImportFrom'", "Assign'", "ImportFrom'", "ImportFrom'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "AugAssign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "async def FUNC_9(self, VAR_23: str, VAR_31: Any, VAR_24: Optional[...\n", "\"\"\"docstring\"\"\"\n", "VAR_48 = encode_canonical_json(VAR_31)\n", "VAR_0.debug('HTTP POST %s -> %s', VAR_48, VAR_23)\n", "VAR_46 = {b'Content-Type': [b'application/json'], b'User-Agent': [self.\n user_agent], b'Accept': [b'application/json']}\n", "if VAR_24:\n", "VAR_46.update(VAR_24)\n", "VAR_13 = await self.request('POST', VAR_23, VAR_24=Headers(actual_headers),\n VAR_30=json_str)\n", "VAR_47 = await make_deferred_yieldable(readBody(VAR_13))\n", "if 200 <= VAR_13.code < 300:\n", "return json_decoder.decode(VAR_47.decode('utf-8'))\n" ]
[ "async def post_json_get_json(self, uri: str, post_json: Any, headers:...\n", "\"\"\"docstring\"\"\"\n", "json_str = encode_canonical_json(post_json)\n", "logger.debug('HTTP POST %s -> %s', json_str, uri)\n", "actual_headers = {b'Content-Type': [b'application/json'], b'User-Agent': [\n self.user_agent], b'Accept': [b'application/json']}\n", "if headers:\n", "actual_headers.update(headers)\n", "response = await self.request('POST', uri, headers=Headers(actual_headers),\n data=json_str)\n", "body = await make_deferred_yieldable(readBody(response))\n", "if 200 <= response.code < 300:\n", "return json_decoder.decode(body.decode('utf-8'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_3(self, *, VAR_4: str) ->Set[str]:...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = super().get_imports(VAR_4=prefix)\n", "VAR_8.update(self.inner_property.get_imports(VAR_4=prefix))\n", "VAR_8.add('from typing import List')\n", "if self.default is not None:\n", "VAR_8.add('from dataclasses import field')\n", "return VAR_8\n", "VAR_8.add('from typing import cast')\n" ]
[ "def get_imports(self, *, prefix: str) ->Set[str]:...\n", "\"\"\"docstring\"\"\"\n", "imports = super().get_imports(prefix=prefix)\n", "imports.update(self.inner_property.get_imports(prefix=prefix))\n", "imports.add('from typing import List')\n", "if self.default is not None:\n", "imports.add('from dataclasses import field')\n", "return imports\n", "imports.add('from typing import cast')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Return'", "Expr'" ]
[ "def __call__(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_56 = VAR_263.request\n", "VAR_11 = VAR_56.args\n", "if not VAR_11:\n", "redirect(self.url(VAR_11='login', VAR_96=request.vars))\n", "if VAR_11[0] in self.settings.actions_disabled:\n", "if VAR_11[0] in ('login', 'logout', 'register', 'verify_email',\n", "if len(VAR_56.args) >= 2 and VAR_11[0] == 'impersonate':\n", "if VAR_11[0] == 'cas' and not self.settings.cas_provider:\n", "return getattr(self, VAR_11[0])(VAR_56.args[1])\n", "return getattr(self, VAR_11[0])()\n", "if VAR_11(1) == self.settings.cas_actions['login']:\n", "return self.cas_login(VAR_137=2)\n", "if VAR_11(1) == self.settings.cas_actions['validate']:\n", "return self.cas_validate(VAR_137=1)\n", "if VAR_11(1) == self.settings.cas_actions['servicevalidate']:\n", "return self.cas_validate(VAR_137=2, VAR_138=False)\n", "if VAR_11(1) == self.settings.cas_actions['proxyvalidate']:\n", "return self.cas_validate(VAR_137=2, VAR_138=True)\n", "if VAR_11(1) == 'p3' and VAR_11(2) == self.settings.cas_actions[\n", "return self.cas_validate(VAR_137=3, VAR_138=False)\n", "if VAR_11(1) == 'p3' and VAR_11(2) == self.settings.cas_actions['proxyvalidate'\n", "return self.cas_validate(VAR_137=3, VAR_138=True)\n", "if VAR_11(1) == self.settings.cas_actions['logout']:\n", "return self.logout(VAR_112=request.vars.service or DEFAULT)\n" ]
[ "def __call__(self):...\n", "\"\"\"docstring\"\"\"\n", "request = current.request\n", "args = request.args\n", "if not args:\n", "redirect(self.url(args='login', vars=request.vars))\n", "if args[0] in self.settings.actions_disabled:\n", "if args[0] in ('login', 'logout', 'register', 'verify_email',\n", "if len(request.args) >= 2 and args[0] == 'impersonate':\n", "if args[0] == 'cas' and not self.settings.cas_provider:\n", "return getattr(self, args[0])(request.args[1])\n", "return getattr(self, args[0])()\n", "if args(1) == self.settings.cas_actions['login']:\n", "return self.cas_login(version=2)\n", "if args(1) == self.settings.cas_actions['validate']:\n", "return self.cas_validate(version=1)\n", "if args(1) == self.settings.cas_actions['servicevalidate']:\n", "return self.cas_validate(version=2, proxy=False)\n", "if args(1) == self.settings.cas_actions['proxyvalidate']:\n", "return self.cas_validate(version=2, proxy=True)\n", "if args(1) == 'p3' and args(2) == self.settings.cas_actions['servicevalidate']:\n", "return self.cas_validate(version=3, proxy=False)\n", "if args(1) == 'p3' and args(2) == self.settings.cas_actions['proxyvalidate']:\n", "return self.cas_validate(version=3, proxy=True)\n", "if args(1) == self.settings.cas_actions['logout']:\n", "return self.logout(next=request.vars.service or DEFAULT)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Condition", "Condition", "Condition", "Return'", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'" ]
[ "def FUNC_45(VAR_136):...\n", "VAR_28 = VAR_18[VAR_117]['entries'][VAR_136]\n", "VAR_140 = FUNC_44(VAR_28, 'order', VAR_122=None)\n", "return VAR_140 is None, sv(VAR_140), sv(VAR_134(VAR_28, VAR_135))\n" ]
[ "def key_func(x):...\n", "config = templates[t]['entries'][x]\n", "entry_order = config_extractor(config, 'order', default_value=None)\n", "return entry_order is None, sv(entry_order), sv(extractor(config, sort_key))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_3, VAR_4, VAR_6):...\n", "self.user_id = self.register_user('user', 'pass')\n", "self.access_token = self.login('user', 'pass')\n", "self.other_user_id = self.register_user('otheruser', 'pass')\n", "self.other_access_token = self.login('otheruser', 'pass')\n", "self.room = self.helper.create_room_as(self.user_id, VAR_52=self.access_token)\n", "self.helper.invite(VAR_7=self.room, src=self.user_id, VAR_52=self.\n access_token, targ=self.other_user_id)\n", "self.helper.join(VAR_7=self.room, user=self.other_user_id, VAR_52=self.\n other_access_token)\n" ]
[ "def prepare(self, reactor, clock, hs):...\n", "self.user_id = self.register_user('user', 'pass')\n", "self.access_token = self.login('user', 'pass')\n", "self.other_user_id = self.register_user('otheruser', 'pass')\n", "self.other_access_token = self.login('otheruser', 'pass')\n", "self.room = self.helper.create_room_as(self.user_id, tok=self.access_token)\n", "self.helper.invite(room=self.room, src=self.user_id, tok=self.access_token,\n targ=self.other_user_id)\n", "self.helper.join(room=self.room, user=self.other_user_id, tok=self.\n other_access_token)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@VAR_1.route('/<bfile>/help/', defaults={'page_slug': '_index'})...\n", "\"\"\"docstring\"\"\"\n", "if VAR_21 not in HELP_PAGES:\n", "abort(404)\n", "VAR_32 = markdown2.markdown_path(resource_path('help') / (VAR_21 + '.md'),\n extras=['fenced-code-blocks', 'tables', 'header-ids'])\n", "return render_template('_layout.html', active_page='help', VAR_21=page_slug,\n help_html=render_template_string(html, beancount_version=\n beancount_version, fava_version=fava_version), HELP_PAGES=HELP_PAGES)\n" ]
[ "@app.route('/<bfile>/help/', defaults={'page_slug': '_index'})...\n", "\"\"\"docstring\"\"\"\n", "if page_slug not in HELP_PAGES:\n", "abort(404)\n", "html = markdown2.markdown_path(resource_path('help') / (page_slug + '.md'),\n extras=['fenced-code-blocks', 'tables', 'header-ids'])\n", "return render_template('_layout.html', active_page='help', page_slug=\n page_slug, help_html=render_template_string(html, beancount_version=\n beancount_version, fava_version=fava_version), HELP_PAGES=HELP_PAGES)\n" ]
[ 0, 0, 0, 0, 0, 2 ]
[ "Condition", "Docstring", "Condition", "Expr'", "Assign'", "Return'" ]
[ "\"\"\"Proxy AMI-related calls from cloud controller to objectstore service.\"\"\"\n", "import binascii\n", "import os\n", "import shutil\n", "import tarfile\n", "import tempfile\n", "from xml.etree import ElementTree\n", "import boto.s3.connection\n", "import eventlet\n", "from nova import crypto\n", "import nova.db.api\n", "from nova import exception\n", "from nova import flags\n", "from nova import image\n", "from nova import log as logging\n", "from nova import utils\n", "from nova.api.ec2 import ec2utils\n", "VAR_0 = logging.getLogger('nova.image.s3')\n", "VAR_1 = flags.FLAGS\n", "flags.DEFINE_string('image_decryption_dir', '/tmp',\n 'parent dir for tempdir used for image decryption')\n", "flags.DEFINE_string('s3_access_key', 'notchecked',\n 'access key to use for s3 server for images')\n", "flags.DEFINE_string('s3_secret_key', 'notchecked',\n 'secret key to use for s3 server for images')\n", "\"\"\"Wraps an existing image service to support s3 based register.\"\"\"\n", "def __init__(self, VAR_2=None, *VAR_3, **VAR_4):...\n", "self.service = VAR_2 or VAR_9.get_default_image_service()\n", "self.service.__init__(*VAR_3, **kwargs)\n", "def FUNC_0(self, VAR_5, VAR_6):...\n", "return nova.db.api.s3_image_get(VAR_5, VAR_6)['uuid']\n" ]
[ "\"\"\"Proxy AMI-related calls from cloud controller to objectstore service.\"\"\"\n", "import binascii\n", "import os\n", "import shutil\n", "import tarfile\n", "import tempfile\n", "from xml.etree import ElementTree\n", "import boto.s3.connection\n", "import eventlet\n", "from nova import crypto\n", "import nova.db.api\n", "from nova import exception\n", "from nova import flags\n", "from nova import image\n", "from nova import log as logging\n", "from nova import utils\n", "from nova.api.ec2 import ec2utils\n", "LOG = logging.getLogger('nova.image.s3')\n", "FLAGS = flags.FLAGS\n", "flags.DEFINE_string('image_decryption_dir', '/tmp',\n 'parent dir for tempdir used for image decryption')\n", "flags.DEFINE_string('s3_access_key', 'notchecked',\n 'access key to use for s3 server for images')\n", "flags.DEFINE_string('s3_secret_key', 'notchecked',\n 'secret key to use for s3 server for images')\n", "\"\"\"Wraps an existing image service to support s3 based register.\"\"\"\n", "def __init__(self, service=None, *args, **kwargs):...\n", "self.service = service or image.get_default_image_service()\n", "self.service.__init__(*args, **kwargs)\n", "def get_image_uuid(self, context, image_id):...\n", "return nova.db.api.s3_image_get(context, image_id)['uuid']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = VAR_2.session['connector'].server_id\n", "VAR_64 = FUNC_13(VAR_2, VAR_6, VAR_19=server_id, VAR_8=conn)\n", "if VAR_64 is None:\n", "VAR_92, VAR_16 = VAR_64\n", "VAR_60 = webgateway_cache.getImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23)\n", "if VAR_60 is None:\n", "VAR_60 = VAR_92.renderJpeg(VAR_22, VAR_23, VAR_98=compress_quality)\n", "VAR_114 = VAR_2.GET.get('format', 'jpeg')\n", "if VAR_60 is None:\n", "VAR_61 = HttpResponse(VAR_60, content_type='image/jpeg')\n", "webgateway_cache.setImage(VAR_2, VAR_19, VAR_92, VAR_22, VAR_23, VAR_60)\n", "if 'download' in VAR_9 and VAR_9['download']:\n", "if VAR_114 == 'png':\n", "return VAR_61\n", "VAR_212 = Image.open(BytesIO(VAR_60))\n", "if VAR_114 == 'tif':\n", "VAR_272 = BytesIO()\n", "VAR_212 = Image.open(BytesIO(VAR_60))\n", "VAR_201 = VAR_92.getName()\n", "VAR_212.save(VAR_272, 'png')\n", "VAR_272 = BytesIO()\n", "VAR_201 = VAR_201.decode('utf8')\n", "VAR_201 = VAR_201.replace(',', '.').replace(' ', '_')\n", "VAR_60 = VAR_272.getvalue()\n", "VAR_212.save(VAR_272, 'tiff')\n", "VAR_61['Content-Type'] = 'application/force-download'\n", "VAR_272.close()\n", "VAR_60 = VAR_272.getvalue()\n", "VAR_61['Content-Length'] = len(VAR_60)\n", "VAR_61 = HttpResponse(VAR_60, content_type='image/png')\n", "VAR_272.close()\n", "VAR_61['Content-Disposition'] = 'attachment; filename=%s.%s' % (VAR_201,\n VAR_114)\n", "VAR_61 = HttpResponse(VAR_60, content_type='image/tiff')\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "server_id = request.session['connector'].server_id\n", "pi = _get_prepared_image(request, iid, server_id=server_id, conn=conn)\n", "if pi is None:\n", "img, compress_quality = pi\n", "jpeg_data = webgateway_cache.getImage(request, server_id, img, z, t)\n", "if jpeg_data is None:\n", "jpeg_data = img.renderJpeg(z, t, compression=compress_quality)\n", "format = request.GET.get('format', 'jpeg')\n", "if jpeg_data is None:\n", "rsp = HttpResponse(jpeg_data, content_type='image/jpeg')\n", "webgateway_cache.setImage(request, server_id, img, z, t, jpeg_data)\n", "if 'download' in kwargs and kwargs['download']:\n", "if format == 'png':\n", "return rsp\n", "i = Image.open(BytesIO(jpeg_data))\n", "if format == 'tif':\n", "output = BytesIO()\n", "i = Image.open(BytesIO(jpeg_data))\n", "fileName = img.getName()\n", "i.save(output, 'png')\n", "output = BytesIO()\n", "fileName = fileName.decode('utf8')\n", "fileName = fileName.replace(',', '.').replace(' ', '_')\n", "jpeg_data = output.getvalue()\n", "i.save(output, 'tiff')\n", "rsp['Content-Type'] = 'application/force-download'\n", "output.close()\n", "jpeg_data = output.getvalue()\n", "rsp['Content-Length'] = len(jpeg_data)\n", "rsp = HttpResponse(jpeg_data, content_type='image/png')\n", "output.close()\n", "rsp['Content-Disposition'] = 'attachment; filename=%s.%s' % (fileName, format)\n", "rsp = HttpResponse(jpeg_data, content_type='image/tiff')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_3(self, VAR_13):...\n", "\"\"\"docstring\"\"\"\n", "return VAR_13\n" ]
[ "def to_python(self, value):...\n", "\"\"\"docstring\"\"\"\n", "return value\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_34(VAR_38, VAR_39):...\n", "if VAR_38 != _(u'Unknown') and VAR_39 != _(u'Unknown'):\n", "VAR_96 = calibre_db.check_exists_book(VAR_39, VAR_38)\n", "VAR_63 = VAR_39.split('&')\n", "if VAR_96:\n", "VAR_63 = list(map(lambda it: it.strip().replace(',', '|'), VAR_63))\n", "VAR_1.info('Uploaded book probably exists in library')\n", "VAR_63 = helper.uniq(VAR_63)\n", "flash(_(\n u'Uploaded book probably exists in the library, consider to change before upload new: '\n ) + Markup(render_title_template('book_exists_flash.html', VAR_96=entry\n )), category='warning')\n", "if VAR_63 == ['']:\n", "VAR_63 = [_(u'Unknown')]\n", "VAR_65 = list()\n", "VAR_70 = None\n", "for inp in VAR_63:\n", "VAR_90 = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp\n ).first()\n", "VAR_66 = ' & '.join(VAR_65)\n", "if not VAR_90:\n", "return VAR_66, VAR_63, VAR_70\n", "if not VAR_70:\n", "if not VAR_70:\n", "VAR_70 = db.Authors(inp, helper.get_sorted_author(inp), '')\n", "VAR_113 = helper.get_sorted_author(inp)\n", "VAR_70 = VAR_90\n", "VAR_113 = VAR_90.sort\n", "calibre_db.session.add(VAR_70)\n", "VAR_65.append(VAR_113)\n", "calibre_db.session.commit()\n" ]
[ "def prepare_authors_on_upload(title, authr):...\n", "if title != _(u'Unknown') and authr != _(u'Unknown'):\n", "entry = calibre_db.check_exists_book(authr, title)\n", "input_authors = authr.split('&')\n", "if entry:\n", "input_authors = list(map(lambda it: it.strip().replace(',', '|'),\n input_authors))\n", "log.info('Uploaded book probably exists in library')\n", "input_authors = helper.uniq(input_authors)\n", "flash(_(\n u'Uploaded book probably exists in the library, consider to change before upload new: '\n ) + Markup(render_title_template('book_exists_flash.html', entry=entry)\n ), category='warning')\n", "if input_authors == ['']:\n", "input_authors = [_(u'Unknown')]\n", "sort_authors_list = list()\n", "db_author = None\n", "for inp in input_authors:\n", "stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name ==\n inp).first()\n", "sort_authors = ' & '.join(sort_authors_list)\n", "if not stored_author:\n", "return sort_authors, input_authors, db_author\n", "if not db_author:\n", "if not db_author:\n", "db_author = db.Authors(inp, helper.get_sorted_author(inp), '')\n", "sort_author = helper.get_sorted_author(inp)\n", "db_author = stored_author\n", "sort_author = stored_author.sort\n", "calibre_db.session.add(db_author)\n", "sort_authors_list.append(sort_author)\n", "calibre_db.session.commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(self, VAR_1, VAR_2, VAR_3, **VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "for k in VAR_4:\n", "setattr(CLASS_25, k, VAR_4[k])\n", "VAR_16 = CLASS_25(VAR_1, admin.site)\n", "VAR_17 = VAR_16.formfield_for_dbfield(VAR_1._meta.get_field(VAR_2), request\n =None)\n", "if isinstance(VAR_17.widget, widgets.RelatedFieldWidgetWrapper):\n", "VAR_53 = VAR_17.widget.widget\n", "VAR_53 = VAR_17.widget\n", "self.assertTrue(isinstance(VAR_53, VAR_3), \n 'Wrong widget for %s.%s: expected %s, got %s' % (VAR_1.__class__.\n __name__, VAR_2, VAR_3, type(VAR_53)))\n", "return VAR_17\n" ]
[ "def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):...\n", "\"\"\"docstring\"\"\"\n", "for k in admin_overrides:\n", "setattr(MyModelAdmin, k, admin_overrides[k])\n", "ma = MyModelAdmin(model, admin.site)\n", "ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)\n", "if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):\n", "widget = ff.widget.widget\n", "widget = ff.widget\n", "self.assertTrue(isinstance(widget, widgetclass), \n 'Wrong widget for %s.%s: expected %s, got %s' % (model.__class__.\n __name__, fieldname, widgetclass, type(widget)))\n", "return ff\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = wikiutil.getLocalizedPage(VAR_0, 'HelpOnMacros')\n", "VAR_6 = VAR_5.get_raw_body()\n", "VAR_7 = re.compile('\\\\|\\\\|(<.*?>)?\\\\{\\\\{\\\\{' +\n '<<(?P<prototype>(?P<macro>\\\\w*).*)>>' + '\\\\}\\\\}\\\\}\\\\s*\\\\|\\\\|' +\n '[^|]*\\\\|\\\\|[^|]*\\\\|\\\\|<[^>]*>' +\n '\\\\s*(?P<help>.*?)\\\\s*\\\\|\\\\|\\\\s*(?P<example>.*?)\\\\s*(<<[^>]*>>)*\\\\s*\\\\|\\\\|$'\n , re.U | re.M)\n", "VAR_2 = {}\n", "for VAR_29 in VAR_7.finditer(VAR_6):\n", "VAR_2[VAR_29.group('macro')] = VAR_29\n", "return VAR_2\n" ]
[ "def get_macro_help(request):...\n", "\"\"\"docstring\"\"\"\n", "helppage = wikiutil.getLocalizedPage(request, 'HelpOnMacros')\n", "content = helppage.get_raw_body()\n", "macro_re = re.compile('\\\\|\\\\|(<.*?>)?\\\\{\\\\{\\\\{' +\n '<<(?P<prototype>(?P<macro>\\\\w*).*)>>' + '\\\\}\\\\}\\\\}\\\\s*\\\\|\\\\|' +\n '[^|]*\\\\|\\\\|[^|]*\\\\|\\\\|<[^>]*>' +\n '\\\\s*(?P<help>.*?)\\\\s*\\\\|\\\\|\\\\s*(?P<example>.*?)\\\\s*(<<[^>]*>>)*\\\\s*\\\\|\\\\|$'\n , re.U | re.M)\n", "help = {}\n", "for match in macro_re.finditer(content):\n", "help[match.group('macro')] = match\n", "return help\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_8(VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_7(VAR_20)\n" ]
[ "def parse_http_date_safe(date):...\n", "\"\"\"docstring\"\"\"\n", "return parse_http_date(date)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_81(VAR_62, VAR_143=True):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.desk.reportview\n", "return frappe.desk.reportview.build_match_conditions(VAR_62, VAR_143=\n as_condition)\n" ]
[ "def build_match_conditions(doctype, as_condition=True):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.desk.reportview\n", "return frappe.desk.reportview.build_match_conditions(doctype, as_condition=\n as_condition)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Return'" ]
[ "def FUNC_146(self, VAR_215):...\n", "VAR_56, VAR_244, VAR_101 = VAR_263.request, VAR_263.response, self.auth.db\n", "VAR_340 = VAR_101.wiki_media(VAR_215)\n", "if VAR_340:\n", "if self.settings.manage_permissions:\n", "VAR_212 = VAR_101.wiki_page(VAR_340.wiki_page)\n", "VAR_56.args = [VAR_340.filename]\n", "if not self.can_read(VAR_212):\n", "VAR_410 = VAR_244.download(VAR_56, VAR_101)\n", "return self.not_authorized(VAR_212)\n", "VAR_263.session.forget()\n", "VAR_244.headers['Last-Modified'] = VAR_56.utcnow.strftime(\n '%a, %d %b %Y %H:%M:%S GMT')\n", "if 'Content-Disposition' in VAR_244.headers:\n", "VAR_244.headers['Pragma'] = 'cache'\n", "VAR_244.headers['Cache-Control'] = 'private'\n", "return VAR_410\n" ]
[ "def media(self, id):...\n", "request, response, db = current.request, current.response, self.auth.db\n", "media = db.wiki_media(id)\n", "if media:\n", "if self.settings.manage_permissions:\n", "page = db.wiki_page(media.wiki_page)\n", "request.args = [media.filename]\n", "if not self.can_read(page):\n", "m = response.download(request, db)\n", "return self.not_authorized(page)\n", "current.session.forget()\n", "response.headers['Last-Modified'] = request.utcnow.strftime(\n '%a, %d %b %Y %H:%M:%S GMT')\n", "if 'Content-Disposition' in response.headers:\n", "response.headers['Pragma'] = 'cache'\n", "response.headers['Cache-Control'] = 'private'\n", "return m\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(self, VAR_7):...\n", "signedjson.sign.sign_json(VAR_7, self.server_name, self.key)\n" ]
[ "def sign_response(self, res):...\n", "signedjson.sign.sign_json(res, self.server_name, self.key)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "\"\"\"string\"\"\"\n", "import os\n", "import re\n", "import pipes\n", "import shutil\n", "import sys\n", "from distutils import log\n", "from distutils.cmd import Command\n", "from fnmatch import fnmatch\n", "from glob import glob\n", "from multiprocessing.pool import ThreadPool\n", "from subprocess import check_call\n", "if sys.platform == 'win32':\n", "from subprocess import list2cmdline\n", "def FUNC_10(VAR_19):...\n", "VAR_0 = os.path.isfile\n", "return ' '.join(map(pipes.quote, VAR_19))\n", "VAR_1 = os.path.join\n", "VAR_2 = os.path.dirname(os.path.abspath(__file__))\n", "VAR_3 = os.path.isdir(VAR_1(VAR_2, '.git'))\n", "def FUNC_0(VAR_4):...\n", "print('>', VAR_4)\n", "os.system(VAR_4)\n", "FUNC_18\n", "def FUNC_18(VAR_41, VAR_42, VAR_43=None):...\n", "VAR_5 = 'notebook'\n", "VAR_43 = VAR_43 or VAR_42\n", "VAR_6 = {}\n", "exec(compile(open(VAR_41).read(), VAR_41, 'exec'), VAR_42, VAR_43)\n", "FUNC_18(VAR_1(VAR_2, VAR_5, '_version.py'), VAR_6)\n", "VAR_7 = VAR_6['__version__']\n", "VAR_8 = re.compile('string')\n", "if not VAR_8.match(VAR_7):\n", "def FUNC_1():...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = []\n", "for dir, subdirs, VAR_65 in os.walk(VAR_5):\n", "VAR_44 = dir.replace(os.path.sep, '.')\n", "return VAR_20\n", "if '__init__.py' not in VAR_65:\n", "VAR_20.append(VAR_44)\n" ]
[ "\"\"\"\nThis module defines the things that are used in setup.py for building the notebook\n\nThis includes:\n\n * Functions for finding things like packages, package data, etc.\n * A function for checking dependencies.\n\"\"\"\n", "import os\n", "import re\n", "import pipes\n", "import shutil\n", "import sys\n", "from distutils import log\n", "from distutils.cmd import Command\n", "from fnmatch import fnmatch\n", "from glob import glob\n", "from multiprocessing.pool import ThreadPool\n", "from subprocess import check_call\n", "if sys.platform == 'win32':\n", "from subprocess import list2cmdline\n", "def list2cmdline(cmd_list):...\n", "isfile = os.path.isfile\n", "return ' '.join(map(pipes.quote, cmd_list))\n", "pjoin = os.path.join\n", "repo_root = os.path.dirname(os.path.abspath(__file__))\n", "is_repo = os.path.isdir(pjoin(repo_root, '.git'))\n", "def oscmd(s):...\n", "print('>', s)\n", "os.system(s)\n", "execfile\n", "def execfile(fname, globs, locs=None):...\n", "name = 'notebook'\n", "locs = locs or globs\n", "version_ns = {}\n", "exec(compile(open(fname).read(), fname, 'exec'), globs, locs)\n", "execfile(pjoin(repo_root, name, '_version.py'), version_ns)\n", "version = version_ns['__version__']\n", "loose_pep440re = re.compile(\n '^([1-9]\\\\d*!)?(0|[1-9]\\\\d*)(\\\\.(0|[1-9]\\\\d*))*((a|b|rc)(0|[1-9]\\\\d*))?(\\\\.post(0|[1-9]\\\\d*))?(\\\\.dev(0|[1-9]\\\\d*)?)?$'\n )\n", "if not loose_pep440re.match(version):\n", "def find_packages():...\n", "\"\"\"docstring\"\"\"\n", "packages = []\n", "for dir, subdirs, files in os.walk(name):\n", "package = dir.replace(os.path.sep, '.')\n", "return packages\n", "if '__init__.py' not in files:\n", "packages.append(package)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "For", "ImportFrom'", "FunctionDef'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'", "Condition", "Expr'" ]
[ "def FUNC_15(VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "VAR_39 = FUNC_14(VAR_18)\n", "for VAR_68, example_list in VAR_39.items():\n", "if not isinstance(example_list, list):\n", "return VAR_39\n", "VAR_39[VAR_68] = [FUNC_16(VAR_41) for VAR_41 in example_list]\n" ]
[ "def preprocess_input_examples_arg_string(input_examples_str):...\n", "\"\"\"docstring\"\"\"\n", "input_dict = preprocess_input_exprs_arg_string(input_examples_str)\n", "for input_key, example_list in input_dict.items():\n", "if not isinstance(example_list, list):\n", "return input_dict\n", "input_dict[input_key] = [_create_example_string(example) for example in\n example_list]\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "def FUNC_15(self, VAR_44):...\n", "self.tokens.append({'type': 'block_quote_start'})\n", "VAR_45 = VAR_4.sub('', VAR_44.group(0))\n", "self.parse(VAR_45)\n", "self.tokens.append({'type': 'block_quote_end'})\n" ]
[ "def parse_block_quote(self, m):...\n", "self.tokens.append({'type': 'block_quote_start'})\n", "cap = _block_quote_leading_pattern.sub('', m.group(0))\n", "self.parse(cap)\n", "self.tokens.append({'type': 'block_quote_end'})\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_83(VAR_62, *VAR_79, **VAR_42):...\n", "\"\"\"docstring\"\"\"\n", "VAR_42['ignore_permissions'] = True\n", "if not 'limit_page_length' in VAR_42:\n", "VAR_42['limit_page_length'] = 0\n", "return FUNC_82(VAR_62, *VAR_79, **kwargs)\n" ]
[ "def get_all(doctype, *args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "kwargs['ignore_permissions'] = True\n", "if not 'limit_page_length' in kwargs:\n", "kwargs['limit_page_length'] = 0\n", "return get_list(doctype, *args, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@VAR_1.route('/admin/book/<int:book_id>', methods=['GET', 'POST'])...\n", "VAR_39 = False\n", "calibre_db.update_title_sort(config)\n", "VAR_2.debug_or_exception(e)\n", "if VAR_32.method != 'POST':\n", "calibre_db.session.rollback()\n", "return FUNC_14(VAR_15)\n", "VAR_17 = calibre_db.get_filtered_book(VAR_15, allow_show_archived=True)\n", "if not VAR_17:\n", "flash(_(\n u'Oops! Selected book title is unavailable. File does not exist or is not accessible'\n ), category='error')\n", "VAR_36 = FUNC_25(VAR_32, VAR_17, VAR_15)\n", "return redirect(url_for('web.index'))\n", "if FUNC_26(VAR_32, VAR_17) is True:\n", "VAR_17.has_cover = 1\n", "VAR_20 = VAR_32.form.to_dict()\n", "VAR_2.debug_or_exception(ex)\n", "VAR_39 = True\n", "FUNC_30(VAR_20, VAR_36)\n", "calibre_db.session.rollback()\n", "VAR_90 = None\n", "flash(_('Error editing book, please check logfile for details'), category=\n 'error')\n", "VAR_91 = FUNC_27(VAR_17, VAR_20['book_title'])\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n", "VAR_62, VAR_92 = FUNC_28(VAR_17, VAR_20['author_name'])\n", "if VAR_92 or VAR_91:\n", "VAR_90 = VAR_17.id\n", "if config.config_use_google_drive:\n", "VAR_39 = True\n", "gdriveutils.updateGdriveCalibreFromLocal()\n", "VAR_49 = False\n", "if VAR_90:\n", "VAR_49 = helper.update_dir_stucture(VAR_90, config.config_calibre_dir,\n VAR_62[0])\n", "if not VAR_49:\n", "if 'cover_url' in VAR_20:\n", "calibre_db.session.rollback()\n", "if VAR_20['cover_url']:\n", "VAR_39 |= FUNC_18(VAR_20['series_index'], VAR_17)\n", "flash(VAR_49, category='error')\n", "if not current_user.role_upload():\n", "VAR_39 |= FUNC_19(Markup(VAR_20['description']).unescape(), VAR_17)\n", "return FUNC_14(VAR_15)\n", "return '', 403\n", "if VAR_20['cover_url'].endswith('/static/generic_cover.jpg'):\n", "VAR_13 = FUNC_31(VAR_20, VAR_17)\n", "VAR_17.has_cover = 0\n", "VAR_68, VAR_49 = helper.save_cover_from_url(VAR_20['cover_url'], VAR_17.path)\n", "VAR_112, VAR_19 = FUNC_8(VAR_13, VAR_17.identifiers, calibre_db.session)\n", "if VAR_68 is True:\n", "if VAR_19:\n", "VAR_17.has_cover = 1\n", "flash(VAR_49, category='error')\n", "flash(_('Identifiers are not Case Sensitive, Overwriting Old Identifier'),\n category='warning')\n", "VAR_39 |= VAR_112\n", "VAR_39 = True\n", "VAR_39 |= FUNC_16(VAR_20['tags'], VAR_17)\n", "VAR_39 |= FUNC_17(VAR_20['series'], VAR_17)\n", "VAR_39 |= FUNC_21(VAR_20['publisher'], VAR_17)\n", "VAR_39 |= FUNC_20(VAR_20['languages'], VAR_17)\n", "VAR_39 |= FUNC_15(VAR_20, VAR_17)\n", "VAR_39 |= FUNC_24(VAR_15, VAR_17, VAR_20)\n", "if VAR_20['pubdate']:\n", "VAR_17.pubdate = db.Books.DEFAULT_PUBDATE\n", "VAR_17.pubdate = datetime.strptime(VAR_20['pubdate'], '%Y-%m-%d')\n", "VAR_17.pubdate = db.Books.DEFAULT_PUBDATE\n", "if VAR_39:\n", "VAR_17.last_modified = datetime.utcnow()\n", "calibre_db.session.merge(VAR_17)\n", "calibre_db.session.commit()\n", "if config.config_use_google_drive:\n", "gdriveutils.updateGdriveCalibreFromLocal()\n", "if 'detail_view' in VAR_20:\n", "return redirect(url_for('web.show_book', VAR_15=book.id))\n", "flash(_('Metadata successfully updated'), category='success')\n", "return FUNC_14(VAR_15)\n" ]
[ "@editbook.route('/admin/book/<int:book_id>', methods=['GET', 'POST'])...\n", "modif_date = False\n", "calibre_db.update_title_sort(config)\n", "log.debug_or_exception(e)\n", "if request.method != 'POST':\n", "calibre_db.session.rollback()\n", "return render_edit_book(book_id)\n", "book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)\n", "if not book:\n", "flash(_(\n u'Oops! Selected book title is unavailable. File does not exist or is not accessible'\n ), category='error')\n", "meta = upload_single_file(request, book, book_id)\n", "return redirect(url_for('web.index'))\n", "if upload_cover(request, book) is True:\n", "book.has_cover = 1\n", "to_save = request.form.to_dict()\n", "log.debug_or_exception(ex)\n", "modif_date = True\n", "merge_metadata(to_save, meta)\n", "calibre_db.session.rollback()\n", "edited_books_id = None\n", "flash(_('Error editing book, please check logfile for details'), category=\n 'error')\n", "title_change = handle_title_on_edit(book, to_save['book_title'])\n", "return redirect(url_for('web.show_book', book_id=book.id))\n", "input_authors, authorchange = handle_author_on_edit(book, to_save[\n 'author_name'])\n", "if authorchange or title_change:\n", "edited_books_id = book.id\n", "if config.config_use_google_drive:\n", "modif_date = True\n", "gdriveutils.updateGdriveCalibreFromLocal()\n", "error = False\n", "if edited_books_id:\n", "error = helper.update_dir_stucture(edited_books_id, config.\n config_calibre_dir, input_authors[0])\n", "if not error:\n", "if 'cover_url' in to_save:\n", "calibre_db.session.rollback()\n", "if to_save['cover_url']:\n", "modif_date |= edit_book_series_index(to_save['series_index'], book)\n", "flash(error, category='error')\n", "if not current_user.role_upload():\n", "modif_date |= edit_book_comments(Markup(to_save['description']).unescape(),\n book)\n", "return render_edit_book(book_id)\n", "return '', 403\n", "if to_save['cover_url'].endswith('/static/generic_cover.jpg'):\n", "input_identifiers = identifier_list(to_save, book)\n", "book.has_cover = 0\n", "result, error = helper.save_cover_from_url(to_save['cover_url'], book.path)\n", "modification, warning = modify_identifiers(input_identifiers, book.\n identifiers, calibre_db.session)\n", "if result is True:\n", "if warning:\n", "book.has_cover = 1\n", "flash(error, category='error')\n", "flash(_('Identifiers are not Case Sensitive, Overwriting Old Identifier'),\n category='warning')\n", "modif_date |= modification\n", "modif_date = True\n", "modif_date |= edit_book_tags(to_save['tags'], book)\n", "modif_date |= edit_book_series(to_save['series'], book)\n", "modif_date |= edit_book_publisher(to_save['publisher'], book)\n", "modif_date |= edit_book_languages(to_save['languages'], book)\n", "modif_date |= edit_book_ratings(to_save, book)\n", "modif_date |= edit_cc_data(book_id, book, to_save)\n", "if to_save['pubdate']:\n", "book.pubdate = db.Books.DEFAULT_PUBDATE\n", "book.pubdate = datetime.strptime(to_save['pubdate'], '%Y-%m-%d')\n", "book.pubdate = db.Books.DEFAULT_PUBDATE\n", "if modif_date:\n", "book.last_modified = datetime.utcnow()\n", "calibre_db.session.merge(book)\n", "calibre_db.session.commit()\n", "if config.config_use_google_drive:\n", "gdriveutils.updateGdriveCalibreFromLocal()\n", "if 'detail_view' in to_save:\n", "return redirect(url_for('web.show_book', book_id=book.id))\n", "flash(_('Metadata successfully updated'), category='success')\n", "return render_edit_book(book_id)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Return'", "Assign'", "Condition", "Expr'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Expr'", "Condition", "AugAssign'", "Expr'", "Condition", "AugAssign'", "Return'", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "AugAssign'", "Assign'", "AugAssign'", "AugAssign'", "AugAssign'", "AugAssign'", "AugAssign'", "AugAssign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_5, VAR_7):...\n", "return nova.db.api.s3_image_create(VAR_5, VAR_7)['id']\n" ]
[ "def _create_image_id(self, context, image_uuid):...\n", "return nova.db.api.s3_image_create(context, image_uuid)['id']\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_20(VAR_41, VAR_10=False, **VAR_42):...\n", "if VAR_10 == False:\n", "VAR_10 = VAR_18.user\n", "FUNC_93('eval_js', VAR_41, VAR_10=user, **kwargs)\n" ]
[ "def emit_js(js, user=False, **kwargs):...\n", "if user == False:\n", "user = session.user\n", "publish_realtime('eval_js', js, user=user, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'" ]