lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_40(self):...\n",
"return CLASS_2.objects.filter(VAR_60=self, status__in=(1, 2),\n assigned_to__isnull=True)\n"
] | [
"def unassigned_tickets(self):...\n",
"return Ticket.objects.filter(kbitem=self, status__in=(1, 2),\n assigned_to__isnull=True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@wraps(VAR_3)...\n",
"if VAR_62.role_admin():\n",
"return VAR_3(*VAR_30, **kwargs)\n",
"abort(403)\n"
] | [
"@wraps(f)...\n",
"if current_user.role_admin():\n",
"return f(*args, **kwargs)\n",
"abort(403)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Expr'"
] |
[
"@app.route('/i18n/<string:locale>/<string:domain>.js')...\n",
"VAR_83 = {}\n",
"VAR_84 = None\n",
"if VAR_14 != 'en':\n",
"VAR_83, VAR_84 = FUNC_24(VAR_14, VAR_31)\n",
"VAR_85 = {'messages': VAR_83, 'plural_expr': VAR_84, 'locale': VAR_14,\n 'domain': VAR_31}\n",
"from flask import Response\n",
"return Response(render_template('i18n.js.jinja2', VAR_85=catalog),\n content_type='application/x-javascript; charset=utf-8')\n"
] | [
"@app.route('/i18n/<string:locale>/<string:domain>.js')...\n",
"messages = {}\n",
"plural_expr = None\n",
"if locale != 'en':\n",
"messages, plural_expr = _get_translations(locale, domain)\n",
"catalog = {'messages': messages, 'plural_expr': plural_expr, 'locale':\n locale, 'domain': domain}\n",
"from flask import Response\n",
"return Response(render_template('i18n.js.jinja2', catalog=catalog),\n content_type='application/x-javascript; charset=utf-8')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_14(self):...\n",
"VAR_21 = 'inputx=file[[v1]v2'\n",
"saved_model_cli.preprocess_inputs_arg_string(VAR_21)\n",
"VAR_21 = 'inputx:file'\n",
"saved_model_cli.preprocess_inputs_arg_string(VAR_21)\n",
"VAR_21 = 'inputx:np.zeros((5))'\n",
"saved_model_cli.preprocess_input_exprs_arg_string(VAR_21, safe=False)\n"
] | [
"def testInputPreProcessErrorBadFormat(self):...\n",
"input_str = 'inputx=file[[v1]v2'\n",
"saved_model_cli.preprocess_inputs_arg_string(input_str)\n",
"input_str = 'inputx:file'\n",
"saved_model_cli.preprocess_inputs_arg_string(input_str)\n",
"input_str = 'inputx:np.zeros((5))'\n",
"saved_model_cli.preprocess_input_exprs_arg_string(input_str, safe=False)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_10(VAR_0, VAR_1):...\n",
"self.query_handlers[VAR_0] = VAR_1\n"
] | [
"def register_query_handler(query_type, handler):...\n",
"self.query_handlers[query_type] = handler\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_53(VAR_21, VAR_29, VAR_30):...\n",
"for tag in VAR_29:\n",
"VAR_21 = VAR_21.filter(db.Books.tags.any(db.Tags.id == tag))\n",
"for tag in VAR_30:\n",
"VAR_21 = VAR_21.filter(not_(db.Books.tags.any(db.Tags.id == tag)))\n",
"return VAR_21\n"
] | [
"def adv_search_tag(q, include_tag_inputs, exclude_tag_inputs):...\n",
"for tag in include_tag_inputs:\n",
"q = q.filter(db.Books.tags.any(db.Tags.id == tag))\n",
"for tag in exclude_tag_inputs:\n",
"q = q.filter(not_(db.Books.tags.any(db.Tags.id == tag)))\n",
"return q\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_4(VAR_18, **VAR_19):...\n",
"if '?' in VAR_18:\n",
"VAR_18, VAR_37 = VAR_18.split('?', 1)\n",
"VAR_19 = [(key, VAR_26) for key, VAR_26 in VAR_19.items() if VAR_26 is not None\n ]\n",
"VAR_19 = dict(parse_qsl(VAR_37), **qs)\n",
"if VAR_19:\n",
"return '%s?%s' % (VAR_18, urlencode(VAR_19))\n",
"return VAR_18\n"
] | [
"def manipulate_query_string(url, **qs):...\n",
"if '?' in url:\n",
"url, current_qs = url.split('?', 1)\n",
"qs = [(key, value) for key, value in qs.items() if value is not None]\n",
"qs = dict(parse_qsl(current_qs), **qs)\n",
"if qs:\n",
"return '%s?%s' % (url, urlencode(qs))\n",
"return url\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_19 = VAR_2.session['connector'].server_id\n",
"VAR_62 = VAR_8.getRoiService().findByRoi(VAR_178(VAR_13), None, VAR_8.\n SERVICE_OPTS)\n",
"if VAR_62 is None or VAR_62.rois is None or len(VAR_62.rois) == 0:\n",
"for VAR_241 in VAR_62.rois:\n",
"VAR_36 = VAR_241.image.id.val\n",
"VAR_63 = [VAR_3 for VAR_3 in VAR_63 if VAR_3 is not None]\n",
"VAR_63 = VAR_241.copyShapes()\n",
"if len(VAR_63) == 0:\n",
"VAR_64 = FUNC_13(VAR_2, VAR_36, VAR_19=server_id, VAR_8=conn)\n",
"if VAR_64 is None:\n",
"VAR_15, VAR_16 = VAR_64\n",
"VAR_65 = None\n",
"if len(VAR_63) == 1:\n",
"VAR_65 = VAR_63[0]\n",
"VAR_182 = VAR_15.getDefaultT()\n",
"return FUNC_9(VAR_2, VAR_8, VAR_15, VAR_65, VAR_16)\n",
"VAR_183 = VAR_15.getDefaultZ()\n",
"VAR_184 = [VAR_3 for VAR_3 in VAR_63 if unwrap(VAR_3.getTheT()) is None or \n unwrap(VAR_3.getTheT()) == VAR_182]\n",
"if len(VAR_184) == 1:\n",
"VAR_65 = VAR_184[0]\n",
"VAR_184 = [VAR_3 for VAR_3 in VAR_184 if unwrap(VAR_3.getTheZ()) is None or\n unwrap(VAR_3.getTheZ()) == VAR_183]\n",
"if VAR_65 is None and len(VAR_63) > 0:\n",
"if len(VAR_184) > 0:\n",
"VAR_65 = VAR_63[0]\n",
"VAR_65 = VAR_184[0]\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"server_id = request.session['connector'].server_id\n",
"result = conn.getRoiService().findByRoi(long(roiId), None, conn.SERVICE_OPTS)\n",
"if result is None or result.rois is None or len(result.rois) == 0:\n",
"for roi in result.rois:\n",
"imageId = roi.image.id.val\n",
"shapes = [s for s in shapes if s is not None]\n",
"shapes = roi.copyShapes()\n",
"if len(shapes) == 0:\n",
"pi = _get_prepared_image(request, imageId, server_id=server_id, conn=conn)\n",
"if pi is None:\n",
"image, compress_quality = pi\n",
"shape = None\n",
"if len(shapes) == 1:\n",
"shape = shapes[0]\n",
"default_t = image.getDefaultT()\n",
"return get_shape_thumbnail(request, conn, image, shape, compress_quality)\n",
"default_z = image.getDefaultZ()\n",
"def_shapes = [s for s in shapes if unwrap(s.getTheT()) is None or unwrap(s.\n getTheT()) == default_t]\n",
"if len(def_shapes) == 1:\n",
"shape = def_shapes[0]\n",
"def_shapes = [s for s in def_shapes if unwrap(s.getTheZ()) is None or \n unwrap(s.getTheZ()) == default_z]\n",
"if shape is None and len(shapes) > 0:\n",
"if len(def_shapes) > 0:\n",
"shape = shapes[0]\n",
"shape = def_shapes[0]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_74(VAR_129, VAR_42):...\n",
"if hasattr(VAR_129, 'fnargs'):\n",
"VAR_210 = VAR_129.fnargs\n",
"VAR_210, VAR_220, VAR_221, VAR_222 = inspect.getargspec(VAR_129)\n",
"VAR_210 = inspect.getfullargspec(VAR_129).args\n",
"VAR_194 = {}\n",
"VAR_220 = inspect.getfullargspec(VAR_129).varargs\n",
"for VAR_223 in VAR_42:\n",
"VAR_221 = inspect.getfullargspec(VAR_129).varkw\n",
"if VAR_223 in VAR_210 or VAR_221:\n",
"VAR_194.pop('ignore_permissions', None)\n",
"VAR_222 = inspect.getfullargspec(VAR_129).defaults\n",
"VAR_194[VAR_223] = VAR_42.get(VAR_223)\n",
"VAR_194.pop('flags', None)\n",
"return VAR_194\n"
] | [
"def get_newargs(fn, kwargs):...\n",
"if hasattr(fn, 'fnargs'):\n",
"fnargs = fn.fnargs\n",
"fnargs, varargs, varkw, defaults = inspect.getargspec(fn)\n",
"fnargs = inspect.getfullargspec(fn).args\n",
"newargs = {}\n",
"varargs = inspect.getfullargspec(fn).varargs\n",
"for a in kwargs:\n",
"varkw = inspect.getfullargspec(fn).varkw\n",
"if a in fnargs or varkw:\n",
"newargs.pop('ignore_permissions', None)\n",
"defaults = inspect.getfullargspec(fn).defaults\n",
"newargs[a] = kwargs.get(a)\n",
"newargs.pop('flags', None)\n",
"return newargs\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_9, *VAR_6, **VAR_7):...\n",
"super().__init__(*VAR_6, **kwargs)\n",
"self.component = VAR_9\n",
"VAR_118 = self.get_lang_objects()\n",
"self.fields['lang'].choices = VAR_118.as_choices()\n"
] | [
"def __init__(self, component, *args, **kwargs):...\n",
"super().__init__(*args, **kwargs)\n",
"self.component = component\n",
"languages = self.get_lang_objects()\n",
"self.fields['lang'].choices = languages.as_choices()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_21(self):...\n",
"if self.direction == 'next':\n",
"self.queryset = self.queryset.order_by(desc(QueryRun.timestamp))\n",
"self.queryset = self.queryset.order_by(QueryRun.timestamp)\n"
] | [
"def order_queryset(self):...\n",
"if self.direction == 'next':\n",
"self.queryset = self.queryset.order_by(desc(QueryRun.timestamp))\n",
"self.queryset = self.queryset.order_by(QueryRun.timestamp)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'"
] |
[
"@login_required(setGroupContext=True)...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_202 = VAR_5.getScriptService()\n",
"VAR_50 = {}\n",
"VAR_49 = VAR_241(VAR_42)\n",
"VAR_73 = VAR_202.getParams(VAR_49)\n",
"if x.message and x.message.startswith('No processor available'):\n",
"VAR_73 = VAR_202.getParams(VAR_49)\n",
"VAR_174 = FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43='Script')\n",
"VAR_43 = VAR_73.name.replace('_', ' ').replace('.py', '')\n",
"return JsonResponse(VAR_174)\n",
"VAR_0.debug('Script: run with request.POST: %s' % VAR_2.POST)\n",
"VAR_234 = 'file_annotation' in VAR_2.FILES and VAR_2.FILES['file_annotation'\n ] or None\n",
"VAR_235 = None\n",
"if VAR_234 is not None and VAR_234 != '':\n",
"VAR_104 = BaseContainer(VAR_5)\n",
"for VAR_310, VAR_320 in VAR_73.inputs.items():\n",
"VAR_235 = VAR_104.createFileAnnotations(VAR_234, [])\n",
"VAR_333 = VAR_320.prototype\n",
"if 'IDs' in VAR_50 and 'Data_Type' in VAR_50:\n",
"VAR_334 = VAR_333.__class__\n",
"VAR_335 = VAR_5.SERVICE_OPTS.getOmeroGroup()\n",
"VAR_0.debug('Running script %s with params %s' % (VAR_43, VAR_50))\n",
"VAR_174 = FUNC_78(VAR_2, VAR_5, VAR_49, VAR_50, VAR_43)\n",
"if VAR_310 == 'File_Annotation' and VAR_235 is not None:\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup('-1')\n",
"return JsonResponse(VAR_174)\n",
"VAR_50[VAR_310] = VAR_334(VAR_345(VAR_235))\n",
"if VAR_334 == omero.rtypes.RBoolI:\n",
"VAR_380 = VAR_5.getObject(VAR_50['Data_Type'].val, unwrap(VAR_50['IDs'])[0])\n",
"VAR_0.debug(traceback.format_exc())\n",
"VAR_375 = VAR_310 in VAR_2.POST\n",
"if VAR_334.__name__ == 'RMapI':\n",
"VAR_381 = VAR_380.getDetails().group.id.val\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)\n",
"VAR_50[VAR_310] = VAR_334(VAR_375)\n",
"VAR_376 = '%s_key0' % VAR_310\n",
"if VAR_310 in VAR_2.POST:\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_381)\n",
"VAR_377 = '%s_value0' % VAR_310\n",
"if VAR_334 == omero.rtypes.RListI:\n",
"VAR_378 = 0\n",
"VAR_417 = VAR_2.POST.getlist(VAR_310)\n",
"VAR_375 = VAR_2.POST[VAR_310]\n",
"VAR_379 = {}\n",
"if len(VAR_417) == 0:\n",
"if len(VAR_375) == 0:\n",
"while VAR_376 in VAR_2.POST:\n",
"if len(VAR_417) == 1:\n",
"VAR_50[VAR_310] = VAR_334(VAR_375)\n",
"VAR_0.debug(\"Invalid entry for '%s' : %s\" % (VAR_310, VAR_375))\n",
"VAR_415 = VAR_345(VAR_2.POST[VAR_376])\n",
"if len(VAR_379) > 0:\n",
"if len(VAR_417[0]) == 0:\n",
"VAR_418 = omero.rtypes.RStringI\n",
"VAR_416 = VAR_2.POST[VAR_377]\n",
"VAR_50[VAR_310] = wrap(VAR_379)\n",
"VAR_417 = VAR_417[0].split(',')\n",
"VAR_419 = VAR_333.val\n",
"if len(VAR_415) > 0 and len(VAR_416) > 0:\n",
"if len(VAR_419) > 0:\n",
"VAR_379[VAR_345(VAR_415)] = VAR_416\n",
"VAR_378 += 1\n",
"VAR_418 = VAR_419[0].__class__\n",
"VAR_420 = []\n",
"VAR_376 = '%s_key%d' % (VAR_310, VAR_378)\n",
"if VAR_418 == int(1).__class__:\n",
"for VAR_416 in VAR_417:\n",
"VAR_377 = '%s_value%d' % (VAR_310, VAR_378)\n",
"VAR_418 = omero.rtypes.rint\n",
"if VAR_418 == VAR_241(1).__class__:\n",
"VAR_50[VAR_310] = omero.rtypes.rlist(VAR_420)\n",
"VAR_38 = VAR_418(VAR_416.strip())\n",
"VAR_0.debug(\"Invalid entry for '%s' : %s\" % (VAR_310, VAR_416))\n",
"if isinstance(VAR_38, omero.model.IObject):\n",
"VAR_418 = omero.rtypes.rlong\n",
"VAR_420.append(omero.rtypes.robject(VAR_38))\n",
"VAR_420.append(VAR_38)\n"
] | [
"@login_required(setGroupContext=True)...\n",
"\"\"\"docstring\"\"\"\n",
"scriptService = conn.getScriptService()\n",
"inputMap = {}\n",
"sId = long(scriptId)\n",
"params = scriptService.getParams(sId)\n",
"if x.message and x.message.startswith('No processor available'):\n",
"params = scriptService.getParams(sId)\n",
"rsp = run_script(request, conn, sId, inputMap, scriptName='Script')\n",
"scriptName = params.name.replace('_', ' ').replace('.py', '')\n",
"return JsonResponse(rsp)\n",
"logger.debug('Script: run with request.POST: %s' % request.POST)\n",
"fileupload = 'file_annotation' in request.FILES and request.FILES[\n 'file_annotation'] or None\n",
"fileAnnId = None\n",
"if fileupload is not None and fileupload != '':\n",
"manager = BaseContainer(conn)\n",
"for key, param in params.inputs.items():\n",
"fileAnnId = manager.createFileAnnotations(fileupload, [])\n",
"prototype = param.prototype\n",
"if 'IDs' in inputMap and 'Data_Type' in inputMap:\n",
"pclass = prototype.__class__\n",
"gid = conn.SERVICE_OPTS.getOmeroGroup()\n",
"logger.debug('Running script %s with params %s' % (scriptName, inputMap))\n",
"rsp = run_script(request, conn, sId, inputMap, scriptName)\n",
"if key == 'File_Annotation' and fileAnnId is not None:\n",
"conn.SERVICE_OPTS.setOmeroGroup('-1')\n",
"return JsonResponse(rsp)\n",
"inputMap[key] = pclass(str(fileAnnId))\n",
"if pclass == omero.rtypes.RBoolI:\n",
"firstObj = conn.getObject(inputMap['Data_Type'].val, unwrap(inputMap['IDs'])[0]\n )\n",
"logger.debug(traceback.format_exc())\n",
"value = key in request.POST\n",
"if pclass.__name__ == 'RMapI':\n",
"newGid = firstObj.getDetails().group.id.val\n",
"conn.SERVICE_OPTS.setOmeroGroup(gid)\n",
"inputMap[key] = pclass(value)\n",
"keyName = '%s_key0' % key\n",
"if key in request.POST:\n",
"conn.SERVICE_OPTS.setOmeroGroup(newGid)\n",
"valueName = '%s_value0' % key\n",
"if pclass == omero.rtypes.RListI:\n",
"row = 0\n",
"values = request.POST.getlist(key)\n",
"value = request.POST[key]\n",
"paramMap = {}\n",
"if len(values) == 0:\n",
"if len(value) == 0:\n",
"while keyName in request.POST:\n",
"if len(values) == 1:\n",
"inputMap[key] = pclass(value)\n",
"logger.debug(\"Invalid entry for '%s' : %s\" % (key, value))\n",
"k = str(request.POST[keyName])\n",
"if len(paramMap) > 0:\n",
"if len(values[0]) == 0:\n",
"listClass = omero.rtypes.RStringI\n",
"v = request.POST[valueName]\n",
"inputMap[key] = wrap(paramMap)\n",
"values = values[0].split(',')\n",
"pval = prototype.val\n",
"if len(k) > 0 and len(v) > 0:\n",
"if len(pval) > 0:\n",
"paramMap[str(k)] = v\n",
"row += 1\n",
"listClass = pval[0].__class__\n",
"valueList = []\n",
"keyName = '%s_key%d' % (key, row)\n",
"if listClass == int(1).__class__:\n",
"for v in values:\n",
"valueName = '%s_value%d' % (key, row)\n",
"listClass = omero.rtypes.rint\n",
"if listClass == long(1).__class__:\n",
"inputMap[key] = omero.rtypes.rlist(valueList)\n",
"obj = listClass(v.strip())\n",
"logger.debug(\"Invalid entry for '%s' : %s\" % (key, v))\n",
"if isinstance(obj, omero.model.IObject):\n",
"listClass = omero.rtypes.rlong\n",
"valueList.append(omero.rtypes.robject(obj))\n",
"valueList.append(obj)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_10(VAR_16, VAR_17):...\n",
"if VAR_17 not in [u.to_string() for u in self.room_members]:\n",
"return None\n"
] | [
"async def check_user_in_room(room_id, user_id):...\n",
"if user_id not in [u.to_string() for u in self.room_members]:\n",
"return None\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"For",
"Return'"
] |
[
"@staticmethod...\n",
"VAR_42.append(VAR_53)\n"
] | [
"@staticmethod...\n",
"addresses.append(address)\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"@VAR_25.whitelist(allow_guest=True)...\n",
"return VAR_25.__version__\n"
] | [
"@frappe.whitelist(allow_guest=True)...\n",
"return frappe.__version__\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_32(VAR_87):...\n",
"VAR_86.update(to_bytes(VAR_87, encoding='utf-8', errors='replace'))\n"
] | [
"def hash_update(value):...\n",
"hash.update(to_bytes(value, encoding='utf-8', errors='replace'))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_5) ->bool:...\n",
"return os.access(self.relative(VAR_5).sourceName, os.R_OK)\n"
] | [
"def cheaplyExists(self, relativePath) ->bool:...\n",
"return os.access(self.relative(relativePath).sourceName, os.R_OK)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"VAR_3 = '@foo:bar'\n",
"VAR_7 = 5000000\n",
"VAR_10 = UserPresenceState.default(VAR_3)\n",
"VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now - IDLE_TIMER - 1, last_user_sync_ts=now)\n",
"VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n",
"self.assertIsNotNone(VAR_9)\n",
"self.assertEquals(VAR_9.state, PresenceState.UNAVAILABLE)\n"
] | [
"def test_idle_timer(self):...\n",
"user_id = '@foo:bar'\n",
"now = 5000000\n",
"state = UserPresenceState.default(user_id)\n",
"state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now - IDLE_TIMER - 1, last_user_sync_ts=now)\n",
"new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n",
"self.assertIsNotNone(new_state)\n",
"self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_16(VAR_25, VAR_26):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_26:\n",
"return False\n",
"VAR_26 = VAR_26.lower()\n",
"return VAR_26[0] == '.' and (VAR_25.endswith(VAR_26) or VAR_25 == VAR_26[1:]\n ) or VAR_26 == VAR_25\n"
] | [
"def is_same_domain(host, pattern):...\n",
"\"\"\"docstring\"\"\"\n",
"if not pattern:\n",
"return False\n",
"pattern = pattern.lower()\n",
"return pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]\n ) or pattern == host\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_43(self, VAR_53, VAR_54):...\n",
"return VAR_53 * VAR_54\n"
] | [
"def multiply(self, a, b):...\n",
"return a * b\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@action('userinfoedit', lazy_gettext('Edit User'), '', 'fa-edit', multiple=...\n",
"return redirect(url_for(self.appbuilder.sm.userinfoeditview.__name__ +\n '.this_form_get'))\n"
] | [
"@action('userinfoedit', lazy_gettext('Edit User'), '', 'fa-edit', multiple=...\n",
"return redirect(url_for(self.appbuilder.sm.userinfoeditview.__name__ +\n '.this_form_get'))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_3(VAR_8: list[str], VAR_4: int) ->list[str]:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = [VAR_8[VAR_4]]\n",
"while True:\n",
"VAR_4 += 1\n",
"VAR_33 = VAR_8[VAR_4]\n",
"return VAR_20\n",
"if not VAR_33.strip() or re.match('\\\\S', VAR_33[0]):\n",
"return VAR_20\n",
"VAR_20.append(VAR_33)\n"
] | [
"def find_entry_lines(lines: list[str], lineno: int) ->list[str]:...\n",
"\"\"\"docstring\"\"\"\n",
"entry_lines = [lines[lineno]]\n",
"while True:\n",
"lineno += 1\n",
"line = lines[lineno]\n",
"return entry_lines\n",
"if not line.strip() or re.match('\\\\S', line[0]):\n",
"return entry_lines\n",
"entry_lines.append(line)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Return'",
"Condition",
"Return'",
"Expr'"
] |
[
"from __future__ import division\n",
"import re\n",
"import math\n",
"import cmath\n",
"import types\n",
"import string\n",
"import supybot.utils as utils\n",
"from supybot.commands import *\n",
"import supybot.utils.minisix as minisix\n",
"import supybot.callbacks as callbacks\n",
"from supybot.i18n import PluginInternationalization, internationalizeDocstring\n",
"VAR_0 = PluginInternationalization('Math')\n",
"from .local import convertcore\n",
"VAR_1 = 'int', 'base', lambda VAR_36: VAR_36 <= 36\n",
"\"\"\"Provides commands to work with math, such as a calculator and\n a unit converter.\"\"\"\n",
"@internationalizeDocstring...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_8:\n",
"VAR_8 = str(VAR_7)\n",
"VAR_3.reply(self._convertBaseToBase(VAR_8, VAR_7, VAR_6))\n",
"VAR_3.error(VAR_0('Invalid <number> for base %s: %s') % (VAR_6, VAR_8))\n",
"VAR_9 = wrap(VAR_9, [('int', 'base', lambda VAR_36: 2 <= VAR_36 <= 36),\n optional(('int', 'base', lambda VAR_36: 2 <= VAR_36 <= 36), 10),\n additional('something')])\n",
"VAR_7 = 10\n",
"def FUNC_1(self, VAR_8, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_8 == 0:\n",
"return '0'\n",
"if VAR_8 < 0:\n",
"VAR_35 = True\n",
"VAR_35 = False\n",
"VAR_8 = -VAR_8\n",
"VAR_27 = []\n",
"while VAR_8 != 0:\n",
"VAR_32 = VAR_8 % VAR_9\n",
"VAR_27.reverse()\n",
"if VAR_32 >= 10:\n",
"return '-' * VAR_35 + ''.join(VAR_27)\n",
"VAR_32 = string.ascii_uppercase[VAR_32 - 10]\n",
"VAR_32 = str(VAR_32)\n",
"VAR_27.append(VAR_32)\n",
"VAR_8 = VAR_8 // VAR_9\n"
] | [
"from __future__ import division\n",
"import re\n",
"import math\n",
"import cmath\n",
"import types\n",
"import string\n",
"import supybot.utils as utils\n",
"from supybot.commands import *\n",
"import supybot.utils.minisix as minisix\n",
"import supybot.callbacks as callbacks\n",
"from supybot.i18n import PluginInternationalization, internationalizeDocstring\n",
"_ = PluginInternationalization('Math')\n",
"from .local import convertcore\n",
"baseArg = 'int', 'base', lambda i: i <= 36\n",
"\"\"\"Provides commands to work with math, such as a calculator and\n a unit converter.\"\"\"\n",
"@internationalizeDocstring...\n",
"\"\"\"docstring\"\"\"\n",
"if not number:\n",
"number = str(to)\n",
"irc.reply(self._convertBaseToBase(number, to, frm))\n",
"irc.error(_('Invalid <number> for base %s: %s') % (frm, number))\n",
"base = wrap(base, [('int', 'base', lambda i: 2 <= i <= 36), optional(('int',\n 'base', lambda i: 2 <= i <= 36), 10), additional('something')])\n",
"to = 10\n",
"def _convertDecimalToBase(self, number, base):...\n",
"\"\"\"docstring\"\"\"\n",
"if number == 0:\n",
"return '0'\n",
"if number < 0:\n",
"negative = True\n",
"negative = False\n",
"number = -number\n",
"digits = []\n",
"while number != 0:\n",
"digit = number % base\n",
"digits.reverse()\n",
"if digit >= 10:\n",
"return '-' * negative + ''.join(digits)\n",
"digit = string.ascii_uppercase[digit - 10]\n",
"digit = str(digit)\n",
"digits.append(digit)\n",
"number = number // base\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"Assign'",
"ImportFrom'",
"Assign'",
"Expr'",
"Condition",
"Docstring",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_70(self):...\n",
"VAR_76 = getattr(self, '_liked_by', None)\n",
"if VAR_76:\n",
"return json.loads(VAR_76)\n",
"return []\n"
] | [
"def get_liked_by(self):...\n",
"liked_by = getattr(self, '_liked_by', None)\n",
"if liked_by:\n",
"return json.loads(liked_by)\n",
"return []\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"@wraps(VAR_3)...\n",
"VAR_12 = FUNC_2()\n",
"if VAR_12 is not None:\n",
"VAR_16 = ub.session.query(ub.User).join(ub.RemoteAuthToken).filter(ub.\n RemoteAuthToken.auth_token == VAR_12).filter(ub.RemoteAuthToken.\n token_type == 1).first()\n",
"if VAR_16 is not None:\n",
"login_user(VAR_16)\n",
"VAR_0.debug('Received Kobo request without a recognizable auth token.')\n",
"return VAR_3(*VAR_9, **kwargs)\n",
"return abort(401)\n"
] | [
"@wraps(f)...\n",
"auth_token = get_auth_token()\n",
"if auth_token is not None:\n",
"user = ub.session.query(ub.User).join(ub.RemoteAuthToken).filter(ub.\n RemoteAuthToken.auth_token == auth_token).filter(ub.RemoteAuthToken.\n token_type == 1).first()\n",
"if user is not None:\n",
"login_user(user)\n",
"log.debug('Received Kobo request without a recognizable auth token.')\n",
"return f(*args, **kwargs)\n",
"return abort(401)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_10(VAR_10):...\n",
"return VAR_10.url.lower().startswith('file:')\n"
] | [
"def is_file_url(link):...\n",
"return link.url.lower().startswith('file:')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_162(VAR_10):...\n",
"if VAR_187:\n",
"self.amfrpc3_procedures[VAR_187 + '.' + VAR_10.__name__] = VAR_10\n",
"self.amfrpc3_procedures[VAR_10.__name__] = VAR_10\n",
"return VAR_10\n"
] | [
"def _amfrpc3(f):...\n",
"if domain:\n",
"self.amfrpc3_procedures[domain + '.' + f.__name__] = f\n",
"self.amfrpc3_procedures[f.__name__] = f\n",
"return f\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self, VAR_44):...\n",
"VAR_54 = VAR_5.sub('', VAR_44.group(0))\n",
"self.tokens.append({'type': 'code', 'lang': None, 'text': VAR_54})\n"
] | [
"def parse_block_code(self, m):...\n",
"code = _block_code_leading_pattern.sub('', m.group(0))\n",
"self.tokens.append({'type': 'code', 'lang': None, 'text': code})\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"VAR_5 = {'not_senders': ['@flibble:wibble']}\n",
"VAR_6 = FUNC_0(sender='@flibble:wibble', type='com.nom.nom.nom', room_id=\n '!foo:bar')\n",
"self.assertFalse(Filter(VAR_5).check(VAR_6))\n"
] | [
"def test_definition_not_senders_works_with_literals(self):...\n",
"definition = {'not_senders': ['@flibble:wibble']}\n",
"event = MockEvent(sender='@flibble:wibble', type='com.nom.nom.nom', room_id\n ='!foo:bar')\n",
"self.assertFalse(Filter(definition).check(event))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"async def FUNC_0(self, VAR_5, VAR_6, VAR_7=False) ->None:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = VAR_6.room_id\n",
"VAR_12 = VAR_6.event_id\n",
"VAR_0.info('handling received PDU: %s', VAR_6)\n",
"VAR_51 = await self.store.get_event(VAR_12, allow_none=True, allow_rejected\n =True)\n",
"VAR_52 = VAR_51 and (not VAR_51.internal_metadata.is_outlier() or VAR_6.\n internal_metadata.is_outlier())\n",
"if VAR_52:\n",
"VAR_0.debug('[%s %s]: Already seen pdu', VAR_11, VAR_12)\n",
"self._sanity_check_event(VAR_6)\n",
"VAR_0.warning('[%s %s] Received event failed sanity checks', VAR_11, VAR_12)\n",
"if VAR_11 in self.room_queues:\n",
"return\n",
"VAR_0.info('[%s %s] Queuing PDU from %s for now: join in progress', VAR_11,\n VAR_12, VAR_5)\n",
"VAR_53 = await self.auth.check_host_in_room(VAR_11, self.server_name)\n",
"self.room_queues[VAR_11].append((VAR_6, VAR_5))\n",
"if not VAR_53:\n",
"return\n",
"VAR_0.info(\"[%s %s] Ignoring PDU from %s as we're not in the room\", VAR_11,\n VAR_12, VAR_5)\n",
"VAR_2 = None\n",
"return None\n",
"if not VAR_6.internal_metadata.is_outlier():\n",
"VAR_9 = await self.get_min_depth_for_context(VAR_6.room_id)\n",
"await self._process_received_pdu(VAR_5, VAR_6, VAR_2=state)\n",
"VAR_0.debug('[%s %s] min_depth: %d', VAR_11, VAR_12, VAR_9)\n",
"VAR_8 = set(VAR_6.prev_event_ids())\n",
"VAR_54 = await self.store.have_events_in_timeline(VAR_8)\n",
"if VAR_9 is not None and VAR_6.depth < VAR_9:\n",
"VAR_6.internal_metadata.outlier = True\n",
"if VAR_9 is not None and VAR_6.depth > VAR_9:\n",
"if VAR_8 - VAR_54:\n",
"VAR_191 = VAR_8 - VAR_54\n",
"if VAR_7:\n",
"if VAR_7 and VAR_191:\n",
"VAR_0.warning('[%s %s] Rejecting: failed to fetch %d prev events: %s',\n VAR_11, VAR_12, len(VAR_8 - VAR_54), shortstr(VAR_8 - VAR_54))\n",
"VAR_0.info(\n 'Event %s is missing prev_events: calculating state for a backwards extremity'\n , VAR_12)\n",
"VAR_0.info('[%s %s] Acquiring room lock to fetch %d missing prev_events: %s',\n VAR_11, VAR_12, len(VAR_191), shortstr(VAR_191))\n",
"VAR_60 = {VAR_12: VAR_6}\n",
"VAR_0.info('[%s %s] Acquired room lock to fetch %d missing prev_events',\n VAR_11, VAR_12, len(VAR_191))\n",
"VAR_192 = await self.state_store.get_state_groups_ids(VAR_11, VAR_54)\n",
"VAR_0.warning(\n '[%s %s] Error attempting to resolve state at missing prev_events',\n VAR_11, VAR_12, exc_info=True)\n",
"await self._get_missing_events_for_pdu(VAR_5, VAR_6, VAR_8, VAR_9)\n",
"VAR_54 = await self.store.have_events_in_timeline(VAR_8)\n",
"VAR_193 = list(VAR_192.values())\n",
"if not VAR_8 - VAR_54:\n",
"for p in (VAR_8 - VAR_54):\n",
"VAR_0.info('[%s %s] Found all missing prev_events', VAR_11, VAR_12)\n",
"VAR_0.info('Requesting state at missing prev_event %s', VAR_12)\n",
"VAR_28 = await self.store.get_room_version_id(VAR_11)\n",
"VAR_62, VAR_166 = await self._get_state_for_room(VAR_5, VAR_11, p, VAR_13=True)\n",
"VAR_87 = await self._state_resolution_handler.resolve_events_with_store(VAR_11,\n VAR_28, VAR_193, VAR_60, state_res_store=StateResolutionStore(self.store))\n",
"VAR_202 = {(VAR_204.type, VAR_204.state_key): VAR_204.event_id for VAR_204 in\n VAR_62}\n",
"VAR_194 = await self.store.get_events(list(VAR_87.values()),\n get_prev_content=False, redact_behaviour=EventRedactBehaviour.AS_IS)\n",
"VAR_193.append(VAR_202)\n",
"VAR_60.update(VAR_194)\n",
"for VAR_204 in VAR_62:\n",
"VAR_2 = [VAR_60[VAR_170] for VAR_170 in VAR_87.values()]\n",
"VAR_60[VAR_204.event_id] = VAR_204\n"
] | [
"async def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False) ->None:...\n",
"\"\"\"docstring\"\"\"\n",
"room_id = pdu.room_id\n",
"event_id = pdu.event_id\n",
"logger.info('handling received PDU: %s', pdu)\n",
"existing = await self.store.get_event(event_id, allow_none=True,\n allow_rejected=True)\n",
"already_seen = existing and (not existing.internal_metadata.is_outlier() or\n pdu.internal_metadata.is_outlier())\n",
"if already_seen:\n",
"logger.debug('[%s %s]: Already seen pdu', room_id, event_id)\n",
"self._sanity_check_event(pdu)\n",
"logger.warning('[%s %s] Received event failed sanity checks', room_id, event_id\n )\n",
"if room_id in self.room_queues:\n",
"return\n",
"logger.info('[%s %s] Queuing PDU from %s for now: join in progress',\n room_id, event_id, origin)\n",
"is_in_room = await self.auth.check_host_in_room(room_id, self.server_name)\n",
"self.room_queues[room_id].append((pdu, origin))\n",
"if not is_in_room:\n",
"return\n",
"logger.info(\"[%s %s] Ignoring PDU from %s as we're not in the room\",\n room_id, event_id, origin)\n",
"state = None\n",
"return None\n",
"if not pdu.internal_metadata.is_outlier():\n",
"min_depth = await self.get_min_depth_for_context(pdu.room_id)\n",
"await self._process_received_pdu(origin, pdu, state=state)\n",
"logger.debug('[%s %s] min_depth: %d', room_id, event_id, min_depth)\n",
"prevs = set(pdu.prev_event_ids())\n",
"seen = await self.store.have_events_in_timeline(prevs)\n",
"if min_depth is not None and pdu.depth < min_depth:\n",
"pdu.internal_metadata.outlier = True\n",
"if min_depth is not None and pdu.depth > min_depth:\n",
"if prevs - seen:\n",
"missing_prevs = prevs - seen\n",
"if sent_to_us_directly:\n",
"if sent_to_us_directly and missing_prevs:\n",
"logger.warning('[%s %s] Rejecting: failed to fetch %d prev events: %s',\n room_id, event_id, len(prevs - seen), shortstr(prevs - seen))\n",
"logger.info(\n 'Event %s is missing prev_events: calculating state for a backwards extremity'\n , event_id)\n",
"logger.info('[%s %s] Acquiring room lock to fetch %d missing prev_events: %s',\n room_id, event_id, len(missing_prevs), shortstr(missing_prevs))\n",
"event_map = {event_id: pdu}\n",
"logger.info('[%s %s] Acquired room lock to fetch %d missing prev_events',\n room_id, event_id, len(missing_prevs))\n",
"ours = await self.state_store.get_state_groups_ids(room_id, seen)\n",
"logger.warning(\n '[%s %s] Error attempting to resolve state at missing prev_events',\n room_id, event_id, exc_info=True)\n",
"await self._get_missing_events_for_pdu(origin, pdu, prevs, min_depth)\n",
"seen = await self.store.have_events_in_timeline(prevs)\n",
"state_maps = list(ours.values())\n",
"if not prevs - seen:\n",
"for p in (prevs - seen):\n",
"logger.info('[%s %s] Found all missing prev_events', room_id, event_id)\n",
"logger.info('Requesting state at missing prev_event %s', event_id)\n",
"room_version = await self.store.get_room_version_id(room_id)\n",
"remote_state, _ = await self._get_state_for_room(origin, room_id, p,\n include_event_in_state=True)\n",
"state_map = await self._state_resolution_handler.resolve_events_with_store(\n room_id, room_version, state_maps, event_map, state_res_store=\n StateResolutionStore(self.store))\n",
"remote_state_map = {(x.type, x.state_key): x.event_id for x in remote_state}\n",
"evs = await self.store.get_events(list(state_map.values()),\n get_prev_content=False, redact_behaviour=EventRedactBehaviour.AS_IS)\n",
"state_maps.append(remote_state_map)\n",
"event_map.update(evs)\n",
"for x in remote_state:\n",
"state = [event_map[e] for e in state_map.values()]\n",
"event_map[x.event_id] = x\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"For",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"For",
"Assign'",
"Assign'"
] |
[
"from collections import OrderedDict\n",
"import django.forms\n",
"from django.utils.translation import gettext_lazy as _\n",
"from wagtail.admin.forms import WagtailAdminPageForm\n",
"from wagtail.contrib.forms.utils import get_field_clean_name\n",
"def __init__(self, *VAR_0, **VAR_1):...\n",
"VAR_1.setdefault('label_suffix', '')\n",
"self.user = VAR_1.pop('user', None)\n",
"self.page = VAR_1.pop('page', None)\n",
"super().__init__(*VAR_0, **kwargs)\n",
"def __init__(self, VAR_2):...\n",
"self.fields = VAR_2\n",
"def FUNC_0(self, VAR_3, VAR_4):...\n",
"VAR_4['max_length'] = 255\n",
"return django.forms.CharField(**options)\n"
] | [
"from collections import OrderedDict\n",
"import django.forms\n",
"from django.utils.translation import gettext_lazy as _\n",
"from wagtail.admin.forms import WagtailAdminPageForm\n",
"from wagtail.contrib.forms.utils import get_field_clean_name\n",
"def __init__(self, *args, **kwargs):...\n",
"kwargs.setdefault('label_suffix', '')\n",
"self.user = kwargs.pop('user', None)\n",
"self.page = kwargs.pop('page', None)\n",
"super().__init__(*args, **kwargs)\n",
"def __init__(self, fields):...\n",
"self.fields = fields\n",
"def create_singleline_field(self, field, options):...\n",
"options['max_length'] = 255\n",
"return django.forms.CharField(**options)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self, VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_30 = self.evaluate(VAR_20)\n",
"return self._handleText(VAR_30, VAR_20)\n"
] | [
"def evaluateText(self, expr):...\n",
"\"\"\"docstring\"\"\"\n",
"text = self.evaluate(expr)\n",
"return self._handleText(text, expr)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_6):...\n",
"VAR_13 = UploadForm(data={'file': 'foo.bar', 's3file': 'file'}, instance=\n filemodel)\n",
"assert VAR_13.is_valid()\n",
"assert VAR_13.cleaned_data['file'] == VAR_6.file\n"
] | [
"def test_file_does_not_exist_no_fallback(self, filemodel):...\n",
"form = UploadForm(data={'file': 'foo.bar', 's3file': 'file'}, instance=\n filemodel)\n",
"assert form.is_valid()\n",
"assert form.cleaned_data['file'] == filemodel.file\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"def FUNC_18(self):...\n",
"assert 'custom/location/tmp/s3file/' in ClearableFileInput().upload_folder\n",
"assert len(os.path.basename(ClearableFileInput().upload_folder)) == 22\n"
] | [
"def test_upload_folder(self):...\n",
"assert 'custom/location/tmp/s3file/' in ClearableFileInput().upload_folder\n",
"assert len(os.path.basename(ClearableFileInput().upload_folder)) == 22\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assert'"
] |
[
"def FUNC_6(self):...\n",
"if self._is_processing:\n",
"return\n",
"run_as_background_process('httppush.process', self._process)\n"
] | [
"def _start_processing(self):...\n",
"if self._is_processing:\n",
"return\n",
"run_as_background_process('httppush.process', self._process)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Expr'"
] |
[
"def __init__(self, VAR_26: 'HomeServer', VAR_27: Dict[str, Any]={}, VAR_8:...\n",
"\"\"\"docstring\"\"\"\n",
"self.hs = VAR_26\n",
"self._ip_whitelist = VAR_8\n",
"self._ip_blacklist = VAR_9\n",
"self._extra_treq_args = VAR_27\n",
"self.user_agent = VAR_26.version_string\n",
"self.clock = VAR_26.get_clock()\n",
"if VAR_26.config.user_agent_suffix:\n",
"self.user_agent = '%s %s' % (self.user_agent, VAR_26.config.user_agent_suffix)\n",
"self._cooperator = Cooperator(scheduler=_make_scheduler(hs.get_reactor()))\n",
"self.user_agent = self.user_agent.encode('ascii')\n",
"if self._ip_blacklist:\n",
"VAR_54 = VAR_26.get_reactor()\n",
"self.reactor = VAR_26.get_reactor()\n",
"VAR_55 = CLASS_0(VAR_54, self._ip_whitelist, self._ip_blacklist)\n",
"VAR_44 = HTTPConnectionPool(self.reactor)\n",
"def __getattr__(VAR_59, VAR_60):...\n",
"VAR_44.maxPersistentPerHost = max((100 * VAR_26.config.caches.global_factor, 5)\n )\n",
"if VAR_60 == 'nameResolver':\n",
"VAR_44.cachedConnectionTimeout = 2 * 60\n",
"return VAR_55\n",
"return getattr(VAR_54, VAR_60)\n",
"self.agent = ProxyAgent(self.reactor, connectTimeout=15, contextFactory=\n self.hs.get_http_client_context_factory(), VAR_44=pool, VAR_28=\n http_proxy, VAR_29=https_proxy)\n",
"if self._ip_blacklist:\n",
"self.agent = CLASS_1(self.agent, VAR_8=self._ip_whitelist, VAR_9=self.\n _ip_blacklist)\n"
] | [
"def __init__(self, hs: 'HomeServer', treq_args: Dict[str, Any]={},...\n",
"\"\"\"docstring\"\"\"\n",
"self.hs = hs\n",
"self._ip_whitelist = ip_whitelist\n",
"self._ip_blacklist = ip_blacklist\n",
"self._extra_treq_args = treq_args\n",
"self.user_agent = hs.version_string\n",
"self.clock = hs.get_clock()\n",
"if hs.config.user_agent_suffix:\n",
"self.user_agent = '%s %s' % (self.user_agent, hs.config.user_agent_suffix)\n",
"self._cooperator = Cooperator(scheduler=_make_scheduler(hs.get_reactor()))\n",
"self.user_agent = self.user_agent.encode('ascii')\n",
"if self._ip_blacklist:\n",
"real_reactor = hs.get_reactor()\n",
"self.reactor = hs.get_reactor()\n",
"nameResolver = IPBlacklistingResolver(real_reactor, self._ip_whitelist,\n self._ip_blacklist)\n",
"pool = HTTPConnectionPool(self.reactor)\n",
"def __getattr__(_self, attr):...\n",
"pool.maxPersistentPerHost = max((100 * hs.config.caches.global_factor, 5))\n",
"if attr == 'nameResolver':\n",
"pool.cachedConnectionTimeout = 2 * 60\n",
"return nameResolver\n",
"return getattr(real_reactor, attr)\n",
"self.agent = ProxyAgent(self.reactor, connectTimeout=15, contextFactory=\n self.hs.get_http_client_context_factory(), pool=pool, http_proxy=\n http_proxy, https_proxy=https_proxy)\n",
"if self._ip_blacklist:\n",
"self.agent = BlacklistingAgentWrapper(self.agent, ip_whitelist=self.\n _ip_whitelist, ip_blacklist=self._ip_blacklist)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
0,
0,
0,
4,
0,
4,
4,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_4(self):...\n",
"self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)\n"
] | [
"def testTextField(self):...\n",
"self.assertFormfield(models.Event, 'description', widgets.AdminTextareaWidget)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_4(VAR_18, VAR_14=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_31 = FUNC_11('GIDIDX' + VAR_18.upper())\n",
"VAR_32 = []\n",
"if VAR_14:\n",
"VAR_14.replace('%2C', ',')\n",
"def FUNC_14():...\n",
"VAR_14.replace('%20', ' ')\n",
"VAR_43 = f.readline()\n",
"VAR_32 = VAR_14.split(',')\n",
"VAR_43 = '/'.join(VAR_43.split('/')[:-2])\n",
"VAR_33 = []\n",
"for c in VAR_32:\n",
"VAR_33.append(FUNC_10(VAR_43 + '/' + c.strip()))\n",
"return VAR_33\n"
] | [
"def get_mixer_list(idx, classes=None):...\n",
"\"\"\"docstring\"\"\"\n",
"mixer_index = _get_index_absolute_path('GIDIDX' + idx.upper())\n",
"classes_list = []\n",
"if classes:\n",
"classes.replace('%2C', ',')\n",
"def get_class_path():...\n",
"classes.replace('%20', ' ')\n",
"dataset_path = f.readline()\n",
"classes_list = classes.split(',')\n",
"dataset_path = '/'.join(dataset_path.split('/')[:-2])\n",
"class_paths = []\n",
"for c in classes_list:\n",
"class_paths.append(_get_obj_absolute_path(dataset_path + '/' + c.strip()))\n",
"return class_paths\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_55, *VAR_6, **VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"super().__init__(*VAR_6, **kwargs)\n",
"self.obj = VAR_55\n",
"self.components = VAR_55.project.component_set.filter(VAR_72=obj.\n source_language) | Component.objects.filter(source_language_id=obj.\n source_language_id, project__contribute_shared_tm=True).exclude(VAR_58=\n obj.project)\n",
"if len(self.components.values_list('id')[:30]) == 30:\n",
"self.fields['component'] = forms.CharField(VAR_113=False, VAR_134=_(\n 'Components'), help_text=_(\n 'Enter component to use as source, keep blank to use all components in current project.'\n ))\n",
"VAR_68 = [(s.id, str(s)) for s in self.components.order_project().\n prefetch_related('project')]\n",
"self.fields['engines'].choices = [(VAR_148, mt.name) for VAR_148, mt in\n MACHINE_TRANSLATION_SERVICES.items()]\n",
"self.fields['component'].choices = [('', _(\n 'All components in current project'))] + VAR_68\n",
"if 'weblate' in MACHINE_TRANSLATION_SERVICES.keys():\n",
"self.fields['engines'].initial = 'weblate'\n",
"VAR_117 = {'all', 'nottranslated', 'todo', 'fuzzy', 'check:inconsistent'}\n",
"self.fields['filter_type'].choices = [x for x in self.fields['filter_type']\n .choices if x[0] in VAR_117]\n",
"self.helper = FormHelper(self)\n",
"self.helper.layout = Layout(Field('mode'), Field('filter_type'),\n InlineRadios('auto_source', id='select_auto_source'), Div('component',\n css_id='auto_source_others'), Div('engines', 'threshold', css_id=\n 'auto_source_mt'))\n"
] | [
"def __init__(self, obj, *args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"super().__init__(*args, **kwargs)\n",
"self.obj = obj\n",
"self.components = obj.project.component_set.filter(source_language=obj.\n source_language) | Component.objects.filter(source_language_id=obj.\n source_language_id, project__contribute_shared_tm=True).exclude(project\n =obj.project)\n",
"if len(self.components.values_list('id')[:30]) == 30:\n",
"self.fields['component'] = forms.CharField(required=False, label=_(\n 'Components'), help_text=_(\n 'Enter component to use as source, keep blank to use all components in current project.'\n ))\n",
"choices = [(s.id, str(s)) for s in self.components.order_project().\n prefetch_related('project')]\n",
"self.fields['engines'].choices = [(key, mt.name) for key, mt in\n MACHINE_TRANSLATION_SERVICES.items()]\n",
"self.fields['component'].choices = [('', _(\n 'All components in current project'))] + choices\n",
"if 'weblate' in MACHINE_TRANSLATION_SERVICES.keys():\n",
"self.fields['engines'].initial = 'weblate'\n",
"use_types = {'all', 'nottranslated', 'todo', 'fuzzy', 'check:inconsistent'}\n",
"self.fields['filter_type'].choices = [x for x in self.fields['filter_type']\n .choices if x[0] in use_types]\n",
"self.helper = FormHelper(self)\n",
"self.helper.layout = Layout(Field('mode'), Field('filter_type'),\n InlineRadios('auto_source', id='select_auto_source'), Div('component',\n css_id='auto_source_others'), Div('engines', 'threshold', css_id=\n 'auto_source_mt'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"from app import apfell, links, use_ssl\n",
"from sanic import response\n",
"from jinja2 import Environment, PackageLoader\n",
"from sanic_jwt.decorators import scoped, inject_user\n",
"from app.routes.routes import respect_pivot\n",
"import urllib.parse\n",
"VAR_0 = Environment(loader=PackageLoader('app', 'templates'))\n",
"@apfell.route('/apiui/command_help')...\n",
"VAR_4 = VAR_0.get_template('apiui_command_help.html')\n",
"if len(VAR_1.query_args) != 0:\n",
"VAR_5 = urllib.parse.unquote(VAR_1.query_args[0][1])\n",
"VAR_5 = ''\n",
"print(VAR_5)\n",
"if use_ssl:\n",
"VAR_6 = VAR_4.render(VAR_3=await respect_pivot(links, request), name=user[\n 'username'], http='https', ws='wss', config=user['ui_config'],\n view_utc_time=user['view_utc_time'], agent=data)\n",
"VAR_6 = VAR_4.render(VAR_3=await respect_pivot(links, request), name=user[\n 'username'], http='http', ws='ws', config=user['ui_config'],\n view_utc_time=user['view_utc_time'], agent=data)\n",
"return response.html(VAR_6)\n"
] | [
"from app import apfell, links, use_ssl\n",
"from sanic import response\n",
"from jinja2 import Environment, PackageLoader\n",
"from sanic_jwt.decorators import scoped, inject_user\n",
"from app.routes.routes import respect_pivot\n",
"import urllib.parse\n",
"env = Environment(loader=PackageLoader('app', 'templates'))\n",
"@apfell.route('/apiui/command_help')...\n",
"template = env.get_template('apiui_command_help.html')\n",
"if len(request.query_args) != 0:\n",
"data = urllib.parse.unquote(request.query_args[0][1])\n",
"data = ''\n",
"print(data)\n",
"if use_ssl:\n",
"content = template.render(links=await respect_pivot(links, request), name=\n user['username'], http='https', ws='wss', config=user['ui_config'],\n view_utc_time=user['view_utc_time'], agent=data)\n",
"content = template.render(links=await respect_pivot(links, request), name=\n user['username'], http='http', ws='ws', config=user['ui_config'],\n view_utc_time=user['view_utc_time'], agent=data)\n",
"return response.html(content)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_65(self):...\n",
"VAR_3 = self.client.post('/admin/auth/user/%s/' % self.admin.pk, self.\n get_user_data(self.admin))\n",
"self.assertRedirects(VAR_3, '/admin/auth/user/')\n",
"VAR_28 = LogEntry.objects.latest('id')\n",
"self.assertEqual(VAR_28.change_message, 'No fields changed.')\n"
] | [
"def test_user_not_change(self):...\n",
"response = self.client.post('/admin/auth/user/%s/' % self.admin.pk, self.\n get_user_data(self.admin))\n",
"self.assertRedirects(response, '/admin/auth/user/')\n",
"row = LogEntry.objects.latest('id')\n",
"self.assertEqual(row.change_message, 'No fields changed.')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"from dataclasses import asdict\n",
"from datetime import date, datetime\n",
"from typing import Any, Dict, List, Optional, Union, cast\n",
"import httpx\n",
"from ..client import AuthenticatedClient, Client\n",
"from ..errors import ApiResponseError\n",
"from ..models.a_model import AModel\n",
"from ..models.an_enum import AnEnum\n",
"from ..models.body_upload_file_tests_upload_post import BodyUploadFileTestsUploadPost\n",
"from ..models.http_validation_error import HTTPValidationError\n",
"async def FUNC_0(*, VAR_0: Client, VAR_1: List[AnEnum], VAR_2: Union[date,...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = '{}/tests/'.format(VAR_0.base_url)\n",
"VAR_12: Dict[str, Any] = VAR_0.get_headers()\n",
"VAR_7 = []\n",
"for an_enum_value_item_data in VAR_1:\n",
"VAR_9 = an_enum_value_item_data.value\n",
"if isinstance(VAR_2, date):\n",
"VAR_7.append(VAR_9)\n",
"VAR_10 = VAR_2.isoformat()\n",
"VAR_10 = VAR_2.isoformat()\n",
"params: Dict[str, Any] = {'an_enum_value': VAR_7, 'some_date': VAR_10}\n",
"VAR_11 = await _client.get(VAR_6=url, VAR_12=headers, params=params)\n",
"if VAR_11.status_code == 200:\n",
"return [AModel.from_dict(item) for item in cast(List[Dict[str, Any]],\n VAR_11.json())]\n",
"if VAR_11.status_code == 422:\n",
"return HTTPValidationError.from_dict(cast(Dict[str, Any], VAR_11.json()))\n",
"async def FUNC_1(*, VAR_0: Client, VAR_3: BodyUploadFileTestsUploadPost,...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = '{}/tests/upload'.format(VAR_0.base_url)\n",
"VAR_12: Dict[str, Any] = VAR_0.get_headers()\n",
"if VAR_4 is not None:\n",
"VAR_12['keep-alive'] = VAR_4\n",
"VAR_11 = await _client.post(VAR_6=url, VAR_12=headers, files=multipart_data\n .to_dict())\n",
"if VAR_11.status_code == 200:\n",
"return None\n",
"if VAR_11.status_code == 422:\n",
"return HTTPValidationError.from_dict(cast(Dict[str, Any], VAR_11.json()))\n",
"async def FUNC_2(*, VAR_0: Client, VAR_5: AModel) ->Union[None,...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = '{}/tests/json_body'.format(VAR_0.base_url)\n",
"VAR_12: Dict[str, Any] = VAR_0.get_headers()\n",
"VAR_8 = VAR_5.to_dict()\n",
"VAR_11 = await _client.post(VAR_6=url, VAR_12=headers, json=json_json_body)\n",
"if VAR_11.status_code == 200:\n",
"return None\n",
"if VAR_11.status_code == 422:\n",
"return HTTPValidationError.from_dict(cast(Dict[str, Any], VAR_11.json()))\n"
] | [
"from dataclasses import asdict\n",
"from datetime import date, datetime\n",
"from typing import Any, Dict, List, Optional, Union, cast\n",
"import httpx\n",
"from ..client import AuthenticatedClient, Client\n",
"from ..errors import ApiResponseError\n",
"from ..models.a_model import AModel\n",
"from ..models.an_enum import AnEnum\n",
"from ..models.body_upload_file_tests_upload_post import BodyUploadFileTestsUploadPost\n",
"from ..models.http_validation_error import HTTPValidationError\n",
"async def get_user_list(*, client: Client, an_enum_value: List[AnEnum],...\n",
"\"\"\"docstring\"\"\"\n",
"url = '{}/tests/'.format(client.base_url)\n",
"headers: Dict[str, Any] = client.get_headers()\n",
"json_an_enum_value = []\n",
"for an_enum_value_item_data in an_enum_value:\n",
"an_enum_value_item = an_enum_value_item_data.value\n",
"if isinstance(some_date, date):\n",
"json_an_enum_value.append(an_enum_value_item)\n",
"json_some_date = some_date.isoformat()\n",
"json_some_date = some_date.isoformat()\n",
"params: Dict[str, Any] = {'an_enum_value': json_an_enum_value, 'some_date':\n json_some_date}\n",
"response = await _client.get(url=url, headers=headers, params=params)\n",
"if response.status_code == 200:\n",
"return [AModel.from_dict(item) for item in cast(List[Dict[str, Any]],\n response.json())]\n",
"if response.status_code == 422:\n",
"return HTTPValidationError.from_dict(cast(Dict[str, Any], response.json()))\n",
"async def upload_file_tests_upload_post(*, client: Client, multipart_data:...\n",
"\"\"\"docstring\"\"\"\n",
"url = '{}/tests/upload'.format(client.base_url)\n",
"headers: Dict[str, Any] = client.get_headers()\n",
"if keep_alive is not None:\n",
"headers['keep-alive'] = keep_alive\n",
"response = await _client.post(url=url, headers=headers, files=\n multipart_data.to_dict())\n",
"if response.status_code == 200:\n",
"return None\n",
"if response.status_code == 422:\n",
"return HTTPValidationError.from_dict(cast(Dict[str, Any], response.json()))\n",
"async def json_body_tests_json_body_post(*, client: Client, json_body: AModel...\n",
"\"\"\"docstring\"\"\"\n",
"url = '{}/tests/json_body'.format(client.base_url)\n",
"headers: Dict[str, Any] = client.get_headers()\n",
"json_json_body = json_body.to_dict()\n",
"response = await _client.post(url=url, headers=headers, json=json_json_body)\n",
"if response.status_code == 200:\n",
"return None\n",
"if response.status_code == 422:\n",
"return HTTPValidationError.from_dict(cast(Dict[str, Any], response.json()))\n"
] | [
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Condition",
"Docstring",
"Assign'",
"AnnAssign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"AnnAssign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Docstring",
"Assign'",
"AnnAssign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Docstring",
"Assign'",
"AnnAssign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"def FUNC_27(VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_58 = '\\n'.join(['Usage example:',\n 'To compile a SavedModel signature via (CPU) XLA AOT:',\n '$saved_model_cli aot_compile_cpu \\\\', ' --dir /tmp/saved_model \\\\',\n ' --tag_set serve \\\\', ' --output_dir /tmp/saved_model_xla_aot', '',\n '',\n 'Note: Additional XLA compilation options are available by setting the ',\n 'XLA_FLAGS environment variable. See the XLA debug options flags for ',\n 'all the options: ', ' {}'.format(VAR_0), '',\n 'For example, to disable XLA fast math when compiling:', '',\n 'XLA_FLAGS=\"--xla_cpu_enable_fast_math=false\" $saved_model_cli aot_compile_cpu ...'\n , '', 'Some possibly useful flags:',\n ' --xla_cpu_enable_fast_math=false',\n ' --xla_force_host_platform_device_count=<num threads>',\n ' (useful in conjunction with disabling multi threading)'])\n",
"VAR_59 = VAR_21.add_parser('aot_compile_cpu', description=compile_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n",
"VAR_59.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to convert')\n",
"VAR_59.add_argument('--output_prefix', type=str, required=True, help=\n 'output directory + filename prefix for the resulting header(s) and object file(s)'\n )\n",
"VAR_59.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to convert, separated by ','\")\n",
"VAR_59.add_argument('--signature_def_key', type=str, default=\n signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, help=\n 'signature_def key to use. default: DEFAULT_SERVING_SIGNATURE_DEF_KEY')\n",
"VAR_59.add_argument('--target_triple', type=str, default='x86_64-pc-linux',\n help=\n 'Target triple for LLVM during AOT compilation. Examples: x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, armv7-none-android. More examples are available in tfcompile.bzl in the tensorflow codebase.'\n )\n",
"VAR_59.add_argument('--target_cpu', type=str, default='', help=\n 'Target cpu name for LLVM during AOT compilation. Examples: x86_64, skylake, haswell, westmere, <empty> (unknown). For a complete list of options, run (for x86 targets): `llc -march=x86 -mcpu=help`'\n )\n",
"VAR_59.add_argument('--checkpoint_path', type=str, default=None, help=\n 'Custom checkpoint to use (default: use the SavedModel variables)')\n",
"VAR_59.add_argument('--cpp_class', type=str, required=True, help=\n 'The name of the generated C++ class, wrapping the generated function. The syntax of this flag is [[<optional_namespace>::],...]<class_name>. This mirrors the C++ syntax for referring to a class, where multiple namespaces may precede the class name, separated by double-colons. The class will be generated in the given namespace(s), or if no namespaces are given, within the global namespace.'\n )\n",
"VAR_59.add_argument('--variables_to_feed', type=str, default='', help=\n \"The names of variables that will be fed into the network. Options are: empty (default; all variables are frozen, none may be fed), 'all' (all variables may be fed), or a comma-delimited list of names of variables that may be fed. In the last case, the non-fed variables will be frozen in the graph.**NOTE** Any variables passed to `variables_to_feed` *must be set by the user*. These variables will NOT be frozen and their values will be uninitialized in the compiled object (this applies to all input arguments from the signature as well).\"\n )\n",
"VAR_59.add_argument('--multithreading', type=str, default='False', help=\n 'Enable multithreading in the compiled computation. Note that if using this option, the resulting object files may have external dependencies on multithreading libraries like nsync.'\n )\n",
"VAR_59.set_defaults(func=aot_compile_cpu)\n"
] | [
"def add_aot_compile_cpu_subparser(subparsers):...\n",
"\"\"\"docstring\"\"\"\n",
"compile_msg = '\\n'.join(['Usage example:',\n 'To compile a SavedModel signature via (CPU) XLA AOT:',\n '$saved_model_cli aot_compile_cpu \\\\', ' --dir /tmp/saved_model \\\\',\n ' --tag_set serve \\\\', ' --output_dir /tmp/saved_model_xla_aot', '',\n '',\n 'Note: Additional XLA compilation options are available by setting the ',\n 'XLA_FLAGS environment variable. See the XLA debug options flags for ',\n 'all the options: ', ' {}'.format(_XLA_DEBUG_OPTIONS_URL), '',\n 'For example, to disable XLA fast math when compiling:', '',\n 'XLA_FLAGS=\"--xla_cpu_enable_fast_math=false\" $saved_model_cli aot_compile_cpu ...'\n , '', 'Some possibly useful flags:',\n ' --xla_cpu_enable_fast_math=false',\n ' --xla_force_host_platform_device_count=<num threads>',\n ' (useful in conjunction with disabling multi threading)'])\n",
"parser_compile = subparsers.add_parser('aot_compile_cpu', description=\n compile_msg, formatter_class=argparse.RawTextHelpFormatter)\n",
"parser_compile.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to convert')\n",
"parser_compile.add_argument('--output_prefix', type=str, required=True,\n help=\n 'output directory + filename prefix for the resulting header(s) and object file(s)'\n )\n",
"parser_compile.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to convert, separated by ','\")\n",
"parser_compile.add_argument('--signature_def_key', type=str, default=\n signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY, help=\n 'signature_def key to use. default: DEFAULT_SERVING_SIGNATURE_DEF_KEY')\n",
"parser_compile.add_argument('--target_triple', type=str, default=\n 'x86_64-pc-linux', help=\n 'Target triple for LLVM during AOT compilation. Examples: x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, armv7-none-android. More examples are available in tfcompile.bzl in the tensorflow codebase.'\n )\n",
"parser_compile.add_argument('--target_cpu', type=str, default='', help=\n 'Target cpu name for LLVM during AOT compilation. Examples: x86_64, skylake, haswell, westmere, <empty> (unknown). For a complete list of options, run (for x86 targets): `llc -march=x86 -mcpu=help`'\n )\n",
"parser_compile.add_argument('--checkpoint_path', type=str, default=None,\n help='Custom checkpoint to use (default: use the SavedModel variables)')\n",
"parser_compile.add_argument('--cpp_class', type=str, required=True, help=\n 'The name of the generated C++ class, wrapping the generated function. The syntax of this flag is [[<optional_namespace>::],...]<class_name>. This mirrors the C++ syntax for referring to a class, where multiple namespaces may precede the class name, separated by double-colons. The class will be generated in the given namespace(s), or if no namespaces are given, within the global namespace.'\n )\n",
"parser_compile.add_argument('--variables_to_feed', type=str, default='',\n help=\n \"The names of variables that will be fed into the network. Options are: empty (default; all variables are frozen, none may be fed), 'all' (all variables may be fed), or a comma-delimited list of names of variables that may be fed. In the last case, the non-fed variables will be frozen in the graph.**NOTE** Any variables passed to `variables_to_feed` *must be set by the user*. These variables will NOT be frozen and their values will be uninitialized in the compiled object (this applies to all input arguments from the signature as well).\"\n )\n",
"parser_compile.add_argument('--multithreading', type=str, default='False',\n help=\n 'Enable multithreading in the compiled computation. Note that if using this option, the resulting object files may have external dependencies on multithreading libraries like nsync.'\n )\n",
"parser_compile.set_defaults(func=aot_compile_cpu)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_31(self, VAR_18, VAR_19=None, VAR_22='read'):...\n",
"if not VAR_19:\n",
"VAR_19 = self.meta.get_field(VAR_18)\n",
"return VAR_19.permlevel in self.get_permlevel_access(VAR_22)\n"
] | [
"def has_permlevel_access_to(self, fieldname, df=None, permission_type='read'):...\n",
"if not df:\n",
"df = self.meta.get_field(fieldname)\n",
"return df.permlevel in self.get_permlevel_access(permission_type)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_0(VAR_1):...\n",
"VAR_19 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']\n",
"def FUNC_7(VAR_1):...\n",
"VAR_20.throw(_('Invalid Search Field {0}').format(VAR_1), VAR_20.DataError)\n",
"if len(VAR_1) == 1:\n",
"VAR_24 = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()_].*')\n",
"if len(VAR_1) >= 3:\n",
"if VAR_24.match(VAR_1):\n",
"if '=' in VAR_1:\n",
"FUNC_7(VAR_1)\n",
"FUNC_7(VAR_1)\n",
"if ' --' in VAR_1:\n",
"FUNC_7(VAR_1)\n",
"if any(' {0} '.format(keyword) in VAR_1.split() for keyword in VAR_19):\n",
"FUNC_7(VAR_1)\n",
"if any(keyword in VAR_1.split() for keyword in VAR_19):\n",
"FUNC_7(VAR_1)\n",
"VAR_24 = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()].*')\n",
"if any(VAR_24.match(f) for f in VAR_1.split()):\n",
"FUNC_7(VAR_1)\n"
] | [
"def sanitize_searchfield(searchfield):...\n",
"blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and',\n 'or', 'like']\n",
"def _raise_exception(searchfield):...\n",
"frappe.throw(_('Invalid Search Field {0}').format(searchfield), frappe.\n DataError)\n",
"if len(searchfield) == 1:\n",
"regex = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()_].*')\n",
"if len(searchfield) >= 3:\n",
"if regex.match(searchfield):\n",
"if '=' in searchfield:\n",
"_raise_exception(searchfield)\n",
"_raise_exception(searchfield)\n",
"if ' --' in searchfield:\n",
"_raise_exception(searchfield)\n",
"if any(' {0} '.format(keyword) in searchfield.split() for keyword in\n",
"_raise_exception(searchfield)\n",
"if any(keyword in searchfield.split() for keyword in blacklisted_keywords):\n",
"_raise_exception(searchfield)\n",
"regex = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()].*')\n",
"if any(regex.match(f) for f in searchfield.split()):\n",
"_raise_exception(searchfield)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"For",
"Expr'",
"For",
"Expr'",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_20(VAR_27, VAR_28=False):...\n",
"while True:\n",
"VAR_96 = queue.pop()\n",
"time.sleep(1)\n",
"if VAR_28:\n",
"if VAR_96 is not None:\n",
"VAR_101, VAR_70, VAR_21, VAR_102 = VAR_96\n",
"time.sleep(1)\n",
"queue.start_job(VAR_70)\n",
"VAR_76 = requests.post(VAR_27 + '/api/' + VAR_102 + '/', json=input_data)\n",
"if VAR_76.status_code == 200:\n",
"queue.pass_job(VAR_70, VAR_76.json())\n",
"queue.fail_job(VAR_70, VAR_76.text)\n"
] | [
"def queue_thread(path_to_local_server, test_mode=False):...\n",
"while True:\n",
"next_job = queue.pop()\n",
"time.sleep(1)\n",
"if test_mode:\n",
"if next_job is not None:\n",
"_, hash, input_data, task_type = next_job\n",
"time.sleep(1)\n",
"queue.start_job(hash)\n",
"response = requests.post(path_to_local_server + '/api/' + task_type + '/',\n json=input_data)\n",
"if response.status_code == 200:\n",
"queue.pass_job(hash, response.json())\n",
"queue.fail_job(hash, response.text)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"VAR_12, VAR_13 = self.make_request('POST', '/create_group'.encode('ascii'),\n VAR_7=self.admin_user_tok, content={'localpart': 'test'})\n",
"self.assertEqual(200, int(VAR_13.result['code']), msg=channel.result['body'])\n",
"VAR_5 = VAR_13.json_body['group_id']\n",
"self._check_group(VAR_5, VAR_6=200)\n",
"VAR_0 = '/groups/%s/admin/users/invite/%s' % (VAR_5, self.other_user)\n",
"VAR_12, VAR_13 = self.make_request('PUT', VAR_0.encode('ascii'), VAR_7=self\n .admin_user_tok, content={})\n",
"self.assertEqual(200, int(VAR_13.result['code']), msg=channel.result['body'])\n",
"VAR_0 = '/groups/%s/self/accept_invite' % (VAR_5,)\n",
"VAR_12, VAR_13 = self.make_request('PUT', VAR_0.encode('ascii'), VAR_7=self\n .other_user_token, content={})\n",
"self.assertEqual(200, int(VAR_13.result['code']), msg=channel.result['body'])\n",
"self.assertIn(VAR_5, self._get_groups_user_is_in(self.admin_user_tok))\n",
"self.assertIn(VAR_5, self._get_groups_user_is_in(self.other_user_token))\n",
"VAR_0 = '/_synapse/admin/v1/delete_group/' + VAR_5\n",
"VAR_12, VAR_13 = self.make_request('POST', VAR_0.encode('ascii'), VAR_7=\n self.admin_user_tok, content={'localpart': 'test'})\n",
"self.assertEqual(200, int(VAR_13.result['code']), msg=channel.result['body'])\n",
"self._check_group(VAR_5, VAR_6=404)\n",
"self.assertNotIn(VAR_5, self._get_groups_user_is_in(self.admin_user_tok))\n",
"self.assertNotIn(VAR_5, self._get_groups_user_is_in(self.other_user_token))\n"
] | [
"def test_delete_group(self):...\n",
"request, channel = self.make_request('POST', '/create_group'.encode('ascii'\n ), access_token=self.admin_user_tok, content={'localpart': 'test'})\n",
"self.assertEqual(200, int(channel.result['code']), msg=channel.result['body'])\n",
"group_id = channel.json_body['group_id']\n",
"self._check_group(group_id, expect_code=200)\n",
"url = '/groups/%s/admin/users/invite/%s' % (group_id, self.other_user)\n",
"request, channel = self.make_request('PUT', url.encode('ascii'),\n access_token=self.admin_user_tok, content={})\n",
"self.assertEqual(200, int(channel.result['code']), msg=channel.result['body'])\n",
"url = '/groups/%s/self/accept_invite' % (group_id,)\n",
"request, channel = self.make_request('PUT', url.encode('ascii'),\n access_token=self.other_user_token, content={})\n",
"self.assertEqual(200, int(channel.result['code']), msg=channel.result['body'])\n",
"self.assertIn(group_id, self._get_groups_user_is_in(self.admin_user_tok))\n",
"self.assertIn(group_id, self._get_groups_user_is_in(self.other_user_token))\n",
"url = '/_synapse/admin/v1/delete_group/' + group_id\n",
"request, channel = self.make_request('POST', url.encode('ascii'),\n access_token=self.admin_user_tok, content={'localpart': 'test'})\n",
"self.assertEqual(200, int(channel.result['code']), msg=channel.result['body'])\n",
"self._check_group(group_id, expect_code=404)\n",
"self.assertNotIn(group_id, self._get_groups_user_is_in(self.admin_user_tok))\n",
"self.assertNotIn(group_id, self._get_groups_user_is_in(self.other_user_token))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0: Optional[str], VAR_1: Optional[Path]) ->Union[CLASS_0,...\n",
"VAR_2 = FUNC_3(VAR_0=url, VAR_1=path)\n",
"if isinstance(VAR_2, GeneratorError):\n",
"return VAR_2\n",
"VAR_3 = GeneratorData.from_dict(VAR_2)\n",
"if isinstance(VAR_3, GeneratorError):\n",
"return VAR_3\n",
"return CLASS_0(VAR_3=openapi)\n"
] | [
"def _get_project_for_url_or_path(url: Optional[str], path: Optional[Path]...\n",
"data_dict = _get_document(url=url, path=path)\n",
"if isinstance(data_dict, GeneratorError):\n",
"return data_dict\n",
"openapi = GeneratorData.from_dict(data_dict)\n",
"if isinstance(openapi, GeneratorError):\n",
"return openapi\n",
"return Project(openapi=openapi)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"@property...\n",
"return self._base_builder.room_id\n"
] | [
"@property...\n",
"return self._base_builder.room_id\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_8, VAR_9=None):...\n",
"self.clock = VAR_8.get_clock()\n",
"if VAR_9 is None:\n",
"VAR_9 = CLASS_4(VAR_8), CLASS_6(VAR_8), CLASS_7(VAR_8)\n",
"self._key_fetchers = VAR_9\n",
"self.key_downloads = {}\n"
] | [
"def __init__(self, hs, key_fetchers=None):...\n",
"self.clock = hs.get_clock()\n",
"if key_fetchers is None:\n",
"key_fetchers = StoreKeyFetcher(hs), PerspectivesKeyFetcher(hs\n ), ServerKeyFetcher(hs)\n",
"self._key_fetchers = key_fetchers\n",
"self.key_downloads = {}\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_13(self, VAR_19):...\n",
"if VAR_19.tag in self.md.opaqueElements:\n",
"return True\n",
"if VAR_19.get('data-opaque') is not None:\n",
"return True\n",
"return False\n"
] | [
"def isOpaqueElement(self, el):...\n",
"if el.tag in self.md.opaqueElements:\n",
"return True\n",
"if el.get('data-opaque') is not None:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"@CLASS_4('tabs')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_21 = collections.defaultdict(list)\n",
"for win_id, window in objreg.window_registry.items():\n",
"if sip.isdeleted(window):\n",
"VAR_20 = jinja.render('tabs.html', title='Tabs', tab_list_by_window=tabs)\n",
"VAR_33 = objreg.get('tabbed-browser', scope='window', window=win_id)\n",
"return 'text/html', VAR_20\n",
"for tab in VAR_33.widgets():\n",
"if tab.url() not in [QUrl('qute://tabs/'), QUrl('qute://tabs')]:\n",
"VAR_45 = tab.url().toDisplayString()\n",
"VAR_21[str(win_id)].append((tab.title(), VAR_45))\n"
] | [
"@add_handler('tabs')...\n",
"\"\"\"docstring\"\"\"\n",
"tabs = collections.defaultdict(list)\n",
"for win_id, window in objreg.window_registry.items():\n",
"if sip.isdeleted(window):\n",
"src = jinja.render('tabs.html', title='Tabs', tab_list_by_window=tabs)\n",
"tabbed_browser = objreg.get('tabbed-browser', scope='window', window=win_id)\n",
"return 'text/html', src\n",
"for tab in tabbed_browser.widgets():\n",
"if tab.url() not in [QUrl('qute://tabs/'), QUrl('qute://tabs')]:\n",
"urlstr = tab.url().toDisplayString()\n",
"tabs[str(win_id)].append((tab.title(), urlstr))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Return'",
"For",
"Condition",
"Assign'",
"Expr'"
] |
[
"@login_required(doConnectionCleanup=False)...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup(-1)\n",
"VAR_173 = VAR_5.getObject('OriginalFile', VAR_30)\n",
"if VAR_173 is None:\n",
"return handlerInternalError(VAR_2, 'Original File does not exist (id:%s).' %\n VAR_30)\n",
"VAR_174 = ConnCleaningHttpResponse(VAR_173.getFileInChunks(buf=settings.\n CHUNK_SIZE))\n",
"VAR_174.conn = VAR_5\n",
"VAR_175 = VAR_173.mimetype\n",
"if VAR_175 == 'text/x-python':\n",
"VAR_175 = 'text/plain'\n",
"VAR_174['Content-Type'] = VAR_175\n",
"VAR_174['Content-Length'] = VAR_173.getSize()\n",
"if VAR_31:\n",
"VAR_300 = VAR_173.name.replace(' ', '_')\n",
"return VAR_174\n",
"VAR_300 = VAR_300.replace(',', '.')\n",
"VAR_174['Content-Disposition'] = 'attachment; filename=%s' % VAR_300\n"
] | [
"@login_required(doConnectionCleanup=False)...\n",
"\"\"\"docstring\"\"\"\n",
"conn.SERVICE_OPTS.setOmeroGroup(-1)\n",
"orig_file = conn.getObject('OriginalFile', fileId)\n",
"if orig_file is None:\n",
"return handlerInternalError(request, \n 'Original File does not exist (id:%s).' % fileId)\n",
"rsp = ConnCleaningHttpResponse(orig_file.getFileInChunks(buf=settings.\n CHUNK_SIZE))\n",
"rsp.conn = conn\n",
"mimetype = orig_file.mimetype\n",
"if mimetype == 'text/x-python':\n",
"mimetype = 'text/plain'\n",
"rsp['Content-Type'] = mimetype\n",
"rsp['Content-Length'] = orig_file.getSize()\n",
"if download:\n",
"downloadName = orig_file.name.replace(' ', '_')\n",
"return rsp\n",
"downloadName = downloadName.replace(',', '.')\n",
"rsp['Content-Disposition'] = 'attachment; filename=%s' % downloadName\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self, VAR_20, VAR_24):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_20.is_ajax():\n",
"return HttpResponseForbidden(json.dumps({'error': force_text(VAR_24)}))\n",
"VAR_32 = urlencode({'error': force_text(VAR_24)})\n",
"VAR_33 = force_str(reverse('shuup_admin:login') + '?' + VAR_32)\n",
"VAR_34 = redirect_to_login(next=request.path, VAR_33=login_url)\n",
"if is_authenticated(VAR_20.user):\n",
"return VAR_34\n"
] | [
"def _get_unauth_response(self, request, reason):...\n",
"\"\"\"docstring\"\"\"\n",
"if request.is_ajax():\n",
"return HttpResponseForbidden(json.dumps({'error': force_text(reason)}))\n",
"error_params = urlencode({'error': force_text(reason)})\n",
"login_url = force_str(reverse('shuup_admin:login') + '?' + error_params)\n",
"resp = redirect_to_login(next=request.path, login_url=login_url)\n",
"if is_authenticated(request.user):\n",
"return resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"from __future__ import annotations\n",
"import datetime\n",
"import functools\n",
"import threading\n",
"from io import BytesIO\n",
"from typing import Any\n",
"import flask\n",
"import markdown2\n",
"import werkzeug.urls\n",
"from beancount import __version__ as beancount_version\n",
"from beancount.core.account import ACCOUNT_RE\n",
"from beancount.utils.text_utils import replace_numbers\n",
"from flask import abort\n",
"from flask import Flask\n",
"from flask import redirect\n",
"from flask import render_template\n",
"from flask import render_template_string\n",
"from flask import request\n",
"from flask import send_file\n",
"from flask.wrappers import Response\n",
"from flask_babel import Babel\n",
"from flask_babel import get_translations\n",
"from werkzeug.utils import secure_filename\n",
"from fava import __version__ as fava_version\n",
"from fava import LANGUAGES\n",
"from fava import template_filters\n",
"from fava.context import g\n",
"from fava.core import FavaLedger\n",
"from fava.core.charts import FavaJSONEncoder\n",
"from fava.core.documents import is_document_or_import_file\n",
"from fava.help import HELP_PAGES\n",
"from fava.helpers import FavaAPIException\n",
"from fava.json_api import json_api\n",
"from fava.serialisation import serialise\n",
"from fava.util import next_key\n",
"from fava.util import resource_path\n",
"from fava.util import send_file_inline\n",
"from fava.util import setup_logging\n",
"from fava.util import slugify\n",
"from fava.util.date import Interval\n",
"from fava.util.excel import HAVE_EXCEL\n",
"VAR_0 = resource_path('static')\n",
"setup_logging()\n",
"VAR_1 = Flask(__name__, template_folder=str(resource_path('templates')),\n static_folder=str(STATIC_FOLDER))\n",
"VAR_1.register_blueprint(json_api, url_prefix='/<bfile>/api')\n",
"VAR_1.json_encoder = FavaJSONEncoder\n",
"VAR_2 = VAR_1.jinja_options.setdefault('extensions', [])\n",
"VAR_2.append('jinja2.ext.do')\n",
"VAR_2.append('jinja2.ext.loopcontrols')\n",
"VAR_1.jinja_env.trim_blocks = True\n",
"VAR_1.jinja_env.lstrip_blocks = True\n",
"VAR_1.config['HAVE_EXCEL'] = HAVE_EXCEL\n",
"VAR_1.config['ACCOUNT_RE'] = ACCOUNT_RE\n",
"VAR_3 = ['balance_sheet', 'commodities', 'documents', 'events', 'editor',\n 'errors', 'holdings', 'import', 'income_statement', 'journal',\n 'options', 'query', 'statistics', 'trial_balance']\n",
"VAR_4 = threading.Lock()\n",
"def FUNC_0(VAR_5: FavaLedger) ->str:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_22 = slugify(VAR_5.options['title'])\n",
"return VAR_22 or slugify(VAR_5.beancount_file_path)\n"
] | [
"\"\"\"Fava's main WSGI application.\n\nwhen using Fava's WSGI app, make sure to set ``app.config['BEANCOUNT_FILES']``.\nTo start a simple server::\n\n from fava.application import app\n app.config['BEANCOUNT_FILES'] = ['/path/to/file.beancount']\n app.run('localhost', 5000)\n\nAttributes:\n app: An instance of :class:`flask.Flask`, this is Fava's WSGI application.\n\n\"\"\"\n",
"from __future__ import annotations\n",
"import datetime\n",
"import functools\n",
"import threading\n",
"from io import BytesIO\n",
"from typing import Any\n",
"import flask\n",
"import markdown2\n",
"import werkzeug.urls\n",
"from beancount import __version__ as beancount_version\n",
"from beancount.core.account import ACCOUNT_RE\n",
"from beancount.utils.text_utils import replace_numbers\n",
"from flask import abort\n",
"from flask import Flask\n",
"from flask import redirect\n",
"from flask import render_template\n",
"from flask import render_template_string\n",
"from flask import request\n",
"from flask import send_file\n",
"from flask.wrappers import Response\n",
"from flask_babel import Babel\n",
"from flask_babel import get_translations\n",
"from werkzeug.utils import secure_filename\n",
"from fava import __version__ as fava_version\n",
"from fava import LANGUAGES\n",
"from fava import template_filters\n",
"from fava.context import g\n",
"from fava.core import FavaLedger\n",
"from fava.core.charts import FavaJSONEncoder\n",
"from fava.core.documents import is_document_or_import_file\n",
"from fava.help import HELP_PAGES\n",
"from fava.helpers import FavaAPIException\n",
"from fava.json_api import json_api\n",
"from fava.serialisation import serialise\n",
"from fava.util import next_key\n",
"from fava.util import resource_path\n",
"from fava.util import send_file_inline\n",
"from fava.util import setup_logging\n",
"from fava.util import slugify\n",
"from fava.util.date import Interval\n",
"from fava.util.excel import HAVE_EXCEL\n",
"STATIC_FOLDER = resource_path('static')\n",
"setup_logging()\n",
"app = Flask(__name__, template_folder=str(resource_path('templates')),\n static_folder=str(STATIC_FOLDER))\n",
"app.register_blueprint(json_api, url_prefix='/<bfile>/api')\n",
"app.json_encoder = FavaJSONEncoder\n",
"jinja_extensions = app.jinja_options.setdefault('extensions', [])\n",
"jinja_extensions.append('jinja2.ext.do')\n",
"jinja_extensions.append('jinja2.ext.loopcontrols')\n",
"app.jinja_env.trim_blocks = True\n",
"app.jinja_env.lstrip_blocks = True\n",
"app.config['HAVE_EXCEL'] = HAVE_EXCEL\n",
"app.config['ACCOUNT_RE'] = ACCOUNT_RE\n",
"REPORTS = ['balance_sheet', 'commodities', 'documents', 'events', 'editor',\n 'errors', 'holdings', 'import', 'income_statement', 'journal',\n 'options', 'query', 'statistics', 'trial_balance']\n",
"LOAD_FILE_LOCK = threading.Lock()\n",
"def ledger_slug(ledger: FavaLedger) ->str:...\n",
"\"\"\"docstring\"\"\"\n",
"title_slug = slugify(ledger.options['title'])\n",
"return title_slug or slugify(ledger.beancount_file_path)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"@CLASS_4('version')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = jinja.render('version.html', title='Version info', version=version\n .version(), copyright=qutebrowser.__copyright__)\n",
"return 'text/html', VAR_20\n"
] | [
"@add_handler('version')...\n",
"\"\"\"docstring\"\"\"\n",
"src = jinja.render('version.html', title='Version info', version=version.\n version(), copyright=qutebrowser.__copyright__)\n",
"return 'text/html', src\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'"
] |
[
"def __str__(self):...\n",
"return '%s / %s' % (self.ticket, self.depends_on)\n"
] | [
"def __str__(self):...\n",
"return '%s / %s' % (self.ticket, self.depends_on)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_36(self):...\n",
"if not test.is_built_with_xla():\n",
"self.skipTest('Skipping test because XLA is not compiled in.')\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_9 = test.test_src_dir_path(VAR_0)\n",
"VAR_41 = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir')\n",
"VAR_10 = self.parser.parse_args(['aot_compile_cpu', '--dir', VAR_9,\n '--tag_set', 'serve', '--output_prefix', VAR_41, '--cpp_class',\n 'Compiled', '--signature_def_key', 'MISSING'])\n",
"saved_model_cli.aot_compile_cpu(VAR_10)\n"
] | [
"def testAOTCompileCPUWrongSignatureDefKey(self):...\n",
"if not test.is_built_with_xla():\n",
"self.skipTest('Skipping test because XLA is not compiled in.')\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"output_dir = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir')\n",
"args = self.parser.parse_args(['aot_compile_cpu', '--dir', base_path,\n '--tag_set', 'serve', '--output_prefix', output_dir, '--cpp_class',\n 'Compiled', '--signature_def_key', 'MISSING'])\n",
"saved_model_cli.aot_compile_cpu(args)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self):...\n",
"super(CLASS_20, self).__init__()\n"
] | [
"def __init__(self):...\n",
"super(AuthOIDView, self).__init__()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_7.route('/base/<baseidx>/keywords/<params>')...\n",
"VAR_8.info('Enter Scope baseIdx {}'.format(VAR_9))\n",
"sys.stdout.flush()\n",
"VAR_23 = []\n",
"VAR_24 = None\n",
"VAR_25 = 0.0\n",
"if VAR_13:\n",
"VAR_24, VAR_25 = FUNC_3(VAR_13)\n",
"VAR_26 = VAR_24\n",
"if VAR_26 == None:\n",
"VAR_26 = random.randrange(10000)\n",
"if VAR_9 != '0':\n",
"VAR_40 = FUNC_11(VAR_9)\n",
"VAR_27 = bool(VAR_17 and VAR_23)\n",
"VAR_41, VAR_42 = VAR_40.split('_')\n",
"if VAR_23:\n",
"VAR_23 = [VAR_41, str(VAR_26), VAR_42, '{:.2f}'.format(VAR_25)]\n",
"VAR_43 = len(VAR_23)\n",
"VAR_43 = len(VAR_17)\n",
"VAR_40 = '_'.join(VAR_23)\n",
"random.seed(VAR_24)\n",
"VAR_23 = VAR_17.copy()\n",
"print(VAR_40)\n",
"VAR_28 = 0\n",
"if not os.path.exists(VAR_40):\n",
"if VAR_27:\n",
"split_data(VAR_3, VAR_25, VAR_26)\n",
"VAR_23 = list(f.readlines())\n",
"random.Random(VAR_24).shuffle(VAR_17)\n",
"def FUNC_13():...\n",
"if VAR_15 > 0:\n",
"VAR_28 = int(VAR_25 * VAR_43)\n",
"yield '<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\\n'\n",
"VAR_23 = VAR_23[VAR_15:]\n",
"if VAR_16 > 0:\n",
"VAR_43 = VAR_43 + VAR_28\n",
"if VAR_1:\n",
"VAR_48 = len(VAR_23)\n",
"VAR_43 = len(VAR_23)\n",
"yield '<?xml-stylesheet type=\"text/xsl\" href=\"/scopelist.xsl\" ?>\\n'\n",
"yield '<objectlist count=\"{:d}\">\\n'.format(VAR_43)\n",
"if VAR_16 > VAR_48:\n",
"VAR_44 = 0\n",
"VAR_16 = VAR_48\n",
"VAR_23 = VAR_23[:VAR_16]\n",
"VAR_45 = 0\n",
"if VAR_27:\n",
"VAR_44 = int(VAR_25 * VAR_5)\n",
"VAR_46 = []\n",
"VAR_49 = cycle(VAR_17)\n",
"def FUNC_15():...\n",
"random.seed(VAR_24)\n",
"return list(map(lambda x: x + VAR_5 * VAR_45, sorted(random.sample(list(\n range(VAR_5)), VAR_44))))\n"
] | [
"@scope_blueprint.route('/base/<baseidx>/keywords/<params>')...\n",
"_log.info('Enter Scope baseIdx {}'.format(baseidx))\n",
"sys.stdout.flush()\n",
"base_list = []\n",
"seed = None\n",
"percentage = 0.0\n",
"if params:\n",
"seed, percentage = decode_params(params)\n",
"s_seed = seed\n",
"if s_seed == None:\n",
"s_seed = random.randrange(10000)\n",
"if baseidx != '0':\n",
"base_index = _get_index_absolute_path(baseidx)\n",
"make_cocktail = bool(mixer_list and base_list)\n",
"data_type, pos_file = base_index.split('_')\n",
"if base_list:\n",
"base_list = [data_type, str(s_seed), pos_file, '{:.2f}'.format(percentage)]\n",
"total_entries = len(base_list)\n",
"total_entries = len(mixer_list)\n",
"base_index = '_'.join(base_list)\n",
"random.seed(seed)\n",
"base_list = mixer_list.copy()\n",
"print(base_index)\n",
"total_sample = 0\n",
"if not os.path.exists(base_index):\n",
"if make_cocktail:\n",
"split_data(INDEXDIR, percentage, s_seed)\n",
"base_list = list(f.readlines())\n",
"random.Random(seed).shuffle(mixer_list)\n",
"def generate():...\n",
"if start > 0:\n",
"total_sample = int(percentage * total_entries)\n",
"yield '<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\\n'\n",
"base_list = base_list[start:]\n",
"if limit > 0:\n",
"total_entries = total_entries + total_sample\n",
"if STYLE:\n",
"end_ = len(base_list)\n",
"total_entries = len(base_list)\n",
"yield '<?xml-stylesheet type=\"text/xsl\" href=\"/scopelist.xsl\" ?>\\n'\n",
"yield '<objectlist count=\"{:d}\">\\n'.format(total_entries)\n",
"if limit > end_:\n",
"mix_per_iteration = 0\n",
"limit = end_\n",
"base_list = base_list[:limit]\n",
"iteration_count = 0\n",
"if make_cocktail:\n",
"mix_per_iteration = int(percentage * ITEMS_PER_ITERATION)\n",
"mix_indices = []\n",
"pool = cycle(mixer_list)\n",
"def generate_mix_indices():...\n",
"random.seed(seed)\n",
"return list(map(lambda x: x + ITEMS_PER_ITERATION * iteration_count, sorted\n (random.sample(list(range(ITEMS_PER_ITERATION)), mix_per_iteration))))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"from django.core.exceptions import ImproperlyConfigured\n",
"from django.core.signals import setting_changed\n",
"from django.http.response import HttpResponseNotFound\n",
"from shuup.xtheme._theme import get_current_theme\n",
"VAR_0 = {}\n",
"def FUNC_0(**VAR_1):...\n",
"VAR_0.clear()\n",
"setting_changed.connect(FUNC_0, dispatch_uid=\n 'shuup.xtheme.views.extra.clear_view_cache')\n",
"def FUNC_1(VAR_2, VAR_3):...\n",
"VAR_5 = VAR_2.get_view(VAR_3)\n",
"if hasattr(VAR_5, 'as_view'):\n",
"VAR_5 = VAR_5.as_view()\n",
"if VAR_5 and not callable(VAR_5):\n",
"return VAR_5\n"
] | [
"from django.core.exceptions import ImproperlyConfigured\n",
"from django.core.signals import setting_changed\n",
"from django.http.response import HttpResponseNotFound\n",
"from shuup.xtheme._theme import get_current_theme\n",
"_VIEW_CACHE = {}\n",
"def clear_view_cache(**kwargs):...\n",
"_VIEW_CACHE.clear()\n",
"setting_changed.connect(clear_view_cache, dispatch_uid=\n 'shuup.xtheme.views.extra.clear_view_cache')\n",
"def _get_view_by_name(theme, view_name):...\n",
"view = theme.get_view(view_name)\n",
"if hasattr(view, 'as_view'):\n",
"view = view.as_view()\n",
"if view and not callable(view):\n",
"return view\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_1: str):...\n",
"assert VAR_1.startswith('https:')\n",
"self.sourceName = VAR_1\n",
"self.type = 'url'\n"
] | [
"def __init__(self, sourceName: str):...\n",
"assert sourceName.startswith('https:')\n",
"self.sourceName = sourceName\n",
"self.type = 'url'\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assign'",
"Assign'"
] |
[
"def FUNC_51(self, VAR_69, VAR_67):...\n",
"\"\"\"docstring\"\"\"\n",
"return \"\"\"<table>\n<thead>%s</thead>\n<tbody>\n%s</tbody>\n</table>\n\"\"\" % (VAR_69,\n VAR_67)\n"
] | [
"def table(self, header, body):...\n",
"\"\"\"docstring\"\"\"\n",
"return \"\"\"<table>\n<thead>%s</thead>\n<tbody>\n%s</tbody>\n</table>\n\"\"\" % (header,\n body)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@VAR_2.route('/ajax/getlocale')...\n",
"VAR_58 = babel.list_translations() + [LC('en')]\n",
"VAR_59 = list()\n",
"VAR_60 = get_locale()\n",
"for loc in VAR_58:\n",
"VAR_59.append({'value': str(loc), 'text': loc.get_language_name(VAR_60)})\n",
"return json.dumps(VAR_59)\n"
] | [
"@admi.route('/ajax/getlocale')...\n",
"locale = babel.list_translations() + [LC('en')]\n",
"ret = list()\n",
"current_locale = get_locale()\n",
"for loc in locale:\n",
"ret.append({'value': str(loc), 'text': loc.get_language_name(current_locale)})\n",
"return json.dumps(ret)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def __repr__(self) ->str:...\n",
"return util.repr_(self)\n"
] | [
"def __repr__(self) ->str:...\n",
"return util.repr_(self)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_40(VAR_16, VAR_17):...\n",
"return config.set_from_dictionary(VAR_16, VAR_17, lambda y: 1 if y == 'on' else\n 0, 0)\n"
] | [
"def _config_checkbox_int(to_save, x):...\n",
"return config.set_from_dictionary(to_save, x, lambda y: 1 if y == 'on' else\n 0, 0)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_6(self):...\n",
"if not hasattr(self, 'resolved_ref'):\n",
"self.resolved_ref = await self.get_resolved_ref()\n",
"return f'{self.url}/{self.resolved_ref}'\n"
] | [
"async def get_resolved_spec(self):...\n",
"if not hasattr(self, 'resolved_ref'):\n",
"self.resolved_ref = await self.get_resolved_ref()\n",
"return f'{self.url}/{self.resolved_ref}'\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_40(self, VAR_2='password'):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = HttpRequest()\n",
"CsrfViewMiddleware().process_view(VAR_18, login_view, (), {})\n",
"VAR_18.META['CSRF_COOKIE_USED'] = True\n",
"VAR_19 = login_view(VAR_18)\n",
"VAR_20 = CsrfViewMiddleware().process_response(VAR_18, VAR_19)\n",
"VAR_21 = VAR_20.cookies.get(VAR_23.CSRF_COOKIE_NAME, None)\n",
"VAR_22 = VAR_21.coded_value\n",
"VAR_18 = HttpRequest()\n",
"VAR_18.COOKIES[VAR_23.CSRF_COOKIE_NAME] = VAR_22\n",
"VAR_18.method = 'POST'\n",
"VAR_18.POST = {'username': 'testclient', 'password': VAR_2,\n 'csrfmiddlewaretoken': VAR_22}\n",
"VAR_18.REQUEST = VAR_18.POST\n",
"SessionMiddleware().process_request(VAR_18)\n",
"CsrfViewMiddleware().process_view(VAR_18, login_view, (), {})\n",
"VAR_18.META['SERVER_NAME'] = 'testserver'\n",
"VAR_18.META['SERVER_PORT'] = 80\n",
"VAR_18.META['CSRF_COOKIE_USED'] = True\n",
"VAR_19 = login_view(VAR_18)\n",
"VAR_20 = CsrfViewMiddleware().process_response(VAR_18, VAR_19)\n",
"VAR_21 = VAR_20.cookies.get(VAR_23.CSRF_COOKIE_NAME, None)\n",
"VAR_24 = VAR_21.coded_value\n",
"self.assertNotEqual(VAR_22, VAR_24)\n"
] | [
"def test_login_csrf_rotate(self, password='password'):...\n",
"\"\"\"docstring\"\"\"\n",
"req = HttpRequest()\n",
"CsrfViewMiddleware().process_view(req, login_view, (), {})\n",
"req.META['CSRF_COOKIE_USED'] = True\n",
"resp = login_view(req)\n",
"resp2 = CsrfViewMiddleware().process_response(req, resp)\n",
"csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)\n",
"token1 = csrf_cookie.coded_value\n",
"req = HttpRequest()\n",
"req.COOKIES[settings.CSRF_COOKIE_NAME] = token1\n",
"req.method = 'POST'\n",
"req.POST = {'username': 'testclient', 'password': password,\n 'csrfmiddlewaretoken': token1}\n",
"req.REQUEST = req.POST\n",
"SessionMiddleware().process_request(req)\n",
"CsrfViewMiddleware().process_view(req, login_view, (), {})\n",
"req.META['SERVER_NAME'] = 'testserver'\n",
"req.META['SERVER_PORT'] = 80\n",
"req.META['CSRF_COOKIE_USED'] = True\n",
"resp = login_view(req)\n",
"resp2 = CsrfViewMiddleware().process_response(req, resp)\n",
"csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)\n",
"token2 = csrf_cookie.coded_value\n",
"self.assertNotEqual(token1, token2)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@VAR_2.route('/author')...\n",
"if VAR_87.check_visibility(constants.SIDEBAR_AUTHOR):\n",
"if VAR_87.get_view_property('author', 'dir') == 'desc':\n",
"abort(404)\n",
"VAR_10 = db.Authors.sort.desc()\n",
"VAR_10 = db.Authors.sort.asc()\n",
"VAR_109 = 0\n",
"VAR_109 = 1\n",
"VAR_63 = calibre_db.session.query(db.Authors, func.count(\n 'books_authors_link.book').label('count')).join(db.books_authors_link\n ).join(db.Books).filter(calibre_db.common_filters()).group_by(text(\n 'books_authors_link.author')).order_by(VAR_10).all()\n",
"VAR_107 = calibre_db.session.query(func.upper(func.substr(db.Authors.sort, \n 1, 1)).label('char')).join(db.books_authors_link).join(db.Books).filter(\n calibre_db.common_filters()).group_by(func.upper(func.substr(db.Authors\n .sort, 1, 1))).all()\n",
"VAR_108 = copy.deepcopy(VAR_63)\n",
"for entry in VAR_108:\n",
"entry.Authors.name = entry.Authors.name.replace('|', ',')\n",
"return render_title_template('list.html', VAR_63=autor_copy, folder=\n 'web.books_list', VAR_107=charlist, VAR_150=u'Authors', VAR_9=\n 'authorlist', VAR_8='author', VAR_10=order_no)\n"
] | [
"@web.route('/author')...\n",
"if current_user.check_visibility(constants.SIDEBAR_AUTHOR):\n",
"if current_user.get_view_property('author', 'dir') == 'desc':\n",
"abort(404)\n",
"order = db.Authors.sort.desc()\n",
"order = db.Authors.sort.asc()\n",
"order_no = 0\n",
"order_no = 1\n",
"entries = calibre_db.session.query(db.Authors, func.count(\n 'books_authors_link.book').label('count')).join(db.books_authors_link\n ).join(db.Books).filter(calibre_db.common_filters()).group_by(text(\n 'books_authors_link.author')).order_by(order).all()\n",
"charlist = calibre_db.session.query(func.upper(func.substr(db.Authors.sort,\n 1, 1)).label('char')).join(db.books_authors_link).join(db.Books).filter(\n calibre_db.common_filters()).group_by(func.upper(func.substr(db.Authors\n .sort, 1, 1))).all()\n",
"autor_copy = copy.deepcopy(entries)\n",
"for entry in autor_copy:\n",
"entry.Authors.name = entry.Authors.name.replace('|', ',')\n",
"return render_title_template('list.html', entries=autor_copy, folder=\n 'web.books_list', charlist=charlist, title=u'Authors', page=\n 'authorlist', data='author', order=order_no)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self) ->Optional[float]:...\n",
"\"\"\"docstring\"\"\"\n",
"return os.stat(self.sourceName).st_mtime\n",
"return None\n"
] | [
"def mtime(self) ->Optional[float]:...\n",
"\"\"\"docstring\"\"\"\n",
"return os.stat(self.sourceName).st_mtime\n",
"return None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'",
"Return'"
] |
[
"def FUNC_0(self, VAR_3, VAR_4):...\n",
"self.url = b'/_matrix/client/r0/publicRooms'\n",
"VAR_51 = self.default_config()\n",
"VAR_51['allow_public_rooms_without_auth'] = False\n",
"self.hs = self.setup_test_homeserver(VAR_51=config)\n",
"return self.hs\n"
] | [
"def make_homeserver(self, reactor, clock):...\n",
"self.url = b'/_matrix/client/r0/publicRooms'\n",
"config = self.default_config()\n",
"config['allow_public_rooms_without_auth'] = False\n",
"self.hs = self.setup_test_homeserver(config=config)\n",
"return self.hs\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"async def FUNC_22(VAR_23):...\n",
"self.assertEquals(current_context().request, 'context_12')\n",
"return {'server10': {FUNC_0(VAR_20): FetchKeyResult(get_verify_key(VAR_20),\n 100)}}\n"
] | [
"async def second_lookup_fetch(keys_to_fetch):...\n",
"self.assertEquals(current_context().request, 'context_12')\n",
"return {'server10': {get_key_id(key1): FetchKeyResult(get_verify_key(key1),\n 100)}}\n"
] | [
0,
0,
0
] | [
"AsyncFunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self, VAR_10, VAR_23, VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_110 = []\n",
"for VAR_18 in range(0, 10):\n",
"VAR_15 = f'{VAR_20}_{VAR_18:d}'\n",
"return [r.replace('\\r', '') for r in VAR_110]\n",
"if VAR_15 not in VAR_10:\n",
"VAR_110.append(VAR_10.get(VAR_15, ''))\n"
] | [
"def value_from_datadict(self, data, files, name):...\n",
"\"\"\"docstring\"\"\"\n",
"ret = []\n",
"for idx in range(0, 10):\n",
"fieldname = f'{name}_{idx:d}'\n",
"return [r.replace('\\r', '') for r in ret]\n",
"if fieldname not in data:\n",
"ret.append(data.get(fieldname, ''))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Expr'"
] |
[
"async def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"async def get_resolved_ref_url(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Docstring"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_14\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return _existing_enums\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"@parameterized.named_parameters(('VariablesToFeedNone', '', 'func2', None),...\n",
"if not test.is_built_with_xla():\n",
"self.skipTest('Skipping test because XLA is not compiled in.')\n",
"VAR_14 = os.path.join(test.get_temp_dir(), 'dummy_model')\n",
"VAR_15 = self.AOTCompileDummyModel()\n",
"VAR_7 = getattr(VAR_15, VAR_7)\n",
"self.evaluate(VAR_15.var.initializer)\n",
"self.evaluate(VAR_15.write_var.initializer)\n",
"save.save(VAR_15, VAR_14, signatures={'func': func})\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_51 = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir/out')\n",
"VAR_11 = ['aot_compile_cpu', '--dir', VAR_14, '--tag_set', 'serve',\n '--signature_def_key', 'func', '--output_prefix', VAR_51,\n '--variables_to_feed', VAR_6, '--cpp_class', 'Generated']\n",
"if VAR_8:\n",
"VAR_11.extend(['--target_triple', VAR_8])\n",
"VAR_11 = self.parser.parse_args(VAR_11)\n",
"saved_model_cli.aot_compile_cpu(VAR_11)\n",
"self.assertRegex(str(captured_warn.call_args),\n \"Signature input key 'y'.*has been pruned while freezing the graph.\")\n",
"self.assertTrue(file_io.file_exists('{}.o'.format(VAR_51)))\n",
"self.assertTrue(file_io.file_exists('{}.h'.format(VAR_51)))\n",
"self.assertTrue(file_io.file_exists('{}_metadata.o'.format(VAR_51)))\n",
"self.assertTrue(file_io.file_exists('{}_makefile.inc'.format(VAR_51)))\n",
"VAR_52 = file_io.read_file_to_string('{}.h'.format(VAR_51))\n",
"self.assertIn('class Generated', VAR_52)\n",
"self.assertIn('arg_feed_x_data', VAR_52)\n",
"self.assertIn('result_fetch_res_data', VAR_52)\n",
"self.assertNotIn('arg_feed_y_data', VAR_52)\n",
"if VAR_6:\n",
"self.assertIn('set_var_param_my_var_data(const float', VAR_52)\n",
"if VAR_7 == VAR_15.func_write:\n",
"self.assertNotIn('set_var_param_my_var_data(float', VAR_52)\n",
"self.assertIn('set_var_param_write_var_data(float', VAR_52)\n",
"VAR_53 = file_io.read_file_to_string('{}_makefile.inc'.format(VAR_51))\n",
"self.assertNotIn('set_var_param_write_var_data(const float', VAR_52)\n",
"self.assertIn('-D_GLIBCXX_USE_CXX11_ABI=', VAR_53)\n"
] | [
"@parameterized.named_parameters(('VariablesToFeedNone', '', 'func2', None),...\n",
"if not test.is_built_with_xla():\n",
"self.skipTest('Skipping test because XLA is not compiled in.')\n",
"saved_model_dir = os.path.join(test.get_temp_dir(), 'dummy_model')\n",
"dummy_model = self.AOTCompileDummyModel()\n",
"func = getattr(dummy_model, func)\n",
"self.evaluate(dummy_model.var.initializer)\n",
"self.evaluate(dummy_model.write_var.initializer)\n",
"save.save(dummy_model, saved_model_dir, signatures={'func': func})\n",
"self.parser = saved_model_cli.create_parser()\n",
"output_prefix = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir/out')\n",
"args = ['aot_compile_cpu', '--dir', saved_model_dir, '--tag_set', 'serve',\n '--signature_def_key', 'func', '--output_prefix', output_prefix,\n '--variables_to_feed', variables_to_feed, '--cpp_class', 'Generated']\n",
"if target_triple:\n",
"args.extend(['--target_triple', target_triple])\n",
"args = self.parser.parse_args(args)\n",
"saved_model_cli.aot_compile_cpu(args)\n",
"self.assertRegex(str(captured_warn.call_args),\n \"Signature input key 'y'.*has been pruned while freezing the graph.\")\n",
"self.assertTrue(file_io.file_exists('{}.o'.format(output_prefix)))\n",
"self.assertTrue(file_io.file_exists('{}.h'.format(output_prefix)))\n",
"self.assertTrue(file_io.file_exists('{}_metadata.o'.format(output_prefix)))\n",
"self.assertTrue(file_io.file_exists('{}_makefile.inc'.format(output_prefix)))\n",
"header_contents = file_io.read_file_to_string('{}.h'.format(output_prefix))\n",
"self.assertIn('class Generated', header_contents)\n",
"self.assertIn('arg_feed_x_data', header_contents)\n",
"self.assertIn('result_fetch_res_data', header_contents)\n",
"self.assertNotIn('arg_feed_y_data', header_contents)\n",
"if variables_to_feed:\n",
"self.assertIn('set_var_param_my_var_data(const float', header_contents)\n",
"if func == dummy_model.func_write:\n",
"self.assertNotIn('set_var_param_my_var_data(float', header_contents)\n",
"self.assertIn('set_var_param_write_var_data(float', header_contents)\n",
"makefile_contents = file_io.read_file_to_string('{}_makefile.inc'.format(\n output_prefix))\n",
"self.assertNotIn('set_var_param_write_var_data(const float', header_contents)\n",
"self.assertIn('-D_GLIBCXX_USE_CXX11_ABI=', makefile_contents)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"\"\"\"``chameleon.tales`` expressions.\"\"\"\n",
"import warnings\n",
"from ast import NodeTransformer\n",
"from ast import parse\n",
"from chameleon.astutil import Static\n",
"from chameleon.astutil import Symbol\n",
"from chameleon.codegen import template\n",
"from chameleon.tales import NotExpr\n",
"from chameleon.tales import StringExpr\n",
"from AccessControl.ZopeGuards import guarded_apply\n",
"from AccessControl.ZopeGuards import guarded_getattr\n",
"from AccessControl.ZopeGuards import guarded_getitem\n",
"from AccessControl.ZopeGuards import guarded_iter\n",
"from AccessControl.ZopeGuards import protected_inplacevar\n",
"from OFS.interfaces import ITraversable\n",
"from RestrictedPython import RestrictingNodeTransformer\n",
"from RestrictedPython.Utilities import utility_builtins\n",
"from z3c.pt import expressions\n",
"from zExceptions import NotFound\n",
"from zExceptions import Unauthorized\n",
"from zope.interface import implementer\n",
"from zope.tales.tales import ExpressionEngine\n",
"from zope.traversing.adapters import traversePathElement\n",
"from zope.traversing.interfaces import TraversalError\n",
"from .Expressions import render\n",
"from .interfaces import IZopeAwareEngine\n",
"VAR_0 = object()\n",
"VAR_1 = (AttributeError, LookupError, NameError, TypeError, ValueError,\n NotFound, Unauthorized, TraversalError)\n",
"def FUNC_0(VAR_2):...\n",
"return Static(template('obj', VAR_2=Symbol(obj), mode='eval'))\n"
] | [
"\"\"\"``chameleon.tales`` expressions.\"\"\"\n",
"import warnings\n",
"from ast import NodeTransformer\n",
"from ast import parse\n",
"from chameleon.astutil import Static\n",
"from chameleon.astutil import Symbol\n",
"from chameleon.codegen import template\n",
"from chameleon.tales import NotExpr\n",
"from chameleon.tales import StringExpr\n",
"from AccessControl.ZopeGuards import guarded_apply\n",
"from AccessControl.ZopeGuards import guarded_getattr\n",
"from AccessControl.ZopeGuards import guarded_getitem\n",
"from AccessControl.ZopeGuards import guarded_iter\n",
"from AccessControl.ZopeGuards import protected_inplacevar\n",
"from OFS.interfaces import ITraversable\n",
"from RestrictedPython import RestrictingNodeTransformer\n",
"from RestrictedPython.Utilities import utility_builtins\n",
"from z3c.pt import expressions\n",
"from zExceptions import NotFound\n",
"from zExceptions import Unauthorized\n",
"from zope.interface import implementer\n",
"from zope.tales.tales import ExpressionEngine\n",
"from zope.traversing.adapters import traversePathElement\n",
"from zope.traversing.interfaces import TraversalError\n",
"from .Expressions import render\n",
"from .interfaces import IZopeAwareEngine\n",
"_marker = object()\n",
"zope2_exceptions = (AttributeError, LookupError, NameError, TypeError,\n ValueError, NotFound, Unauthorized, TraversalError)\n",
"def static(obj):...\n",
"return Static(template('obj', obj=Symbol(obj), mode='eval'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.get('/ping', response_model=bool)...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
"@app.get('/ping', response_model=bool)...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_6():...\n",
"return VAR_11\n"
] | [
"def getTrustedEngine():...\n",
"return _trusted_engine\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_20(self, VAR_1):...\n",
"VAR_5 = VAR_1.MagicMock()\n",
"VAR_26 = VAR_1.MagicMock()\n",
"VAR_23 = oai.Schema.construct(type='string')\n",
"VAR_35 = VAR_1.patch(f'{VAR_0}.StringProperty')\n",
"from openapi_python_client.parser.properties import _string_based_property\n",
"VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n",
"VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=None,\n default=None)\n",
"assert VAR_4 == VAR_35.return_value\n",
"VAR_35.reset_mock()\n",
"VAR_23.default = VAR_1.MagicMock()\n",
"VAR_23.pattern = VAR_1.MagicMock()\n",
"VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n",
"VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=data.\n pattern, default=data.default)\n"
] | [
"def test__string_based_property_no_format(self, mocker):...\n",
"name = mocker.MagicMock()\n",
"required = mocker.MagicMock()\n",
"data = oai.Schema.construct(type='string')\n",
"StringProperty = mocker.patch(f'{MODULE_NAME}.StringProperty')\n",
"from openapi_python_client.parser.properties import _string_based_property\n",
"p = _string_based_property(name=name, required=required, data=data)\n",
"StringProperty.assert_called_once_with(name=name, required=required,\n pattern=None, default=None)\n",
"assert p == StringProperty.return_value\n",
"StringProperty.reset_mock()\n",
"data.default = mocker.MagicMock()\n",
"data.pattern = mocker.MagicMock()\n",
"_string_based_property(name=name, required=required, data=data)\n",
"StringProperty.assert_called_once_with(name=name, required=required,\n pattern=data.pattern, default=data.default)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assert'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_32 = {}\n",
"if self.has_higher_quota():\n",
"VAR_32['quota'] = VAR_12.get('per_repo_quota_higher')\n",
"VAR_32['quota'] = VAR_12.get('per_repo_quota')\n",
"for item in self.spec_config:\n",
"VAR_52 = item.get('pattern', None)\n",
"return VAR_32\n",
"VAR_53 = item.get('config', None)\n",
"if not isinstance(VAR_52, str):\n",
"if not isinstance(VAR_53, dict):\n",
"if re.match(VAR_52, self.spec, re.IGNORECASE):\n",
"VAR_32.update(VAR_53)\n"
] | [
"def repo_config(self, settings):...\n",
"\"\"\"docstring\"\"\"\n",
"repo_config = {}\n",
"if self.has_higher_quota():\n",
"repo_config['quota'] = settings.get('per_repo_quota_higher')\n",
"repo_config['quota'] = settings.get('per_repo_quota')\n",
"for item in self.spec_config:\n",
"pattern = item.get('pattern', None)\n",
"return repo_config\n",
"config = item.get('config', None)\n",
"if not isinstance(pattern, str):\n",
"if not isinstance(config, dict):\n",
"if re.match(pattern, self.spec, re.IGNORECASE):\n",
"repo_config.update(config)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'"
] |
[
"def FUNC_8(self, VAR_10: Text, VAR_12: Optional[Text]=None) ->None:...\n",
"import boto3\n",
"import botocore\n",
"if not VAR_12:\n",
"VAR_12 = boto3.DEFAULT_SESSION.region_name\n",
"VAR_25 = {'LocationConstraint': VAR_12}\n",
"self.s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=\n bucket_config)\n"
] | [
"def _ensure_bucket_exists(self, bucket_name: Text, region_name: Optional[...\n",
"import boto3\n",
"import botocore\n",
"if not region_name:\n",
"region_name = boto3.DEFAULT_SESSION.region_name\n",
"bucket_config = {'LocationConstraint': region_name}\n",
"self.s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=\n bucket_config)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Import'",
"Import'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_16(self, VAR_26: str, *, VAR_29: Optional[Type[Any]]=None, VAR_13:...\n",
"return self.api_route(VAR_26=path, VAR_29=response_model, VAR_13=\n status_code, VAR_30=tags, VAR_31=dependencies, VAR_32=summary, VAR_33=\n description, VAR_34=response_description, VAR_35=responses, VAR_36=\n deprecated, VAR_37=['OPTIONS'], VAR_38=operation_id, VAR_16=\n response_model_include, VAR_17=response_model_exclude, VAR_18=\n response_model_by_alias, VAR_19=response_model_exclude_unset, VAR_20=\n response_model_exclude_defaults, VAR_21=response_model_exclude_none,\n VAR_39=include_in_schema, VAR_14=response_class, VAR_28=name, VAR_40=\n callbacks)\n"
] | [
"def options(self, path: str, *, response_model: Optional[Type[Any]]=None,...\n",
"return self.api_route(path=path, response_model=response_model, status_code\n =status_code, tags=tags, dependencies=dependencies, summary=summary,\n description=description, response_description=response_description,\n responses=responses, deprecated=deprecated, methods=['OPTIONS'],\n operation_id=operation_id, response_model_include=\n response_model_include, response_model_exclude=response_model_exclude,\n response_model_by_alias=response_model_by_alias,\n response_model_exclude_unset=response_model_exclude_unset,\n response_model_exclude_defaults=response_model_exclude_defaults,\n response_model_exclude_none=response_model_exclude_none,\n include_in_schema=include_in_schema, response_class=response_class,\n name=name, callbacks=callbacks)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_38(VAR_33):...\n",
"if callable(VAR_65):\n",
"return FUNC_19(VAR_33)\n",
"VAR_34 = VAR_65()\n",
"VAR_0.exception(\n 'Error while trying to retrieve custom LastModified value for plugin {}'\n .format(VAR_8))\n",
"if VAR_34:\n",
"return VAR_34\n"
] | [
"def compute_lastmodified(files):...\n",
"if callable(custom_lastmodified):\n",
"return _compute_date(files)\n",
"lastmodified = custom_lastmodified()\n",
"_logger.exception(\n 'Error while trying to retrieve custom LastModified value for plugin {}'\n .format(key))\n",
"if lastmodified:\n",
"return lastmodified\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Return'"
] |
[
"def FUNC_14():...\n",
"VAR_47 = f.readline()\n",
"VAR_47 = '/'.join(VAR_47.split('/')[:-2])\n",
"VAR_35 = []\n",
"for c in VAR_34:\n",
"VAR_35.append(FUNC_10(VAR_47 + '/' + c.strip()))\n",
"return VAR_35\n"
] | [
"def get_class_path():...\n",
"dataset_path = f.readline()\n",
"dataset_path = '/'.join(dataset_path.split('/')[:-2])\n",
"class_paths = []\n",
"for c in classes_list:\n",
"class_paths.append(_get_obj_absolute_path(dataset_path + '/' + c.strip()))\n",
"return class_paths\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self, VAR_3, VAR_4):...\n",
"self.hs = self.setup_test_homeserver('red', http_client=None,\n federation_client=Mock())\n",
"self.hs.get_federation_handler = Mock()\n",
"self.hs.get_federation_handler.return_value.maybe_backfill = Mock(return_value\n =make_awaitable(None))\n",
"async def FUNC_76(*VAR_20, **VAR_21):...\n",
"return None\n"
] | [
"def make_homeserver(self, reactor, clock):...\n",
"self.hs = self.setup_test_homeserver('red', http_client=None,\n federation_client=Mock())\n",
"self.hs.get_federation_handler = Mock()\n",
"self.hs.get_federation_handler.return_value.maybe_backfill = Mock(return_value\n =make_awaitable(None))\n",
"async def _insert_client_ip(*args, **kwargs):...\n",
"return None\n"
] | [
0,
4,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"AsyncFunctionDef'",
"Return'"
] |
[
"@ensure_csrf_cookie...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n",
"VAR_131 = 'bulk_course_email'\n",
"VAR_132 = instructor_task.api.get_instructor_task_history(VAR_10, VAR_131=\n task_type)\n",
"VAR_63 = {'tasks': map(extract_task_features, VAR_132)}\n",
"return JsonResponse(VAR_63)\n"
] | [
"@ensure_csrf_cookie...\n",
"\"\"\"docstring\"\"\"\n",
"course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n",
"task_type = 'bulk_course_email'\n",
"tasks = instructor_task.api.get_instructor_task_history(course_id,\n task_type=task_type)\n",
"response_payload = {'tasks': map(extract_task_features, tasks)}\n",
"return JsonResponse(response_payload)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_22(self, VAR_12):...\n",
"if VAR_12.get('fields'):\n",
"VAR_13 = VAR_12.get('fields')\n",
"if VAR_12.get('columns'):\n",
"return VAR_13\n",
"VAR_13 = VAR_12.get('columns')\n",
"if VAR_12.get('fields'):\n",
"VAR_13 = VAR_12.get('fields')\n",
"VAR_13 = [['name', self.ref_doctype]]\n",
"for df in frappe.get_meta(self.ref_doctype).fields:\n",
"if df.in_list_view:\n",
"VAR_13.append([df.fieldname, self.ref_doctype])\n"
] | [
"def get_standard_report_columns(self, params):...\n",
"if params.get('fields'):\n",
"columns = params.get('fields')\n",
"if params.get('columns'):\n",
"return columns\n",
"columns = params.get('columns')\n",
"if params.get('fields'):\n",
"columns = params.get('fields')\n",
"columns = [['name', self.ref_doctype]]\n",
"for df in frappe.get_meta(self.ref_doctype).fields:\n",
"if df.in_list_view:\n",
"columns.append([df.fieldname, self.ref_doctype])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_44(self, VAR_10, VAR_11, VAR_12, VAR_13, VAR_14=None, VAR_15=None):...\n",
"VAR_45 = '#id_%s_add_link' % VAR_11\n",
"VAR_46 = '#id_%s_add_all_link' % VAR_11\n",
"VAR_47 = '#id_%s_remove_link' % VAR_11\n",
"VAR_48 = '#id_%s_remove_all_link' % VAR_11\n",
"self.assertEqual(self.has_css_class(VAR_45, 'active'), VAR_12)\n",
"self.assertEqual(self.has_css_class(VAR_47, 'active'), VAR_13)\n",
"if VAR_10 == 'horizontal':\n",
"self.assertEqual(self.has_css_class(VAR_46, 'active'), VAR_14)\n",
"self.assertEqual(self.has_css_class(VAR_48, 'active'), VAR_15)\n"
] | [
"def assertActiveButtons(self, mode, field_name, choose, remove, choose_all=...\n",
"choose_link = '#id_%s_add_link' % field_name\n",
"choose_all_link = '#id_%s_add_all_link' % field_name\n",
"remove_link = '#id_%s_remove_link' % field_name\n",
"remove_all_link = '#id_%s_remove_all_link' % field_name\n",
"self.assertEqual(self.has_css_class(choose_link, 'active'), choose)\n",
"self.assertEqual(self.has_css_class(remove_link, 'active'), remove)\n",
"if mode == 'horizontal':\n",
"self.assertEqual(self.has_css_class(choose_all_link, 'active'), choose_all)\n",
"self.assertEqual(self.has_css_class(remove_all_link, 'active'), remove_all)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"@FUNC_0...\n",
"return ReplicationDataHandler(self)\n"
] | [
"@cache_in_self...\n",
"return ReplicationDataHandler(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_152(VAR_213):...\n",
"if VAR_43:\n",
"VAR_213 = FUNC_151(VAR_213)\n",
"return VAR_213\n"
] | [
"def encoded_or_raw(text):...\n",
"if raw:\n",
"text = encode_header(text)\n",
"return text\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_16(self, VAR_19, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_27 = self.get_authenticated_connection(VAR_19, VAR_2)\n",
"VAR_28 = self.is_valid_public_url(VAR_19, VAR_2)\n",
"VAR_0.debug('Is valid public URL? %s' % VAR_28)\n",
"if VAR_27 is None and VAR_28:\n",
"VAR_0.debug(\n 'OMERO.webpublic enabled, attempting to login with configuration supplied credentials.'\n )\n",
"if VAR_27 is not None:\n",
"if VAR_19 is None:\n",
"VAR_38 = VAR_27.isAnonymous()\n",
"return VAR_27\n",
"VAR_19 = settings.PUBLIC_SERVER_ID\n",
"VAR_32 = settings.PUBLIC_USER\n",
"VAR_0.debug('Is anonymous? %s' % VAR_38)\n",
"VAR_33 = settings.PUBLIC_PASSWORD\n",
"if VAR_38 and not VAR_28:\n",
"VAR_31 = settings.SECURE\n",
"if VAR_27.c is not None:\n",
"VAR_0.debug('Is SSL? %s' % VAR_31)\n",
"VAR_0.debug('Closing anonymous connection')\n",
"return None\n",
"VAR_34 = self.get_public_user_connector()\n",
"VAR_27.close(hard=False)\n",
"if VAR_34 is not None:\n",
"VAR_0.debug('Attempting to use cached OMERO.webpublic connector: %r' % VAR_34)\n",
"VAR_5 = Connector(VAR_19, VAR_31)\n",
"VAR_27 = VAR_34.join_connection(self.useragent)\n",
"VAR_27 = VAR_5.create_connection(self.useragent, VAR_32, VAR_33, is_public=\n True, VAR_29=get_client_ip(request))\n",
"if VAR_27 is not None:\n",
"VAR_2.session['connector'] = VAR_5\n",
"VAR_2.session['connector'] = VAR_34\n",
"VAR_0.debug('Attempt to use cached OMERO.web public session key failed.')\n",
"if 'active_group' in VAR_2.session:\n",
"VAR_0.debug('Attempt to use cached OMERO.web public session key successful!')\n",
"if 'user_id' in VAR_2.session:\n",
"return VAR_27\n",
"VAR_2.session.modified = True\n",
"self.set_public_user_connector(VAR_5)\n"
] | [
"def get_connection(self, server_id, request):...\n",
"\"\"\"docstring\"\"\"\n",
"connection = self.get_authenticated_connection(server_id, request)\n",
"is_valid_public_url = self.is_valid_public_url(server_id, request)\n",
"logger.debug('Is valid public URL? %s' % is_valid_public_url)\n",
"if connection is None and is_valid_public_url:\n",
"logger.debug(\n 'OMERO.webpublic enabled, attempting to login with configuration supplied credentials.'\n )\n",
"if connection is not None:\n",
"if server_id is None:\n",
"is_anonymous = connection.isAnonymous()\n",
"return connection\n",
"server_id = settings.PUBLIC_SERVER_ID\n",
"username = settings.PUBLIC_USER\n",
"logger.debug('Is anonymous? %s' % is_anonymous)\n",
"password = settings.PUBLIC_PASSWORD\n",
"if is_anonymous and not is_valid_public_url:\n",
"is_secure = settings.SECURE\n",
"if connection.c is not None:\n",
"logger.debug('Is SSL? %s' % is_secure)\n",
"logger.debug('Closing anonymous connection')\n",
"return None\n",
"public_user_connector = self.get_public_user_connector()\n",
"connection.close(hard=False)\n",
"if public_user_connector is not None:\n",
"logger.debug('Attempting to use cached OMERO.webpublic connector: %r' %\n public_user_connector)\n",
"connector = Connector(server_id, is_secure)\n",
"connection = public_user_connector.join_connection(self.useragent)\n",
"connection = connector.create_connection(self.useragent, username, password,\n is_public=True, userip=get_client_ip(request))\n",
"if connection is not None:\n",
"request.session['connector'] = connector\n",
"request.session['connector'] = public_user_connector\n",
"logger.debug('Attempt to use cached OMERO.web public session key failed.')\n",
"if 'active_group' in request.session:\n",
"logger.debug('Attempt to use cached OMERO.web public session key successful!')\n",
"if 'user_id' in request.session:\n",
"return connection\n",
"request.session.modified = True\n",
"self.set_public_user_connector(connector)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Expr'"
] |
[
"def FUNC_159(VAR_82):...\n",
"VAR_366 = to_native(TAG['cas:serviceResponse'](VAR_82, **{'_xmlns:cas':\n 'http://www.yale.edu/tp/cas'}).xml())\n",
"return '<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n' + VAR_366\n"
] | [
"def build_response(body):...\n",
"xml_body = to_native(TAG['cas:serviceResponse'](body, **{'_xmlns:cas':\n 'http://www.yale.edu/tp/cas'}).xml())\n",
"return '<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n' + xml_body\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_16(VAR_21, VAR_17):...\n",
"VAR_57 = VAR_21.split(',')\n",
"VAR_57 = list(map(lambda it: it.strip(), VAR_57))\n",
"VAR_57 = helper.uniq(VAR_57)\n",
"return FUNC_7(VAR_57, VAR_17.tags, db.Tags, calibre_db.session, 'tags')\n"
] | [
"def edit_book_tags(tags, book):...\n",
"input_tags = tags.split(',')\n",
"input_tags = list(map(lambda it: it.strip(), input_tags))\n",
"input_tags = helper.uniq(input_tags)\n",
"return modify_database_object(input_tags, book.tags, db.Tags, calibre_db.\n session, 'tags')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@FUNC_0...\n",
"return GroupAttestionRenewer(self)\n"
] | [
"@cache_in_self...\n",
"return GroupAttestionRenewer(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@require_POST...\n",
"VAR_3 = get_object_or_404(CommentPoll.objects.unremoved(), VAR_1=pk)\n",
"if not VAR_0.user.is_authenticated:\n",
"return redirect_to_login(next=poll.get_absolute_url())\n",
"VAR_4 = PollVoteManyForm(user=request.user, VAR_3=poll, data=request.POST)\n",
"if VAR_4.is_valid():\n",
"CommentPollChoice.decrease_vote_count(VAR_3=poll, voter=request.user)\n",
"messages.error(VAR_0, utils.render_form_errors(VAR_4))\n",
"VAR_4.save_m2m()\n",
"return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))\n",
"CommentPollChoice.increase_vote_count(VAR_3=poll, voter=request.user)\n",
"return redirect(VAR_0.POST.get('next', VAR_3.get_absolute_url()))\n"
] | [
"@require_POST...\n",
"poll = get_object_or_404(CommentPoll.objects.unremoved(), pk=pk)\n",
"if not request.user.is_authenticated:\n",
"return redirect_to_login(next=poll.get_absolute_url())\n",
"form = PollVoteManyForm(user=request.user, poll=poll, data=request.POST)\n",
"if form.is_valid():\n",
"CommentPollChoice.decrease_vote_count(poll=poll, voter=request.user)\n",
"messages.error(request, utils.render_form_errors(form))\n",
"form.save_m2m()\n",
"return redirect(request.POST.get('next', poll.get_absolute_url()))\n",
"CommentPollChoice.increase_vote_count(poll=poll, voter=request.user)\n",
"return redirect(request.POST.get('next', poll.get_absolute_url()))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4
] | [
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'"
] |
[
"async def FUNC_18(self, VAR_28, VAR_29=None):...\n",
"VAR_33 = AsyncHTTPClient()\n",
"VAR_46 = {}\n",
"if self.client_id and self.client_secret:\n",
"VAR_46.update(dict(auth_username=self.client_id, auth_password=self.\n client_secret))\n",
"VAR_47 = {}\n",
"if self.access_token:\n",
"VAR_47['Authorization'] = 'token {token}'.format(token=self.access_token)\n",
"if VAR_29:\n",
"VAR_47['If-None-Match'] = VAR_29\n",
"VAR_34 = HTTPRequest(VAR_28, VAR_47=headers, user_agent='BinderHub', **\n request_kwargs)\n",
"VAR_41 = await VAR_33.fetch(VAR_34)\n",
"if e.code == 304:\n",
"if 'x-ratelimit-remaining' in VAR_41.headers:\n",
"VAR_41 = e.response\n",
"if e.code == 403 and e.response and 'x-ratelimit-remaining' in e.response.headers and e.response.headers.get(\n",
"VAR_59 = int(VAR_41.headers['x-ratelimit-remaining'])\n",
"return VAR_41\n",
"VAR_60 = e.response.headers['x-ratelimit-limit']\n",
"if e.code in (404, 422):\n",
"VAR_60 = int(VAR_41.headers['x-ratelimit-limit'])\n",
"VAR_61 = int(e.response.headers['x-ratelimit-reset'])\n",
"return None\n",
"VAR_61 = int(VAR_41.headers['x-ratelimit-reset'])\n",
"VAR_69 = int(VAR_61 - time.time())\n",
"VAR_0.set(VAR_59)\n",
"self.log.error('GitHub Rate limit ({limit}) exceeded. Reset in {delta}.'.\n format(limit=rate_limit, VAR_63=timedelta(seconds=reset_seconds)))\n",
"VAR_62 = VAR_59 / VAR_60\n",
"VAR_70 = 5 * (1 + VAR_69 // 60 // 5)\n",
"if VAR_62 < 0.2:\n",
"VAR_68 = self.log.warning\n",
"if VAR_62 < 0.5:\n",
"VAR_63 = timedelta(seconds=int(reset_timestamp - time.time()))\n",
"VAR_68 = self.log.info\n",
"VAR_68 = self.log.debug\n",
"VAR_68('GitHub rate limit remaining {remaining}/{limit}. Reset in {delta}.'\n .format(VAR_59=remaining, limit=rate_limit, VAR_63=delta))\n"
] | [
"async def github_api_request(self, api_url, etag=None):...\n",
"client = AsyncHTTPClient()\n",
"request_kwargs = {}\n",
"if self.client_id and self.client_secret:\n",
"request_kwargs.update(dict(auth_username=self.client_id, auth_password=self\n .client_secret))\n",
"headers = {}\n",
"if self.access_token:\n",
"headers['Authorization'] = 'token {token}'.format(token=self.access_token)\n",
"if etag:\n",
"headers['If-None-Match'] = etag\n",
"req = HTTPRequest(api_url, headers=headers, user_agent='BinderHub', **\n request_kwargs)\n",
"resp = await client.fetch(req)\n",
"if e.code == 304:\n",
"if 'x-ratelimit-remaining' in resp.headers:\n",
"resp = e.response\n",
"if e.code == 403 and e.response and 'x-ratelimit-remaining' in e.response.headers and e.response.headers.get(\n",
"remaining = int(resp.headers['x-ratelimit-remaining'])\n",
"return resp\n",
"rate_limit = e.response.headers['x-ratelimit-limit']\n",
"if e.code in (404, 422):\n",
"rate_limit = int(resp.headers['x-ratelimit-limit'])\n",
"reset_timestamp = int(e.response.headers['x-ratelimit-reset'])\n",
"return None\n",
"reset_timestamp = int(resp.headers['x-ratelimit-reset'])\n",
"reset_seconds = int(reset_timestamp - time.time())\n",
"GITHUB_RATE_LIMIT.set(remaining)\n",
"self.log.error('GitHub Rate limit ({limit}) exceeded. Reset in {delta}.'.\n format(limit=rate_limit, delta=timedelta(seconds=reset_seconds)))\n",
"fraction = remaining / rate_limit\n",
"minutes_until_reset = 5 * (1 + reset_seconds // 60 // 5)\n",
"if fraction < 0.2:\n",
"log = self.log.warning\n",
"if fraction < 0.5:\n",
"delta = timedelta(seconds=int(reset_timestamp - time.time()))\n",
"log = self.log.info\n",
"log = self.log.debug\n",
"log('GitHub rate limit remaining {remaining}/{limit}. Reset in {delta}.'.\n format(remaining=remaining, limit=rate_limit, delta=delta))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_42():...\n",
"VAR_129 = make_response(render_template('index.jinja2', **render_kwargs))\n",
"if VAR_107:\n",
"VAR_129 = util.flask.add_non_caching_response_headers(VAR_129)\n",
"return VAR_129\n"
] | [
"def make_default_ui():...\n",
"r = make_response(render_template('index.jinja2', **render_kwargs))\n",
"if wizard:\n",
"r = util.flask.add_non_caching_response_headers(r)\n",
"return r\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_19(self, VAR_17):...\n",
"FUNC_0()\n",
"VAR_68 = '../res/img/folder.png'\n",
"log.i('Fetching album art for: %s' % VAR_17)\n",
"VAR_69 = os.path.join(cherry.config['media.basedir'], VAR_17)\n",
"if os.path.isfile(VAR_69):\n",
"VAR_99 = TinyTag.get(VAR_69, image=True)\n",
"VAR_66 = albumArtFilePath(VAR_17)\n",
"VAR_100 = VAR_99.get_image()\n",
"VAR_70 = self.albumartcache_load(VAR_66)\n",
"if VAR_100:\n",
"if VAR_70:\n",
"log.d('Image found in tag.')\n",
"VAR_17 = os.path.dirname(VAR_17)\n",
"VAR_53.response.headers['Content-Length'] = len(VAR_70)\n",
"VAR_64 = albumartfetcher.AlbumArtFetcher()\n",
"VAR_67 = {'Content-Type': 'image/jpg', 'Content-Length': len(VAR_100)}\n",
"return VAR_70\n",
"VAR_71 = os.path.join(cherry.config['media.basedir'], VAR_17)\n",
"VAR_53.response.headers.update(VAR_67)\n",
"VAR_67, VAR_20, VAR_72 = VAR_64.fetchLocal(VAR_71)\n",
"return VAR_100\n",
"if VAR_67:\n",
"if VAR_72:\n",
"if cherry.config['media.fetch_album_art']:\n",
"self.albumartcache_save(VAR_66, VAR_20)\n",
"VAR_53.response.headers.update(VAR_67)\n",
"VAR_123 = os.path.basename(VAR_17)\n",
"return VAR_20\n",
"VAR_124 = VAR_123\n",
"log.i(_('Fetching album art for keywords {keywords!r}').format(VAR_124=\n keywords))\n",
"VAR_67, VAR_20 = VAR_64.fetch(VAR_124)\n",
"if VAR_67:\n",
"VAR_53.response.headers.update(VAR_67)\n",
"self.albumartcache_save(VAR_66, VAR_20)\n",
"return VAR_20\n"
] | [
"def api_fetchalbumart(self, directory):...\n",
"_save_and_release_session()\n",
"default_folder_image = '../res/img/folder.png'\n",
"log.i('Fetching album art for: %s' % directory)\n",
"filepath = os.path.join(cherry.config['media.basedir'], directory)\n",
"if os.path.isfile(filepath):\n",
"tag = TinyTag.get(filepath, image=True)\n",
"b64imgpath = albumArtFilePath(directory)\n",
"image_data = tag.get_image()\n",
"img_data = self.albumartcache_load(b64imgpath)\n",
"if image_data:\n",
"if img_data:\n",
"log.d('Image found in tag.')\n",
"directory = os.path.dirname(directory)\n",
"cherrypy.response.headers['Content-Length'] = len(img_data)\n",
"fetcher = albumartfetcher.AlbumArtFetcher()\n",
"header = {'Content-Type': 'image/jpg', 'Content-Length': len(image_data)}\n",
"return img_data\n",
"localpath = os.path.join(cherry.config['media.basedir'], directory)\n",
"cherrypy.response.headers.update(header)\n",
"header, data, resized = fetcher.fetchLocal(localpath)\n",
"return image_data\n",
"if header:\n",
"if resized:\n",
"if cherry.config['media.fetch_album_art']:\n",
"self.albumartcache_save(b64imgpath, data)\n",
"cherrypy.response.headers.update(header)\n",
"foldername = os.path.basename(directory)\n",
"return data\n",
"keywords = foldername\n",
"log.i(_('Fetching album art for keywords {keywords!r}').format(keywords=\n keywords))\n",
"header, data = fetcher.fetch(keywords)\n",
"if header:\n",
"cherrypy.response.headers.update(header)\n",
"self.albumartcache_save(b64imgpath, data)\n",
"return data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_20(self):...\n",
"self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)\n"
] | [
"def testInheritance(self):...\n",
"self.assertFormfield(models.Album, 'backside_art', widgets.AdminFileWidget)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.