lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "@login_required(redirect_field_name='', login_url='/403')...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = forms.BatchActionsForm(VAR_6.POST)\n", "if not VAR_11.is_valid():\n", "return HttpResponseBadRequest(VAR_11.errors.as_json())\n", "VAR_2 = get_object_or_404(Locale, code=form.cleaned_data['locale'])\n", "VAR_12 = Entity.objects.filter(pk__in=form.cleaned_data['entities'])\n", "if not VAR_12.exists():\n", "return JsonResponse({'count': 0})\n", "VAR_13 = VAR_12.values_list('resource__project__pk', flat=True)\n", "VAR_7 = Project.objects.filter(pk__in=projects_pk.distinct())\n", "for VAR_5 in VAR_7:\n", "if not VAR_6.user.can_translate(VAR_5=project, VAR_2=locale\n", "VAR_14 = Translation.objects.filter(active=True, VAR_2=locale, entity__in=\n entities)\n", "return HttpResponseForbidden(\n \"Forbidden: You don't have permission for batch editing\")\n", "VAR_15 = ACTIONS_FN_MAP[VAR_11.cleaned_data['action']]\n", "VAR_16 = VAR_15(VAR_11, VAR_6.user, VAR_14, VAR_2)\n", "if VAR_16.get('error'):\n", "return JsonResponse(VAR_16)\n", "VAR_17 = len(VAR_16.get('invalid_translation_pks', []))\n", "if VAR_16['count'] == 0:\n", "return JsonResponse({'count': 0, 'invalid_translation_count': VAR_17})\n", "FUNC_0(VAR_16['translated_resources'], VAR_2)\n", "FUNC_1(VAR_16['changed_entities'], VAR_2)\n", "if VAR_16['latest_translation_pk']:\n", "Translation.objects.get(pk=action_status['latest_translation_pk']\n ).update_latest_translation()\n", "FUNC_2(VAR_16['changed_translation_pks'], VAR_5, VAR_2)\n", "return JsonResponse({'count': VAR_16['count'], 'invalid_translation_count':\n VAR_17})\n" ]
[ "@login_required(redirect_field_name='', login_url='/403')...\n", "\"\"\"docstring\"\"\"\n", "form = forms.BatchActionsForm(request.POST)\n", "if not form.is_valid():\n", "return HttpResponseBadRequest(form.errors.as_json())\n", "locale = get_object_or_404(Locale, code=form.cleaned_data['locale'])\n", "entities = Entity.objects.filter(pk__in=form.cleaned_data['entities'])\n", "if not entities.exists():\n", "return JsonResponse({'count': 0})\n", "projects_pk = entities.values_list('resource__project__pk', flat=True)\n", "projects = Project.objects.filter(pk__in=projects_pk.distinct())\n", "for project in projects:\n", "if not request.user.can_translate(project=project, locale=locale\n", "active_translations = Translation.objects.filter(active=True, locale=locale,\n entity__in=entities)\n", "return HttpResponseForbidden(\n \"Forbidden: You don't have permission for batch editing\")\n", "action_function = ACTIONS_FN_MAP[form.cleaned_data['action']]\n", "action_status = action_function(form, request.user, active_translations, locale\n )\n", "if action_status.get('error'):\n", "return JsonResponse(action_status)\n", "invalid_translation_count = len(action_status.get('invalid_translation_pks',\n []))\n", "if action_status['count'] == 0:\n", "return JsonResponse({'count': 0, 'invalid_translation_count':\n invalid_translation_count})\n", "update_stats(action_status['translated_resources'], locale)\n", "mark_changed_translation(action_status['changed_entities'], locale)\n", "if action_status['latest_translation_pk']:\n", "Translation.objects.get(pk=action_status['latest_translation_pk']\n ).update_latest_translation()\n", "update_translation_memory(action_status['changed_translation_pks'], project,\n locale)\n", "return JsonResponse({'count': action_status['count'],\n 'invalid_translation_count': invalid_translation_count})\n" ]
[ 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_0(self, VAR_1):...\n", "VAR_2 = VAR_1.query_params.get('item')\n", "VAR_3 = VAR_1.query_params.get('city')\n", "if not VAR_2 and not VAR_3:\n", "return Response(VAR_0)\n", "VAR_4 = 'string'\n", "if VAR_2 and VAR_3:\n", "VAR_4 = \"{} WHERE city = '{}' and title = '{}'\".format(VAR_4, VAR_3, VAR_2)\n", "if VAR_2:\n", "c.execute(VAR_4)\n", "VAR_4 = \"{} WHERE title = '{}'\".format(VAR_4, VAR_2)\n", "if VAR_3:\n", "VAR_5, VAR_6 = c.fetchone()\n", "VAR_4 = \"{} WHERE city = '{}'\".format(VAR_4, VAR_3)\n", "if VAR_6 == 0:\n", "return Response(VAR_0)\n", "return Response({'status': 200, 'content': {'item': VAR_2 or\n 'Not specified', 'item_count': VAR_6, 'price_suggestion': VAR_5, 'city':\n VAR_3 or 'Not specified'}})\n" ]
[ "def get(self, request):...\n", "item = request.query_params.get('item')\n", "city = request.query_params.get('city')\n", "if not item and not city:\n", "return Response(NOT_FOUND_JSON_RESPONSE)\n", "sql = \"\"\"SELECT\n mode() WITHIN GROUP (ORDER BY list_price DESC) AS model_value,\n count(*)\n FROM\n \"itemPrices_itemsale\"\n \"\"\"\n", "if item and city:\n", "sql = \"{} WHERE city = '{}' and title = '{}'\".format(sql, city, item)\n", "if item:\n", "c.execute(sql)\n", "sql = \"{} WHERE title = '{}'\".format(sql, item)\n", "if city:\n", "price_mode, count = c.fetchone()\n", "sql = \"{} WHERE city = '{}'\".format(sql, city)\n", "if count == 0:\n", "return Response(NOT_FOUND_JSON_RESPONSE)\n", "return Response({'status': 200, 'content': {'item': item or 'Not specified',\n 'item_count': count, 'price_suggestion': price_mode, 'city': city or\n 'Not specified'}})\n" ]
[ 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_30(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'LOW': 2, 'MEDIUM': 6}, 'CONFIDENCE': {'HIGH': 8}}\n", "self.check_example('pickle_deserialize.py', VAR_2)\n" ]
[ "def test_pickle(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'LOW': 2, 'MEDIUM': 6}, 'CONFIDENCE': {'HIGH': 8}}\n", "self.check_example('pickle_deserialize.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_4(self, VAR_7=None, VAR_8=None, VAR_4=None, VAR_9=None, VAR_10=...\n", "if not VAR_18 and not frappe.has_permission(self.doctype, 'read', VAR_19=user):\n", "frappe.flags.error_message = _('Insufficient Permission for {0}').format(frappe\n .bold(self.doctype))\n", "if isinstance(VAR_8, dict) or isinstance(VAR_8, list) and VAR_8 and isinstance(\n", "VAR_4, VAR_8 = VAR_8, VAR_4\n", "if VAR_8 and isinstance(VAR_4, list) and len(VAR_4) > 1 and isinstance(VAR_4\n", "if VAR_8:\n", "VAR_4, VAR_8 = VAR_8, VAR_4\n", "self.fields = VAR_8\n", "self.fields = ['`tab{0}`.`name`'.format(self.doctype)]\n", "if VAR_23:\n", "VAR_13 = VAR_23\n", "if VAR_24:\n", "VAR_14 = VAR_24\n", "if VAR_25:\n", "VAR_14 = VAR_25\n", "self.filters = VAR_4 or []\n", "self.or_filters = VAR_9 or []\n", "self.docstatus = VAR_10 or []\n", "self.group_by = VAR_11\n", "self.order_by = VAR_12\n", "self.limit_start = 0 if VAR_13 is False else cint(VAR_13)\n", "self.limit_page_length = cint(VAR_14) if VAR_14 else None\n", "self.with_childnames = VAR_16\n", "self.debug = VAR_17\n", "self.join = VAR_21\n", "self.distinct = VAR_22\n", "self.as_list = VAR_15\n", "self.ignore_ifnull = VAR_26\n", "self.flags.ignore_permissions = VAR_18\n", "self.user = VAR_19 or frappe.session.user\n", "self.update = VAR_29\n", "self.user_settings_fields = copy.deepcopy(self.fields)\n", "if VAR_31:\n", "self.user_settings = json.loads(VAR_31)\n", "if VAR_7:\n", "VAR_39 = self.run_custom_query(VAR_7)\n", "VAR_39 = self.build_and_run()\n", "if VAR_20 and not VAR_15 and self.doctype:\n", "self.add_comment_count(VAR_39)\n", "if VAR_27:\n", "self.save_user_settings_fields = VAR_28\n", "return VAR_39\n", "self.update_user_settings()\n" ]
[ "def execute(self, query=None, fields=None, filters=None, or_filters=None,...\n", "if not ignore_permissions and not frappe.has_permission(self.doctype,\n", "frappe.flags.error_message = _('Insufficient Permission for {0}').format(frappe\n .bold(self.doctype))\n", "if isinstance(fields, dict) or isinstance(fields, list\n", "filters, fields = fields, filters\n", "if fields and isinstance(filters, list) and len(filters) > 1 and isinstance(\n", "if fields:\n", "filters, fields = fields, filters\n", "self.fields = fields\n", "self.fields = ['`tab{0}`.`name`'.format(self.doctype)]\n", "if start:\n", "limit_start = start\n", "if page_length:\n", "limit_page_length = page_length\n", "if limit:\n", "limit_page_length = limit\n", "self.filters = filters or []\n", "self.or_filters = or_filters or []\n", "self.docstatus = docstatus or []\n", "self.group_by = group_by\n", "self.order_by = order_by\n", "self.limit_start = 0 if limit_start is False else cint(limit_start)\n", "self.limit_page_length = cint(limit_page_length) if limit_page_length else None\n", "self.with_childnames = with_childnames\n", "self.debug = debug\n", "self.join = join\n", "self.distinct = distinct\n", "self.as_list = as_list\n", "self.ignore_ifnull = ignore_ifnull\n", "self.flags.ignore_permissions = ignore_permissions\n", "self.user = user or frappe.session.user\n", "self.update = update\n", "self.user_settings_fields = copy.deepcopy(self.fields)\n", "if user_settings:\n", "self.user_settings = json.loads(user_settings)\n", "if query:\n", "result = self.run_custom_query(query)\n", "result = self.build_and_run()\n", "if with_comment_count and not as_list and self.doctype:\n", "self.add_comment_count(result)\n", "if save_user_settings:\n", "self.save_user_settings_fields = save_user_settings_fields\n", "return result\n", "self.update_user_settings()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_23(self, VAR_16, VAR_22):...\n", "\"\"\"docstring\"\"\"\n", "self._eql_execute('volume', 'select', VAR_16['name'], 'size', '%sG' % VAR_22)\n", "VAR_0.error(_(\n 'Failed to extend_volume %(name)s from %(current_size)sGB to %(new_size)sGB'\n ), {'name': VAR_16['name'], 'current_size': VAR_16['size'], 'new_size':\n VAR_22})\n" ]
[ "def extend_volume(self, volume, new_size):...\n", "\"\"\"docstring\"\"\"\n", "self._eql_execute('volume', 'select', volume['name'], 'size', '%sG' % new_size)\n", "LOG.error(_(\n 'Failed to extend_volume %(name)s from %(current_size)sGB to %(new_size)sGB'\n ), {'name': volume['name'], 'current_size': volume['size'], 'new_size':\n new_size})\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]
[ "def FUNC_37(VAR_7):...\n", "VAR_7 = VAR_7 + '/log'\n", "VAR_14, VAR_45 = FUNC_5(VAR_7)\n", "VAR_91 = '2015-03-07'\n", "if 'created_at' not in VAR_14:\n", "return VAR_91\n", "VAR_92 = str(VAR_14).index('created_at')\n", "VAR_14 = VAR_14[VAR_92:]\n", "VAR_93 = 'created_at\":\"'\n", "VAR_94 = len(VAR_93)\n", "VAR_95 = len(VAR_91) + VAR_94\n", "VAR_23 = VAR_14[VAR_94:VAR_95]\n", "VAR_63 = VAR_23.split('-')[0]\n", "VAR_64 = VAR_23.split('-')[1]\n", "VAR_65 = VAR_23.split('-')[2]\n", "return VAR_23\n" ]
[ "def get_date(url):...\n", "url = url + '/log'\n", "bracket, status = hit_url(url)\n", "s2 = '2015-03-07'\n", "if 'created_at' not in bracket:\n", "return s2\n", "first_occurance = str(bracket).index('created_at')\n", "bracket = bracket[first_occurance:]\n", "s = 'created_at\":\"'\n", "i = len(s)\n", "i2 = len(s2) + i\n", "date = bracket[i:i2]\n", "y = date.split('-')[0]\n", "m = date.split('-')[1]\n", "d = date.split('-')[2]\n", "return date\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_22(VAR_30, VAR_3):...\n", "if not isinstance(VAR_30, _IOFile):\n", "return IOFile(VAR_30, VAR_16=self)\n", "return VAR_30.apply_wildcards(VAR_3, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n" ]
[ "def concretize_iofile(f, wildcards):...\n", "if not isinstance(f, _IOFile):\n", "return IOFile(f, rule=self)\n", "return f.apply_wildcards(wildcards, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "@handler('unregistered', VAR_0='*')...\n", "if isinstance(VAR_5, BaseController) and VAR_5.channel in self.paths:\n" ]
[ "@handler('unregistered', channel='*')...\n", "if isinstance(component, BaseController) and component.channel in self.paths:\n" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "@property...\n", "if self._snakefile is None:\n", "return os.path.abspath(os.path.join(self.workdir, 'Snakefile'))\n", "if not os.path.isabs(self._snakefile):\n", "return os.path.abspath(os.path.join(self.workflow.basedir, self._snakefile))\n", "return self._snakefile\n" ]
[ "@property...\n", "if self._snakefile is None:\n", "return os.path.abspath(os.path.join(self.workdir, 'Snakefile'))\n", "if not os.path.isabs(self._snakefile):\n", "return os.path.abspath(os.path.join(self.workflow.basedir, self._snakefile))\n", "return self._snakefile\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(VAR_7):...\n", "if VAR_7:\n", "VAR_25 = c_onboarding_status(use_secondary=True).find_one({onboarding.\n USER_ID: VAR_7}) or {}\n", "return 0\n", "if VAR_25:\n", "VAR_26 = [VAR_25.get(key, False) for key in FUNC_2()]\n", "return round(len(filter(lambda x: x, VAR_26)) / float(len(VAR_26)) * 100, 0)\n" ]
[ "def get_onboarding_percentage(user_id):...\n", "if user_id:\n", "status = c_onboarding_status(use_secondary=True).find_one({onboarding.\n USER_ID: user_id}) or {}\n", "return 0\n", "if status:\n", "steps = [status.get(key, False) for key in get_onboarding_steps()]\n", "return round(len(filter(lambda x: x, steps)) / float(len(steps)) * 100, 0)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_11(self):...\n", "return 'FixedString({})'.format(self.length)\n" ]
[ "def for_schema(self):...\n", "return 'FixedString({})'.format(self.length)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_22(self, *VAR_69, **VAR_70):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.input = VAR_69, VAR_70\n", "return VAR_101\n" ]
[ "def input(self, *paths, **kwpaths):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.input = paths, kwpaths\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(self):...\n", "self.assertTrue({'name': 'DocType', 'issingle': 0} in DatabaseQuery(\n 'DocType').execute(fields=['name', 'issingle'], limit_page_length=None))\n" ]
[ "def test_fields(self):...\n", "self.assertTrue({'name': 'DocType', 'issingle': 0} in DatabaseQuery(\n 'DocType').execute(fields=['name', 'issingle'], limit_page_length=None))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "VAR_46 = None\n", "if self.xsrf_token_header:\n", "VAR_46 = self.request.headers.get(self.xsrf_token_header)\n", "if not VAR_46 and self.xsrf_token_request_param:\n", "VAR_53 = self.request.get_all(self.xsrf_token_request_param)\n", "return VAR_46\n", "VAR_46 = VAR_53[0] if VAR_53 else None\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "token = None\n", "if self.xsrf_token_header:\n", "token = self.request.headers.get(self.xsrf_token_header)\n", "if not token and self.xsrf_token_request_param:\n", "param = self.request.get_all(self.xsrf_token_request_param)\n", "return token\n", "token = param[0] if param else None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'" ]
[ "@property...\n", "VAR_4 = self.request.session.get('oidc_login_next', None)\n", "return VAR_4 or import_from_settings('LOGIN_REDIRECT_URL', '/')\n" ]
[ "@property...\n", "next_url = self.request.session.get('oidc_login_next', None)\n", "return next_url or import_from_settings('LOGIN_REDIRECT_URL', '/')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_10(self):...\n", "\"\"\"docstring\"\"\"\n", "import soscollector.hosts\n", "VAR_4 = soscollector.hosts\n", "VAR_19 = {}\n", "VAR_20 = self._load_modules(VAR_4, 'hosts')\n", "for VAR_24 in VAR_20:\n", "VAR_19[VAR_24[0]] = VAR_24[1]\n", "return VAR_19\n" ]
[ "def _load_supported_hosts(self):...\n", "\"\"\"docstring\"\"\"\n", "import soscollector.hosts\n", "package = soscollector.hosts\n", "supported_hosts = {}\n", "hosts = self._load_modules(package, 'hosts')\n", "for host in hosts:\n", "supported_hosts[host[0]] = host[1]\n", "return supported_hosts\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_23(self):...\n", "VAR_0 = [SleepCheck(0.5) for i in range(3)]\n", "self.set_max_jobs(len(VAR_0))\n", "self.runall(VAR_0)\n", "self.assertEqual(len(VAR_0), self.runner.stats.num_cases())\n", "self.assertEqual(0, len(self.runner.stats.failures()))\n", "self.assertEqual(len(VAR_0), max(self.monitor.num_tasks))\n", "self.assertEqual(len(VAR_0), self.monitor.num_tasks[len(VAR_0)])\n", "self.read_timestamps(self.monitor.tasks)\n", "if self.begin_stamps[-1] > self.end_stamps[0]:\n", "self.skipTest('the system seems too much loaded.')\n" ]
[ "def test_concurrency_unlimited(self):...\n", "checks = [SleepCheck(0.5) for i in range(3)]\n", "self.set_max_jobs(len(checks))\n", "self.runall(checks)\n", "self.assertEqual(len(checks), self.runner.stats.num_cases())\n", "self.assertEqual(0, len(self.runner.stats.failures()))\n", "self.assertEqual(len(checks), max(self.monitor.num_tasks))\n", "self.assertEqual(len(checks), self.monitor.num_tasks[len(checks)])\n", "self.read_timestamps(self.monitor.tasks)\n", "if self.begin_stamps[-1] > self.end_stamps[0]:\n", "self.skipTest('the system seems too much loaded.')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_8(self, VAR_6):...\n", "if VAR_6 in self.clients:\n", "VAR_14 = self.clients[VAR_6]\n", "if VAR_14['status'] == self.RUNNING:\n", "VAR_0.info('Stopping LSP client for {}...'.format(VAR_6))\n", "VAR_14['status'] = self.STOPPED\n", "VAR_14['instance'].stop()\n" ]
[ "def close_client(self, language):...\n", "if language in self.clients:\n", "language_client = self.clients[language]\n", "if language_client['status'] == self.RUNNING:\n", "logger.info('Stopping LSP client for {}...'.format(language))\n", "language_client['status'] = self.STOPPED\n", "language_client['instance'].stop()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_42(self, VAR_16):...\n", "VAR_25 = self.meta.get_field(VAR_16)\n", "VAR_46 = self.get(VAR_16)\n", "return self.cast(VAR_46, VAR_25)\n" ]
[ "def get_value(self, fieldname):...\n", "df = self.meta.get_field(fieldname)\n", "val = self.get(fieldname)\n", "return self.cast(val, df)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "@inlineCallbacks...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.nodes[0].overlay.metadata_store.ChannelMetadata.create_channel(\n 'test', 'bla')\n", "for _ in xrange(20):\n", "self.add_random_torrent(self.nodes[0].overlay.metadata_store.TorrentMetadata)\n", "VAR_6.commit_channel_torrent()\n", "self.nodes[0].overlay.send_random_to(Peer(self.nodes[1].my_peer.public_key,\n self.nodes[1].endpoint.wan_address))\n", "yield self.deliver_messages()\n", "self.assertEqual(len(self.nodes[1].overlay.metadata_store.ChannelMetadata.\n select()), 1)\n", "VAR_6 = self.nodes[1].overlay.metadata_store.ChannelMetadata.select()[:][0]\n", "self.assertLess(VAR_6.contents_len, 20)\n" ]
[ "@inlineCallbacks...\n", "\"\"\"docstring\"\"\"\n", "channel = self.nodes[0].overlay.metadata_store.ChannelMetadata.create_channel(\n 'test', 'bla')\n", "for _ in xrange(20):\n", "self.add_random_torrent(self.nodes[0].overlay.metadata_store.TorrentMetadata)\n", "channel.commit_channel_torrent()\n", "self.nodes[0].overlay.send_random_to(Peer(self.nodes[1].my_peer.public_key,\n self.nodes[1].endpoint.wan_address))\n", "yield self.deliver_messages()\n", "self.assertEqual(len(self.nodes[1].overlay.metadata_store.ChannelMetadata.\n select()), 1)\n", "channel = self.nodes[1].overlay.metadata_store.ChannelMetadata.select()[:][0]\n", "self.assertLess(channel.contents_len, 20)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "from django.apps import apps\n", "from clickgestion.transactions.forms import TransactionEditForm, TransactionPayForm\n", "from clickgestion.transactions.models import BaseConcept, Transaction\n", "from django.shortcuts import get_object_or_404, render, redirect, reverse\n", "from django.utils.translation import gettext, gettext_lazy\n", "from clickgestion.transactions.filters import ConceptFilter, TransactionFilter\n", "from clickgestion.core.utilities import invalid_permission_redirect\n", "from django.views.generic import ListView\n", "from django.contrib.auth.decorators import login_required\n", "from pure_pagination.mixins import PaginationMixin\n", "from django.http import HttpResponse, QueryDict\n", "from django.conf import settings\n", "from django.utils import timezone\n", "from django_xhtml2pdf.utils import generate_pdf\n", "@login_required()...\n", "VAR_5 = {}\n", "VAR_6, VAR_7 = FUNC_5(**kwargs)\n", "VAR_5['concept'] = VAR_6\n", "VAR_4 = VAR_6.transaction\n", "if VAR_4.closed:\n", "return redirect('message', message=gettext('Transaction Closed'))\n", "VAR_5['transaction'] = VAR_4\n", "VAR_5['header'] = gettext('Delete {}?'.format(VAR_6.concept_type))\n", "VAR_5['message'] = VAR_6.description_short\n", "VAR_5['next'] = VAR_0.META['HTTP_REFERER']\n", "if VAR_0.method == 'POST':\n", "VAR_24 = reverse('transaction_detail', VAR_2={'transaction_code': concept.\n transaction.code})\n", "return render(VAR_0, 'core/delete.html', VAR_5)\n", "VAR_6.delete()\n", "VAR_25 = VAR_0.POST.get('next', VAR_24)\n", "return redirect(VAR_25)\n" ]
[ "from django.apps import apps\n", "from clickgestion.transactions.forms import TransactionEditForm, TransactionPayForm\n", "from clickgestion.transactions.models import BaseConcept, Transaction\n", "from django.shortcuts import get_object_or_404, render, redirect, reverse\n", "from django.utils.translation import gettext, gettext_lazy\n", "from clickgestion.transactions.filters import ConceptFilter, TransactionFilter\n", "from clickgestion.core.utilities import invalid_permission_redirect\n", "from django.views.generic import ListView\n", "from django.contrib.auth.decorators import login_required\n", "from pure_pagination.mixins import PaginationMixin\n", "from django.http import HttpResponse, QueryDict\n", "from django.conf import settings\n", "from django.utils import timezone\n", "from django_xhtml2pdf.utils import generate_pdf\n", "@login_required()...\n", "extra_context = {}\n", "concept, concept_form = get_concept_and_form_from_kwargs(**kwargs)\n", "extra_context['concept'] = concept\n", "transaction = concept.transaction\n", "if transaction.closed:\n", "return redirect('message', message=gettext('Transaction Closed'))\n", "extra_context['transaction'] = transaction\n", "extra_context['header'] = gettext('Delete {}?'.format(concept.concept_type))\n", "extra_context['message'] = concept.description_short\n", "extra_context['next'] = request.META['HTTP_REFERER']\n", "if request.method == 'POST':\n", "default_next = reverse('transaction_detail', kwargs={'transaction_code':\n concept.transaction.code})\n", "return render(request, 'core/delete.html', extra_context)\n", "concept.delete()\n", "next_page = request.POST.get('next', default_next)\n", "return redirect(next_page)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Expr'", "Assign'", "Return'" ]
[ "def __eq__(self, VAR_17):...\n", "return self.name == VAR_17.name\n" ]
[ "def __eq__(self, other):...\n", "return self.name == other.name\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/config'.format(**locals())\n", "VAR_7 = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "self.assertIn('NEW_URL1', VAR_5.data['values'])\n", "VAR_4 = '/api/apps/{app_id}/releases'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(VAR_5.data['count'], 2)\n", "VAR_4 = '/api/apps/{app_id}/releases/v1'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_8 = VAR_5.data\n", "self.assertIn('config', VAR_5.data)\n", "self.assertIn('build', VAR_5.data)\n", "self.assertEquals(VAR_8['version'], 1)\n", "self.assertEquals(VAR_8['image'], 'deis/helloworld')\n", "VAR_4 = '/api/apps/{app_id}/releases/v2'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_9 = VAR_5.data\n", "self.assertNotEqual(VAR_8['uuid'], VAR_9['uuid'])\n", "self.assertNotEqual(VAR_8['config'], VAR_9['config'])\n", "self.assertEqual(VAR_8['build'], VAR_9['build'])\n", "self.assertEquals(VAR_9['version'], 2)\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_10 = json.dumps({'PATH': 'bin:/usr/local/bin:/usr/bin:/bin'})\n", "VAR_7 = {'image': 'autotest/example'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "self.assertEqual(VAR_5.data['image'], VAR_7['image'])\n", "VAR_4 = '/api/apps/{app_id}/releases/v3'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_11 = VAR_5.data\n", "self.assertNotEqual(VAR_9['uuid'], VAR_11['uuid'])\n", "self.assertNotEqual(VAR_9['build'], VAR_11['build'])\n", "self.assertEquals(VAR_11['version'], 3)\n", "VAR_4 = '/api/apps/{app_id}/releases/v2'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_9 = VAR_5.data\n", "self.assertNotEqual(VAR_9['uuid'], VAR_11['uuid'])\n", "self.assertNotEqual(VAR_9['build'], VAR_11['build'])\n", "self.assertEquals(VAR_9['version'], 2)\n", "VAR_4 = '/api/apps/{app_id}/releases'.format(**locals())\n", "self.assertEqual(self.client.post(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.put(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.patch(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.delete(VAR_4).status_code, 405)\n", "return VAR_11\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "\"\"\"docstring\"\"\"\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/config'.format(**locals())\n", "body = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertIn('NEW_URL1', response.data['values'])\n", "url = '/api/apps/{app_id}/releases'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(response.data['count'], 2)\n", "url = '/api/apps/{app_id}/releases/v1'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "release1 = response.data\n", "self.assertIn('config', response.data)\n", "self.assertIn('build', response.data)\n", "self.assertEquals(release1['version'], 1)\n", "self.assertEquals(release1['image'], 'deis/helloworld')\n", "url = '/api/apps/{app_id}/releases/v2'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "release2 = response.data\n", "self.assertNotEqual(release1['uuid'], release2['uuid'])\n", "self.assertNotEqual(release1['config'], release2['config'])\n", "self.assertEqual(release1['build'], release2['build'])\n", "self.assertEquals(release2['version'], 2)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "build_config = json.dumps({'PATH': 'bin:/usr/local/bin:/usr/bin:/bin'})\n", "body = {'image': 'autotest/example'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertEqual(response.data['image'], body['image'])\n", "url = '/api/apps/{app_id}/releases/v3'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "release3 = response.data\n", "self.assertNotEqual(release2['uuid'], release3['uuid'])\n", "self.assertNotEqual(release2['build'], release3['build'])\n", "self.assertEquals(release3['version'], 3)\n", "url = '/api/apps/{app_id}/releases/v2'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "release2 = response.data\n", "self.assertNotEqual(release2['uuid'], release3['uuid'])\n", "self.assertNotEqual(release2['build'], release3['build'])\n", "self.assertEquals(release2['version'], 2)\n", "url = '/api/apps/{app_id}/releases'.format(**locals())\n", "self.assertEqual(self.client.post(url).status_code, 405)\n", "self.assertEqual(self.client.put(url).status_code, 405)\n", "self.assertEqual(self.client.patch(url).status_code, 405)\n", "self.assertEqual(self.client.delete(url).status_code, 405)\n", "return release3\n" ]
[ 0, 0, 0, 5, 0, 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5, 5, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_9(self):...\n", "VAR_14 = ['8.9.10.11', '9.10.11.12:4433']\n", "VAR_15 = self.new_mocked_cluster(VAR_14, FUNC_0, concurrent_connections=11)\n", "for ep_id, ep in VAR_15.endpoints.items():\n", "self.assertEqual(ep.pool.max_size, 11)\n" ]
[ "def test_conns_per_pool(self):...\n", "conf_managers = ['8.9.10.11', '9.10.11.12:4433']\n", "api = self.new_mocked_cluster(conf_managers, _validate_conn_up,\n concurrent_connections=11)\n", "for ep_id, ep in api.endpoints.items():\n", "self.assertEqual(ep.pool.max_size, 11)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Expr'" ]
[ "def FUNC_17(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def wait(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def __init__(self, VAR_2: dict, VAR_3: forms.Field, VAR_4=None):...\n", "VAR_13 = []\n", "self.scheme = VAR_2\n", "self.field = VAR_3\n", "for VAR_31, VAR_32, VAR_33 in self.scheme['fields']:\n", "VAR_30 = copy.copy(VAR_4) or {}\n", "super().__init__(VAR_13, VAR_4)\n", "VAR_30['data-fname'] = VAR_31\n", "VAR_13.append(self.widget(VAR_4=a))\n" ]
[ "def __init__(self, scheme: dict, field: forms.Field, attrs=None):...\n", "widgets = []\n", "self.scheme = scheme\n", "self.field = field\n", "for fname, label, size in self.scheme['fields']:\n", "a = copy.copy(attrs) or {}\n", "super().__init__(widgets, attrs)\n", "a['data-fname'] = fname\n", "widgets.append(self.widget(attrs=a))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_16(self, VAR_14):...\n", "" ]
[ "def is_datetime(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_13(self, VAR_20, VAR_21=True):...\n", "def FUNC_39(VAR_62):...\n", "return json.loads(VAR_62) if VAR_21 else VAR_62\n" ]
[ "def _render(self, request, as_json=True):...\n", "def get_str(_str):...\n", "return json.loads(_str) if as_json else _str\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Return'" ]
[ "import vim\n", "import requests\n", "import urlparse\n", "from retries import retries\n", "from requests_futures.sessions import FuturesSession\n", "from ycm.unsafe_thread_pool_executor import UnsafeThreadPoolExecutor\n", "from ycm import vimsupport\n", "from ycm.utils import ToUtf8Json\n", "from ycm.server.responses import ServerError, UnknownExtraConf\n", "VAR_0 = {'content-type': 'application/json'}\n", "VAR_1 = UnsafeThreadPoolExecutor(max_workers=30)\n", "VAR_2 = 30\n", "def __init__(self):...\n", "def FUNC_5(self):...\n", "def FUNC_6(self):...\n", "return True\n" ]
[ "import vim\n", "import requests\n", "import urlparse\n", "from retries import retries\n", "from requests_futures.sessions import FuturesSession\n", "from ycm.unsafe_thread_pool_executor import UnsafeThreadPoolExecutor\n", "from ycm import vimsupport\n", "from ycm.utils import ToUtf8Json\n", "from ycm.server.responses import ServerError, UnknownExtraConf\n", "_HEADERS = {'content-type': 'application/json'}\n", "_EXECUTOR = UnsafeThreadPoolExecutor(max_workers=30)\n", "_DEFAULT_TIMEOUT_SEC = 30\n", "def __init__(self):...\n", "def Start(self):...\n", "def Done(self):...\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "FunctionDef'", "FunctionDef'", "Return'" ]
[ "def FUNC_19(self, *VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "self.order.append(list(VAR_18))\n" ]
[ "def add(self, *rulenames):...\n", "\"\"\"docstring\"\"\"\n", "self.order.append(list(rulenames))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_17(VAR_5):...\n", "VAR_11 = VAR_5['children']\n", "if VAR_11:\n", "return itertools.chain.from_iterable([FUNC_17(child) for child in VAR_11])\n", "return VAR_5['id'],\n" ]
[ "def get_descendant_ids(node):...\n", "children = node['children']\n", "if children:\n", "return itertools.chain.from_iterable([get_descendant_ids(child) for child in\n children])\n", "return node['id'],\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __init__(self, *VAR_11, **VAR_12):...\n", "if 'health' not in VAR_12 and 'infohash' in VAR_12:\n", "VAR_12['health'] = VAR_0.TorrentState.get(VAR_2=kwargs['infohash']\n ) or VAR_0.TorrentState(VAR_2=kwargs['infohash'])\n", "if 'xxx' not in VAR_12:\n", "VAR_12['xxx'] = default_xxx_filter.isXXXTorrentMetadataDict(VAR_12)\n", "super(CLASS_0, self).__init__(*VAR_11, **kwargs)\n", "if 'tracker_info' in VAR_12:\n", "self.add_tracker(VAR_12['tracker_info'])\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "if 'health' not in kwargs and 'infohash' in kwargs:\n", "kwargs['health'] = db.TorrentState.get(infohash=kwargs['infohash']\n ) or db.TorrentState(infohash=kwargs['infohash'])\n", "if 'xxx' not in kwargs:\n", "kwargs['xxx'] = default_xxx_filter.isXXXTorrentMetadataDict(kwargs)\n", "super(TorrentMetadata, self).__init__(*args, **kwargs)\n", "if 'tracker_info' in kwargs:\n", "self.add_tracker(kwargs['tracker_info'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = jc.Contract()\n", "return st.OperationContract(self.agent.make_delete_app_operation(VAR_1=self\n .bindings, application=self.TEST_APP), VAR_6=contract)\n" ]
[ "def delete_app(self):...\n", "\"\"\"docstring\"\"\"\n", "contract = jc.Contract()\n", "return st.OperationContract(self.agent.make_delete_app_operation(bindings=\n self.bindings, application=self.TEST_APP), contract=contract)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_4(VAR_15, VAR_16):...\n", "VAR_33 = {}\n", "for in_file, VAR_45 in VAR_15:\n", "VAR_33[VAR_45] = FUNC_5(in_file, VAR_16)\n", "return VAR_33\n" ]
[ "def build_tree(templates, config):...\n", "res = {}\n", "for in_file, out_file in templates:\n", "res[out_file] = render_template(in_file, config)\n", "return res\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_8(VAR_0, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = pd.read_sql_table(VAR_0, db.engine, columns=[attr])\n", "print('FILL MEAN FAILED')\n", "VAR_21 = VAR_15[VAR_5].median()\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (VAR_0, VAR_5, VAR_21))\n" ]
[ "def fill_null_with_median(table_name, attr):...\n", "\"\"\"docstring\"\"\"\n", "dataframe = pd.read_sql_table(table_name, db.engine, columns=[attr])\n", "print('FILL MEAN FAILED')\n", "median = dataframe[attr].median()\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (table_name, attr, median))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_10():...\n", "log_files.clean_log_folder(1)\n", "users.cache(100)\n", "db.connect()\n", "bot.start_bot()\n" ]
[ "def main():...\n", "log_files.clean_log_folder(1)\n", "users.cache(100)\n", "db.connect()\n", "bot.start_bot()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@api.route('/songs/<name>')...\n", "return jsonify(VAR_0=name)\n" ]
[ "@api.route('/songs/<name>')...\n", "return jsonify(name=name)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_6(self, VAR_2):...\n", "VAR_15['XSRF-TOKEN'] = '{0:x}'.format(random.SystemRandom().getrandbits(160))\n", "VAR_2.set_cookie('XSRF-TOKEN', VAR_15['XSRF-TOKEN'])\n" ]
[ "def set_csrf_token(self, resp):...\n", "session['XSRF-TOKEN'] = '{0:x}'.format(random.SystemRandom().getrandbits(160))\n", "resp.set_cookie('XSRF-TOKEN', session['XSRF-TOKEN'])\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_16(self, VAR_27, VAR_6, VAR_28):...\n", "self.host_list.append(VAR_28)\n", "self.logger.debug('Saving component to tmp')\n", "VAR_48 = '%s/%s.yaml' % (VAR_2, VAR_6)\n", "FUNC_10(VAR_48)\n", "dump(VAR_27, outfile, default_flow_style=False)\n", "self.logger.debug('Copying component \"%s\" to remote host \"%s\"' % (VAR_6,\n VAR_28))\n", "VAR_12 = \"ssh %s 'mkdir -p %s' & scp %s %s:%s/%s.yaml\" % (VAR_28, VAR_1,\n VAR_48, VAR_28, VAR_1, VAR_6)\n", "self.logger.debug(VAR_12)\n", "FUNC_7(self.session, VAR_12)\n" ]
[ "def copy_component_to_remote(self, infile, comp, host):...\n", "self.host_list.append(host)\n", "self.logger.debug('Saving component to tmp')\n", "tmp_comp_path = '%s/%s.yaml' % (TMP_COMP_DIR, comp)\n", "ensure_dir(tmp_comp_path)\n", "dump(infile, outfile, default_flow_style=False)\n", "self.logger.debug('Copying component \"%s\" to remote host \"%s\"' % (comp, host))\n", "cmd = \"ssh %s 'mkdir -p %s' & scp %s %s:%s/%s.yaml\" % (host, TMP_SLAVE_DIR,\n tmp_comp_path, host, TMP_SLAVE_DIR, comp)\n", "self.logger.debug(cmd)\n", "send_main_session_command(self.session, cmd)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_8, VAR_9):...\n", "VAR_0.debug('Generating MySQL command for insertion/update for table c{:d}'\n .format(VAR_9))\n", "if VAR_8.id < 0:\n", "VAR_0.error('TypeError: id must be non-negative')\n", "if type(VAR_8.pubdate) != datetime:\n", "VAR_0.error('Skipping the Listing')\n", "VAR_0.error('TypeError: pubdate must be a datetime')\n", "VAR_12 = (\n 'INSERT INTO c{cat_id:d}({id:s}, {url:s}, {loc_id:s}, {title:s}, {pubdate:s}, {desc:s}'\n .format(VAR_9=cat_id, **self.FIELDS_DICT))\n", "return -1\n", "return -1\n", "VAR_13 = (\n \") VALUES ({id:d}, '{url:s}', {loc_id:d}, '{title:s}', '{pubdate:s}', '{desc:s}'\"\n .format(id=listing.id, url=listing.url, loc_id=listing.loc_id, title=\n listing.title, pubdate=listing.pubdate.strftime('%Y-%m-%d %H:%M:%S'),\n desc=listing.description))\n", "VAR_14 = [self.FIELDS_DICT['addr'], self.FIELDS_DICT['price'], self.\n FIELDS_DICT['bedrooms'], self.FIELDS_DICT['bathrooms'], self.\n FIELDS_DICT['pet_friendly'], self.FIELDS_DICT['furnished'], self.\n FIELDS_DICT['urgent'], self.FIELDS_DICT['size']]\n", "VAR_15 = [VAR_8.addr, VAR_8.price, VAR_8.bedrooms, VAR_8.bathrooms, VAR_8.\n pet_friendlly, VAR_8.furnished, VAR_8.urgent, VAR_8.size]\n", "VAR_16 = [lambda : \"'{:s}'\".format(VAR_8.addr), lambda : '{:f}'.format(\n VAR_8.price), lambda : '{:f}'.format(VAR_8.bedrooms), lambda : '{:f}'.\n format(VAR_8.bathrooms), lambda : '{:d}'.format(int(VAR_8.pet_friendlly\n )), lambda : '{:d}'.format(int(VAR_8.furnished)), lambda : '{:d}'.\n format(int(VAR_8.urgent)), lambda : '{:f}'.format(VAR_8.size)]\n", "for i in range(len(VAR_14)):\n", "if VAR_15[i] != -1:\n", "VAR_17 = VAR_12 + VAR_13 + ')'\n", "VAR_12 += ', ' + VAR_14[i]\n", "VAR_0.debug('MySQL command generation successful')\n", "VAR_13 += ', ' + VAR_16[i]()\n", "return VAR_17\n" ]
[ "def gen_sql_insert(self, listing, cat_id):...\n", "logger.debug('Generating MySQL command for insertion/update for table c{:d}'\n .format(cat_id))\n", "if listing.id < 0:\n", "logger.error('TypeError: id must be non-negative')\n", "if type(listing.pubdate) != datetime:\n", "logger.error('Skipping the Listing')\n", "logger.error('TypeError: pubdate must be a datetime')\n", "sql_cols = (\n 'INSERT INTO c{cat_id:d}({id:s}, {url:s}, {loc_id:s}, {title:s}, {pubdate:s}, {desc:s}'\n .format(cat_id=cat_id, **self.FIELDS_DICT))\n", "return -1\n", "return -1\n", "sql_vals = (\n \") VALUES ({id:d}, '{url:s}', {loc_id:d}, '{title:s}', '{pubdate:s}', '{desc:s}'\"\n .format(id=listing.id, url=listing.url, loc_id=listing.loc_id, title=\n listing.title, pubdate=listing.pubdate.strftime('%Y-%m-%d %H:%M:%S'),\n desc=listing.description))\n", "col_list = [self.FIELDS_DICT['addr'], self.FIELDS_DICT['price'], self.\n FIELDS_DICT['bedrooms'], self.FIELDS_DICT['bathrooms'], self.\n FIELDS_DICT['pet_friendly'], self.FIELDS_DICT['furnished'], self.\n FIELDS_DICT['urgent'], self.FIELDS_DICT['size']]\n", "val_list = [listing.addr, listing.price, listing.bedrooms, listing.\n bathrooms, listing.pet_friendlly, listing.furnished, listing.urgent,\n listing.size]\n", "sql_list = [lambda : \"'{:s}'\".format(listing.addr), lambda : '{:f}'.format(\n listing.price), lambda : '{:f}'.format(listing.bedrooms), lambda :\n '{:f}'.format(listing.bathrooms), lambda : '{:d}'.format(int(listing.\n pet_friendlly)), lambda : '{:d}'.format(int(listing.furnished)), lambda :\n '{:d}'.format(int(listing.urgent)), lambda : '{:f}'.format(listing.size)]\n", "for i in range(len(col_list)):\n", "if val_list[i] != -1:\n", "output = sql_cols + sql_vals + ')'\n", "sql_cols += ', ' + col_list[i]\n", "logger.debug('MySQL command generation successful')\n", "sql_vals += ', ' + sql_list[i]()\n", "return output\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 4, 0, 0, 4, 0, 0, 4, 4 ]
[ "FunctionDef'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Return'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "AugAssign'", "Expr'", "AugAssign'", "Return'" ]
[ "@defer.inlineCallbacks...\n", "VAR_37 = os.path.join(self._leap_home, self._uuid)\n", "self.soledad = yield FUNC_0(VAR_0=soledad_test_folder, VAR_1=self._uuid)\n", "self.search_engine = SearchEngine(self.INDEX_KEY, user_home=soledad_test_folder\n )\n", "self.keymanager = mock()\n", "self.mail_sender = self._create_mail_sender()\n", "self.mail_store = SearchableMailStore(LeapMailStore(self.soledad), self.\n search_engine)\n", "self.attachment_store = LeapAttachmentStore(self.soledad)\n", "yield self._initialize_imap_account()\n", "self.draft_service = DraftService(self.mail_store)\n", "self.leap_session = mock()\n", "self.feedback_service = FeedbackService(self.leap_session)\n", "self.mail_service = self._create_mail_service(self.mail_sender, self.\n mail_store, self.search_engine, self.attachment_store)\n", "VAR_58 = yield self.mail_service.all_mails()\n", "if len(VAR_58) > 0:\n", "self.search_engine.index_mails(VAR_58)\n" ]
[ "@defer.inlineCallbacks...\n", "soledad_test_folder = os.path.join(self._leap_home, self._uuid)\n", "self.soledad = yield initialize_soledad(tempdir=soledad_test_folder, uuid=\n self._uuid)\n", "self.search_engine = SearchEngine(self.INDEX_KEY, user_home=soledad_test_folder\n )\n", "self.keymanager = mock()\n", "self.mail_sender = self._create_mail_sender()\n", "self.mail_store = SearchableMailStore(LeapMailStore(self.soledad), self.\n search_engine)\n", "self.attachment_store = LeapAttachmentStore(self.soledad)\n", "yield self._initialize_imap_account()\n", "self.draft_service = DraftService(self.mail_store)\n", "self.leap_session = mock()\n", "self.feedback_service = FeedbackService(self.leap_session)\n", "self.mail_service = self._create_mail_service(self.mail_sender, self.\n mail_store, self.search_engine, self.attachment_store)\n", "mails = yield self.mail_service.all_mails()\n", "if len(mails) > 0:\n", "self.search_engine.index_mails(mails)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def __init__(self, VAR_1=None):...\n", "super(CLASS_0, self).__init__(VAR_1)\n", "self.data_items = []\n", "self.item_load_batch = 50\n", "self.total_items = 0\n", "self.infohashes = {}\n" ]
[ "def __init__(self, parent=None):...\n", "super(RemoteTableModel, self).__init__(parent)\n", "self.data_items = []\n", "self.item_load_batch = 50\n", "self.total_items = 0\n", "self.infohashes = {}\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_2(self):...\n", "VAR_1 = '/api/apps'\n", "VAR_2 = self.client.post(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_3 = VAR_2.data['id']\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 1)\n", "VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_4 = {'web': 0}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 204)\n", "chaos.CREATE_ERROR_RATE = 0.5\n", "VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_4 = {'web': 20}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 503)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 20)\n", "VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n", "self.assertEqual(VAR_5, set(['error', 'created']))\n" ]
[ "def test_create_chaos(self):...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 0}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "chaos.CREATE_ERROR_RATE = 0.5\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 20}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 503)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 20)\n", "states = set([c['state'] for c in response.data['results']])\n", "self.assertEqual(states, set(['error', 'created']))\n" ]
[ 0, 0, 5, 0, 0, 0, 0, 5, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_5(self, VAR_8):...\n", "VAR_18 = random.randint(0, self.width)\n", "VAR_19 = random.randint(0, self.width)\n", "VAR_20 = set()\n", "while len(VAR_20) < self.keysPerNode:\n", "VAR_20.add(random.randint(0, self.keyPoolSize))\n", "self.G.add_node(VAR_8, VAR_18=x, VAR_19=y, VAR_20=keys)\n" ]
[ "def addNewNode(self, index):...\n", "x = random.randint(0, self.width)\n", "y = random.randint(0, self.width)\n", "keys = set()\n", "while len(keys) < self.keysPerNode:\n", "keys.add(random.randint(0, self.keyPoolSize))\n", "self.G.add_node(index, x=x, y=y, keys=keys)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_29():...\n", "self.assertIn('reauthn-for-termination', VAR_32)\n", "VAR_29 = VAR_32['reauthn-for-termination']\n", "VAR_30 = int(time.time())\n", "self.assertTrue(VAR_30 - VAR_29 < 5)\n" ]
[ "def _check():...\n", "self.assertIn('reauthn-for-termination', session)\n", "then = session['reauthn-for-termination']\n", "now = int(time.time())\n", "self.assertTrue(now - then < 5)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.resources = VAR_76, VAR_52\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.resources = args, resources\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_2, VAR_3):...\n", "" ]
[ "def _executeSQL(self, cur, sql):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "from __future__ import absolute_import, division, print_function\n", "__metaclass__ = type\n", "from ansible.inventory.group import Group\n", "from ansible.utils.vars import combine_vars, get_unique_id\n", "__all__ = ['Host']\n", "\"\"\" a single ansible host \"\"\"\n", "def __getstate__(self):...\n", "return self.serialize()\n" ]
[ "from __future__ import absolute_import, division, print_function\n", "__metaclass__ = type\n", "from ansible.inventory.group import Group\n", "from ansible.utils.vars import combine_vars, get_unique_id\n", "__all__ = ['Host']\n", "\"\"\" a single ansible host \"\"\"\n", "def __getstate__(self):...\n", "return self.serialize()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Assign'", "ImportFrom'", "ImportFrom'", "Assign'", "Expr'", "FunctionDef'", "Return'" ]
[ "def FUNC_19(self):...\n", "\"\"\"docstring\"\"\"\n", "if not (CLASS_1.smb_user and CLASS_1.smb_pass and CLASS_1.smb_ip and\n", "return False\n", "return SMBConnection(CLASS_1.smb_user, CLASS_1.smb_pass, 'odoo', 'nas')\n" ]
[ "def _get_smb_connection(self):...\n", "\"\"\"docstring\"\"\"\n", "if not (SmbConfig.smb_user and SmbConfig.smb_pass and SmbConfig.smb_ip and\n", "return False\n", "return SMBConnection(SmbConfig.smb_user, SmbConfig.smb_pass, 'odoo', 'nas')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_4(VAR_0):...\n", "VAR_0.clear_filters()\n", "for arg in request.args:\n", "VAR_8 = re.findall('_flt_(\\\\d)_(.*)', arg)\n", "if VAR_8:\n", "VAR_0.add_filter_index(VAR_8[0][1], int(VAR_8[0][0]), request.args.get(arg))\n" ]
[ "def get_filter_args(filters):...\n", "filters.clear_filters()\n", "for arg in request.args:\n", "re_match = re.findall('_flt_(\\\\d)_(.*)', arg)\n", "if re_match:\n", "filters.add_filter_index(re_match[0][1], int(re_match[0][0]), request.args.\n get(arg))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "For", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_22(VAR_14, VAR_15):...\n", "VAR_37 = VAR_14.state['sleep_streak']\n", "self.assertEqual(VAR_14.remote, self.server)\n", "if VAR_37 == 5:\n", "return False\n" ]
[ "def poll_server(botobj, _):...\n", "sleep_streak = botobj.state['sleep_streak']\n", "self.assertEqual(botobj.remote, self.server)\n", "if sleep_streak == 5:\n", "return False\n" ]
[ 0, 0, 5, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Condition", "Return'" ]
[ "from django.urls import path, include\n", "from . import views\n", "VAR_0 = [path('', views.index, name='home'), path('register/', views.\n register_form, name='register'), path('login/', views.login_form, name=\n 'login'), path('logout/', views.logout_view, name='logout'), path(\n 'confirm/', views.login_form, name='confirm_account'), path(\n 'createnewposition/', views.import_position, name='importposition'),\n path('positions/', views.positions, name='positions'), path('position/',\n views.position, name='position'), path('importapplications/', views.\n import_applications, name='importapplications')]\n" ]
[ "from django.urls import path, include\n", "from . import views\n", "urlpatterns = [path('', views.index, name='home'), path('register/', views.\n register_form, name='register'), path('login/', views.login_form, name=\n 'login'), path('logout/', views.logout_view, name='logout'), path(\n 'confirm/', views.login_form, name='confirm_account'), path(\n 'createnewposition/', views.import_position, name='importposition'),\n path('positions/', views.positions, name='positions'), path('position/',\n views.position, name='position'), path('importapplications/', views.\n import_applications, name='importapplications')]\n" ]
[ 0, 0, 6 ]
[ "ImportFrom'", "ImportFrom'", "Assign'" ]
[ "def FUNC_3(self, VAR_10, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_17 = {}\n", "if not int(VAR_11['simulated-human-interaction']):\n", "VAR_17['human'] = int(VAR_11['simulated-human-interaction'])\n", "return emit_options(VAR_17)\n" ]
[ "def translate_options(self, info, options):...\n", "\"\"\"docstring\"\"\"\n", "ret = {}\n", "if not int(options['simulated-human-interaction']):\n", "ret['human'] = int(options['simulated-human-interaction'])\n", "return emit_options(ret)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0, VAR_1, VAR_2=('view_project',)):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = models.Project.objects.get(pk=project_pk, deleting=False)\n", "return VAR_6\n", "for perm in VAR_2:\n", "if not VAR_0.user.has_perm(perm, VAR_6):\n" ]
[ "def get_and_check_project(request, project_pk, perms=('view_project',)):...\n", "\"\"\"docstring\"\"\"\n", "project = models.Project.objects.get(pk=project_pk, deleting=False)\n", "return project\n", "for perm in perms:\n", "if not request.user.has_perm(perm, project):\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'", "For", "Condition" ]
[ "def FUNC_24(self):...\n", "self.assertIsNone(handler.gae_cookie_authentication(webapp2.Request({})))\n" ]
[ "def test_non_applicable(self):...\n", "self.assertIsNone(handler.gae_cookie_authentication(webapp2.Request({})))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_9(self, VAR_8, VAR_9, VAR_10=None):...\n", "\"\"\"docstring\"\"\"\n", "return list(entry.path for entry in self.compile_classpath_entries(VAR_8,\n VAR_9, VAR_10))\n" ]
[ "def compile_classpath(self, classpath_product_key, target, extra_cp_entries...\n", "\"\"\"docstring\"\"\"\n", "return list(entry.path for entry in self.compile_classpath_entries(\n classpath_product_key, target, extra_cp_entries))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@utils.synchronized('3par', external=True)...\n", "" ]
[ "@utils.synchronized('3par', external=True)...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "import asyncio\n", "import mistune\n", "from tortoise import fields\n", "from tortoise.query_utils import Q\n", "from arq import create_pool\n", "from config import REDIS_URL\n", "from .base import BaseModel\n", "from .mc import cache, clear_mc\n", "from .user import GithubUser\n", "from .consts import K_COMMENT, ONE_HOUR\n", "from .react import ReactMixin, ReactItem\n", "from .signals import comment_reacted\n", "from .utils import RedisSettings\n", "VAR_0 = mistune.Markdown()\n", "VAR_1 = 'comment:%s:comment_list'\n", "VAR_2 = 'comment:%s:n_comments'\n", "VAR_3 = 'react:comment_ids_liked_by:%s:%s'\n", "VAR_7 = fields.IntField()\n", "VAR_8 = fields.IntField()\n", "VAR_9 = fields.IntField(default=0)\n", "VAR_10 = K_COMMENT\n", "VAR_15 = 'comments'\n", "async def FUNC_1(self, VAR_11):...\n", "return await self.set_props_by_key('content', VAR_11)\n" ]
[ "import asyncio\n", "import mistune\n", "from tortoise import fields\n", "from tortoise.query_utils import Q\n", "from arq import create_pool\n", "from config import REDIS_URL\n", "from .base import BaseModel\n", "from .mc import cache, clear_mc\n", "from .user import GithubUser\n", "from .consts import K_COMMENT, ONE_HOUR\n", "from .react import ReactMixin, ReactItem\n", "from .signals import comment_reacted\n", "from .utils import RedisSettings\n", "markdown = mistune.Markdown()\n", "MC_KEY_COMMENT_LIST = 'comment:%s:comment_list'\n", "MC_KEY_N_COMMENTS = 'comment:%s:n_comments'\n", "MC_KEY_COMMNET_IDS_LIKED_BY_USER = 'react:comment_ids_liked_by:%s:%s'\n", "github_id = fields.IntField()\n", "post_id = fields.IntField()\n", "ref_id = fields.IntField(default=0)\n", "kind = K_COMMENT\n", "table = 'comments'\n", "async def set_content(self, content):...\n", "return await self.set_props_by_key('content', content)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "AsyncFunctionDef'", "Return'" ]
[ "def FUNC_3(self):...\n", "GenericRequest._prepare(self)\n", "VAR_15, VAR_16 = tempfile.mkstemp()\n", "self.files = list(zip(self.submission_format, self.filenames)) + [('input',\n VAR_16)]\n" ]
[ "def _prepare(self):...\n", "GenericRequest._prepare(self)\n", "_, temp_filename = tempfile.mkstemp()\n", "self.files = list(zip(self.submission_format, self.filenames)) + [('input',\n temp_filename)]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'" ]
[ "from osv import osv\n", "from tools.translate import _\n", "VAR_0 = 'pos.close.statement'\n", "VAR_1 = 'Close Statements'\n", "def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n", "VAR_7 = []\n", "VAR_8 = self.pool.get('ir.model.data')\n", "VAR_9 = self.pool.get('account.bank.statement')\n", "VAR_10 = self.pool.get('account.journal')\n", "VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n", "VAR_11 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_11)))\n", "VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "for journal in VAR_10.browse(VAR_2, VAR_3, VAR_12):\n", "VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n", "VAR_13 = self.pool.get('ir.model.data')\n", "if not VAR_4:\n", "VAR_14 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n", "VAR_7.append(VAR_4[0])\n", "VAR_15 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n", "if not journal.check_dtls:\n", "if VAR_14:\n", "VAR_9.button_confirm_cash(VAR_2, VAR_3, VAR_4, VAR_5)\n", "VAR_14 = VAR_13.browse(VAR_2, VAR_3, VAR_14, VAR_5=context).res_id\n", "if VAR_15:\n", "VAR_15 = VAR_13.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n", "return {'domain': \"[('id','in',\" + str(VAR_7) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(VAR_14, 'tree'), (\n VAR_15, 'form')], 'type': 'ir.actions.act_window'}\n" ]
[ "from osv import osv\n", "from tools.translate import _\n", "_name = 'pos.close.statement'\n", "_description = 'Close Statements'\n", "def close_statement(self, cr, uid, ids, context):...\n", "\"\"\"docstring\"\"\"\n", "company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n", "list_statement = []\n", "mod_obj = self.pool.get('ir.model.data')\n", "statement_obj = self.pool.get('account.bank.statement')\n", "journal_obj = self.pool.get('account.journal')\n", "cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n", "j_ids = map(lambda x1: x1[0], cr.fetchall())\n", "cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n", "journal_ids = map(lambda x1: x1[0], cr.fetchall())\n", "for journal in journal_obj.browse(cr, uid, journal_ids):\n", "ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n", "data_obj = self.pool.get('ir.model.data')\n", "if not ids:\n", "id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n", "list_statement.append(ids[0])\n", "id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n", "if not journal.check_dtls:\n", "if id2:\n", "statement_obj.button_confirm_cash(cr, uid, ids, context)\n", "id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n", "if id3:\n", "id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n", "return {'domain': \"[('id','in',\" + str(list_statement) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(id2, 'tree'), (id3,\n 'form')], 'type': 'ir.actions.act_window'}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "return self.extracted_cpcat.data_document.get_absolute_url()\n" ]
[ "def get_datadocument_url(self):...\n", "return self.extracted_cpcat.data_document.get_absolute_url()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(self, VAR_1, VAR_2, VAR_5, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_4 is None:\n", "VAR_4 = {}\n", "VAR_30 = VAR_4.get('states', [])\n", "VAR_31 = VAR_4.get('what', ())\n", "if not VAR_5:\n", "VAR_5 = self.search(VAR_1, VAR_2, [])\n", "VAR_29 = {}.fromkeys(VAR_5, 0.0)\n", "if not VAR_5:\n", "return VAR_29\n", "if VAR_4.get('shop', False):\n", "VAR_1.execute('select warehouse_id from sale_shop where id=%s', (int(VAR_4[\n 'shop']),))\n", "if VAR_4.get('warehouse', False):\n", "VAR_41 = VAR_1.fetchone()\n", "VAR_1.execute('select lot_stock_id from stock_warehouse where id=%s', (int(\n VAR_4['warehouse']),))\n", "if VAR_4.get('location', False):\n", "if VAR_41:\n", "VAR_41 = VAR_1.fetchone()\n", "if type(VAR_4['location']) == type(1):\n", "VAR_42 = []\n", "VAR_4['warehouse'] = VAR_41[0]\n", "if VAR_41:\n", "VAR_42 = [VAR_4['location']]\n", "if type(VAR_4['location']) in (type(''), type(u'')):\n", "VAR_43 = self.pool.get('stock.warehouse').search(VAR_1, VAR_2, [], VAR_4=\n context)\n", "VAR_4['location'] = VAR_41[0]\n", "if VAR_4.get('compute_child', True):\n", "VAR_42 = self.pool.get('stock.location').search(VAR_1, VAR_2, [('name',\n 'ilike', VAR_4['location'])], VAR_4=context)\n", "VAR_42 = VAR_4['location']\n", "for w in self.pool.get('stock.warehouse').browse(VAR_1, VAR_2, VAR_43,\n", "VAR_44 = self.pool.get('stock.location').search(VAR_1, VAR_2, [(\n 'location_id', 'child_of', VAR_42)])\n", "VAR_42 = VAR_42\n", "VAR_42.append(w.lot_stock_id.id)\n", "VAR_42 = VAR_44 or VAR_42\n", "VAR_32 = {}\n", "VAR_33 = {}\n", "for VAR_45 in self.browse(VAR_1, VAR_2, VAR_5, VAR_4=context):\n", "VAR_33[VAR_45.id] = VAR_45.uom_id.id\n", "VAR_34 = []\n", "VAR_32[VAR_45.uom_id.id] = VAR_45.uom_id\n", "VAR_35 = []\n", "VAR_36 = VAR_4.get('from_date', False)\n", "VAR_37 = VAR_4.get('to_date', False)\n", "VAR_38 = False\n", "if VAR_36 and VAR_37:\n", "VAR_38 = \"date_planned>='%s' and date_planned<='%s'\" % (VAR_36, VAR_37)\n", "if VAR_36:\n", "if 'in' in VAR_31:\n", "VAR_38 = \"date_planned>='%s'\" % VAR_36\n", "if VAR_37:\n", "VAR_1.execute('string' + (VAR_38 and 'and ' + VAR_38 + ' ' or '') +\n 'group by product_id,product_uom', (tuple(VAR_42), tuple(VAR_42), tuple\n (VAR_5), tuple(VAR_30)))\n", "if 'out' in VAR_31:\n", "VAR_38 = \"date_planned<='%s'\" % VAR_37\n", "VAR_34 = VAR_1.fetchall()\n", "VAR_1.execute('string' + (VAR_38 and 'and ' + VAR_38 + ' ' or '') +\n 'group by product_id,product_uom', (tuple(VAR_42), tuple(VAR_42), tuple\n (VAR_5), tuple(VAR_30)))\n", "VAR_39 = self.pool.get('product.uom')\n", "VAR_35 = VAR_1.fetchall()\n", "VAR_40 = map(lambda x: x[2], VAR_34) + map(lambda x: x[2], VAR_35)\n", "if VAR_4.get('uom', False):\n", "VAR_40 += [VAR_4['uom']]\n", "VAR_40 = filter(lambda x: x not in VAR_32.keys(), VAR_40)\n", "if VAR_40:\n", "VAR_40 = VAR_39.browse(VAR_1, VAR_2, list(set(VAR_40)), VAR_4=context)\n", "for VAR_46 in VAR_40:\n", "VAR_32[VAR_46.id] = VAR_46\n", "for VAR_47, prod_id, prod_uom in VAR_34:\n", "VAR_47 = VAR_39._compute_qty_obj(VAR_1, VAR_2, VAR_32[prod_uom], VAR_47,\n VAR_32[VAR_4.get('uom', False) or VAR_33[prod_id]])\n", "for VAR_47, prod_id, prod_uom in VAR_35:\n", "VAR_29[prod_id] += VAR_47\n", "VAR_47 = VAR_39._compute_qty_obj(VAR_1, VAR_2, VAR_32[prod_uom], VAR_47,\n VAR_32[VAR_4.get('uom', False) or VAR_33[prod_id]])\n", "return VAR_29\n", "VAR_29[prod_id] -= VAR_47\n" ]
[ "def get_product_available(self, cr, uid, ids, context=None):...\n", "\"\"\"docstring\"\"\"\n", "if context is None:\n", "context = {}\n", "states = context.get('states', [])\n", "what = context.get('what', ())\n", "if not ids:\n", "ids = self.search(cr, uid, [])\n", "res = {}.fromkeys(ids, 0.0)\n", "if not ids:\n", "return res\n", "if context.get('shop', False):\n", "cr.execute('select warehouse_id from sale_shop where id=%s', (int(context[\n 'shop']),))\n", "if context.get('warehouse', False):\n", "res2 = cr.fetchone()\n", "cr.execute('select lot_stock_id from stock_warehouse where id=%s', (int(\n context['warehouse']),))\n", "if context.get('location', False):\n", "if res2:\n", "res2 = cr.fetchone()\n", "if type(context['location']) == type(1):\n", "location_ids = []\n", "context['warehouse'] = res2[0]\n", "if res2:\n", "location_ids = [context['location']]\n", "if type(context['location']) in (type(''), type(u'')):\n", "wids = self.pool.get('stock.warehouse').search(cr, uid, [], context=context)\n", "context['location'] = res2[0]\n", "if context.get('compute_child', True):\n", "location_ids = self.pool.get('stock.location').search(cr, uid, [('name',\n 'ilike', context['location'])], context=context)\n", "location_ids = context['location']\n", "for w in self.pool.get('stock.warehouse').browse(cr, uid, wids, context=context\n", "child_location_ids = self.pool.get('stock.location').search(cr, uid, [(\n 'location_id', 'child_of', location_ids)])\n", "location_ids = location_ids\n", "location_ids.append(w.lot_stock_id.id)\n", "location_ids = child_location_ids or location_ids\n", "uoms_o = {}\n", "product2uom = {}\n", "for product in self.browse(cr, uid, ids, context=context):\n", "product2uom[product.id] = product.uom_id.id\n", "results = []\n", "uoms_o[product.uom_id.id] = product.uom_id\n", "results2 = []\n", "from_date = context.get('from_date', False)\n", "to_date = context.get('to_date', False)\n", "date_str = False\n", "if from_date and to_date:\n", "date_str = \"date_planned>='%s' and date_planned<='%s'\" % (from_date, to_date)\n", "if from_date:\n", "if 'in' in what:\n", "date_str = \"date_planned>='%s'\" % from_date\n", "if to_date:\n", "cr.execute(\n 'select sum(product_qty), product_id, product_uom from stock_move where location_id NOT IN %sand location_dest_id IN %sand product_id IN %sand state IN %s'\n + (date_str and 'and ' + date_str + ' ' or '') +\n 'group by product_id,product_uom', (tuple(location_ids), tuple(\n location_ids), tuple(ids), tuple(states)))\n", "if 'out' in what:\n", "date_str = \"date_planned<='%s'\" % to_date\n", "results = cr.fetchall()\n", "cr.execute(\n 'select sum(product_qty), product_id, product_uom from stock_move where location_id IN %sand location_dest_id NOT IN %s and product_id IN %sand state in %s'\n + (date_str and 'and ' + date_str + ' ' or '') +\n 'group by product_id,product_uom', (tuple(location_ids), tuple(\n location_ids), tuple(ids), tuple(states)))\n", "uom_obj = self.pool.get('product.uom')\n", "results2 = cr.fetchall()\n", "uoms = map(lambda x: x[2], results) + map(lambda x: x[2], results2)\n", "if context.get('uom', False):\n", "uoms += [context['uom']]\n", "uoms = filter(lambda x: x not in uoms_o.keys(), uoms)\n", "if uoms:\n", "uoms = uom_obj.browse(cr, uid, list(set(uoms)), context=context)\n", "for o in uoms:\n", "uoms_o[o.id] = o\n", "for amount, prod_id, prod_uom in results:\n", "amount = uom_obj._compute_qty_obj(cr, uid, uoms_o[prod_uom], amount, uoms_o\n [context.get('uom', False) or product2uom[prod_id]])\n", "for amount, prod_id, prod_uom in results2:\n", "res[prod_id] += amount\n", "amount = uom_obj._compute_qty_obj(cr, uid, uoms_o[prod_uom], amount, uoms_o\n [context.get('uom', False) or product2uom[prod_id]])\n", "return res\n", "res[prod_id] -= amount\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "AugAssign'", "Assign'", "Condition", "Assign'", "For", "Assign'", "For", "Assign'", "For", "AugAssign'", "Assign'", "Return'", "AugAssign'" ]
[ "def __init__(self, VAR_0=None, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=...\n", "self.object_id_seed = VAR_11\n", "self.redis_address = VAR_0\n", "self.num_cpus = VAR_1\n", "self.num_gpus = VAR_2\n", "self.resources = VAR_3\n", "self.object_store_memory = VAR_4\n", "self.redis_max_memory = VAR_5\n", "self.redis_port = VAR_6\n", "self.redis_shard_ports = VAR_7\n", "self.object_manager_port = VAR_8\n", "self.node_manager_port = VAR_9\n", "self.node_ip_address = VAR_10\n", "self.num_workers = VAR_12\n", "self.local_mode = VAR_13\n", "self.driver_mode = VAR_14\n", "self.redirect_worker_output = VAR_15\n", "self.redirect_output = VAR_16\n", "self.num_redis_shards = VAR_17\n", "self.redis_max_clients = VAR_18\n", "self.redis_password = VAR_19\n", "self.plasma_directory = VAR_20\n", "self.worker_path = VAR_21\n", "self.huge_pages = VAR_22\n", "self.include_webui = VAR_23\n", "self.plasma_store_socket_name = VAR_26\n", "self.raylet_socket_name = VAR_27\n", "self.temp_dir = VAR_28\n", "self.include_log_monitor = VAR_29\n", "self.autoscaling_config = VAR_30\n", "self.include_java = VAR_31\n", "self.java_worker_options = VAR_32\n", "self._internal_config = VAR_33\n", "self._check_usage()\n" ]
[ "def __init__(self, redis_address=None, num_cpus=None, num_gpus=None,...\n", "self.object_id_seed = object_id_seed\n", "self.redis_address = redis_address\n", "self.num_cpus = num_cpus\n", "self.num_gpus = num_gpus\n", "self.resources = resources\n", "self.object_store_memory = object_store_memory\n", "self.redis_max_memory = redis_max_memory\n", "self.redis_port = redis_port\n", "self.redis_shard_ports = redis_shard_ports\n", "self.object_manager_port = object_manager_port\n", "self.node_manager_port = node_manager_port\n", "self.node_ip_address = node_ip_address\n", "self.num_workers = num_workers\n", "self.local_mode = local_mode\n", "self.driver_mode = driver_mode\n", "self.redirect_worker_output = redirect_worker_output\n", "self.redirect_output = redirect_output\n", "self.num_redis_shards = num_redis_shards\n", "self.redis_max_clients = redis_max_clients\n", "self.redis_password = redis_password\n", "self.plasma_directory = plasma_directory\n", "self.worker_path = worker_path\n", "self.huge_pages = huge_pages\n", "self.include_webui = include_webui\n", "self.plasma_store_socket_name = plasma_store_socket_name\n", "self.raylet_socket_name = raylet_socket_name\n", "self.temp_dir = temp_dir\n", "self.include_log_monitor = include_log_monitor\n", "self.autoscaling_config = autoscaling_config\n", "self.include_java = include_java\n", "self.java_worker_options = java_worker_options\n", "self._internal_config = _internal_config\n", "self._check_usage()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "@staticmethod...\n", "return users.create_login_url(VAR_23)\n" ]
[ "@staticmethod...\n", "return users.create_login_url(dest_url)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_2 = self.client.get('/datadocument/167497/')\n", "self.assertContains(VAR_2, '/link_product_form/167497/')\n", "VAR_6 = {'title': ['New Product'], 'upc': ['stub_1860'], 'document_type': [\n 1], 'return_url': ['/datadocument/167497/']}\n", "VAR_2 = self.client.post('/link_product_form/167497/', VAR_6=data)\n", "self.assertRedirects(VAR_2, '/datadocument/167497/')\n", "VAR_2 = self.client.get(VAR_2.url)\n", "self.assertContains(VAR_2, 'New Product')\n" ]
[ "def test_product_create_link(self):...\n", "response = self.client.get('/datadocument/167497/')\n", "self.assertContains(response, '/link_product_form/167497/')\n", "data = {'title': ['New Product'], 'upc': ['stub_1860'], 'document_type': [1\n ], 'return_url': ['/datadocument/167497/']}\n", "response = self.client.post('/link_product_form/167497/', data=data)\n", "self.assertRedirects(response, '/datadocument/167497/')\n", "response = self.client.get(response.url)\n", "self.assertContains(response, 'New Product')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_13, VAR_11, VAR_4, VAR_6):...\n", "self.field_name = VAR_13\n", "self.label = VAR_11\n", "self.value = VAR_4\n", "self.query_params = VAR_6\n" ]
[ "def __init__(self, field_name, label, value, query_params):...\n", "self.field_name = field_name\n", "self.label = label\n", "self.value = value\n", "self.query_params = query_params\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_7, VAR_20=True, *VAR_15, **VAR_16):...\n", "CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n", "self.redirect = VAR_20\n" ]
[ "def __init__(self, param, redirect=True, *a, **kw):...\n", "Validator.__init__(self, param, *a, **kw)\n", "self.redirect = redirect\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_12():...\n", "VAR_18 = requests.get(FUNC_2('healthy'))\n", "VAR_18.raise_for_status()\n", "return VAR_18.json()\n" ]
[ "def _ServerIsHealthy():...\n", "response = requests.get(_BuildUri('healthy'))\n", "response.raise_for_status()\n", "return response.json()\n" ]
[ 0, 7, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6(VAR_10, VAR_11):...\n", "VAR_10 = VAR_10.replace('/b/', '/osu/').split('&', 1)[0]\n", "if VAR_11 == '':\n", "VAR_37 = 'curl ' + VAR_10 + ' | /home/pi/DiscordBots/Oppai/oppai/oppai -'\n", "VAR_37 = ('curl ' + VAR_10 + ' | /home/pi/DiscordBots/Oppai/oppai/oppai - ' +\n VAR_11)\n", "return FUNC_8(subprocess.Popen(VAR_37, stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT, shell=True).stdout.read())\n" ]
[ "def return_simple_beatmap_info(url, oppaiParameters):...\n", "url = url.replace('/b/', '/osu/').split('&', 1)[0]\n", "if oppaiParameters == '':\n", "command = 'curl ' + url + ' | /home/pi/DiscordBots/Oppai/oppai/oppai -'\n", "command = ('curl ' + url + ' | /home/pi/DiscordBots/Oppai/oppai/oppai - ' +\n oppaiParameters)\n", "return get_infos(subprocess.Popen(command, stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT, shell=True).stdout.read())\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_16(VAR_24):...\n", "VAR_42 = logging.getLogger('ray')\n", "if VAR_21:\n", "VAR_53 = logging.StreamHandler(stream=new_stream)\n", "VAR_53.setFormatter(VAR_21.formatter)\n", "VAR_21.close()\n", "VAR_21 = VAR_53\n", "VAR_42.addHandler(VAR_21)\n" ]
[ "def try_update_handler(new_stream):...\n", "logger = logging.getLogger('ray')\n", "if _default_handler:\n", "new_handler = logging.StreamHandler(stream=new_stream)\n", "new_handler.setFormatter(_default_handler.formatter)\n", "_default_handler.close()\n", "_default_handler = new_handler\n", "logger.addHandler(_default_handler)\n" ]
[ 0, 0, 6, 6, 6, 6, 6, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "import json\n", "import logging\n", "from typing import List\n", "from datetime import timedelta\n", "import tornado.web\n", "from tornado_sqlalchemy import as_future, SessionMixin\n", "from pinnwand import database\n", "from pinnwand import utility\n", "from pinnwand import path\n", "VAR_0 = logging.getLogger(__name__)\n", "async def FUNC_1(self) ->None:...\n", "self.render('about.html', pagetitle='about')\n", "\"\"\"The index page shows the new paste page with a list of all available\n lexers from Pygments.\"\"\"\n", "async def FUNC_1(self, VAR_1: str='') ->None:...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = utility.list_languages()\n", "if not VAR_1:\n", "VAR_1 = 'text'\n", "if VAR_1 not in VAR_4:\n", "VAR_0.debug('CreatePaste.get: non-existent logger requested')\n", "await self.render('new.html', VAR_1=lexer, VAR_4=lexers, pagetitle='new',\n message=None)\n", "self.set_status(404)\n", "async def FUNC_2(self) ->None:...\n", "self.render('404.html', pagetitle='404')\n", "VAR_1 = self.get_body_argument('lexer')\n", "return\n", "VAR_5 = self.get_body_argument('code')\n", "VAR_6 = self.get_body_argument('expiry')\n", "if VAR_1 not in utility.list_languages():\n", "VAR_0.info('Paste.post: a paste was submitted with an invalid lexer')\n", "if not VAR_5:\n", "return self.redirect(f'/+{VAR_1}')\n", "if VAR_6 not in utility.expiries:\n", "VAR_0.info('Paste.post: a paste was submitted with an invalid expiry')\n", "VAR_7 = database.Paste(VAR_5, VAR_1, utility.expiries[VAR_6])\n", "session.add(VAR_7)\n", "session.commit()\n", "self.set_cookie('removal', str(VAR_7.removal_id), path=\n f'/show/{paste.paste_id}')\n", "self.redirect(f'/show/{VAR_7.paste_id}')\n", "def FUNC_3(self) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "return True\n" ]
[ "import json\n", "import logging\n", "from typing import List\n", "from datetime import timedelta\n", "import tornado.web\n", "from tornado_sqlalchemy import as_future, SessionMixin\n", "from pinnwand import database\n", "from pinnwand import utility\n", "from pinnwand import path\n", "log = logging.getLogger(__name__)\n", "async def get(self) ->None:...\n", "self.render('about.html', pagetitle='about')\n", "\"\"\"The index page shows the new paste page with a list of all available\n lexers from Pygments.\"\"\"\n", "async def get(self, lexer: str='') ->None:...\n", "\"\"\"docstring\"\"\"\n", "lexers = utility.list_languages()\n", "if not lexer:\n", "lexer = 'text'\n", "if lexer not in lexers:\n", "log.debug('CreatePaste.get: non-existent logger requested')\n", "await self.render('new.html', lexer=lexer, lexers=lexers, pagetitle='new',\n message=None)\n", "self.set_status(404)\n", "async def post(self) ->None:...\n", "self.render('404.html', pagetitle='404')\n", "lexer = self.get_body_argument('lexer')\n", "return\n", "raw = self.get_body_argument('code')\n", "expiry = self.get_body_argument('expiry')\n", "if lexer not in utility.list_languages():\n", "log.info('Paste.post: a paste was submitted with an invalid lexer')\n", "if not raw:\n", "return self.redirect(f'/+{lexer}')\n", "if expiry not in utility.expiries:\n", "log.info('Paste.post: a paste was submitted with an invalid expiry')\n", "paste = database.Paste(raw, lexer, utility.expiries[expiry])\n", "session.add(paste)\n", "session.commit()\n", "self.set_cookie('removal', str(paste.removal_id), path=\n f'/show/{paste.paste_id}')\n", "self.redirect(f'/show/{paste.paste_id}')\n", "def check_xsrf_cookie(self) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "AsyncFunctionDef'", "Expr'", "Expr'", "AsyncFunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "AsyncFunctionDef'", "Expr'", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_13(self, *VAR_16, **VAR_10):...\n", "if not self.current_user or not self.current_user.admin:\n", "return VAR_7(self, *VAR_16, **kwargs)\n" ]
[ "def wrapper(self, *args, **kwargs):...\n", "if not self.current_user or not self.current_user.admin:\n", "return method(self, *args, **kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'" ]
[ "def FUNC_7(VAR_2, VAR_5='/tmp/source.html'):...\n", "out.write(VAR_2.browser.page_source.encode('utf8'))\n" ]
[ "def save_source(context, filename='/tmp/source.html'):...\n", "out.write(context.browser.page_source.encode('utf8'))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_2(VAR_1: 'projects.Project', VAR_3: str, VAR_4: SyntaxError) ->dict:...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = [dict(VAR_15=error.filename, VAR_16=None, line_number=error.lineno,\n line=error.text.rstrip())]\n", "VAR_10 = dict(type=error.__class__.__name__, message='{}'.format(error),\n VAR_9=stack)\n", "return dict(success=False, VAR_4=error, message=templating.render_template(\n 'user-code-error.txt', **render_data), html_message=templating.\n render_template('user-code-error.html', **render_data))\n" ]
[ "def render_syntax_error(project: 'projects.Project', code: str, error:...\n", "\"\"\"docstring\"\"\"\n", "stack = [dict(filename=error.filename, location=None, line_number=error.\n lineno, line=error.text.rstrip())]\n", "render_data = dict(type=error.__class__.__name__, message='{}'.format(error\n ), stack=stack)\n", "return dict(success=False, error=error, message=templating.render_template(\n 'user-code-error.txt', **render_data), html_message=templating.\n render_template('user-code-error.html', **render_data))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_31(self, VAR_44):...\n", "\"\"\"docstring\"\"\"\n", "if self.counter['tag'] != VAR_44.pk:\n", "if VAR_44.pk == self.counter['tag']:\n", "self.counter = self.test_tags.__next__()\n", "return 0\n", "return self.counter[self.key]\n", "return 0\n" ]
[ "def calculate_tag_count(self, tag):...\n", "\"\"\"docstring\"\"\"\n", "if self.counter['tag'] != tag.pk:\n", "if tag.pk == self.counter['tag']:\n", "self.counter = self.test_tags.__next__()\n", "return 0\n", "return self.counter[self.key]\n", "return 0\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Assign'", "Return'", "Return'", "Return'" ]
[ "def FUNC_0(self):...\n", "VAR_17 = 'string'\n", "return VAR_17\n" ]
[ "def _select(self):...\n", "select_str = \"\"\"\n select\n id,\n c.date_open as opening_date,\n c.date_closed as date_closed,\n c.state,\n c.user_id,\n c.team_id,\n c.partner_id,\n c.duration,\n c.company_id,\n c.priority,\n 1 as nbr_cases,\n c.create_date as create_date,\n extract(\n 'epoch' from (\n c.date_closed-c.create_date))/(3600*24) as delay_close,\n extract(\n 'epoch' from (\n c.date_open-c.create_date))/(3600*24) as delay_open\n \"\"\"\n", "return select_str\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_18(self):...\n", "VAR_13 = ['Line1 passed', 'Line1 failed']\n", "VAR_14 = ['Line1', 'Line2']\n", "VAR_15 = [' passed', None]\n", "self.assertEqual(self.driver._get_prefixed_value(VAR_13, VAR_14[0]), VAR_15[0])\n", "self.assertEqual(self.driver._get_prefixed_value(VAR_13, VAR_14[1]), VAR_15[1])\n" ]
[ "def test_get_prefixed_value(self):...\n", "lines = ['Line1 passed', 'Line1 failed']\n", "prefix = ['Line1', 'Line2']\n", "expected_output = [' passed', None]\n", "self.assertEqual(self.driver._get_prefixed_value(lines, prefix[0]),\n expected_output[0])\n", "self.assertEqual(self.driver._get_prefixed_value(lines, prefix[1]),\n expected_output[1])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def __str__(self):...\n", "return ' '.join(map(str, self))\n" ]
[ "def __str__(self):...\n", "return ' '.join(map(str, self))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_21(self, VAR_2):...\n", "VAR_1 = frappe.get_meta(self.doctype)\n", "if self.order_by:\n", "VAR_2.order_by = self.order_by\n", "VAR_2.order_by = ''\n", "VAR_65 = len(self.fields) == 1 and (self.fields[0].lower().startswith(\n 'count(') or self.fields[0].lower().startswith('min(') or self.fields[0\n ].lower().startswith('max(')) and not self.group_by\n", "if not VAR_65:\n", "VAR_40 = VAR_41 = None\n", "if VAR_1.sort_field and ',' in VAR_1.sort_field:\n", "VAR_2.order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(self.doctype,\n VAR_34.split()[0].strip(), VAR_34.split()[1].strip()) for VAR_34 in\n VAR_1.sort_field.split(',')])\n", "VAR_40 = VAR_1.sort_field or 'modified'\n", "if VAR_1.is_submittable:\n", "VAR_41 = VAR_1.sort_field and VAR_1.sort_order or 'desc'\n", "VAR_2.order_by = '`tab{0}`.docstatus asc, {1}'.format(self.doctype, VAR_2.\n order_by)\n", "VAR_2.order_by = '`tab{0}`.`{1}` {2}'.format(self.doctype, VAR_40 or\n 'modified', VAR_41 or 'desc')\n" ]
[ "def set_order_by(self, args):...\n", "meta = frappe.get_meta(self.doctype)\n", "if self.order_by:\n", "args.order_by = self.order_by\n", "args.order_by = ''\n", "group_function_without_group_by = len(self.fields) == 1 and (self.fields[0]\n .lower().startswith('count(') or self.fields[0].lower().startswith(\n 'min(') or self.fields[0].lower().startswith('max(')) and not self.group_by\n", "if not group_function_without_group_by:\n", "sort_field = sort_order = None\n", "if meta.sort_field and ',' in meta.sort_field:\n", "args.order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(self.doctype, f.\n split()[0].strip(), f.split()[1].strip()) for f in meta.sort_field.\n split(',')])\n", "sort_field = meta.sort_field or 'modified'\n", "if meta.is_submittable:\n", "sort_order = meta.sort_field and meta.sort_order or 'desc'\n", "args.order_by = '`tab{0}`.docstatus asc, {1}'.format(self.doctype, args.\n order_by)\n", "args.order_by = '`tab{0}`.`{1}` {2}'.format(self.doctype, sort_field or\n 'modified', sort_order or 'desc')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "@VAR_0.route('/<page_name>/history/record')...\n", "VAR_12 = request.args.get('id')\n", "VAR_3 = VAR_1.query('string' % VAR_12)\n", "VAR_13 = VAR_3.namedresult()[0]\n", "return render_template('page_record.html', VAR_2=page_name, VAR_13=page_record)\n" ]
[ "@app.route('/<page_name>/history/record')...\n", "content_id = request.args.get('id')\n", "query = db.query(\n \"select page_content.content, page_content.timestamp from page, page_content where page.id = page_content.page_id and page_content.id = '%s'\"\n % content_id)\n", "page_record = query.namedresult()[0]\n", "return render_template('page_record.html', page_name=page_name, page_record\n =page_record)\n" ]
[ 0, 0, 4, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def __enter__(self):...\n", "\"\"\"docstring\"\"\"\n", "self.start_time = time.time()\n", "return self\n" ]
[ "def __enter__(self):...\n", "\"\"\"docstring\"\"\"\n", "self.start_time = time.time()\n", "return self\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_17(VAR_16):...\n", "VAR_0.info('Login Error for %s' % VAR_5.args['username'][0])\n", "VAR_0.info('%s' % VAR_16)\n", "VAR_5.setResponseCode(UNAUTHORIZED)\n", "return self._render_template(VAR_5, 'Invalid credentials')\n" ]
[ "def render_error(error):...\n", "log.info('Login Error for %s' % request.args['username'][0])\n", "log.info('%s' % error)\n", "request.setResponseCode(UNAUTHORIZED)\n", "return self._render_template(request, 'Invalid credentials')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_6(VAR_8, VAR_14, VAR_22, VAR_23, VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = FUNC_0()\n", "if VAR_23:\n", "if VAR_8.grace_period:\n", "VAR_37 = VAR_3 if VAR_18 else VAR_2\n", "return max(VAR_20 - VAR_23 - VAR_8.grace_period, 0.0)\n", "return 0.0\n", "if VAR_8.hard_timeout:\n", "VAR_37 = min(VAR_37, VAR_14 + VAR_8.hard_timeout - VAR_20)\n", "if VAR_8.io_timeout:\n", "VAR_37 = min(VAR_37, VAR_22 + VAR_8.io_timeout - VAR_20)\n", "VAR_37 = max(VAR_37, 0)\n", "logging.debug('calc_yield_wait() = %d', VAR_37)\n", "return VAR_37\n" ]
[ "def calc_yield_wait(task_details, start, last_io, timed_out, stdout):...\n", "\"\"\"docstring\"\"\"\n", "now = monotonic_time()\n", "if timed_out:\n", "if task_details.grace_period:\n", "out = MIN_PACKET_INTERNAL if stdout else MAX_PACKET_INTERVAL\n", "return max(now - timed_out - task_details.grace_period, 0.0)\n", "return 0.0\n", "if task_details.hard_timeout:\n", "out = min(out, start + task_details.hard_timeout - now)\n", "if task_details.io_timeout:\n", "out = min(out, last_io + task_details.io_timeout - now)\n", "out = max(out, 0)\n", "logging.debug('calc_yield_wait() = %d', out)\n", "return out\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Condition", "Assign'", "Return'", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "from __future__ import unicode_literals\n", "import frappe\n", "from frappe import _\n", "from frappe.website.website_generator import WebsiteGenerator\n", "from frappe.website.render import clear_cache\n", "from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown\n", "from frappe.website.utils import find_first_image, get_comment_list\n", "VAR_8 = frappe._dict(VAR_7='published_on desc')\n", "def FUNC_5(self):...\n", "if not self.route:\n", "return frappe.db.get_value('Blog Category', self.blog_category, 'route'\n ) + '/' + self.scrub(self.title)\n", "def FUNC_6(self):...\n", "return self.title\n" ]
[ "from __future__ import unicode_literals\n", "import frappe\n", "from frappe import _\n", "from frappe.website.website_generator import WebsiteGenerator\n", "from frappe.website.render import clear_cache\n", "from frappe.utils import today, cint, global_date_format, get_fullname, strip_html_tags, markdown\n", "from frappe.website.utils import find_first_image, get_comment_list\n", "website = frappe._dict(order_by='published_on desc')\n", "def make_route(self):...\n", "if not self.route:\n", "return frappe.db.get_value('Blog Category', self.blog_category, 'route'\n ) + '/' + self.scrub(self.title)\n", "def get_feed(self):...\n", "return self.title\n" ]
[ 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Condition", "Return'", "FunctionDef'", "Return'" ]
[ "@app.route('/api/sequences/<id>', methods=['DELETE'])...\n", "VAR_19 = controller.sequences.lookup(VAR_4)\n", "VAR_20 = VAR_19.to_map()\n", "VAR_20.update({'status': 'deleted'})\n", "controller.sequences.remove(VAR_19)\n", "return VAR_20\n" ]
[ "@app.route('/api/sequences/<id>', methods=['DELETE'])...\n", "sequence = controller.sequences.lookup(id)\n", "sequence_json = sequence.to_map()\n", "sequence_json.update({'status': 'deleted'})\n", "controller.sequences.remove(sequence)\n", "return sequence_json\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return set(VAR_28 for VAR_28 in self.input if not VAR_28.exists and not \n VAR_28 in self.subworkflow_input)\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return set(f for f in self.input if not f.exists and not f in self.\n subworkflow_input)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_12(self, VAR_9, VAR_10, VAR_11, VAR_16={}):...\n", "get_and_check_project(VAR_9, VAR_11)\n", "VAR_28 = self.queryset.annotate(**annotate).get(VAR_10=pk, VAR_1=project_pk)\n", "return VAR_28\n" ]
[ "def get_and_check_task(self, request, pk, project_pk, annotate={}):...\n", "get_and_check_project(request, project_pk)\n", "task = self.queryset.annotate(**annotate).get(pk=pk, project=project_pk)\n", "return task\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_10(self):...\n", "VAR_14 = self.new_mocked_client(client.RESTClient, url_prefix='api/v1/ports')\n", "VAR_14.create(VAR_24=jsonutils.dumps({'resource-name': 'port1'}))\n", "FUNC_1('post', VAR_14, 'https://1.2.3.4/api/v1/ports', VAR_8=jsonutils.\n dumps({'resource-name': 'port1'}))\n" ]
[ "def test_client_create(self):...\n", "api = self.new_mocked_client(client.RESTClient, url_prefix='api/v1/ports')\n", "api.create(body=jsonutils.dumps({'resource-name': 'port1'}))\n", "assert_call('post', api, 'https://1.2.3.4/api/v1/ports', data=jsonutils.\n dumps({'resource-name': 'port1'}))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_6(self, VAR_23):...\n", "if VAR_23:\n", "return self.error()\n", "return Account._by_name(VAR_23)\n" ]
[ "def run(self, name):...\n", "if name:\n", "return self.error()\n", "return Account._by_name(name)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_0(VAR_3=None, VAR_4=None, VAR_5=True):...\n", "VAR_14, VAR_15 = vimsupport.CurrentLineAndColumn()\n", "VAR_16 = vimsupport.GetCurrentBufferFilepath()\n", "VAR_17 = {'filetypes': vimsupport.CurrentFiletypes(), 'line_num': VAR_14,\n 'column_num': VAR_15, 'start_column': VAR_3, 'line_value': vim.current.\n line, 'filepath': VAR_16}\n", "if VAR_5:\n", "VAR_17['file_data'] = vimsupport.GetUnsavedAndCurrentBufferData()\n", "if VAR_4:\n", "VAR_17['query'] = VAR_4\n", "return VAR_17\n" ]
[ "def BuildRequestData(start_column=None, query=None, include_buffer_data=True):...\n", "line, column = vimsupport.CurrentLineAndColumn()\n", "filepath = vimsupport.GetCurrentBufferFilepath()\n", "request_data = {'filetypes': vimsupport.CurrentFiletypes(), 'line_num':\n line, 'column_num': column, 'start_column': start_column, 'line_value':\n vim.current.line, 'filepath': filepath}\n", "if include_buffer_data:\n", "request_data['file_data'] = vimsupport.GetUnsavedAndCurrentBufferData()\n", "if query:\n", "request_data['query'] = query\n", "return request_data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_13(VAR_18, VAR_19):...\n", "VAR_6 = {}\n", "for id_, VAR_21 in enumerate(VAR_18.description):\n", "VAR_6[VAR_21[0]] = VAR_19[id_]\n", "return VAR_6\n" ]
[ "def dict_factory(cursor, row):...\n", "dictionary = {}\n", "for id_, column in enumerate(cursor.description):\n", "dictionary[column[0]] = row[id_]\n", "return dictionary\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_0(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_4 is None:\n", "VAR_4 = {}\n", "VAR_17 = self.pool.get('product.product').browse(VAR_1, VAR_2, VAR_3, VAR_4\n =context)\n", "VAR_18 = (VAR_17.property_stock_account_input and VAR_17.\n property_stock_account_input.id or False)\n", "if not VAR_18:\n", "VAR_18 = (VAR_17.categ_id.property_stock_account_input_categ and VAR_17.\n categ_id.property_stock_account_input_categ.id or False)\n", "VAR_19 = (VAR_17.property_stock_account_output and VAR_17.\n property_stock_account_output.id or False)\n", "if not VAR_19:\n", "VAR_19 = (VAR_17.categ_id.property_stock_account_output_categ and VAR_17.\n categ_id.property_stock_account_output_categ.id or False)\n", "VAR_20 = (VAR_17.categ_id.property_stock_journal and VAR_17.categ_id.\n property_stock_journal.id or False)\n", "VAR_21 = (VAR_17.categ_id.property_stock_variation and VAR_17.categ_id.\n property_stock_variation.id or False)\n", "return {'stock_account_input': VAR_18, 'stock_account_output': VAR_19,\n 'stock_journal': VAR_20, 'property_stock_variation': VAR_21}\n" ]
[ "def get_product_accounts(self, cr, uid, product_id, context=None):...\n", "\"\"\"docstring\"\"\"\n", "if context is None:\n", "context = {}\n", "product_obj = self.pool.get('product.product').browse(cr, uid, product_id,\n context=context)\n", "stock_input_acc = (product_obj.property_stock_account_input and product_obj\n .property_stock_account_input.id or False)\n", "if not stock_input_acc:\n", "stock_input_acc = (product_obj.categ_id.property_stock_account_input_categ and\n product_obj.categ_id.property_stock_account_input_categ.id or False)\n", "stock_output_acc = (product_obj.property_stock_account_output and\n product_obj.property_stock_account_output.id or False)\n", "if not stock_output_acc:\n", "stock_output_acc = (product_obj.categ_id.\n property_stock_account_output_categ and product_obj.categ_id.\n property_stock_account_output_categ.id or False)\n", "journal_id = (product_obj.categ_id.property_stock_journal and product_obj.\n categ_id.property_stock_journal.id or False)\n", "account_variation = (product_obj.categ_id.property_stock_variation and\n product_obj.categ_id.property_stock_variation.id or False)\n", "return {'stock_account_input': stock_input_acc, 'stock_account_output':\n stock_output_acc, 'stock_journal': journal_id,\n 'property_stock_variation': account_variation}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_0(self, VAR_0):...\n", "return [name for name in creatable_models.keys() if name.startswith(VAR_0)]\n" ]
[ "def complete(self, token):...\n", "return [name for name in creatable_models.keys() if name.startswith(token)]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@defer.inlineCallbacks...\n", "VAR_71 = {'encoding': [VAR_44]}\n", "if VAR_45:\n", "VAR_71['filename'] = [VAR_45]\n", "if VAR_46:\n", "VAR_71['content_type'] = [VAR_46]\n", "VAR_72, VAR_73 = self.get('/attachment/%s' % VAR_43, VAR_71, VAR_21=False)\n", "VAR_69 = yield VAR_72\n", "defer.returnValue((VAR_69, VAR_73))\n" ]
[ "@defer.inlineCallbacks...\n", "params = {'encoding': [encoding]}\n", "if filename:\n", "params['filename'] = [filename]\n", "if content_type:\n", "params['content_type'] = [content_type]\n", "deferred_result, req = self.get('/attachment/%s' % ident, params, as_json=False\n )\n", "res = yield deferred_result\n", "defer.returnValue((res, req))\n" ]
[ 5, 0, 0, 0, 0, 0, 5, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_12():...\n", "VAR_18 = requests.get(FUNC_2('healthy'))\n", "VAR_18.raise_for_status()\n", "return VAR_18.json()\n" ]
[ "def _ServerIsHealthy():...\n", "response = requests.get(_BuildUri('healthy'))\n", "response.raise_for_status()\n", "return response.json()\n" ]
[ 0, 7, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_11(self):...\n", "return 'Float{}'.format(self.size)\n" ]
[ "def for_schema(self):...\n", "return 'Float{}'.format(self.size)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(VAR_1, VAR_2, VAR_3='NGTREE'):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = 'CSV', 'TREE', 'JSON', 'YAML', 'NGTREE'\n", "if VAR_3 in VAR_11:\n", "VAR_0.info('Query: Finding Full Path (%s --> %s) for %s', VAR_1, VAR_2,\n nglib.user)\n", "VAR_8, VAR_9 = VAR_1, VAR_2\n", "VAR_12, VAR_13 = None, None\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', VAR_8):\n", "VAR_12 = nglib.query.net.get_net(VAR_8, VAR_3='NGTREE')\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', VAR_9):\n", "if VAR_12:\n", "VAR_13 = nglib.query.net.get_net(VAR_9, VAR_3='NGTREE')\n", "VAR_14, VAR_15, VAR_16, VAR_17 = None, None, None, None\n", "VAR_8 = VAR_12['_child001']['Name']\n", "if VAR_13:\n", "if nglib.use_netdb:\n", "VAR_9 = VAR_13['_child001']['Name']\n", "VAR_14 = nglib.netdb.ip.get_netdb_ip(VAR_1)\n", "if VAR_14:\n", "VAR_15 = nglib.netdb.ip.get_netdb_ip(VAR_2)\n", "VAR_36 = VAR_12['_child001']['Router']\n", "if VAR_15:\n", "if 'StandbyRouter' in VAR_12['_child001']:\n", "VAR_36 = VAR_13['_child001']['Router']\n", "VAR_18 = True\n", "VAR_36 = VAR_36 + '|' + VAR_12['_child001']['StandbyRouter']\n", "VAR_16 = FUNC_2(VAR_14['Switch'], VAR_36, VAR_7=False)\n", "if 'StandbyRouter' in VAR_13['_child001']:\n", "if VAR_14 and VAR_15:\n", "VAR_36 = VAR_36 + '|' + VAR_13['_child001']['StandbyRouter']\n", "VAR_17 = FUNC_2(VAR_36, VAR_15['Switch'], VAR_7=False)\n", "if VAR_14['Switch'] == VAR_15['Switch'] and VAR_14['VLAN'] == VAR_15['VLAN']:\n", "VAR_19 = nglib.ngtree.get_ngtree('L2-L4', tree_type='PATHs')\n", "VAR_18 = False\n", "if VAR_12['_child001']['Name'] != VAR_13['_child001']['Name']:\n", "VAR_19['L3 Path'] = VAR_8 + ' -> ' + VAR_9\n", "if VAR_14 and VAR_15:\n", "VAR_19['Lx Path'] = VAR_1 + ' -> ' + VAR_2\n", "VAR_19['L2 Path'] = VAR_14['Switch'] + ' (' + VAR_14['SwitchPort'\n ] + ') -> ' + VAR_15['Switch'] + ' (' + VAR_15['SwitchPort'] + ')'\n", "VAR_12['_type'] = 'SRC'\n", "VAR_12['Name'] = VAR_1\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_12)\n", "if not VAR_18 and '_child002' in VAR_13:\n", "nglib.ngtree.add_child_ngtree(VAR_12, VAR_13['_child002'])\n", "if VAR_18 and VAR_16:\n", "VAR_12['_type'] = 'L2PATH'\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_16)\n", "VAR_20 = FUNC_1(VAR_1, VAR_2, VAR_3='NGTREE', VAR_4=True)\n", "VAR_12['Name'] = VAR_1 + ' -> ' + VAR_2\n", "if VAR_20 and 'PATH' in VAR_20['_type']:\n", "if VAR_20['_type'] == 'L4-PATH':\n", "if VAR_18 and VAR_17:\n", "VAR_19['L4 Path'] = VAR_20['Name']\n", "VAR_19['L4 Path'] = 'VRF:' + VAR_12['_child001']['VRF']\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_17)\n", "if VAR_18:\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_20)\n", "VAR_13['_type'] = 'DST'\n", "VAR_19 = nglib.query.exp_ngtree(VAR_19, VAR_3)\n", "VAR_13['Name'] = VAR_2\n", "return VAR_19\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_13)\n" ]
[ "def get_full_path(src, dst, rtype='NGTREE'):...\n", "\"\"\"docstring\"\"\"\n", "rtypes = 'CSV', 'TREE', 'JSON', 'YAML', 'NGTREE'\n", "if rtype in rtypes:\n", "logger.info('Query: Finding Full Path (%s --> %s) for %s', src, dst, nglib.user\n )\n", "net1, net2 = src, dst\n", "n1tree, n2tree = None, None\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', net1):\n", "n1tree = nglib.query.net.get_net(net1, rtype='NGTREE')\n", "if re.search('^\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+$', net2):\n", "if n1tree:\n", "n2tree = nglib.query.net.get_net(net2, rtype='NGTREE')\n", "srctree, dsttree, srcswp, dstswp = None, None, None, None\n", "net1 = n1tree['_child001']['Name']\n", "if n2tree:\n", "if nglib.use_netdb:\n", "net2 = n2tree['_child001']['Name']\n", "srctree = nglib.netdb.ip.get_netdb_ip(src)\n", "if srctree:\n", "dsttree = nglib.netdb.ip.get_netdb_ip(dst)\n", "router = n1tree['_child001']['Router']\n", "if dsttree:\n", "if 'StandbyRouter' in n1tree['_child001']:\n", "router = n2tree['_child001']['Router']\n", "switching = True\n", "router = router + '|' + n1tree['_child001']['StandbyRouter']\n", "srcswp = get_switched_path(srctree['Switch'], router, verbose=False)\n", "if 'StandbyRouter' in n2tree['_child001']:\n", "if srctree and dsttree:\n", "router = router + '|' + n2tree['_child001']['StandbyRouter']\n", "dstswp = get_switched_path(router, dsttree['Switch'], verbose=False)\n", "if srctree['Switch'] == dsttree['Switch'] and srctree['VLAN'] == dsttree['VLAN'\n", "ngtree = nglib.ngtree.get_ngtree('L2-L4', tree_type='PATHs')\n", "switching = False\n", "if n1tree['_child001']['Name'] != n2tree['_child001']['Name']:\n", "ngtree['L3 Path'] = net1 + ' -> ' + net2\n", "if srctree and dsttree:\n", "ngtree['Lx Path'] = src + ' -> ' + dst\n", "ngtree['L2 Path'] = srctree['Switch'] + ' (' + srctree['SwitchPort'\n ] + ') -> ' + dsttree['Switch'] + ' (' + dsttree['SwitchPort'] + ')'\n", "n1tree['_type'] = 'SRC'\n", "n1tree['Name'] = src\n", "nglib.ngtree.add_child_ngtree(ngtree, n1tree)\n", "if not switching and '_child002' in n2tree:\n", "nglib.ngtree.add_child_ngtree(n1tree, n2tree['_child002'])\n", "if switching and srcswp:\n", "n1tree['_type'] = 'L2PATH'\n", "nglib.ngtree.add_child_ngtree(ngtree, srcswp)\n", "rtree = get_full_routed_path(src, dst, rtype='NGTREE', l2path=True)\n", "n1tree['Name'] = src + ' -> ' + dst\n", "if rtree and 'PATH' in rtree['_type']:\n", "if rtree['_type'] == 'L4-PATH':\n", "if switching and dstswp:\n", "ngtree['L4 Path'] = rtree['Name']\n", "ngtree['L4 Path'] = 'VRF:' + n1tree['_child001']['VRF']\n", "nglib.ngtree.add_child_ngtree(ngtree, dstswp)\n", "if switching:\n", "nglib.ngtree.add_child_ngtree(ngtree, rtree)\n", "n2tree['_type'] = 'DST'\n", "ngtree = nglib.query.exp_ngtree(ngtree, rtype)\n", "n2tree['Name'] = dst\n", "return ngtree\n", "nglib.ngtree.add_child_ngtree(ngtree, n2tree)\n" ]
[ 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_22(self, VAR_14):...\n", "if self.is_relation_many_to_one(VAR_14):\n", "VAR_34 = self.get_relation_fk(VAR_14)\n", "return self.list_columns[VAR_14].nullable\n", "return False\n", "return VAR_34.nullable\n" ]
[ "def is_nullable(self, col_name):...\n", "if self.is_relation_many_to_one(col_name):\n", "col = self.get_relation_fk(col_name)\n", "return self.list_columns[col_name].nullable\n", "return False\n", "return col.nullable\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Return'", "Return'" ]
[ "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n" ]
[ "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'If you are receiving a \"PM init failed\" error when attempting to launch safehax and are not on 11.3, use [this version of safehax.](https://github.com/TiniVi/safehax/releases/tag/r19)'\n )\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'" ]
[ "def FUNC_10(VAR_5, VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "VAR_26 = True\n", "VAR_0 = VAR_5['repository']\n", "VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n", "VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n", "VAR_10 = 'https://api.github.com/repos/{}/issues/{}/comments'\n", "VAR_10 = VAR_10.format(VAR_0, str(VAR_5['pr_number']))\n", "VAR_27 = requests.get(VAR_10, VAR_8=headers, VAR_9=auth).json()\n", "VAR_28 = ''\n", "for old_comment in reversed(VAR_27):\n", "if old_comment['user']['id'] == 24736507:\n", "\"\"\"string\"\"\"\n", "VAR_28 = old_comment['body']\n", "for old_comment in reversed(VAR_27):\n", "if '@pep8speaks' in old_comment['body']:\n", "return VAR_26\n", "if 'resume' in old_comment['body'].lower():\n", "if 'quiet' in old_comment['body'].lower():\n", "VAR_26 = False\n" ]
[ "def comment_permission_check(data, comment):...\n", "\"\"\"docstring\"\"\"\n", "PERMITTED_TO_COMMENT = True\n", "repository = data['repository']\n", "headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n", "auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n", "url = 'https://api.github.com/repos/{}/issues/{}/comments'\n", "url = url.format(repository, str(data['pr_number']))\n", "comments = requests.get(url, headers=headers, auth=auth).json()\n", "last_comment = ''\n", "for old_comment in reversed(comments):\n", "if old_comment['user']['id'] == 24736507:\n", "\"\"\"\n # Disabling this because only a single comment is made per PR\n text1 = ''.join(BeautifulSoup(markdown(comment)).findAll(text=True))\n text2 = ''.join(BeautifulSoup(markdown(last_comment)).findAll(text=True))\n if text1 == text2.replace(\"submitting\", \"updating\"):\n PERMITTED_TO_COMMENT = False\n \"\"\"\n", "last_comment = old_comment['body']\n", "for old_comment in reversed(comments):\n", "if '@pep8speaks' in old_comment['body']:\n", "return PERMITTED_TO_COMMENT\n", "if 'resume' in old_comment['body'].lower():\n", "if 'quiet' in old_comment['body'].lower():\n", "PERMITTED_TO_COMMENT = False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Expr'", "Assign'", "For", "Condition", "Return'", "Condition", "Condition", "Assign'" ]