lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_19(self, VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(self.iscsi_ips) == 1:\n",
"return self.iscsi_ips.keys()[0]\n",
"VAR_15 = self._get_active_nsp(VAR_11)\n",
"if VAR_15 is None:\n",
"VAR_15 = self._get_least_used_nsp(self._get_iscsi_nsps())\n",
"return self._get_ip_using_nsp(VAR_15)\n",
"if VAR_15 is None:\n",
"VAR_40 = _('Least busy iSCSI port not found, using first iSCSI port in list.')\n",
"VAR_1.warn(VAR_40)\n",
"return self.iscsi_ips.keys()[0]\n"
] | [
"def _get_iscsi_ip(self, hostname):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(self.iscsi_ips) == 1:\n",
"return self.iscsi_ips.keys()[0]\n",
"nsp = self._get_active_nsp(hostname)\n",
"if nsp is None:\n",
"nsp = self._get_least_used_nsp(self._get_iscsi_nsps())\n",
"return self._get_ip_using_nsp(nsp)\n",
"if nsp is None:\n",
"msg = _('Least busy iSCSI port not found, using first iSCSI port in list.')\n",
"LOG.warn(msg)\n",
"return self.iscsi_ips.keys()[0]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"VAR_22 = self.builder.get_object('password_unlock_button')\n",
"VAR_22.connect('clicked', self.on_password_unlock_button_clicked)\n",
"VAR_23 = self.builder.get_object('keyfile_unlock_button')\n",
"VAR_23.connect('clicked', self.on_keyfile_unlock_button_clicked)\n",
"VAR_24 = self.builder.get_object('composite_unlock_button')\n",
"VAR_24.connect('clicked', self.on_composite_unlock_button_clicked)\n",
"VAR_25 = self.builder.get_object('keyfile_unlock_select_button')\n",
"VAR_25.connect('clicked', self.on_keyfile_unlock_select_button_clicked)\n",
"VAR_26 = self.builder.get_object('composite_unlock_select_button')\n",
"VAR_26.connect('clicked', self.on_composite_unlock_select_button_clicked)\n",
"VAR_27 = self.builder.get_object('password_unlock_entry')\n",
"VAR_27.connect('activate', self.on_password_unlock_button_clicked)\n",
"VAR_27.connect('icon-press', self.on_password_unlock_entry_secondary_clicked)\n"
] | [
"def connect_events(self):...\n",
"password_unlock_button = self.builder.get_object('password_unlock_button')\n",
"password_unlock_button.connect('clicked', self.\n on_password_unlock_button_clicked)\n",
"keyfile_unlock_button = self.builder.get_object('keyfile_unlock_button')\n",
"keyfile_unlock_button.connect('clicked', self.on_keyfile_unlock_button_clicked)\n",
"composite_unlock_button = self.builder.get_object('composite_unlock_button')\n",
"composite_unlock_button.connect('clicked', self.\n on_composite_unlock_button_clicked)\n",
"keyfile_unlock_select_button = self.builder.get_object(\n 'keyfile_unlock_select_button')\n",
"keyfile_unlock_select_button.connect('clicked', self.\n on_keyfile_unlock_select_button_clicked)\n",
"composite_unlock_select_button = self.builder.get_object(\n 'composite_unlock_select_button')\n",
"composite_unlock_select_button.connect('clicked', self.\n on_composite_unlock_select_button_clicked)\n",
"password_unlock_entry = self.builder.get_object('password_unlock_entry')\n",
"password_unlock_entry.connect('activate', self.\n on_password_unlock_button_clicked)\n",
"password_unlock_entry.connect('icon-press', self.\n on_password_unlock_entry_secondary_clicked)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"print(request.json)\n",
"VAR_5 = request.json['Name']\n",
"VAR_6 = request.json['Password']\n",
"VAR_3 = VAR_2.execute(\"INSERT INTO USERS(NAME, PASSWORD) VALUES ('\" + VAR_5 +\n \"', '\" + VAR_6 + \"')\")\n",
"VAR_2.commit()\n",
"return {'status': 'success'}\n"
] | [
"def post(self):...\n",
"print(request.json)\n",
"Name = request.json['Name']\n",
"Password = request.json['Password']\n",
"query = conn.execute(\"INSERT INTO USERS(NAME, PASSWORD) VALUES ('\" + Name +\n \"', '\" + Password + \"')\")\n",
"conn.commit()\n",
"return {'status': 'success'}\n"
] | [
0,
0,
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_18(self):...\n",
"if not self._IsServerAlive():\n",
"return\n",
"SendEventNotificationAsync('InsertLeave')\n"
] | [
"def OnInsertLeave(self):...\n",
"if not self._IsServerAlive():\n",
"return\n",
"SendEventNotificationAsync('InsertLeave')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_10(VAR_16):...\n",
"return isinstance(VAR_16, str) or not isinstance(VAR_16, Iterable)\n"
] | [
"def not_iterable(value):...\n",
"return isinstance(value, str) or not isinstance(value, Iterable)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_12(self, VAR_14):...\n",
"if not VAR_14:\n",
"return None\n",
"self.model.total_items = VAR_14['total']\n",
"if self.num_torrents_label:\n",
"self.num_torrents_label.setText('%d items' % VAR_14['total'])\n",
"if VAR_14['first'] >= self.model.rowCount():\n",
"self.model.add_items(VAR_14['torrents'])\n",
"return True\n"
] | [
"def on_torrents(self, response):...\n",
"if not response:\n",
"return None\n",
"self.model.total_items = response['total']\n",
"if self.num_torrents_label:\n",
"self.num_torrents_label.setText('%d items' % response['total'])\n",
"if response['first'] >= self.model.rowCount():\n",
"self.model.add_items(response['torrents'])\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"return self.name.replace(' ', '_')\n"
] | [
"def get_name_as_slug(self):...\n",
"return self.name.replace(' ', '_')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1():...\n",
"import time\n",
"import os\n",
"VAR_1 = CLASS_2()\n",
"VAR_1.__dict__ = {'type': 's3', 'region': os.environ['MOTUZ_REGION'],\n 'access_key_id': os.environ['MOTUZ_ACCESS_KEY_ID'], 'secret_access_key':\n os.environ['MOTUZ_SECRET_ACCESS_KEY']}\n",
"VAR_11 = CLASS_0()\n",
"VAR_7 = 123\n",
"import random\n",
"VAR_11.copy(VAR_3=None, VAR_4='/tmp/motuz/mb_blob.bin', VAR_5=data, VAR_6=\n '/fh-ctr-mofuz-test/hello/world/{}'.format(random.randint(10, 10000)),\n VAR_7=job_id)\n",
"while not VAR_11.copy_finished(VAR_7):\n",
"print(VAR_11.copy_percent(VAR_7))\n",
"time.sleep(0.1)\n"
] | [
"def main():...\n",
"import time\n",
"import os\n",
"data = CloudConnection()\n",
"data.__dict__ = {'type': 's3', 'region': os.environ['MOTUZ_REGION'],\n 'access_key_id': os.environ['MOTUZ_ACCESS_KEY_ID'], 'secret_access_key':\n os.environ['MOTUZ_SECRET_ACCESS_KEY']}\n",
"connection = RcloneConnection()\n",
"job_id = 123\n",
"import random\n",
"connection.copy(src_data=None, src_path='/tmp/motuz/mb_blob.bin', dst_data=\n data, dst_path='/fh-ctr-mofuz-test/hello/world/{}'.format(random.\n randint(10, 10000)), job_id=job_id)\n",
"while not connection.copy_finished(job_id):\n",
"print(connection.copy_percent(job_id))\n",
"time.sleep(0.1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Import'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
"def setUp(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"from django.conf import settings\n",
"from django.test import TransactionTestCase\n",
"from scheduler import chaos\n",
"\"\"\"Tests creation of containers on nodes\"\"\"\n",
"VAR_0 = ['tests.json']\n",
"def FUNC_0(self):...\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"chaos.CREATE_ERROR_RATE = 0\n",
"chaos.DESTROY_ERROR_RATE = 0\n",
"chaos.START_ERROR_RATE = 0\n",
"chaos.STOP_ERROR_RATE = 0\n",
"settings.SCHEDULER_MODULE = 'chaos'\n",
"settings.SSH_PRIVATE_KEY = '<some-ssh-private-key>'\n",
"def FUNC_1(self):...\n",
"settings.SCHEDULER_MODULE = 'mock'\n",
"settings.SSH_PRIVATE_KEY = ''\n",
"def FUNC_2(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['error', 'created']))\n",
"def FUNC_3(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.START_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['crashed', 'up']))\n",
"def FUNC_4(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"chaos.DESTROY_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['error']))\n",
"VAR_6 = 20\n",
"for _ in range(100):\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"def FUNC_5(self):...\n",
"VAR_4 = {'web': 0}\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"VAR_2 = self.client.post(VAR_1)\n",
"if VAR_2.status_code == 204:\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"self.assertEquals(VAR_2.status_code, 503)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"VAR_6 = len(VAR_2.data['results'])\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['up']))\n",
"def FUNC_6(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/config'.format(**locals())\n",
"VAR_4 = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['up']))\n",
"def FUNC_7(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"chaos.CREATE_ERROR_RATE = 1\n",
"VAR_1 = '/api/apps/{app_id}/run'.format(**locals())\n",
"VAR_4 = {'command': 'ls -al'}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n"
] | [
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"from django.conf import settings\n",
"from django.test import TransactionTestCase\n",
"from scheduler import chaos\n",
"\"\"\"Tests creation of containers on nodes\"\"\"\n",
"fixtures = ['tests.json']\n",
"def setUp(self):...\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"chaos.CREATE_ERROR_RATE = 0\n",
"chaos.DESTROY_ERROR_RATE = 0\n",
"chaos.START_ERROR_RATE = 0\n",
"chaos.STOP_ERROR_RATE = 0\n",
"settings.SCHEDULER_MODULE = 'chaos'\n",
"settings.SSH_PRIVATE_KEY = '<some-ssh-private-key>'\n",
"def tearDown(self):...\n",
"settings.SCHEDULER_MODULE = 'mock'\n",
"settings.SSH_PRIVATE_KEY = ''\n",
"def test_create_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['error', 'created']))\n",
"def test_start_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.START_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['crashed', 'up']))\n",
"def test_destroy_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"chaos.DESTROY_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['error']))\n",
"containers = 20\n",
"for _ in range(100):\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"def test_build_chaos(self):...\n",
"body = {'web': 0}\n",
"url = '/api/apps'\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"response = self.client.post(url)\n",
"if response.status_code == 204:\n",
"self.assertEqual(response.status_code, 201)\n",
"self.assertEquals(response.status_code, 503)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"self.assertEqual(response.status_code, 200)\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"containers = len(response.data['results'])\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['up']))\n",
"def test_config_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/config'.format(**locals())\n",
"body = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['up']))\n",
"def test_run_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"chaos.CREATE_ERROR_RATE = 1\n",
"url = '/api/apps/{app_id}/run'.format(**locals())\n",
"body = {'command': 'ls -al'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
5,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_1=QModelIndex()):...\n",
"return len(self.data_items)\n"
] | [
"def rowCount(self, parent=QModelIndex()):...\n",
"return len(self.data_items)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self, VAR_1):...\n",
"self.__init__()\n",
"self.name = VAR_1.get('name')\n",
"self.vars = VAR_1.get('vars', dict())\n",
"self.depth = VAR_1.get('depth', 0)\n",
"self.hosts = VAR_1.get('hosts', [])\n",
"self._hosts = None\n",
"VAR_7 = VAR_1.get('parent_groups', [])\n",
"for parent_data in VAR_7:\n",
"VAR_12 = CLASS_0()\n",
"VAR_12.deserialize(parent_data)\n",
"self.parent_groups.append(VAR_12)\n"
] | [
"def deserialize(self, data):...\n",
"self.__init__()\n",
"self.name = data.get('name')\n",
"self.vars = data.get('vars', dict())\n",
"self.depth = data.get('depth', 0)\n",
"self.hosts = data.get('hosts', [])\n",
"self._hosts = None\n",
"parent_groups = data.get('parent_groups', [])\n",
"for parent_data in parent_groups:\n",
"g = Group()\n",
"g.deserialize(parent_data)\n",
"self.parent_groups.append(g)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(VAR_0):...\n",
"VAR_0.clear_filters()\n",
"for arg in request.args:\n",
"VAR_8 = re.findall('_flt_(\\\\d)_(.*)', arg)\n",
"if VAR_8:\n",
"VAR_0.add_filter_index(VAR_8[0][1], int(VAR_8[0][0]), request.args.get(arg))\n"
] | [
"def get_filter_args(filters):...\n",
"filters.clear_filters()\n",
"for arg in request.args:\n",
"re_match = re.findall('_flt_(\\\\d)_(.*)', arg)\n",
"if re_match:\n",
"filters.add_filter_index(re_match[0][1], int(re_match[0][0]), request.args.\n get(arg))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_3(VAR_0, VAR_6):...\n",
"VAR_5.execute(f'DELETE from note WHERE note_id = {VAR_6}')\n"
] | [
"def delete_note(conn, note_id):...\n",
"cur.execute(f'DELETE from note WHERE note_id = {note_id}')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_32(self, VAR_81):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.shellcmd = VAR_81\n",
"return VAR_101\n"
] | [
"def shellcmd(self, cmd):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.shellcmd = cmd\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 5}, 'CONFIDENCE': {'MEDIUM': 5}}\n",
"self.check_example('exec-as-root.py', VAR_2)\n"
] | [
"def test_exec_as_root(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 5}, 'CONFIDENCE': {'MEDIUM': 5}}\n",
"self.check_example('exec-as-root.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return sum(map(len, VAR_3.values()))\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return sum(map(len, wildcards.values()))\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_6(self, VAR_25):...\n",
"VAR_77 = []\n",
"if VAR_25:\n",
"VAR_77 = [VAR_3 for VAR_3 in self.comma_sep.split(VAR_25) if VAR_3 ==\n _force_ascii(VAR_3)]\n",
"return VAR_77\n"
] | [
"def run(self, tag_field):...\n",
"tags = []\n",
"if tag_field:\n",
"tags = [x for x in self.comma_sep.split(tag_field) if x == _force_ascii(x)]\n",
"return tags\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_18(self, VAR_14):...\n",
"return self.user_ip == get_real_ip(VAR_14)\n"
] | [
"def is_mine(self, request):...\n",
"return self.user_ip == get_real_ip(request)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_5(VAR_4, VAR_6, VAR_7='get', VAR_5=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = None\n",
"if VAR_5 is not None:\n",
"VAR_11 = normalize_request(VAR_5)\n",
"VAR_12 = None\n",
"if VAR_6 is not None:\n",
"VAR_12 = normalize_response(VAR_6, VAR_11=request)\n",
"if VAR_12 is not None:\n",
"validate_response(VAR_12=response, VAR_7=request_method, VAR_4=schema)\n"
] | [
"def validate_api_response(schema, raw_response, request_method='get',...\n",
"\"\"\"docstring\"\"\"\n",
"request = None\n",
"if raw_request is not None:\n",
"request = normalize_request(raw_request)\n",
"response = None\n",
"if raw_response is not None:\n",
"response = normalize_response(raw_response, request=request)\n",
"if response is not None:\n",
"validate_response(response=response, request_method=request_method, schema=\n schema)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'"
] |
[
"@contextmanager...\n",
"\"\"\"docstring\"\"\"\n",
"yield workunit\n"
] | [
"@contextmanager...\n",
"\"\"\"docstring\"\"\"\n",
"yield workunit\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_59(self):...\n",
"VAR_13 = 'string'\n",
"VAR_14 = 'string' % (os.getcwd(), VAR_13)\n",
"self.b_mgr.populate_baseline(VAR_14)\n",
"self.run_example('flask_debug.py')\n",
"self.assertEqual(1, len(self.b_mgr.baseline))\n",
"self.assertEqual({}, self.b_mgr.get_issue_list())\n"
] | [
"def test_baseline_filter(self):...\n",
"issue_text = (\n 'A Flask app appears to be run with debug=True, which exposes the Werkzeug debugger and allows the execution of arbitrary code.'\n )\n",
"json = (\n \"\"\"{\n \"results\": [\n {\n \"code\": \"...\",\n \"filename\": \"%s/examples/flask_debug.py\",\n \"issue_confidence\": \"MEDIUM\",\n \"issue_severity\": \"HIGH\",\n \"issue_text\": \"%s\",\n \"line_number\": 10,\n \"line_range\": [\n 10\n ],\n \"test_name\": \"flask_debug_true\",\n \"test_id\": \"B201\"\n }\n ]\n }\n \"\"\"\n % (os.getcwd(), issue_text))\n",
"self.b_mgr.populate_baseline(json)\n",
"self.run_example('flask_debug.py')\n",
"self.assertEqual(1, len(self.b_mgr.baseline))\n",
"self.assertEqual({}, self.b_mgr.get_issue_list())\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"VAR_5 = [{'cloudProvider': 'gce', 'loadBalancerName': self.__lb_name,\n 'ipProtocol': 'TCP', 'portRange': '8080', 'provider': 'gce', 'stack':\n self.TEST_STACK, 'detail': 'frontend', 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'region': self.TEST_REGION, 'listeners': [{\n 'protocol': 'TCP', 'portRange': '8080', 'healthCheck': False}], 'name':\n self.__lb_name, 'type': 'upsertLoadBalancer', 'availabilityZones': {\n self.TEST_REGION: []}, 'user': 'integration-tests'}]\n",
"VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_6.new_clause_builder('Load Balancer Created', retryable_for_secs=30\n ).list_resources('forwarding-rules').contains_path_value('name', self.\n __lb_name)\n",
"VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - create load balancer', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'create_load_balancer', data=payload, path=self.__path), contract=\n builder.build())\n"
] | [
"def create_load_balancer(self):...\n",
"job = [{'cloudProvider': 'gce', 'loadBalancerName': self.__lb_name,\n 'ipProtocol': 'TCP', 'portRange': '8080', 'provider': 'gce', 'stack':\n self.TEST_STACK, 'detail': 'frontend', 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'region': self.TEST_REGION, 'listeners': [{\n 'protocol': 'TCP', 'portRange': '8080', 'healthCheck': False}], 'name':\n self.__lb_name, 'type': 'upsertLoadBalancer', 'availabilityZones': {\n self.TEST_REGION: []}, 'user': 'integration-tests'}]\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Load Balancer Created', retryable_for_secs=30\n ).list_resources('forwarding-rules').contains_path_value('name', self.\n __lb_name)\n",
"payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - create load balancer', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'create_load_balancer', data=payload, path=self.__path), contract=\n builder.build())\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_12(self):...\n",
"self.run_test_case(self.scenario.resize_server_group())\n"
] | [
"def test_c_resize_server_group(self):...\n",
"self.run_test_case(self.scenario.resize_server_group())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"import cmd\n",
"import getpass\n",
"import paramiko\n",
"from shrub.scripts.validate import validate\n",
"VAR_0 = '[email protected]'\n",
"VAR_1 = 'swordfish'\n",
"VAR_4 = 'shrub> '\n",
"VAR_5 = \"\"\"Welcome to shrub!\nTo get started, try \"help\".\n\"\"\"\n",
"VAR_6 = 'Available commands:'\n",
"VAR_7 = '-'\n",
"VAR_8 = []\n",
"def FUNC_4(self):...\n",
"def FUNC_5(self, VAR_9):...\n",
"if not validate(VAR_9):\n",
"print('shrub: {}: command not found. Try \"help\".'.format(VAR_9.split(' ', 1\n )[0]))\n",
"VAR_18 = self.send_cmd(VAR_9, self.user_creds)\n",
"return\n",
"print(VAR_18)\n",
"return\n"
] | [
"import cmd\n",
"import getpass\n",
"import paramiko\n",
"from shrub.scripts.validate import validate\n",
"CONNECTION_STRING = '[email protected]'\n",
"SERVER_PASSWORD = 'swordfish'\n",
"prompt = 'shrub> '\n",
"intro = \"\"\"Welcome to shrub!\nTo get started, try \"help\".\n\"\"\"\n",
"doc_header = 'Available commands:'\n",
"ruler = '-'\n",
"user_creds = []\n",
"def emptyline(self):...\n",
"def default(self, line):...\n",
"if not validate(line):\n",
"print('shrub: {}: command not found. Try \"help\".'.format(line.split(' ', 1)[0])\n )\n",
"message = self.send_cmd(line, self.user_creds)\n",
"return\n",
"print(message)\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_1, VAR_2):...\n",
"VAR_3 = psycopg2.connect(host=config['HOST'], port=config['PORT'], database\n =config['NAME'], user=config['USER'], password=config['PASSWORD'])\n",
"VAR_4 = VAR_3.cursor()\n",
"VAR_4.execute(\"insert into reply_map values('{}', '{}')\".format(VAR_1, VAR_2))\n",
"VAR_3.commit()\n"
] | [
"def insert(key, value):...\n",
"connection = psycopg2.connect(host=config['HOST'], port=config['PORT'],\n database=config['NAME'], user=config['USER'], password=config['PASSWORD'])\n",
"cur = connection.cursor()\n",
"cur.execute(\"insert into reply_map values('{}', '{}')\".format(key, value))\n",
"connection.commit()\n"
] | [
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"import time\n",
"from urllib.parse import urlencode\n",
"from urllib import urlencode\n",
"from django.core.exceptions import SuspiciousOperation\n",
"from django.core.urlresolvers import reverse\n",
"from django.contrib import auth\n",
"from django.http import HttpResponseRedirect\n",
"from django.utils.crypto import get_random_string\n",
"from django.utils.module_loading import import_string\n",
"from django.views.generic import View\n",
"from mozilla_django_oidc.utils import absolutify, import_from_settings, is_authenticated\n",
"\"\"\"OIDC client authentication callback HTTP endpoint\"\"\"\n",
"VAR_0 = ['get']\n",
"@property...\n",
"return import_from_settings('LOGIN_REDIRECT_URL_FAILURE', '/')\n"
] | [
"import time\n",
"from urllib.parse import urlencode\n",
"from urllib import urlencode\n",
"from django.core.exceptions import SuspiciousOperation\n",
"from django.core.urlresolvers import reverse\n",
"from django.contrib import auth\n",
"from django.http import HttpResponseRedirect\n",
"from django.utils.crypto import get_random_string\n",
"from django.utils.module_loading import import_string\n",
"from django.views.generic import View\n",
"from mozilla_django_oidc.utils import absolutify, import_from_settings, is_authenticated\n",
"\"\"\"OIDC client authentication callback HTTP endpoint\"\"\"\n",
"http_method_names = ['get']\n",
"@property...\n",
"return import_from_settings('LOGIN_REDIRECT_URL_FAILURE', '/')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"@login_required...\n",
"VAR_8 = None\n",
"VAR_27 = {'title': VAR_19}\n",
"if VAR_3 is not None:\n",
"VAR_8 = get_object_or_404(VAR_5, **{group_slug_field: group_slug})\n",
"VAR_46 = VAR_47 = True\n",
"VAR_51 = get_ct(VAR_8)\n",
"if not VAR_47:\n",
"VAR_27.update({'content_type': VAR_51, 'object_id': VAR_8.id})\n",
"return HttpResponseForbidden()\n",
"VAR_28 = VAR_2.get(**article_args)\n",
"VAR_28 = None\n",
"if VAR_14.method == 'POST':\n",
"VAR_46 = FUNC_3(VAR_14.user, VAR_8, VAR_12, VAR_13)\n",
"VAR_52 = VAR_21(VAR_14.POST, instance=article)\n",
"if VAR_14.method == 'GET':\n",
"VAR_47 = FUNC_4(VAR_14.user, VAR_8, VAR_12)\n",
"VAR_52.cache_old_content()\n",
"VAR_60 = get_real_ip(VAR_14)\n",
"if not VAR_28:\n",
"if VAR_52.is_valid():\n",
"VAR_61 = {'user_ip': VAR_60}\n",
"VAR_48 = {'form': VAR_52, 'new_article': True}\n",
"VAR_48 = {'form': VAR_52, 'new_article': False, 'content_type': ContentType\n .objects.get_for_model(Article).pk, 'object_id': VAR_28.pk, 'images':\n VAR_28.all_images(), 'article': VAR_28}\n",
"if VAR_14.user.is_authenticated():\n",
"if VAR_3 is not None:\n",
"if VAR_3 is not None:\n",
"VAR_52.editor = VAR_14.user\n",
"if VAR_28 is None and VAR_3 is not None:\n",
"VAR_61.update({'content_type': VAR_51.id, 'object_id': VAR_8.id})\n",
"if VAR_28 is None:\n",
"VAR_48['group'] = VAR_8\n",
"if VAR_18 is not None:\n",
"VAR_52.group = VAR_8\n",
"VAR_59, VAR_53 = VAR_52.save()\n",
"VAR_61.update({'title': VAR_19, 'action': 'create'})\n",
"VAR_61['action'] = 'edit'\n",
"VAR_48.update(VAR_18)\n",
"return render_to_response('/'.join([VAR_17, VAR_16]), VAR_48,\n context_instance=RequestContext(request))\n",
"return redirect(VAR_59)\n",
"VAR_52 = VAR_21(VAR_61=initial)\n",
"VAR_52 = VAR_21(instance=article, VAR_61=initial)\n"
] | [
"@login_required...\n",
"group = None\n",
"article_args = {'title': title}\n",
"if group_slug is not None:\n",
"group = get_object_or_404(group_qs, **{group_slug_field: group_slug})\n",
"allow_read = allow_write = True\n",
"group_ct = get_ct(group)\n",
"if not allow_write:\n",
"article_args.update({'content_type': group_ct, 'object_id': group.id})\n",
"return HttpResponseForbidden()\n",
"article = article_qs.get(**article_args)\n",
"article = None\n",
"if request.method == 'POST':\n",
"allow_read = has_read_perm(request.user, group, is_member, is_private)\n",
"form = ArticleFormClass(request.POST, instance=article)\n",
"if request.method == 'GET':\n",
"allow_write = has_write_perm(request.user, group, is_member)\n",
"form.cache_old_content()\n",
"user_ip = get_real_ip(request)\n",
"if not article:\n",
"if form.is_valid():\n",
"initial = {'user_ip': user_ip}\n",
"template_params = {'form': form, 'new_article': True}\n",
"template_params = {'form': form, 'new_article': False, 'content_type':\n ContentType.objects.get_for_model(Article).pk, 'object_id': article.pk,\n 'images': article.all_images(), 'article': article}\n",
"if request.user.is_authenticated():\n",
"if group_slug is not None:\n",
"if group_slug is not None:\n",
"form.editor = request.user\n",
"if article is None and group_slug is not None:\n",
"initial.update({'content_type': group_ct.id, 'object_id': group.id})\n",
"if article is None:\n",
"template_params['group'] = group\n",
"if extra_context is not None:\n",
"form.group = group\n",
"new_article, changeset = form.save()\n",
"initial.update({'title': title, 'action': 'create'})\n",
"initial['action'] = 'edit'\n",
"template_params.update(extra_context)\n",
"return render_to_response('/'.join([template_dir, template_name]),\n template_params, context_instance=RequestContext(request))\n",
"return redirect(new_article)\n",
"form = ArticleFormClass(initial=initial)\n",
"form = ArticleFormClass(instance=article, initial=initial)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(VAR_1, VAR_2):...\n",
""
] | [
"def queryOne(cursor, reqString):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2='LayeredFs Guide', VAR_3=discord.Color(6750207))\n",
"VAR_7.set_author(name='ih8ih8sn0w', url='https://pastebin.com/sx8HYULr')\n",
"VAR_7.set_thumbnail(url='http://i.imgur.com/QEUfyrp.png')\n",
"VAR_7.url = 'https://pastebin.com/QdzBv4Te'\n",
"VAR_7.description = 'How to use Luma 8.0+ LayeredFs for ROM Hacking.'\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title='LayeredFs Guide', color=discord.Color(6750207))\n",
"embed.set_author(name='ih8ih8sn0w', url='https://pastebin.com/sx8HYULr')\n",
"embed.set_thumbnail(url='http://i.imgur.com/QEUfyrp.png')\n",
"embed.url = 'https://pastebin.com/QdzBv4Te'\n",
"embed.description = 'How to use Luma 8.0+ LayeredFs for ROM Hacking.'\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"from django.core.exceptions import ObjectDoesNotExist\n",
"from rest_framework import exceptions\n",
"from app import models\n",
"def FUNC_0(VAR_0, VAR_1, VAR_2=('view_project',)):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = models.Project.objects.get(pk=project_pk, deleting=False)\n",
"return VAR_6\n",
"for perm in VAR_2:\n",
"if not VAR_0.user.has_perm(perm, VAR_6):\n"
] | [
"from django.core.exceptions import ObjectDoesNotExist\n",
"from rest_framework import exceptions\n",
"from app import models\n",
"def get_and_check_project(request, project_pk, perms=('view_project',)):...\n",
"\"\"\"docstring\"\"\"\n",
"project = models.Project.objects.get(pk=project_pk, deleting=False)\n",
"return project\n",
"for perm in perms:\n",
"if not request.user.has_perm(perm, project):\n"
] | [
1,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"For",
"Condition"
] |
[
"\"\"\"\nraut2webstr-pagemodel-tree import-path script\n\"\"\"\n",
"import argparse\n",
"import os\n",
"import sys\n",
"import re\n",
"VAR_0 = 'models', 'pages'\n",
"def FUNC_0(VAR_1):...\n",
"return VAR_1.endswith('.py') and VAR_1 != '__init__.py'\n"
] | [
"\"\"\"\nraut2webstr-pagemodel-tree import-path script\n\"\"\"\n",
"import argparse\n",
"import os\n",
"import sys\n",
"import re\n",
"RAUT_MODULES = 'models', 'pages'\n",
"def is_py_file(filename):...\n",
"return filename.endswith('.py') and filename != '__init__.py'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_19=False, *VAR_20, **VAR_21):...\n",
"super(CLASS_3, self).__init__(*VAR_20, VAR_19=csrf_enabled, **kwargs)\n"
] | [
"def __init__(self, csrf_enabled=False, *args, **kwargs):...\n",
"super(ChangeAllModelsForm, self).__init__(*args, csrf_enabled=csrf_enabled,\n **kwargs)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_6(VAR_7):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def verify_path(path):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_2(self, VAR_1, VAR_3=None, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_4 is None:\n",
"if VAR_3 is None:\n",
"VAR_3 = VAR_3.copy()\n",
"for fobj in VAR_20.itervalues():\n",
"return VAR_15\n",
"VAR_15 = self.session.get(VAR_1)\n",
"VAR_3 = VAR_3.copy()\n",
"VAR_3['_xsrf'] = self.xsrf_token\n",
"fobj.close()\n",
"VAR_3['_xsrf'] = self.xsrf_token\n",
"VAR_20 = dict((k, io.open(v, 'rb')) for k, v in VAR_4)\n",
"VAR_15 = self.session.post(VAR_1, VAR_3)\n",
"VAR_15 = self.session.post(VAR_1, VAR_3, files=file_objs)\n"
] | [
"def do_request(self, url, data=None, file_names=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if file_names is None:\n",
"if data is None:\n",
"data = data.copy()\n",
"for fobj in file_objs.itervalues():\n",
"return response\n",
"response = self.session.get(url)\n",
"data = data.copy()\n",
"data['_xsrf'] = self.xsrf_token\n",
"fobj.close()\n",
"data['_xsrf'] = self.xsrf_token\n",
"file_objs = dict((k, io.open(v, 'rb')) for k, v in file_names)\n",
"response = self.session.post(url, data)\n",
"response = self.session.post(url, data, files=file_objs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Assign'",
"For",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_23(self, VAR_12=False):...\n",
"if self.DiagnosticsForCurrentFileReady():\n",
"VAR_22 = self._latest_file_parse_request.Response()\n",
"return []\n",
"self._latest_file_parse_request = None\n",
"if VAR_12:\n",
"return vimsupport.ConvertDiagnosticsToQfList(VAR_22)\n",
"return VAR_22\n"
] | [
"def GetDiagnosticsFromStoredRequest(self, qflist_format=False):...\n",
"if self.DiagnosticsForCurrentFileReady():\n",
"diagnostics = self._latest_file_parse_request.Response()\n",
"return []\n",
"self._latest_file_parse_request = None\n",
"if qflist_format:\n",
"return vimsupport.ConvertDiagnosticsToQfList(diagnostics)\n",
"return diagnostics\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_4(VAR_5):...\n",
"if VAR_1:\n",
"print('attempting to get pickle data for ', VAR_5)\n",
"VAR_38 = os.getcwd()\n",
"VAR_39 = VAR_5.replace('/', '_')\n",
"VAR_40 = VAR_38 + '/pickle/' + str(VAR_39) + '.p'\n",
"VAR_2.info('attempting to load pickle data for {}'.format(VAR_40))\n",
"VAR_6 = pickle.load(p)\n",
"VAR_2.info('could not load pickle data for {}'.format(VAR_40))\n",
"return VAR_6\n",
"if VAR_1:\n",
"print('failed to get pickle data for ', VAR_5)\n",
"return None\n"
] | [
"def load_pickle_data(base_fname):...\n",
"if debug:\n",
"print('attempting to get pickle data for ', base_fname)\n",
"cwd = os.getcwd()\n",
"bracket_name = base_fname.replace('/', '_')\n",
"fname = cwd + '/pickle/' + str(bracket_name) + '.p'\n",
"LOG.info('attempting to load pickle data for {}'.format(fname))\n",
"data = pickle.load(p)\n",
"LOG.info('could not load pickle data for {}'.format(fname))\n",
"return data\n",
"if debug:\n",
"print('failed to get pickle data for ', base_fname)\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
"def setUp(self):...\n",
"self.test_strings = ('normal_string', 'string with spaces',\n 'string with quotes\"a', \"string with s-quotes'b\", 'bsn \\n A',\n 'unrecognized \\\\q escape')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_24():...\n",
""
] | [
"def connect_timeout():...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_16(self, **VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"self.set_defaults(**kwargs)\n",
"for VAR_42, VAR_3 in VAR_17.items():\n",
"self.overridable[VAR_42] = VAR_3\n"
] | [
"def set_params(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"self.set_defaults(**kwargs)\n",
"for k, v in kwargs.items():\n",
"self.overridable[k] = v\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"For",
"Assign'"
] |
[
"@property...\n",
"return self._log\n"
] | [
"@property...\n",
"return self._log\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = ['hp3par_api_url', 'hp3par_username', 'hp3par_password', 'san_ip',\n 'san_login', 'san_password']\n",
"self.common.check_flags(self.configuration, VAR_15)\n"
] | [
"def _check_flags(self):...\n",
"\"\"\"docstring\"\"\"\n",
"required_flags = ['hp3par_api_url', 'hp3par_username', 'hp3par_password',\n 'san_ip', 'san_login', 'san_password']\n",
"self.common.check_flags(self.configuration, required_flags)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(VAR_6, VAR_7):...\n",
"if VAR_7.data is VAR_6[VAR_5].data:\n"
] | [
"def _different(form, field):...\n",
"if field.data is form[fieldname].data:\n"
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_6():...\n",
"VAR_35 = FUNC_3(VAR_18, VAR_19)\n",
"return CLASS_16, VAR_35\n"
] | [
"def three():...\n",
"HnPFormSet = make_formset(parent, child)\n",
"return ExtractedTextForm, HnPFormSet\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self, VAR_11, VAR_10, VAR_12):...\n",
"VAR_0.start_engine(VAR_11=session, VAR_23=data, workflow_name=data['wf'])\n",
"VAR_0.current.headers = VAR_12\n",
"self.current = VAR_0.current\n",
"VAR_0.run()\n",
"return VAR_0.current.output\n"
] | [
"def _handle_workflow(self, session, data, headers):...\n",
"wf_engine.start_engine(session=session, input=data, workflow_name=data['wf'])\n",
"wf_engine.current.headers = headers\n",
"self.current = wf_engine.current\n",
"wf_engine.run()\n",
"return wf_engine.current.output\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"async def FUNC_9(VAR_6):...\n",
"VAR_10 = {'Authorization': f'Bearer {VAR_1}'}\n",
"VAR_8 = await VAR_6.get('/results?page=0&count=2', VAR_10=headers)\n",
"VAR_9 = await VAR_8.json()\n",
"assert VAR_8.status == 200\n",
"assert len(VAR_9) == 2\n",
"VAR_8 = await VAR_6.get('/results?page=1&count=1', VAR_10=headers)\n",
"VAR_9 = await VAR_8.json()\n",
"assert VAR_8.status == 200\n",
"assert len(VAR_9) == 1\n"
] | [
"async def test_positive_get_paging(test_cli):...\n",
"headers = {'Authorization': f'Bearer {access_token}'}\n",
"resp = await test_cli.get('/results?page=0&count=2', headers=headers)\n",
"resp_json = await resp.json()\n",
"assert resp.status == 200\n",
"assert len(resp_json) == 2\n",
"resp = await test_cli.get('/results?page=1&count=1', headers=headers)\n",
"resp_json = await resp.json()\n",
"assert resp.status == 200\n",
"assert len(resp_json) == 1\n"
] | [
0,
0,
4,
0,
0,
0,
4,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"async def FUNC_8(VAR_12: int):...\n",
"\"\"\"docstring\"\"\"\n",
"await cursor.execute('SELECT uid, item_id FROM item where player_uid = %d' %\n VAR_12)\n",
"FUNC_10(ex)\n",
"return VAR_19, True\n",
"VAR_19 = cursor.fetchall()\n",
"return tuple(), False\n"
] | [
"async def get_item_list(player_uid: int):...\n",
"\"\"\"docstring\"\"\"\n",
"await cursor.execute('SELECT uid, item_id FROM item where player_uid = %d' %\n player_uid)\n",
"_error_report(ex)\n",
"return datas, True\n",
"datas = cursor.fetchall()\n",
"return tuple(), False\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Return'"
] |
[
"@VAR_0.route('/promote/<int:id>')...\n",
"VAR_7 = get_db()\n",
"VAR_15 = get_user(VAR_4)\n",
"VAR_16 = None\n",
"if VAR_15['restricted'] == 1:\n",
"VAR_16 = 'Cannot promote restricted user.'\n",
"if VAR_15['admin'] == 1:\n",
"if VAR_16 is None:\n",
"VAR_16 = 'User is already an admin.'\n",
"VAR_7.execute('UPDATE user SET admin = 1 WHERE id = ?', (VAR_4,))\n",
"flash(VAR_16)\n",
"VAR_7.commit()\n",
"return redirect(url_for('admin.user_view'))\n",
"return redirect(url_for('admin.user_view'))\n"
] | [
"@bp.route('/promote/<int:id>')...\n",
"db = get_db()\n",
"user = get_user(id)\n",
"error = None\n",
"if user['restricted'] == 1:\n",
"error = 'Cannot promote restricted user.'\n",
"if user['admin'] == 1:\n",
"if error is None:\n",
"error = 'User is already an admin.'\n",
"db.execute('UPDATE user SET admin = 1 WHERE id = ?', (id,))\n",
"flash(error)\n",
"db.commit()\n",
"return redirect(url_for('admin.user_view'))\n",
"return redirect(url_for('admin.user_view'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_13(self, VAR_13):...\n",
"if VAR_13:\n",
"self._update_backend_status()\n",
"return self.device_stats\n"
] | [
"def get_volume_stats(self, refresh):...\n",
"if refresh:\n",
"self._update_backend_status()\n",
"return self.device_stats\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'"
] |
[
"@pytest.mark.skip(reason='compound words not handled yet')...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'BEE KLEEN', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='BE-CLEAN', VAR_10=[{'name': 'BEE KLEEN'}])\n"
] | [
"@pytest.mark.skip(reason='compound words not handled yet')...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'BEE KLEEN', id='1')\n",
"verify_results(client, jwt, query='BE-CLEAN', expected=[{'name': 'BEE KLEEN'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_25(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.assertEqual(0, FUNC_5(VAR_1='Christoph Waltz', VAR_2='Germany'))\n"
] | [
"def test_add_new_player(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.assertEqual(0, dummy_player(player_name='Christoph Waltz', country=\n 'Germany'))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_7(self, VAR_4):...\n",
"self.wait_next()\n",
"self.log.total += 1\n",
"VAR_4.execute()\n",
"print('Unhandled exception while executing the request: %s' % exc, file=sys\n .stderr)\n",
"self.log.__dict__[VAR_4.outcome] += 1\n",
"return\n",
"self.log.total_time += VAR_4.duration\n",
"self.log.max_time = max(self.log.max_time, VAR_4.duration)\n",
"self.log.store_to_file(VAR_4)\n"
] | [
"def do_step(self, request):...\n",
"self.wait_next()\n",
"self.log.total += 1\n",
"request.execute()\n",
"print('Unhandled exception while executing the request: %s' % exc, file=sys\n .stderr)\n",
"self.log.__dict__[request.outcome] += 1\n",
"return\n",
"self.log.total_time += request.duration\n",
"self.log.max_time = max(self.log.max_time, request.duration)\n",
"self.log.store_to_file(request)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"AugAssign'",
"Expr'",
"Expr'",
"AugAssign'",
"Return'",
"AugAssign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1():...\n",
"VAR_13 = argparse.ArgumentParser(description='Stress tester for CMS')\n",
"VAR_13.add_argument('-c', '--contest-id', action='store', type=int,\n required=True, help='ID of the contest to test against')\n",
"VAR_13.add_argument('-n', '--actor-num', action='store', type=int, help=\n 'the number of actors to spawn')\n",
"VAR_13.add_argument('-s', '--sort-actors', action='store_true', help=\n 'sort usernames alphabetically before slicing them')\n",
"VAR_13.add_argument('-u', '--base-url', action='store', type=utf8_decoder,\n help='base URL for placing HTTP requests')\n",
"VAR_13.add_argument('-S', '--submissions-path', action='store', type=\n utf8_decoder, help='base path for submission to send')\n",
"VAR_13.add_argument('-p', '--prepare-path', action='store', type=\n utf8_decoder, help='file to put contest info to')\n",
"VAR_13.add_argument('-r', '--read-from', action='store', type=utf8_decoder,\n help='file to read contest info from')\n",
"VAR_13.add_argument('-t', '--time-coeff', action='store', type=float,\n default=10.0, help='average wait between actions')\n",
"VAR_13.add_argument('-o', '--only-submit', action='store_true', help=\n 'whether the actor only submits solutions')\n",
"VAR_14 = VAR_13.parse_args()\n",
"if VAR_14.prepare_path is not None:\n",
"VAR_12, VAR_8 = FUNC_0(VAR_14.contest_id)\n",
"assert VAR_14.time_coeff > 0.0\n",
"VAR_27 = dict()\n",
"assert not (VAR_14.only_submit and VAR_14.submissions_path == '')\n",
"VAR_27['users'] = VAR_12\n",
"VAR_12 = []\n",
"VAR_27['tasks'] = VAR_8\n",
"VAR_8 = []\n",
"file_.write('%s' % VAR_27)\n",
"if VAR_14.read_from is None:\n",
"return\n",
"VAR_12, VAR_8 = FUNC_0(VAR_14.contest_id)\n",
"VAR_27 = ast.literal_eval(file_.read())\n",
"if VAR_14.actor_num is not None:\n",
"VAR_12 = VAR_27['users']\n",
"VAR_28 = VAR_12.items()\n",
"if VAR_14.base_url is not None:\n",
"VAR_8 = VAR_27['tasks']\n",
"if VAR_14.sort_actors:\n",
"VAR_10 = VAR_14.base_url\n",
"VAR_10 = 'http://%s:%d/' % (get_service_address(ServiceCoord(\n 'ContestWebServer', 0))[0], config.contest_listen_port[0])\n",
"VAR_28.sort()\n",
"random.shuffle(VAR_28)\n",
"VAR_7 = VAR_1\n",
"VAR_12 = dict(VAR_28[:VAR_14.actor_num])\n",
"VAR_7['time_coeff'] = VAR_14.time_coeff\n",
"VAR_15 = CLASS_3\n",
"if VAR_14.only_submit:\n",
"VAR_15 = CLASS_4\n",
"VAR_16 = [VAR_15(VAR_5, data['password'], VAR_7, VAR_8, VAR_9=RequestLog(\n log_dir=os.path.join('./test_logs', username)), VAR_10=base_url, VAR_11\n =args.submissions_path) for VAR_5, data in VAR_12.iteritems()]\n",
"for actor in VAR_16:\n",
"actor.start()\n",
"while True:\n",
"print('Taking down actors', file=sys.stderr)\n",
"VAR_17 = False\n",
"time.sleep(1)\n",
"for actor in VAR_16:\n",
"while not VAR_17:\n",
"actor.die = True\n",
"for actor in VAR_16:\n",
"print('Test finished', file=sys.stderr)\n",
"actor.join()\n",
"VAR_18 = CLASS_0()\n",
"for actor in VAR_16:\n",
"VAR_18.merge(actor.log)\n",
"VAR_18.print_stats()\n"
] | [
"def main():...\n",
"parser = argparse.ArgumentParser(description='Stress tester for CMS')\n",
"parser.add_argument('-c', '--contest-id', action='store', type=int,\n required=True, help='ID of the contest to test against')\n",
"parser.add_argument('-n', '--actor-num', action='store', type=int, help=\n 'the number of actors to spawn')\n",
"parser.add_argument('-s', '--sort-actors', action='store_true', help=\n 'sort usernames alphabetically before slicing them')\n",
"parser.add_argument('-u', '--base-url', action='store', type=utf8_decoder,\n help='base URL for placing HTTP requests')\n",
"parser.add_argument('-S', '--submissions-path', action='store', type=\n utf8_decoder, help='base path for submission to send')\n",
"parser.add_argument('-p', '--prepare-path', action='store', type=\n utf8_decoder, help='file to put contest info to')\n",
"parser.add_argument('-r', '--read-from', action='store', type=utf8_decoder,\n help='file to read contest info from')\n",
"parser.add_argument('-t', '--time-coeff', action='store', type=float,\n default=10.0, help='average wait between actions')\n",
"parser.add_argument('-o', '--only-submit', action='store_true', help=\n 'whether the actor only submits solutions')\n",
"args = parser.parse_args()\n",
"if args.prepare_path is not None:\n",
"users, tasks = harvest_contest_data(args.contest_id)\n",
"assert args.time_coeff > 0.0\n",
"contest_data = dict()\n",
"assert not (args.only_submit and args.submissions_path == '')\n",
"contest_data['users'] = users\n",
"users = []\n",
"contest_data['tasks'] = tasks\n",
"tasks = []\n",
"file_.write('%s' % contest_data)\n",
"if args.read_from is None:\n",
"return\n",
"users, tasks = harvest_contest_data(args.contest_id)\n",
"contest_data = ast.literal_eval(file_.read())\n",
"if args.actor_num is not None:\n",
"users = contest_data['users']\n",
"user_items = users.items()\n",
"if args.base_url is not None:\n",
"tasks = contest_data['tasks']\n",
"if args.sort_actors:\n",
"base_url = args.base_url\n",
"base_url = 'http://%s:%d/' % (get_service_address(ServiceCoord(\n 'ContestWebServer', 0))[0], config.contest_listen_port[0])\n",
"user_items.sort()\n",
"random.shuffle(user_items)\n",
"metrics = DEFAULT_METRICS\n",
"users = dict(user_items[:args.actor_num])\n",
"metrics['time_coeff'] = args.time_coeff\n",
"actor_class = RandomActor\n",
"if args.only_submit:\n",
"actor_class = SubmitActor\n",
"actors = [actor_class(username, data['password'], metrics, tasks, log=\n RequestLog(log_dir=os.path.join('./test_logs', username)), base_url=\n base_url, submissions_path=args.submissions_path) for username, data in\n users.iteritems()]\n",
"for actor in actors:\n",
"actor.start()\n",
"while True:\n",
"print('Taking down actors', file=sys.stderr)\n",
"finished = False\n",
"time.sleep(1)\n",
"for actor in actors:\n",
"while not finished:\n",
"actor.die = True\n",
"for actor in actors:\n",
"print('Test finished', file=sys.stderr)\n",
"actor.join()\n",
"great_log = RequestLog()\n",
"for actor in actors:\n",
"great_log.merge(actor.log)\n",
"great_log.print_stats()\n"
] | [
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"For",
"Condition",
"Assign'",
"For",
"Expr'",
"Expr'",
"Assign'",
"For",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(VAR_10, VAR_3, VAR_4, VAR_5, VAR_6):...\n",
"VAR_14 = [VAR_45 for VAR_45 in dir(VAR_3.__class__) if not VAR_45.\n startswith('__') and not callable(getattr(VAR_3.__class__, VAR_45))]\n",
"for VAR_45 in VAR_14:\n",
"if isinstance(getattr(VAR_3.__class__, VAR_45), FieldList):\n",
"VAR_15 = ''\n",
"for childobj in getattr(VAR_3, VAR_45):\n",
"if VAR_3.__class__.__name__ in VAR_5:\n",
"FUNC_1(VAR_10, childobj, VAR_3, VAR_5, VAR_6)\n",
"if len(VAR_5[VAR_3.__class__.__name__]) == 0:\n",
"VAR_16 = ('INSERT INTO ' + VAR_3.__class__.__name__ + \" VALUES ('\" + VAR_3.\n id + \"'\")\n",
"if len(VAR_5[VAR_3.__class__.__name__]) == 1:\n",
"for columnname, columntype in VAR_6[VAR_3.__class__.__name__]:\n",
"VAR_15, VAR_45 = VAR_5[VAR_3.__class__.__name__][0]\n",
"assert False\n",
"if columntype == 'int':\n",
"VAR_16 += ')'\n",
"VAR_41 = ''\n",
"if columntype == 'text':\n",
"VAR_10.execute(VAR_16)\n",
"VAR_16 += ','\n",
"VAR_41 = \"'\"\n",
"assert False\n",
"if VAR_15 != '' and VAR_15 + 'id' == columnname:\n",
"VAR_41 = ''\n",
"VAR_16 += VAR_41 + VAR_4.id + VAR_41\n",
"VAR_16 += VAR_41 + str(getattr(VAR_3, columnname)) + VAR_41\n"
] | [
"def SaveDocumentObject(database, documentobject, parentobject,...\n",
"variables = [a for a in dir(documentobject.__class__) if not a.startswith(\n '__') and not callable(getattr(documentobject.__class__, a))]\n",
"for a in variables:\n",
"if isinstance(getattr(documentobject.__class__, a), FieldList):\n",
"foreignkeyclassname = ''\n",
"for childobj in getattr(documentobject, a):\n",
"if documentobject.__class__.__name__ in foreignkeydict:\n",
"SaveDocumentObject(database, childobj, documentobject, foreignkeydict,\n columndict)\n",
"if len(foreignkeydict[documentobject.__class__.__name__]) == 0:\n",
"sql = ('INSERT INTO ' + documentobject.__class__.__name__ + \" VALUES ('\" +\n documentobject.id + \"'\")\n",
"if len(foreignkeydict[documentobject.__class__.__name__]) == 1:\n",
"for columnname, columntype in columndict[documentobject.__class__.__name__]:\n",
"foreignkeyclassname, a = foreignkeydict[documentobject.__class__.__name__][0]\n",
"assert False\n",
"if columntype == 'int':\n",
"sql += ')'\n",
"quote = ''\n",
"if columntype == 'text':\n",
"database.execute(sql)\n",
"sql += ','\n",
"quote = \"'\"\n",
"assert False\n",
"if foreignkeyclassname != '' and foreignkeyclassname + 'id' == columnname:\n",
"quote = ''\n",
"sql += quote + parentobject.id + quote\n",
"sql += quote + str(getattr(documentobject, columnname)) + quote\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Assign'",
"For",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Assert'",
"Condition",
"AugAssign'",
"Assign'",
"Condition",
"Expr'",
"AugAssign'",
"Assign'",
"Assert'",
"Condition",
"Assign'",
"AugAssign'",
"AugAssign'"
] |
[
"def FUNC_16(self):...\n",
"VAR_17 = 3\n",
"VAR_18 = 2\n",
"fp.write('0\\n')\n",
"VAR_0 = [RetriesCheck(VAR_18, fp.name)]\n",
"self.runner._max_retries = VAR_17\n",
"self.runall(VAR_0)\n",
"self.assertEqual(1, self.runner.stats.num_cases())\n",
"self.assertEqual(1, len(self.runner.stats.failures(run=0)))\n",
"self.assertEqual(VAR_18, rt.runtime().current_run)\n",
"self.assertEqual(0, len(self.runner.stats.failures()))\n",
"os.remove(fp.name)\n"
] | [
"def test_pass_in_retries(self):...\n",
"max_retries = 3\n",
"run_to_pass = 2\n",
"fp.write('0\\n')\n",
"checks = [RetriesCheck(run_to_pass, fp.name)]\n",
"self.runner._max_retries = max_retries\n",
"self.runall(checks)\n",
"self.assertEqual(1, self.runner.stats.num_cases())\n",
"self.assertEqual(1, len(self.runner.stats.failures(run=0)))\n",
"self.assertEqual(run_to_pass, rt.runtime().current_run)\n",
"self.assertEqual(0, len(self.runner.stats.failures()))\n",
"os.remove(fp.name)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"import os\n",
"import time\n",
"import logging\n",
"from flask import jsonify\n",
"from flask import Flask, request\n",
"from flask import render_template\n",
"from flask import send_from_directory\n",
"import Importer\n",
"from DataSource.MySQLDataSource import MySQL\n",
"import Config\n",
"from Config import logger\n",
"VAR_0 = Flask(__name__)\n",
"VAR_1 = os.environ['LOGFILES_PATH'\n ] if 'LOGFILES_PATH' in os.environ else './logs/'\n",
"VAR_2 = os.environ['DATAFILES_PATH'\n ] if 'DATAFILES_PATH' in os.environ else './uploads/'\n",
"VAR_3 = os.environ['DB_NAME'] if 'DB_NAME' in os.environ else 'astronomy'\n",
"@VAR_0.route('/')...\n",
"return FUNC_3()\n"
] | [
"import os\n",
"import time\n",
"import logging\n",
"from flask import jsonify\n",
"from flask import Flask, request\n",
"from flask import render_template\n",
"from flask import send_from_directory\n",
"import Importer\n",
"from DataSource.MySQLDataSource import MySQL\n",
"import Config\n",
"from Config import logger\n",
"app = Flask(__name__)\n",
"LOG_DIR = os.environ['LOGFILES_PATH'\n ] if 'LOGFILES_PATH' in os.environ else './logs/'\n",
"UPLOADS_DIR = os.environ['DATAFILES_PATH'\n ] if 'DATAFILES_PATH' in os.environ else './uploads/'\n",
"DATABASE = os.environ['DB_NAME'] if 'DB_NAME' in os.environ else 'astronomy'\n",
"@app.route('/')...\n",
"return explore()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_33(self, VAR_24):...\n",
"return self.regex().match(VAR_24) or None\n"
] | [
"def match(self, target):...\n",
"return self.regex().match(target) or None\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from odoo import tools\n",
"from odoo import api, fields, models\n",
"VAR_0 = [('draft', 'Draft'), ('open', 'Todo'), ('cancel', 'Cancelled'), (\n 'done', 'Held'), ('pending', 'Pending')]\n",
"VAR_1 = 'crm.phonecall.report'\n",
"VAR_2 = 'Phone calls by user'\n",
"VAR_3 = False\n",
"VAR_4 = fields.Many2one(comodel_name='res.users', string='User', readonly=True)\n",
"VAR_5 = fields.Many2one(comodel_name='crm.team', string='Team', readonly=True)\n",
"VAR_6 = fields.Selection(selection=[('0', 'Low'), ('1', 'Normal'), ('2',\n 'High')], string='Priority')\n",
"VAR_7 = fields.Integer(string='# of Cases', readonly=True)\n",
"VAR_8 = fields.Selection(VAR_0, string='Status', readonly=True)\n",
"VAR_9 = fields.Datetime(string='Create Date', readonly=True, index=True)\n",
"VAR_10 = fields.Float(string='Delay to close', digits=(16, 2), readonly=\n True, group_operator='avg', help='Number of Days to close the case')\n",
"VAR_11 = fields.Float(string='Duration', digits=(16, 2), readonly=True,\n group_operator='avg')\n",
"VAR_12 = fields.Float(string='Delay to open', digits=(16, 2), readonly=True,\n group_operator='avg', help='Number of Days to open the case')\n",
"VAR_13 = fields.Many2one(comodel_name='res.partner', string='Partner',\n readonly=True)\n",
"VAR_14 = fields.Many2one(comodel_name='res.company', string='Company',\n readonly=True)\n",
"VAR_15 = fields.Datetime(readonly=True, index=True)\n",
"VAR_16 = fields.Datetime(string='Close Date', readonly=True, index=True)\n",
"def FUNC_0(self):...\n",
"VAR_17 = 'string'\n",
"return VAR_17\n"
] | [
"from odoo import tools\n",
"from odoo import api, fields, models\n",
"AVAILABLE_STATES = [('draft', 'Draft'), ('open', 'Todo'), ('cancel',\n 'Cancelled'), ('done', 'Held'), ('pending', 'Pending')]\n",
"_name = 'crm.phonecall.report'\n",
"_description = 'Phone calls by user'\n",
"_auto = False\n",
"user_id = fields.Many2one(comodel_name='res.users', string='User', readonly\n =True)\n",
"team_id = fields.Many2one(comodel_name='crm.team', string='Team', readonly=True\n )\n",
"priority = fields.Selection(selection=[('0', 'Low'), ('1', 'Normal'), ('2',\n 'High')], string='Priority')\n",
"nbr_cases = fields.Integer(string='# of Cases', readonly=True)\n",
"state = fields.Selection(AVAILABLE_STATES, string='Status', readonly=True)\n",
"create_date = fields.Datetime(string='Create Date', readonly=True, index=True)\n",
"delay_close = fields.Float(string='Delay to close', digits=(16, 2),\n readonly=True, group_operator='avg', help=\n 'Number of Days to close the case')\n",
"duration = fields.Float(string='Duration', digits=(16, 2), readonly=True,\n group_operator='avg')\n",
"delay_open = fields.Float(string='Delay to open', digits=(16, 2), readonly=\n True, group_operator='avg', help='Number of Days to open the case')\n",
"partner_id = fields.Many2one(comodel_name='res.partner', string='Partner',\n readonly=True)\n",
"company_id = fields.Many2one(comodel_name='res.company', string='Company',\n readonly=True)\n",
"opening_date = fields.Datetime(readonly=True, index=True)\n",
"date_closed = fields.Datetime(string='Close Date', readonly=True, index=True)\n",
"def _select(self):...\n",
"select_str = \"\"\"\n select\n id,\n c.date_open as opening_date,\n c.date_closed as date_closed,\n c.state,\n c.user_id,\n c.team_id,\n c.partner_id,\n c.duration,\n c.company_id,\n c.priority,\n 1 as nbr_cases,\n c.create_date as create_date,\n extract(\n 'epoch' from (\n c.date_closed-c.create_date))/(3600*24) as delay_close,\n extract(\n 'epoch' from (\n c.date_open-c.create_date))/(3600*24) as delay_open\n \"\"\"\n",
"return select_str\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"super(CLASS_1, self).setUp()\n",
"VAR_12 = patch('pavelib.utils.test.suites.bokchoy_suite.sh')\n",
"self._mock_sh = VAR_12.start()\n",
"self.addCleanup(VAR_12.stop)\n"
] | [
"def setUp(self):...\n",
"super(TestPaverPa11yCrawlerCmd, self).setUp()\n",
"mock_sh = patch('pavelib.utils.test.suites.bokchoy_suite.sh')\n",
"self._mock_sh = mock_sh.start()\n",
"self.addCleanup(mock_sh.stop)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_20(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 2}, 'CONFIDENCE': {'HIGH': 2}}\n",
"self.check_example('imports.py', VAR_2)\n"
] | [
"def test_imports(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 2}, 'CONFIDENCE': {'HIGH': 2}}\n",
"self.check_example('imports.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"@then('the pods logs should not be empty')...\n",
"VAR_1 = 'string'\n",
"VAR_2 = VAR_0.check_output(VAR_1)\n",
"for pod_id in VAR_2.split('\\n'):\n",
"VAR_3 = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf logs {} --limit-bytes=1 -n kube-system'\n .format(pod_id))\n",
"VAR_4 = VAR_0.check_output(VAR_3)\n",
"if 'salt-master' not in pod_id:\n",
"assert len(VAR_4.strip()) > 0, 'Error cannot retrieve logs for {}'.format(\n pod_id)\n"
] | [
"@then('the pods logs should not be empty')...\n",
"cmd = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system --no-headers -o custom-columns=\":metadata.name\"'\n )\n",
"pods_list = host.check_output(cmd)\n",
"for pod_id in pods_list.split('\\n'):\n",
"cmd_logs = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf logs {} --limit-bytes=1 -n kube-system'\n .format(pod_id))\n",
"res = host.check_output(cmd_logs)\n",
"if 'salt-master' not in pod_id:\n",
"assert len(res.strip()) > 0, 'Error cannot retrieve logs for {}'.format(pod_id)\n"
] | [
0,
2,
2,
0,
2,
2,
0,
2
] | [
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assert'"
] |
[
"def FUNC_0(VAR_1):...\n",
"for key in VAR_6.__all__:\n",
"if not hasattr(VAR_1, key):\n",
"setattr(VAR_1, key, getattr(VAR_6, key))\n"
] | [
"def _include_filters(obj):...\n",
"for key in filters.__all__:\n",
"if not hasattr(obj, key):\n",
"setattr(obj, key, getattr(filters, key))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_27(VAR_3, VAR_13, VAR_14, VAR_15, VAR_16, VAR_17=True):...\n",
"if FUNC_26(VAR_3):\n",
"VAR_0, VAR_10 = FUNC_0()\n",
"VAR_32 = VAR_3\n",
"VAR_24 = ','.join(VAR_13)\n",
"VAR_33 = 'NONE'\n",
"VAR_34 = VAR_17\n",
"VAR_28 = VAR_32, VAR_24, VAR_15, VAR_34, VAR_14, VAR_16, VAR_33\n",
"VAR_18 = 'INSERT INTO {} VALUES (?,?,?,?,?,?,?)'.format(CFG('poll_table_name'))\n",
"VAR_10.execute(VAR_18, VAR_28)\n",
"VAR_29 = []\n",
"if VAR_15:\n",
"VAR_29 = FUNC_22(VAR_10, VAR_3)\n",
"FUNC_24(VAR_10, VAR_3)\n",
"for opt in VAR_13:\n",
"FUNC_19(VAR_10, VAR_3, opt)\n",
"FUNC_1(VAR_0)\n",
"return VAR_29\n"
] | [
"def createPoll(poll_name, options_arr, question, has_tokens, multi,...\n",
"if checkPollExists(poll_name):\n",
"conn, c = connectDB()\n",
"name = poll_name\n",
"options = ','.join(options_arr)\n",
"date = 'NONE'\n",
"show_results = openresults\n",
"params = name, options, has_tokens, show_results, question, multi, date\n",
"req = 'INSERT INTO {} VALUES (?,?,?,?,?,?,?)'.format(CFG('poll_table_name'))\n",
"c.execute(req, params)\n",
"tokens = []\n",
"if has_tokens:\n",
"tokens = genTokens(c, poll_name)\n",
"createAdminToken(c, poll_name)\n",
"for opt in options_arr:\n",
"insertOption(c, poll_name, opt)\n",
"closeDB(conn)\n",
"return tokens\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"self.db.execute('string' % (VAR_1, VAR_2, VAR_3, VAR_4, VAR_5))\n",
"VAR_8 = self.db.fetchone()\n",
"if VAR_8:\n",
"VAR_19 = ['scope', 'name']\n",
"VAR_20 = [VAR_1, VAR_0]\n",
"if VAR_3 > -1:\n",
"VAR_19.append('adapter')\n",
"if VAR_4 > -1:\n",
"VAR_20.append('%s' % VAR_3)\n",
"VAR_19.append('enclosure')\n",
"VAR_19.append('slot')\n",
"VAR_20.append('%s' % VAR_4)\n",
"VAR_20.append('%s' % VAR_5)\n"
] | [
"def checkIt(self, name, scope, tableid, adapter, enclosure, slot):...\n",
"self.db.execute(\n \"\"\"select scope, tableid, adapter, enclosure,\n\t\t\tslot from storage_controller where\n\t\t\tscope = '%s' and tableid = %s and adapter = %s and\n\t\t\tenclosure = %s and slot = %s\"\"\"\n % (scope, tableid, adapter, enclosure, slot))\n",
"row = self.db.fetchone()\n",
"if row:\n",
"label = ['scope', 'name']\n",
"value = [scope, name]\n",
"if adapter > -1:\n",
"label.append('adapter')\n",
"if enclosure > -1:\n",
"value.append('%s' % adapter)\n",
"label.append('enclosure')\n",
"label.append('slot')\n",
"value.append('%s' % enclosure)\n",
"value.append('%s' % slot)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1():...\n",
"return db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n users.get_current_user.user_id()).get()\n"
] | [
"def get_current_li():...\n",
"return db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n users.get_current_user.user_id()).get()\n"
] | [
0,
5
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_8(self, VAR_13):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def p_expr(self, p):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_15(VAR_13):...\n",
"def FUNC_56(self):...\n",
"self._test_strtype('blob', VAR_13, VAR_23(VAR_13))\n",
"return VAR_31\n"
] | [
"def _maketest(value):...\n",
"def t(self):...\n",
"self._test_strtype('blob', value, len(value))\n",
"return t\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_15(self, VAR_6, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = None\n",
"VAR_10 = self.common._safe_hostname(VAR_9['host'])\n",
"VAR_25 = self.common.get_cpg(VAR_6, allowSnap=True)\n",
"VAR_12 = self.common.get_domain(VAR_25)\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n",
"VAR_13 = self.common.get_persona_type(VAR_6)\n",
"return VAR_20\n",
"if not VAR_20['FCPaths']:\n",
"VAR_10 = self._create_3par_fibrechan_host(VAR_10, VAR_9['wwpns'], VAR_12,\n VAR_13)\n",
"self._modify_3par_fibrechan_host(VAR_10, VAR_9['wwpns'])\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n",
"VAR_20 = self.common._get_3par_host(VAR_10)\n"
] | [
"def _create_host(self, volume, connector):...\n",
"\"\"\"docstring\"\"\"\n",
"host = None\n",
"hostname = self.common._safe_hostname(connector['host'])\n",
"cpg = self.common.get_cpg(volume, allowSnap=True)\n",
"domain = self.common.get_domain(cpg)\n",
"host = self.common._get_3par_host(hostname)\n",
"persona_id = self.common.get_persona_type(volume)\n",
"return host\n",
"if not host['FCPaths']:\n",
"hostname = self._create_3par_fibrechan_host(hostname, connector['wwpns'],\n domain, persona_id)\n",
"self._modify_3par_fibrechan_host(hostname, connector['wwpns'])\n",
"host = self.common._get_3par_host(hostname)\n",
"host = self.common._get_3par_host(hostname)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"\"\"\"string\"\"\"\n",
"import logging\n",
"from datetime import timedelta\n",
"import voluptuous as vol\n",
"from homeassistant.components.sensor import PLATFORM_SCHEMA\n",
"from homeassistant.const import TEMP_CELSIUS, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, STATE_UNKNOWN\n",
"from homeassistant.helpers.entity import Entity\n",
"from homeassistant.util import Throttle\n",
"import homeassistant.helpers.config_validation as cv\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = 'modules'\n",
"VAR_2 = 'station'\n",
"VAR_3 = ['netatmo']\n",
"VAR_4 = timedelta(seconds=600)\n",
"VAR_5 = {'temperature': ['Temperature', TEMP_CELSIUS, None,\n DEVICE_CLASS_TEMPERATURE], 'co2': ['CO2', 'ppm', 'mdi:cloud', None],\n 'pressure': ['Pressure', 'mbar', 'mdi:gauge', None], 'noise': ['Noise',\n 'dB', 'mdi:volume-high', None], 'humidity': ['Humidity', '%', None,\n DEVICE_CLASS_HUMIDITY], 'rain': ['Rain', 'mm', 'mdi:weather-rainy',\n None], 'sum_rain_1': ['sum_rain_1', 'mm', 'mdi:weather-rainy', None],\n 'sum_rain_24': ['sum_rain_24', 'mm', 'mdi:weather-rainy', None],\n 'battery_vp': ['Battery', '', 'mdi:battery', None], 'battery_lvl': [\n 'Battery_lvl', '', 'mdi:battery', None], 'min_temp': ['Min Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'max_temp': ['Max Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'windangle': ['Angle', '',\n 'mdi:compass', None], 'windangle_value': ['Angle Value', 'º',\n 'mdi:compass', None], 'windstrength': ['Strength', 'km/h',\n 'mdi:weather-windy', None], 'gustangle': ['Gust Angle', '',\n 'mdi:compass', None], 'gustangle_value': ['Gust Angle Value', 'º',\n 'mdi:compass', None], 'guststrength': ['Gust Strength', 'km/h',\n 'mdi:weather-windy', None], 'rf_status': ['Radio', '', 'mdi:signal',\n None], 'rf_status_lvl': ['Radio_lvl', '', 'mdi:signal', None],\n 'wifi_status': ['Wifi', '', 'mdi:wifi', None], 'wifi_status_lvl': [\n 'Wifi_lvl', 'dBm', 'mdi:wifi', None]}\n",
"VAR_6 = vol.Schema({vol.Required(cv.string): vol.All(cv.ensure_list, [vol.\n In(VAR_5)])})\n",
"VAR_7 = VAR_7.extend({vol.Optional(VAR_2): cv.string, vol.Optional(VAR_1):\n VAR_6})\n",
"def FUNC_0(VAR_8, VAR_9, VAR_10, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = VAR_8.components.netatmo\n",
"VAR_13 = CLASS_1(VAR_12.NETATMO_AUTH, VAR_9.get(VAR_2, None))\n",
"VAR_14 = []\n",
"import pyatmo\n",
"if VAR_1 in VAR_9:\n",
"return None\n",
"VAR_10(VAR_14, True)\n",
"for VAR_16, monitored_conditions in VAR_9[VAR_1].items():\n",
"for VAR_16 in VAR_13.get_module_names():\n",
"\"\"\"Implementation of a Netatmo sensor.\"\"\"\n",
"if VAR_16 not in VAR_13.get_module_names():\n",
"for variable in VAR_13.station_data.monitoredConditions(VAR_16):\n",
"def __init__(self, VAR_15, VAR_16, VAR_17):...\n",
"VAR_0.error('Module name: \"%s\" not found', VAR_16)\n",
"for variable in monitored_conditions:\n",
"if variable in VAR_5.keys():\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14.append(CLASS_0(VAR_13, VAR_16, variable))\n",
"VAR_14.append(CLASS_0(VAR_13, VAR_16, variable))\n",
"VAR_0.warning('Ignoring unknown var %s for mod %s', variable, VAR_16)\n",
"self._name = 'Netatmo {} {}'.format(VAR_16, VAR_5[VAR_17][0])\n",
"self.netatmo_data = VAR_15\n",
"self.module_name = VAR_16\n",
"self.type = VAR_17\n",
"self._state = None\n",
"self._device_class = VAR_5[self.type][3]\n",
"self._icon = VAR_5[self.type][2]\n",
"self._unit_of_measurement = VAR_5[self.type][1]\n",
"VAR_20 = self.netatmo_data.station_data.moduleByName(module=module_name)['_id']\n",
"self.module_id = VAR_20[1]\n",
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._name\n"
] | [
"\"\"\"\nSupport for the NetAtmo Weather Service.\n\nFor more details about this platform, please refer to the documentation at\nhttps://home-assistant.io/components/sensor.netatmo/\n\"\"\"\n",
"import logging\n",
"from datetime import timedelta\n",
"import voluptuous as vol\n",
"from homeassistant.components.sensor import PLATFORM_SCHEMA\n",
"from homeassistant.const import TEMP_CELSIUS, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE, STATE_UNKNOWN\n",
"from homeassistant.helpers.entity import Entity\n",
"from homeassistant.util import Throttle\n",
"import homeassistant.helpers.config_validation as cv\n",
"_LOGGER = logging.getLogger(__name__)\n",
"CONF_MODULES = 'modules'\n",
"CONF_STATION = 'station'\n",
"DEPENDENCIES = ['netatmo']\n",
"MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=600)\n",
"SENSOR_TYPES = {'temperature': ['Temperature', TEMP_CELSIUS, None,\n DEVICE_CLASS_TEMPERATURE], 'co2': ['CO2', 'ppm', 'mdi:cloud', None],\n 'pressure': ['Pressure', 'mbar', 'mdi:gauge', None], 'noise': ['Noise',\n 'dB', 'mdi:volume-high', None], 'humidity': ['Humidity', '%', None,\n DEVICE_CLASS_HUMIDITY], 'rain': ['Rain', 'mm', 'mdi:weather-rainy',\n None], 'sum_rain_1': ['sum_rain_1', 'mm', 'mdi:weather-rainy', None],\n 'sum_rain_24': ['sum_rain_24', 'mm', 'mdi:weather-rainy', None],\n 'battery_vp': ['Battery', '', 'mdi:battery', None], 'battery_lvl': [\n 'Battery_lvl', '', 'mdi:battery', None], 'min_temp': ['Min Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'max_temp': ['Max Temp.',\n TEMP_CELSIUS, 'mdi:thermometer', None], 'windangle': ['Angle', '',\n 'mdi:compass', None], 'windangle_value': ['Angle Value', 'º',\n 'mdi:compass', None], 'windstrength': ['Strength', 'km/h',\n 'mdi:weather-windy', None], 'gustangle': ['Gust Angle', '',\n 'mdi:compass', None], 'gustangle_value': ['Gust Angle Value', 'º',\n 'mdi:compass', None], 'guststrength': ['Gust Strength', 'km/h',\n 'mdi:weather-windy', None], 'rf_status': ['Radio', '', 'mdi:signal',\n None], 'rf_status_lvl': ['Radio_lvl', '', 'mdi:signal', None],\n 'wifi_status': ['Wifi', '', 'mdi:wifi', None], 'wifi_status_lvl': [\n 'Wifi_lvl', 'dBm', 'mdi:wifi', None]}\n",
"MODULE_SCHEMA = vol.Schema({vol.Required(cv.string): vol.All(cv.ensure_list,\n [vol.In(SENSOR_TYPES)])})\n",
"PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_STATION): cv.\n string, vol.Optional(CONF_MODULES): MODULE_SCHEMA})\n",
"def setup_platform(hass, config, add_devices, discovery_info=None):...\n",
"\"\"\"docstring\"\"\"\n",
"netatmo = hass.components.netatmo\n",
"data = NetAtmoData(netatmo.NETATMO_AUTH, config.get(CONF_STATION, None))\n",
"dev = []\n",
"import pyatmo\n",
"if CONF_MODULES in config:\n",
"return None\n",
"add_devices(dev, True)\n",
"for module_name, monitored_conditions in config[CONF_MODULES].items():\n",
"for module_name in data.get_module_names():\n",
"\"\"\"Implementation of a Netatmo sensor.\"\"\"\n",
"if module_name not in data.get_module_names():\n",
"for variable in data.station_data.monitoredConditions(module_name):\n",
"def __init__(self, netatmo_data, module_name, sensor_type):...\n",
"_LOGGER.error('Module name: \"%s\" not found', module_name)\n",
"for variable in monitored_conditions:\n",
"if variable in SENSOR_TYPES.keys():\n",
"\"\"\"docstring\"\"\"\n",
"dev.append(NetAtmoSensor(data, module_name, variable))\n",
"dev.append(NetAtmoSensor(data, module_name, variable))\n",
"_LOGGER.warning('Ignoring unknown var %s for mod %s', variable, module_name)\n",
"self._name = 'Netatmo {} {}'.format(module_name, SENSOR_TYPES[sensor_type][0])\n",
"self.netatmo_data = netatmo_data\n",
"self.module_name = module_name\n",
"self.type = sensor_type\n",
"self._state = None\n",
"self._device_class = SENSOR_TYPES[self.type][3]\n",
"self._icon = SENSOR_TYPES[self.type][2]\n",
"self._unit_of_measurement = SENSOR_TYPES[self.type][1]\n",
"module_id = self.netatmo_data.station_data.moduleByName(module=module_name)[\n '_id']\n",
"self.module_id = module_id[1]\n",
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._name\n"
] | [
0,
0,
7,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
7,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Import'",
"Condition",
"Return'",
"Expr'",
"For",
"For",
"Expr'",
"Condition",
"For",
"FunctionDef'",
"Expr'",
"For",
"Condition",
"Docstring",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Docstring",
"Return'"
] |
[
"@VAR_0.assignment_tag(takes_context=True)...\n",
"VAR_16 = FUNC_1(VAR_1)\n",
"return VAR_16.total()['max_group_size']\n"
] | [
"@register.assignment_tag(takes_context=True)...\n",
"points = _prepare_context(context)\n",
"return points.total()['max_group_size']\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_7, VAR_67=(), *VAR_15, **VAR_16):...\n",
"CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n",
"self.options = VAR_67\n"
] | [
"def __init__(self, param, options=(), *a, **kw):...\n",
"Validator.__init__(self, param, *a, **kw)\n",
"self.options = options\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_10(self):...\n",
"VAR_6 = ('\"normal_string\"', '\"string with spaces\"',\n '\"string with quotes\\\\\"a\"', '\"string with s-quotes\\'b\"', '\"bsn \\n A\"',\n '\"unrecognized \\\\q escape\"')\n",
"for string, result in zip(self.test_strings, VAR_6):\n",
"self.assertEqual(prepare_string_argument(string, 'sh'), result)\n"
] | [
"def test_prepare_string_argument_sh(self):...\n",
"expected_results = ('\"normal_string\"', '\"string with spaces\"',\n '\"string with quotes\\\\\"a\"', '\"string with s-quotes\\'b\"', '\"bsn \\n A\"',\n '\"unrecognized \\\\q escape\"')\n",
"for string, result in zip(self.test_strings, expected_results):\n",
"self.assertEqual(prepare_string_argument(string, 'sh'), result)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_1(VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"for name in find_modules('benwaonline.blueprints'):\n",
"VAR_10 = import_string(name)\n",
"return None\n",
"if hasattr(VAR_10, 'bp'):\n",
"VAR_4.register_blueprint(VAR_10.bp)\n"
] | [
"def register_blueprints(app):...\n",
"\"\"\"docstring\"\"\"\n",
"for name in find_modules('benwaonline.blueprints'):\n",
"mod = import_string(name)\n",
"return None\n",
"if hasattr(mod, 'bp'):\n",
"app.register_blueprint(mod.bp)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Assign'",
"Return'",
"Condition",
"Expr'"
] |
[
"def FUNC_13(self):...\n",
"VAR_4 = 'hubba-bubba'\n",
"def FUNC_29():...\n",
"self.assertIn('reauthn-for-termination', VAR_32)\n",
"VAR_29 = VAR_32['reauthn-for-termination']\n",
"VAR_30 = int(time.time())\n",
"self.assertTrue(VAR_30 - VAR_29 < 5)\n",
"self.acs('/terminate', VAR_4, FUNC_29)\n"
] | [
"def test_terminate_assertion_consumer_service(self):...\n",
"eppn = 'hubba-bubba'\n",
"def _check():...\n",
"self.assertIn('reauthn-for-termination', session)\n",
"then = session['reauthn-for-termination']\n",
"now = int(time.time())\n",
"self.assertTrue(now - then < 5)\n",
"self.acs('/terminate', eppn, _check)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"VAR_24 = self.common.create_cloned_volume(VAR_7, VAR_8)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_24}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"\"\"\"docstring\"\"\"\n",
"self.common.client_login()\n",
"new_vol = self.common.create_cloned_volume(volume, src_vref)\n",
"self.common.client_logout()\n",
"return {'metadata': new_vol}\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_15(self, VAR_17):...\n",
"if not path.exists(VAR_17):\n",
"VAR_50 = sqlite3.connect(VAR_17)\n",
"VAR_50.execute('PRAGMA page_size = 4096;')\n",
"VAR_50.execute('PRAGMA cache_size = 20000;')\n",
"VAR_50.execute('PRAGMA temp_store = MEMORY; ')\n",
"VAR_50.execute('PRAGMA synchronous = OFF; ')\n",
"VAR_50.commit()\n"
] | [
"def init_user_db(self, db_file):...\n",
"if not path.exists(db_file):\n",
"db = sqlite3.connect(db_file)\n",
"db.execute('PRAGMA page_size = 4096;')\n",
"db.execute('PRAGMA cache_size = 20000;')\n",
"db.execute('PRAGMA temp_store = MEMORY; ')\n",
"db.execute('PRAGMA synchronous = OFF; ')\n",
"db.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_30(VAR_35):...\n",
"self.assertEqual(None, VAR_35)\n",
"return 0\n"
] | [
"def run_bot(error):...\n",
"self.assertEqual(None, error)\n",
"return 0\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_12(VAR_24):...\n",
"VAR_38 = argparse.ArgumentParser(description=\n 'Reads and merges JSON configuration files specified by colon separated environment variable OS_CONFIG_FILES, unless overridden by command line option --metadata. If no files are specified this way, falls back to legacy behavior of searching the fallback metadata path for a single config file.'\n )\n",
"VAR_38.add_argument('-t', '--templates', metavar='TEMPLATE_ROOT', help=\n \"\"\"path to template root directory (default:\n %(default)s)\"\"\"\n , VAR_12=TEMPLATES_DIR)\n",
"VAR_38.add_argument('-o', '--output', metavar='OUT_DIR', help=\n 'root directory for output (default:%(default)s)', VAR_12='/')\n",
"VAR_38.add_argument('-m', '--metadata', metavar='METADATA_FILE', nargs='*',\n help=\n 'Overrides environment variable OS_CONFIG_FILES. Specify multiple times, rather than separate files with \":\".'\n , VAR_12=[])\n",
"VAR_38.add_argument('--fallback-metadata', metavar='FALLBACK_METADATA',\n nargs='*', help=\n 'Files to search when OS_CONFIG_FILES is empty. (default: %(default)s)',\n VAR_12=['/var/cache/heat-cfntools/last_metadata',\n '/var/lib/heat-cfntools/cfn-init-data',\n '/var/lib/cloud/data/cfn-init-data'])\n",
"VAR_38.add_argument('-v', '--validate', help=\n 'validate only. do not write files', VAR_12=False, action='store_true')\n",
"VAR_38.add_argument('--print-templates', VAR_12=False, action='store_true',\n help='Print templates root and exit.')\n",
"VAR_38.add_argument('-s', '--subhash', help=\n 'use the sub-hash named by this key, instead of the full metadata hash')\n",
"VAR_38.add_argument('--key', metavar='KEY', VAR_12=None, help=\n 'print the specified key and exit. (may be used with --type and --key-default)'\n )\n",
"VAR_38.add_argument('--type', VAR_12='default', help=\n 'exit with error if the specified --key does not match type. Valid types are <int|default|netaddress|netdevice|dsn|swiftdevices|raw>'\n )\n",
"VAR_38.add_argument('--key-default', help=\n 'This option only affects running with --key. Print this if key is not found. This value is not subject to type restrictions. If --key is specified and no default is specified, program exits with an error on missing key.'\n )\n",
"VAR_38.add_argument('--version', action='version', version=version.\n version_info.version_string())\n",
"VAR_38.add_argument('--os-config-files', VAR_12=OS_CONFIG_FILES_PATH, help=\n 'Set path to os_config_files.json')\n",
"VAR_39 = VAR_38.parse_args(VAR_24[1:])\n",
"return VAR_39\n"
] | [
"def parse_opts(argv):...\n",
"parser = argparse.ArgumentParser(description=\n 'Reads and merges JSON configuration files specified by colon separated environment variable OS_CONFIG_FILES, unless overridden by command line option --metadata. If no files are specified this way, falls back to legacy behavior of searching the fallback metadata path for a single config file.'\n )\n",
"parser.add_argument('-t', '--templates', metavar='TEMPLATE_ROOT', help=\n \"\"\"path to template root directory (default:\n %(default)s)\"\"\"\n , default=TEMPLATES_DIR)\n",
"parser.add_argument('-o', '--output', metavar='OUT_DIR', help=\n 'root directory for output (default:%(default)s)', default='/')\n",
"parser.add_argument('-m', '--metadata', metavar='METADATA_FILE', nargs='*',\n help=\n 'Overrides environment variable OS_CONFIG_FILES. Specify multiple times, rather than separate files with \":\".'\n , default=[])\n",
"parser.add_argument('--fallback-metadata', metavar='FALLBACK_METADATA',\n nargs='*', help=\n 'Files to search when OS_CONFIG_FILES is empty. (default: %(default)s)',\n default=['/var/cache/heat-cfntools/last_metadata',\n '/var/lib/heat-cfntools/cfn-init-data',\n '/var/lib/cloud/data/cfn-init-data'])\n",
"parser.add_argument('-v', '--validate', help=\n 'validate only. do not write files', default=False, action='store_true')\n",
"parser.add_argument('--print-templates', default=False, action='store_true',\n help='Print templates root and exit.')\n",
"parser.add_argument('-s', '--subhash', help=\n 'use the sub-hash named by this key, instead of the full metadata hash')\n",
"parser.add_argument('--key', metavar='KEY', default=None, help=\n 'print the specified key and exit. (may be used with --type and --key-default)'\n )\n",
"parser.add_argument('--type', default='default', help=\n 'exit with error if the specified --key does not match type. Valid types are <int|default|netaddress|netdevice|dsn|swiftdevices|raw>'\n )\n",
"parser.add_argument('--key-default', help=\n 'This option only affects running with --key. Print this if key is not found. This value is not subject to type restrictions. If --key is specified and no default is specified, program exits with an error on missing key.'\n )\n",
"parser.add_argument('--version', action='version', version=version.\n version_info.version_string())\n",
"parser.add_argument('--os-config-files', default=OS_CONFIG_FILES_PATH, help\n ='Set path to os_config_files.json')\n",
"opts = parser.parse_args(argv[1:])\n",
"return opts\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_10(self, VAR_13, VAR_20, VAR_21, VAR_12):...\n",
"self.wz_sock.send_multipart(self.wz.make_router_rep_msg(VAR_13, VAR_20,\n VAR_21, VAR_12))\n"
] | [
"def send_rep(self, reqid, seqnum, status, data):...\n",
"self.wz_sock.send_multipart(self.wz.make_router_rep_msg(reqid, seqnum,\n status, data))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"VAR_2 = CLASS_1.construct_testscript_command('test_program.py')\n",
"VAR_3, VAR_4 = run_shell_command(VAR_2)\n",
"VAR_5 = \"\"\"test_program Z\nnon-interactive mode.\nExiting...\n\"\"\"\n",
"self.assertEqual(VAR_3, VAR_5)\n",
"self.assertEqual(VAR_4, '')\n"
] | [
"def test_run_shell_command_without_stdin(self):...\n",
"command = RunShellCommandTest.construct_testscript_command('test_program.py')\n",
"stdout, stderr = run_shell_command(command)\n",
"expected = \"\"\"test_program Z\nnon-interactive mode.\nExiting...\n\"\"\"\n",
"self.assertEqual(stdout, expected)\n",
"self.assertEqual(stderr, '')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(VAR_0, VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"return FUNC_0(VAR_0, VAR_0, VAR_2, VAR_3)\n"
] | [
"def has_unexpected_keys(expected_keys, actual_keys, name):...\n",
"\"\"\"docstring\"\"\"\n",
"return has_unexpected_subset_keys(expected_keys, expected_keys, actual_keys,\n name)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = self.client.get('/api/apps', HTTP_X_DEIS_VERSION='1234.5678')\n",
"self.assertEqual(VAR_1.status_code, 405)\n"
] | [
"def test_x_deis_version_header_bad(self):...\n",
"\"\"\"docstring\"\"\"\n",
"response = self.client.get('/api/apps', HTTP_X_DEIS_VERSION='1234.5678')\n",
"self.assertEqual(response.status_code, 405)\n"
] | [
0,
0,
5,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(VAR_2, VAR_5, VAR_6, VAR_7, VAR_8, VAR_9=0):...\n",
"VAR_2.cursor.execute('string', (VAR_5, VAR_6, VAR_7, VAR_8, VAR_9))\n",
"VAR_10 = VAR_2.cursor.lastrowid\n",
"VAR_2.database.commit()\n",
"return VAR_0(VAR_10, VAR_5, VAR_6, VAR_7, VAR_8, 0)\n"
] | [
"def create(db, sheet_id, student_id, timestamp, files_path, deleted=0):...\n",
"db.cursor.execute(\n \"\"\"INSERT INTO submission\n (sheet_id, student_id, time, files_path, deleted)\n VALUES (?, ?, ?, ?, ?)\"\"\"\n , (sheet_id, student_id, timestamp, files_path, deleted))\n",
"submission_id = db.cursor.lastrowid\n",
"db.database.commit()\n",
"return Submission(submission_id, sheet_id, student_id, timestamp, files_path, 0\n )\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = self.bindings['TEST_APP_COMPONENT_NAME']\n",
"VAR_1 = self.bindings\n",
"VAR_9 = self.agent.make_json_payload_from_kwargs(job=[{'type':\n 'deleteLoadBalancer', 'cloudProvider': 'gce', 'loadBalancerName':\n load_balancer_name, 'region': bindings['TEST_GCE_REGION'], 'regions': [\n bindings['TEST_GCE_REGION']], 'credentials': bindings['GCE_CREDENTIALS'\n ], 'user': '[anonymous]'}], description=\n 'Delete Load Balancer: {0} in {1}:{2}'.format(load_balancer_name,\n bindings['GCE_CREDENTIALS'], bindings['TEST_GCE_REGION']), application=\n self.TEST_APP)\n",
"VAR_10 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_10.new_clause_builder('Health Check Removed', retryable_for_secs=30\n ).list_resources('http-health-checks').excludes_path_value('name', \n '%s-hc' % VAR_6)\n",
"VAR_10.new_clause_builder('TargetPool Removed').list_resources('target-pools'\n ).excludes_path_value('name', '%s-tp' % VAR_6)\n",
"VAR_10.new_clause_builder('Forwarding Rule Removed').list_resources(\n 'forwarding-rules').excludes_path_value('name', VAR_6)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'delete_load_balancer', data=payload, path='tasks'), VAR_5=builder.build())\n"
] | [
"def delete_load_balancer(self):...\n",
"\"\"\"docstring\"\"\"\n",
"load_balancer_name = self.bindings['TEST_APP_COMPONENT_NAME']\n",
"bindings = self.bindings\n",
"payload = self.agent.make_json_payload_from_kwargs(job=[{'type':\n 'deleteLoadBalancer', 'cloudProvider': 'gce', 'loadBalancerName':\n load_balancer_name, 'region': bindings['TEST_GCE_REGION'], 'regions': [\n bindings['TEST_GCE_REGION']], 'credentials': bindings['GCE_CREDENTIALS'\n ], 'user': '[anonymous]'}], description=\n 'Delete Load Balancer: {0} in {1}:{2}'.format(load_balancer_name,\n bindings['GCE_CREDENTIALS'], bindings['TEST_GCE_REGION']), application=\n self.TEST_APP)\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Health Check Removed', retryable_for_secs=30\n ).list_resources('http-health-checks').excludes_path_value('name', \n '%s-hc' % load_balancer_name)\n",
"builder.new_clause_builder('TargetPool Removed').list_resources('target-pools'\n ).excludes_path_value('name', '%s-tp' % load_balancer_name)\n",
"builder.new_clause_builder('Forwarding Rule Removed').list_resources(\n 'forwarding-rules').excludes_path_value('name', load_balancer_name)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'delete_load_balancer', data=payload, path='tasks'), contract=builder.\n build())\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_14(self, VAR_14):...\n",
""
] | [
"def is_boolean(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_22(self):...\n",
"return self._email\n"
] | [
"def nickname(self):...\n",
"return self._email\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_9():...\n",
"VAR_40 = 'string'\n",
"VAR_50 = pd.read_sql(VAR_40, con=db.engine)\n",
"return VAR_50\n"
] | [
"def get_history():...\n",
"query = \"\"\"\n SELECT\n upload_log.id as upload_id,\n upload_log.jurisdiction_slug,\n upload_log.event_type_slug,\n upload_log.user_id,\n upload_log.given_filename,\n upload_log.upload_timestamp,\n upload_log.num_rows,\n upload_log.file_size,\n upload_log.file_hash,\n upload_log.s3_upload_path,\n match_log.id as match_id,\n match_log.match_start_timestamp,\n match_log.match_complete_timestamp,\n to_char(match_log.runtime, 'HH24:MI:SS') as runtime\n FROM match_log\n LEFT JOIN upload_log ON upload_log.id = match_log.upload_id\n ORDER BY match_complete_timestamp ASC\n \"\"\"\n",
"df = pd.read_sql(query, con=db.engine)\n",
"return df\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.message = VAR_73\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.message = message\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self, **VAR_4):...\n",
"VAR_5 = super().get_context_data(**kwargs)\n",
"VAR_5['count'] = self.object_list.count()\n",
"VAR_5['alltag'] = Tag.objects.order_by('id')\n",
"VAR_6 = self.request.GET.getlist('tag')\n",
"VAR_5['tags'] = Tag.objects.filter(id__in=taglist)\n",
"VAR_7 = EventSearchForm(self.request.GET)\n",
"VAR_5['search_form'] = VAR_7\n",
"VAR_5['30_day_labels'] = self.thirty_day_labels()\n",
"VAR_5['30_day_data'] = self.thirty_day_data()\n",
"return VAR_5\n"
] | [
"def get_context_data(self, **kwargs):...\n",
"context = super().get_context_data(**kwargs)\n",
"context['count'] = self.object_list.count()\n",
"context['alltag'] = Tag.objects.order_by('id')\n",
"taglist = self.request.GET.getlist('tag')\n",
"context['tags'] = Tag.objects.filter(id__in=taglist)\n",
"search_form = EventSearchForm(self.request.GET)\n",
"context['search_form'] = search_form\n",
"context['30_day_labels'] = self.thirty_day_labels()\n",
"context['30_day_data'] = self.thirty_day_data()\n",
"return context\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __str__(self):...\n",
"return ' '.join(self.cmd)\n"
] | [
"def __str__(self):...\n",
"return ' '.join(self.cmd)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"VAR_13 = list(traceback.format_exception(*sys.exc_info()))\n",
"VAR_14 = min(max([len(line) for line in VAR_13]), 185)\n",
"VAR_15 = VAR_13.pop()\n",
"print(VAR_15, end='')\n",
"VAR_13 = filter(lambda item: os.path.join('gui', 'handled_decorators.py')\n not in item, VAR_13)\n",
"VAR_16 = QMessageBox(icon=QMessageBox.Critical)\n",
"VAR_16.setWindowTitle('Error')\n",
"VAR_16.setText(f\"\"\"{VAR_15 + '_' * VAR_14}\n\n{''.join(VAR_13)}\"\"\")\n",
"VAR_16.exec()\n"
] | [
"def error_dialog(self):...\n",
"stack = list(traceback.format_exception(*sys.exc_info()))\n",
"desired_width = min(max([len(line) for line in stack]), 185)\n",
"description = stack.pop()\n",
"print(description, end='')\n",
"stack = filter(lambda item: os.path.join('gui', 'handled_decorators.py') not in\n item, stack)\n",
"dialog = QMessageBox(icon=QMessageBox.Critical)\n",
"dialog.setWindowTitle('Error')\n",
"dialog.setText(f\"\"\"{description + '_' * desired_width}\n\n{''.join(stack)}\"\"\")\n",
"dialog.exec()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_2):...\n",
"VAR_6 = os.path.join(self.namespace, VAR_2)\n",
"self.etcd.write(VAR_6, None, dir=True, prevExist=False)\n",
"FUNC_0('Error storing key %s: [%r]' % (VAR_2, repr(err)))\n"
] | [
"def span(self, key):...\n",
"path = os.path.join(self.namespace, key)\n",
"self.etcd.write(path, None, dir=True, prevExist=False)\n",
"log_error('Error storing key %s: [%r]' % (key, repr(err)))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@api.public...\n",
"self.response.write('OK')\n"
] | [
"@api.public...\n",
"self.response.write('OK')\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def __init__(self, VAR_7, **VAR_8):...\n",
"if 'user' not in VAR_8:\n",
"if 'password' not in VAR_8:\n",
"VAR_8['backend'] = 'IBMBackend'\n",
"super().__init__(VAR_7, **kwargs)\n"
] | [
"def __init__(self, wires, **kwargs):...\n",
"if 'user' not in kwargs:\n",
"if 'password' not in kwargs:\n",
"kwargs['backend'] = 'IBMBackend'\n",
"super().__init__(wires, **kwargs)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_32(self):...\n",
"VAR_44 = VAR_54.get_doc(self.doctype, self.name).as_dict()\n",
"for VAR_7 in self.as_dict():\n",
"VAR_25 = self.meta.get_field(VAR_7)\n",
"VAR_60 = VAR_44.get(VAR_7)\n",
"if VAR_25 and not VAR_25.allow_on_submit and (self.get(VAR_7) or VAR_60):\n",
"if VAR_25.fieldtype == 'Table':\n",
"VAR_72 = len(self.get(VAR_7))\n",
"VAR_72 = self.get_value(VAR_7)\n",
"VAR_60 = len(VAR_60)\n",
"if VAR_72 != VAR_60:\n",
"VAR_54.throw(_('Not allowed to change {0} after submission').format(VAR_25.\n label), VAR_54.UpdateAfterSubmitError)\n"
] | [
"def _validate_update_after_submit(self):...\n",
"db_values = frappe.get_doc(self.doctype, self.name).as_dict()\n",
"for key in self.as_dict():\n",
"df = self.meta.get_field(key)\n",
"db_value = db_values.get(key)\n",
"if df and not df.allow_on_submit and (self.get(key) or db_value):\n",
"if df.fieldtype == 'Table':\n",
"self_value = len(self.get(key))\n",
"self_value = self.get_value(key)\n",
"db_value = len(db_value)\n",
"if self_value != db_value:\n",
"frappe.throw(_('Not allowed to change {0} after submission').format(df.\n label), frappe.UpdateAfterSubmitError)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_4(VAR_16):...\n",
"return db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n VAR_16.created_by_id).get()\n"
] | [
"def get_sender(this):...\n",
"return db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n this.created_by_id).get()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_10(VAR_9, VAR_10, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = [iter(VAR_9)] * VAR_10\n",
"return izip_longest(*VAR_15, VAR_11=fillvalue)\n"
] | [
"def grouper(iterable, n, fillvalue=None):...\n",
"\"\"\"docstring\"\"\"\n",
"args = [iter(iterable)] * n\n",
"return izip_longest(*args, fillvalue=fillvalue)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_41():...\n",
"self.listenTCP(VAR_22, VAR_23)\n",
"reactor.run()\n"
] | [
"def _start():...\n",
"self.listenTCP(port, host)\n",
"reactor.run()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_9(VAR_12):...\n",
"if os.path.isfile(VAR_12):\n",
"VAR_41 = f.read()\n",
"return None\n",
"return VAR_41\n"
] | [
"def read_file_contents(local_filepath):...\n",
"if os.path.isfile(local_filepath):\n",
"data = f.read()\n",
"return None\n",
"return data\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self.config.getint(self.section, 'max_authn_failures_per_month')\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self.config.getint(self.section, 'max_authn_failures_per_month')\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.