lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "\"\"\"string\"\"\"\n", "import sys\n", "from hp3parclient import exceptions as hpexceptions\n", "from cinder import exception\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "import cinder.volume.driver\n", "from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon\n", "from cinder.volume.drivers.san import san\n", "VAR_0 = 1.1\n", "VAR_1 = logging.getLogger(__name__)\n", "VAR_2 = 3260\n", "\"\"\"string\"\"\"\n", "def __init__(self, *VAR_3, **VAR_4):...\n", "super(CLASS_0, self).__init__(*VAR_3, **kwargs)\n", "self.common = None\n", "self.configuration.append_config_values(hpcommon.hp3par_opts)\n", "self.configuration.append_config_values(san.san_opts)\n", "def FUNC_0(self):...\n", "return hpcommon.HP3PARCommon(self.configuration)\n" ]
[ "\"\"\"\nVolume driver for HP 3PAR Storage array.\nThis driver requires 3.1.2 MU2 firmware on the 3PAR array.\n\nYou will need to install the python hp3parclient.\nsudo pip install hp3parclient\n\nSet the following in the cinder.conf file to enable the\n3PAR iSCSI Driver along with the required flags:\n\nvolume_driver=cinder.volume.drivers.san.hp.hp_3par_iscsi.HP3PARISCSIDriver\n\"\"\"\n", "import sys\n", "from hp3parclient import exceptions as hpexceptions\n", "from cinder import exception\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "import cinder.volume.driver\n", "from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon\n", "from cinder.volume.drivers.san import san\n", "VERSION = 1.1\n", "LOG = logging.getLogger(__name__)\n", "DEFAULT_ISCSI_PORT = 3260\n", "\"\"\"OpenStack iSCSI driver to enable 3PAR storage array.\n\n Version history:\n 1.0 - Initial driver\n 1.1 - QoS, extend volume, multiple iscsi ports, remove domain,\n session changes, faster clone, requires 3.1.2 MU2 firmware.\n\n \"\"\"\n", "def __init__(self, *args, **kwargs):...\n", "super(HP3PARISCSIDriver, self).__init__(*args, **kwargs)\n", "self.common = None\n", "self.configuration.append_config_values(hpcommon.hp3par_opts)\n", "self.configuration.append_config_values(san.san_opts)\n", "def _init_common(self):...\n", "return hpcommon.HP3PARCommon(self.configuration)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Return'" ]
[ "def FUNC_12(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = 'SELECT path FROM {0};'.format(VAR_2)\n", "VAR_4 = self._run_command(VAR_7)\n", "VAR_4 = [i[0] for i in VAR_4]\n", "VAR_4 = None\n", "return VAR_4\n" ]
[ "def getFileList(self):...\n", "\"\"\"docstring\"\"\"\n", "command = 'SELECT path FROM {0};'.format(TABLE_NAME)\n", "data = self._run_command(command)\n", "data = [i[0] for i in data]\n", "data = None\n", "return data\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_0.type in ['CO']:\n", "return False\n", "if VAR_0.extracted_docs() > 0:\n", "return CLASS_2()\n", "return False\n" ]
[ "def include_clean_comp_data_form(dg):...\n", "\"\"\"docstring\"\"\"\n", "if not dg.type in ['CO']:\n", "return False\n", "if dg.extracted_docs() > 0:\n", "return CleanCompDataForm()\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_9(VAR_0, VAR_12, VAR_3, VAR_4):...\n", "if VAR_12[0] < 0:\n", "VAR_31 = VAR_3[VAR_12[0]]\n", "VAR_31 = VAR_12[0]\n", "if VAR_12[1] < 0:\n", "VAR_32 = VAR_3[VAR_12[1]]\n", "VAR_32 = VAR_12[1]\n", "VAR_7 = SymbolicInput(VAR_4[0], VAR_0, VAR_31, VAR_32)\n", "VAR_3[VAR_4[0]] = VAR_7\n", "VAR_4[0] -= 1\n", "return VAR_7.getId()\n" ]
[ "def param2Simple(op, params, symbols, symId):...\n", "if params[0] < 0:\n", "p0 = symbols[params[0]]\n", "p0 = params[0]\n", "if params[1] < 0:\n", "p1 = symbols[params[1]]\n", "p1 = params[1]\n", "x = SymbolicInput(symId[0], op, p0, p1)\n", "symbols[symId[0]] = x\n", "symId[0] -= 1\n", "return x.getId()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Return'" ]
[ "def FUNC_2():...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = {}\n", "for arg in request.args:\n", "VAR_8 = re.findall('psize_(.*)', arg)\n", "return VAR_6\n", "if VAR_8:\n", "VAR_6[VAR_8[0]] = int(request.args.get(arg))\n" ]
[ "def get_page_size_args():...\n", "\"\"\"docstring\"\"\"\n", "page_sizes = {}\n", "for arg in request.args:\n", "re_match = re.findall('psize_(.*)', arg)\n", "return page_sizes\n", "if re_match:\n", "page_sizes[re_match[0]] = int(request.args.get(arg))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'" ]
[ "def __str__(self):...\n", "return str(self.nickname())\n" ]
[ "def __str__(self):...\n", "return str(self.nickname())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "from __future__ import absolute_import\n", "from __future__ import division\n", "from __future__ import print_function\n", "import threading\n", "import traceback\n", "import redis\n", "import ray\n", "from ray import ray_constants\n", "from ray import cloudpickle as pickle\n", "from ray import profiling\n", "from ray import utils\n", "\"\"\"string\"\"\"\n", "def __init__(self, VAR_0, VAR_1):...\n", "self.worker = VAR_0\n", "self.mode = VAR_1\n", "self.redis_client = VAR_0.redis_client\n", "def FUNC_0(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_3 = threading.Thread(target=self._run, name='ray_import_thread')\n", "VAR_3.daemon = True\n", "VAR_3.start()\n", "def FUNC_1(self):...\n", "VAR_4 = self.redis_client.pubsub()\n", "VAR_4.subscribe('__keyspace@0__:Exports')\n", "VAR_5 = 0\n", "VAR_9 = self.redis_client.lrange('Exports', 0, -1)\n", "for VAR_2 in VAR_9:\n", "VAR_5 += 1\n", "for msg in VAR_4.listen():\n", "def FUNC_2(self, VAR_2):...\n", "self._process_key(VAR_2)\n", "if msg['type'] == 'subscribe':\n", "\"\"\"docstring\"\"\"\n", "assert msg['data'] == b'rpush'\n", "if self.mode != ray.WORKER_MODE:\n", "VAR_12 = self.redis_client.llen('Exports')\n", "if VAR_2.startswith(b'FunctionsToRun'):\n", "if VAR_2.startswith(b'RemoteFunction'):\n", "assert VAR_12 >= VAR_5\n", "self.fetch_and_execute_function_to_run(VAR_2)\n", "return\n", "self.worker.function_actor_manager.fetch_and_register_remote_function(VAR_2)\n", "if VAR_2.startswith(b'FunctionsToRun'):\n", "for i in range(VAR_5, VAR_12):\n", "def FUNC_3(self, VAR_2):...\n", "self.fetch_and_execute_function_to_run(VAR_2)\n", "if VAR_2.startswith(b'ActorClass'):\n", "VAR_5 += 1\n", "\"\"\"docstring\"\"\"\n", "self.worker.function_actor_manager.imported_actor_classes.add(VAR_2)\n", "VAR_2 = self.redis_client.lindex('Exports', i)\n", "VAR_6, VAR_7, VAR_8 = self.redis_client.hmget(VAR_2, ['driver_id',\n 'function', 'run_on_other_drivers'])\n", "self._process_key(VAR_2)\n", "if utils.decode(VAR_8\n", "return\n", "VAR_10 = pickle.loads(VAR_7)\n", "VAR_11 = traceback.format_exc()\n", "VAR_10({'worker': self.worker})\n", "utils.push_error_to_driver(self.worker, ray_constants.\n FUNCTION_TO_RUN_PUSH_ERROR, VAR_11, VAR_6=ray.DriverID(driver_id))\n" ]
[ "from __future__ import absolute_import\n", "from __future__ import division\n", "from __future__ import print_function\n", "import threading\n", "import traceback\n", "import redis\n", "import ray\n", "from ray import ray_constants\n", "from ray import cloudpickle as pickle\n", "from ray import profiling\n", "from ray import utils\n", "\"\"\"A thread used to import exports from the driver or other workers.\n\n Note:\n The driver also has an import thread, which is used only to\n import custom class definitions from calls to register_custom_serializer\n that happen under the hood on workers.\n\n Attributes:\n worker: the worker object in this process.\n mode: worker mode\n redis_client: the redis client used to query exports.\n \"\"\"\n", "def __init__(self, worker, mode):...\n", "self.worker = worker\n", "self.mode = mode\n", "self.redis_client = worker.redis_client\n", "def start(self):...\n", "\"\"\"docstring\"\"\"\n", "t = threading.Thread(target=self._run, name='ray_import_thread')\n", "t.daemon = True\n", "t.start()\n", "def _run(self):...\n", "import_pubsub_client = self.redis_client.pubsub()\n", "import_pubsub_client.subscribe('__keyspace@0__:Exports')\n", "num_imported = 0\n", "export_keys = self.redis_client.lrange('Exports', 0, -1)\n", "for key in export_keys:\n", "num_imported += 1\n", "for msg in import_pubsub_client.listen():\n", "def _process_key(self, key):...\n", "self._process_key(key)\n", "if msg['type'] == 'subscribe':\n", "\"\"\"docstring\"\"\"\n", "assert msg['data'] == b'rpush'\n", "if self.mode != ray.WORKER_MODE:\n", "num_imports = self.redis_client.llen('Exports')\n", "if key.startswith(b'FunctionsToRun'):\n", "if key.startswith(b'RemoteFunction'):\n", "assert num_imports >= num_imported\n", "self.fetch_and_execute_function_to_run(key)\n", "return\n", "self.worker.function_actor_manager.fetch_and_register_remote_function(key)\n", "if key.startswith(b'FunctionsToRun'):\n", "for i in range(num_imported, num_imports):\n", "def fetch_and_execute_function_to_run(self, key):...\n", "self.fetch_and_execute_function_to_run(key)\n", "if key.startswith(b'ActorClass'):\n", "num_imported += 1\n", "\"\"\"docstring\"\"\"\n", "self.worker.function_actor_manager.imported_actor_classes.add(key)\n", "key = self.redis_client.lindex('Exports', i)\n", "driver_id, serialized_function, run_on_other_drivers = self.redis_client.hmget(\n key, ['driver_id', 'function', 'run_on_other_drivers'])\n", "self._process_key(key)\n", "if utils.decode(run_on_other_drivers\n", "return\n", "function = pickle.loads(serialized_function)\n", "traceback_str = traceback.format_exc()\n", "function({'worker': self.worker})\n", "utils.push_error_to_driver(self.worker, ray_constants.\n FUNCTION_TO_RUN_PUSH_ERROR, traceback_str, driver_id=ray.DriverID(\n driver_id))\n" ]
[ 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 6, 6, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "AugAssign'", "For", "FunctionDef'", "Expr'", "Condition", "Docstring", "Assert'", "Condition", "Assign'", "Condition", "Condition", "Assert'", "Expr'", "Return'", "Expr'", "Condition", "For", "FunctionDef'", "Expr'", "Condition", "AugAssign'", "Docstring", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_11(self):...\n", "\"\"\"docstring\"\"\"\n", "self.client.login(username='autotest2', password='password')\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_8 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "self.client.login(username='autotest', password='password')\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'web': 4, 'worker': 2}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n" ]
[ "def test_admin_can_manage_other_containers(self):...\n", "\"\"\"docstring\"\"\"\n", "self.client.login(username='autotest2', password='password')\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.client.login(username='autotest', password='password')\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 4, 'worker': 2}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n" ]
[ 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_0):...\n", "import pdb\n", "pdb.set_trace()\n" ]
[ "def SetUpInputRedirect(cmd):...\n", "import pdb\n", "pdb.set_trace()\n" ]
[ 0, 6, 6 ]
[ "FunctionDef'", "Import'", "Expr'" ]
[ "def FUNC_2(self):...\n", "VAR_1 = 'INVALID'\n", "self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n", "self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us r/a-file with spaces.bla')\n", "self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n VAR_1), '|home|us r|a*dir with spaces|x|')\n", "self.assertEqual(escape_path_argument('system|a|b|c?d', VAR_1),\n 'system|a|b|c?d')\n" ]
[ "def test_escape_path_argument_unsupported(self):...\n", "_type = 'INVALID'\n", "self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n", "self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us r/a-file with spaces.bla')\n", "self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n _type), '|home|us r|a*dir with spaces|x|')\n", "self.assertEqual(escape_path_argument('system|a|b|c?d', _type),\n 'system|a|b|c?d')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "import json\n", "import os\n", "import time\n", "import urllib\n", "import psycopg2\n", "from websocket import create_connection\n", "import config\n", "VAR_0 = create_connection(config.WEBSOCKET_URL)\n", "VAR_1 = psycopg2.connect(**config.POSTGRES)\n", "VAR_2 = VAR_1.cursor()\n", "VAR_3 = 'TRUNCATE holders'\n", "VAR_2.execute(VAR_3)\n", "VAR_3 = 'ALTER SEQUENCE holders_hid_seq RESTART WITH 1'\n", "VAR_2.execute(VAR_3)\n", "VAR_1.commit()\n", "VAR_0.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_count\",[]]}')\n", "VAR_4 = VAR_0.recv()\n", "VAR_5 = json.loads(VAR_4)\n", "VAR_6 = int(VAR_5['result'])\n", "for ac in range(0, VAR_6):\n", "VAR_0.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"1.2.' +\n str(ac) + '\"]]]}')\n", "VAR_1.close()\n", "VAR_4 = VAR_0.recv()\n", "VAR_5 = json.loads(VAR_4)\n", "VAR_9 = VAR_5['result'][0]['id']\n", "VAR_0.send(\n '{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_balances\",[\"' +\n VAR_9 + '\", [\"1.3.0\"]]]}')\n", "VAR_10 = VAR_5['result'][0]['name']\n", "VAR_7 = VAR_0.recv()\n", "VAR_8 = json.loads(VAR_7)\n", "if VAR_8['result'][0]['amount'] == 0:\n", "VAR_11 = VAR_8['result'][0]['amount']\n", "VAR_0.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"' +\n VAR_5['result'][0]['statistics'] + '\"]]]}')\n", "VAR_4 = VAR_0.recv()\n", "VAR_12 = json.loads(VAR_4)\n", "VAR_14 = VAR_12['result'][0]['total_core_in_orders']\n", "VAR_14 = 0\n", "VAR_11 = int(VAR_11) + int(VAR_14)\n", "VAR_13 = VAR_5['result'][0]['options']['voting_account']\n", "VAR_3 = (\n \"INSERT INTO holders (account_id, account_name, amount, voting_as) VALUES('\"\n + VAR_9 + \"', '\" + VAR_10 + \"','\" + str(VAR_11) + \"', '\" + VAR_13 + \"')\")\n", "VAR_2.execute(VAR_3)\n", "VAR_1.commit()\n" ]
[ "import json\n", "import os\n", "import time\n", "import urllib\n", "import psycopg2\n", "from websocket import create_connection\n", "import config\n", "ws = create_connection(config.WEBSOCKET_URL)\n", "con = psycopg2.connect(**config.POSTGRES)\n", "cur = con.cursor()\n", "query = 'TRUNCATE holders'\n", "cur.execute(query)\n", "query = 'ALTER SEQUENCE holders_hid_seq RESTART WITH 1'\n", "cur.execute(query)\n", "con.commit()\n", "ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_count\",[]]}')\n", "result = ws.recv()\n", "j = json.loads(result)\n", "account_count = int(j['result'])\n", "for ac in range(0, account_count):\n", "ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"1.2.' + str\n (ac) + '\"]]]}')\n", "con.close()\n", "result = ws.recv()\n", "j = json.loads(result)\n", "account_id = j['result'][0]['id']\n", "ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_balances\",[\"' +\n account_id + '\", [\"1.3.0\"]]]}')\n", "account_name = j['result'][0]['name']\n", "result3 = ws.recv()\n", "jb = json.loads(result3)\n", "if jb['result'][0]['amount'] == 0:\n", "amount = jb['result'][0]['amount']\n", "ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"' + j[\n 'result'][0]['statistics'] + '\"]]]}')\n", "result = ws.recv()\n", "js = json.loads(result)\n", "total_core_in_orders = js['result'][0]['total_core_in_orders']\n", "total_core_in_orders = 0\n", "amount = int(amount) + int(total_core_in_orders)\n", "voting_account = j['result'][0]['options']['voting_account']\n", "query = (\n \"INSERT INTO holders (account_id, account_name, amount, voting_as) VALUES('\"\n + account_id + \"', '\" + account_name + \"','\" + str(amount) + \"', '\" +\n voting_account + \"')\")\n", "cur.execute(query)\n", "con.commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_0) == 2:\n", "VAR_10, VAR_41 = VAR_0\n", "if len(VAR_0) == 1:\n", "self.name = VAR_10\n", "VAR_17 = VAR_0[0]\n", "self.workflow = VAR_41\n", "self.name = VAR_17.name\n", "self.docstring = None\n", "self.workflow = VAR_17.workflow\n", "self.message = None\n", "self.docstring = VAR_17.docstring\n", "self._input = InputFiles()\n", "self.message = VAR_17.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(VAR_17._input)\n", "self._params = Params()\n", "self._output = OutputFiles(VAR_17._output)\n", "self.dependencies = dict()\n", "self._params = Params(VAR_17._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(VAR_17.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(VAR_17.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(VAR_17.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(VAR_17.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(VAR_17.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(VAR_17.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(VAR_17.subworkflow_input)\n", "self.priority = 0\n", "self.resources = VAR_17.resources\n", "self.version = None\n", "self.priority = VAR_17.priority\n", "self._log = Log()\n", "self.version = VAR_17.version\n", "self._benchmark = None\n", "self._log = VAR_17._log\n", "self.wildcard_names = set()\n", "self._benchmark = VAR_17._benchmark\n", "self.lineno = VAR_1\n", "self.wildcard_names = set(VAR_17.wildcard_names)\n", "self.snakefile = VAR_2\n", "self.lineno = VAR_17.lineno\n", "self.run_func = None\n", "self.snakefile = VAR_17.snakefile\n", "self.shellcmd = None\n", "self.run_func = VAR_17.run_func\n", "self.norun = False\n", "self.shellcmd = VAR_17.shellcmd\n", "self.norun = VAR_17.norun\n" ]
[ "def __init__(self, *args, lineno=None, snakefile=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(args) == 2:\n", "name, workflow = args\n", "if len(args) == 1:\n", "self.name = name\n", "other = args[0]\n", "self.workflow = workflow\n", "self.name = other.name\n", "self.docstring = None\n", "self.workflow = other.workflow\n", "self.message = None\n", "self.docstring = other.docstring\n", "self._input = InputFiles()\n", "self.message = other.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(other._input)\n", "self._params = Params()\n", "self._output = OutputFiles(other._output)\n", "self.dependencies = dict()\n", "self._params = Params(other._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(other.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(other.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(other.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(other.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(other.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(other.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(other.subworkflow_input)\n", "self.priority = 0\n", "self.resources = other.resources\n", "self.version = None\n", "self.priority = other.priority\n", "self._log = Log()\n", "self.version = other.version\n", "self._benchmark = None\n", "self._log = other._log\n", "self.wildcard_names = set()\n", "self._benchmark = other._benchmark\n", "self.lineno = lineno\n", "self.wildcard_names = set(other.wildcard_names)\n", "self.snakefile = snakefile\n", "self.lineno = other.lineno\n", "self.run_func = None\n", "self.snakefile = other.snakefile\n", "self.shellcmd = None\n", "self.run_func = other.run_func\n", "self.norun = False\n", "self.shellcmd = other.shellcmd\n", "self.norun = other.norun\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Docstring", "Assign'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "async def FUNC_1(self, VAR_2: str) ->None:...\n", "VAR_7 = await as_future(session.query(database.Paste).filter(database.Paste\n .paste_id == VAR_2).first)\n", "if not VAR_7:\n", "self.set_status(404)\n", "VAR_8 = self.get_cookie('removal') == str(VAR_7.removal_id)\n", "self.render('404.html', pagetitle='404')\n", "self.render('show.html', VAR_7=paste, pagetitle='show', VAR_8=can_delete,\n linenos=False)\n", "return\n" ]
[ "async def get(self, paste_id: str) ->None:...\n", "paste = await as_future(session.query(database.Paste).filter(database.Paste\n .paste_id == paste_id).first)\n", "if not paste:\n", "self.set_status(404)\n", "can_delete = self.get_cookie('removal') == str(paste.removal_id)\n", "self.render('404.html', pagetitle='404')\n", "self.render('show.html', paste=paste, pagetitle='show', can_delete=\n can_delete, linenos=False)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@VAR_0.route('/loadProtocoles', methods=['GET', 'POST'])...\n", "VAR_8 = getConnexion()\n", "VAR_9 = 'string'\n", "VAR_8.cur.execute(VAR_9)\n", "return Response(flask.json.dumps(VAR_8.cur.fetchone()[0]), mimetype=\n 'application/json')\n" ]
[ "@addObs.route('/loadProtocoles', methods=['GET', 'POST'])...\n", "db = getConnexion()\n", "sql = (\n 'SELECT array_to_json(array_agg(row_to_json(p))) FROM (SELECT * FROM synthese.bib_projet WHERE saisie_possible = TRUE) p'\n )\n", "db.cur.execute(sql)\n", "return Response(flask.json.dumps(db.cur.fetchone()[0]), mimetype=\n 'application/json')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_0, VAR_1, VAR_2):...\n", "VAR_9 = VAR_0.get_input('client', default='')\n", "VAR_10 = VAR_0.get_input('tenant', default='')\n", "if len(VAR_9) and len(VAR_10):\n", "VAR_1['AZURE_CLIENT_ID'] = VAR_9\n", "VAR_1['AZURE_SUBSCRIPTION_ID'] = VAR_0.get_input('subscription', default='')\n", "VAR_1['AZURE_TENANT'] = VAR_10\n", "VAR_1['AZURE_AD_USER'] = VAR_0.get_input('username', default='')\n", "VAR_1['AZURE_SECRET'] = VAR_0.get_input('secret', default='')\n", "VAR_1['AZURE_PASSWORD'] = VAR_0.get_input('password', default='')\n", "VAR_1['AZURE_SUBSCRIPTION_ID'] = VAR_0.get_input('subscription', default='')\n", "if VAR_0.has_input('cloud_environment'):\n", "VAR_1['AZURE_CLOUD_ENVIRONMENT'] = VAR_0.get_input('cloud_environment')\n" ]
[ "def azure_rm(cred, env, private_data_dir):...\n", "client = cred.get_input('client', default='')\n", "tenant = cred.get_input('tenant', default='')\n", "if len(client) and len(tenant):\n", "env['AZURE_CLIENT_ID'] = client\n", "env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')\n", "env['AZURE_TENANT'] = tenant\n", "env['AZURE_AD_USER'] = cred.get_input('username', default='')\n", "env['AZURE_SECRET'] = cred.get_input('secret', default='')\n", "env['AZURE_PASSWORD'] = cred.get_input('password', default='')\n", "env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')\n", "if cred.has_input('cloud_environment'):\n", "env['AZURE_CLOUD_ENVIRONMENT'] = cred.get_input('cloud_environment')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_4(self):...\n", "if self.password == None or self.username == None or self.password == ' ' or self.username == ' ':\n", "self.errorNotification('Please login to JIRA first')\n", "self.firefox.loadPage('https://devops.partech.com/jira/login.jsp', 'JIRA')\n", "self.firefox.login(self.username, self.password)\n", "self.firefox.createNewTicket()\n", "self.firefox.inputDataToCase(self.summary_field.get(), self.detailed_field.\n get('1.0', END))\n" ]
[ "def createCase(self):...\n", "if self.password == None or self.username == None or self.password == ' ' or self.username == ' ':\n", "self.errorNotification('Please login to JIRA first')\n", "self.firefox.loadPage('https://devops.partech.com/jira/login.jsp', 'JIRA')\n", "self.firefox.login(self.username, self.password)\n", "self.firefox.createNewTicket()\n", "self.firefox.inputDataToCase(self.summary_field.get(), self.detailed_field.\n get('1.0', END))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_21(self):...\n", "if not self._IsServerAlive():\n", "return\n", "SendEventNotificationAsync('CurrentIdentifierFinished')\n" ]
[ "def OnCurrentIdentifierFinished(self):...\n", "if not self._IsServerAlive():\n", "return\n", "SendEventNotificationAsync('CurrentIdentifierFinished')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_0(VAR_0):...\n", "return os.stat(VAR_0, follow_symlinks=os.stat not in os.\n supports_follow_symlinks)\n" ]
[ "def lstat(f):...\n", "return os.stat(f, follow_symlinks=os.stat not in os.supports_follow_symlinks)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(VAR_1=None, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = os.path.join(FUNC_0(), 'tmp')\n", "if not os.path.exists(VAR_13):\n", "os.mkdir(VAR_13)\n", "VAR_2 = '' if not isinstance(VAR_2, str) else VAR_2\n", "VAR_1 = '' if not isinstance(VAR_1, str) else VAR_1\n", "VAR_13 = os.path.join(VAR_13, VAR_2)\n", "if not os.path.exists(VAR_13):\n", "os.mkdir(VAR_13)\n", "return os.path.join(VAR_13, VAR_1)\n" ]
[ "def path_to_tmp(file=None, folder=None):...\n", "\"\"\"docstring\"\"\"\n", "tmp_path = os.path.join(path_to_visbrain_data(), 'tmp')\n", "if not os.path.exists(tmp_path):\n", "os.mkdir(tmp_path)\n", "folder = '' if not isinstance(folder, str) else folder\n", "file = '' if not isinstance(file, str) else file\n", "tmp_path = os.path.join(tmp_path, folder)\n", "if not os.path.exists(tmp_path):\n", "os.mkdir(tmp_path)\n", "return os.path.join(tmp_path, file)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_4(self, VAR_2, VAR_3, VAR_4, *VAR_5):...\n", "self.write_data({'type': 'watch_folder_corrupt_torrent', 'event': {'name':\n VAR_5[0]}})\n" ]
[ "def on_watch_folder_corrupt_torrent(self, subject, changetype, objectID, *args...\n", "self.write_data({'type': 'watch_folder_corrupt_torrent', 'event': {'name':\n args[0]}})\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_2(self, VAR_3, VAR_4='200 OK', VAR_5='text/html', VAR_6=[]):...\n", "VAR_10 = [u'HTTP/1.1 ' + VAR_4, u'Connection: Keep-Alive', \n u'Content-Type: ' + VAR_5, u'Content-Length: ' + str(len(VAR_3))]\n", "VAR_10.extend(VAR_6)\n", "VAR_11 = u'\\r\\n'.join(VAR_10).encode('utf-8')\n", "if self.ok:\n", "self.s.send(VAR_11 + b'\\r\\n\\r\\n' + VAR_3)\n", "return VAR_3\n" ]
[ "def reply(self, body, status='200 OK', mime='text/html', headers=[]):...\n", "response = [u'HTTP/1.1 ' + status, u'Connection: Keep-Alive', \n u'Content-Type: ' + mime, u'Content-Length: ' + str(len(body))]\n", "response.extend(headers)\n", "response_str = u'\\r\\n'.join(response).encode('utf-8')\n", "if self.ok:\n", "self.s.send(response_str + b'\\r\\n\\r\\n' + body)\n", "return body\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_7(self):...\n", "self.runall(self.checks, skip_environ_check=True)\n", "VAR_15 = self.runner.stats\n", "self.assertEqual(8, VAR_15.num_cases())\n", "self.assertEqual(4, len(VAR_15.failures()))\n", "self.assertEqual(2, self._num_failures_stage('setup'))\n", "self.assertEqual(1, self._num_failures_stage('sanity'))\n", "self.assertEqual(1, self._num_failures_stage('performance'))\n" ]
[ "def test_runall_skip_prgenv_check(self):...\n", "self.runall(self.checks, skip_environ_check=True)\n", "stats = self.runner.stats\n", "self.assertEqual(8, stats.num_cases())\n", "self.assertEqual(4, len(stats.failures()))\n", "self.assertEqual(2, self._num_failures_stage('setup'))\n", "self.assertEqual(1, self._num_failures_stage('sanity'))\n", "self.assertEqual(1, self._num_failures_stage('performance'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "import stack.commands\n", "from stack.exception import CommandError, ParamRequired, ParamType, ParamValue, ParamError\n", "\"\"\"string\"\"\"\n", "def FUNC_0(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5):...\n", "self.db.execute('string' % (VAR_1, VAR_2, VAR_3, VAR_4, VAR_5))\n", "VAR_8 = self.db.fetchone()\n", "if VAR_8:\n", "VAR_19 = ['scope', 'name']\n", "def FUNC_1(self, VAR_6, VAR_7):...\n", "VAR_20 = [VAR_1, VAR_0]\n", "VAR_1 = None\n", "if VAR_3 > -1:\n", "VAR_9 = []\n", "VAR_19.append('adapter')\n", "if VAR_4 > -1:\n", "VAR_10 = []\n", "VAR_20.append('%s' % VAR_3)\n", "VAR_19.append('enclosure')\n", "VAR_19.append('slot')\n", "VAR_11 = []\n", "VAR_20.append('%s' % VAR_4)\n", "VAR_20.append('%s' % VAR_5)\n", "if len(VAR_7) == 0:\n", "VAR_1 = 'global'\n", "if len(VAR_7) == 1:\n", "if not VAR_1:\n", "VAR_9 = self.getOSNames(VAR_7)\n", "VAR_9 = []\n", "VAR_10 = self.getApplianceNames(VAR_7)\n", "VAR_10 = []\n", "VAR_11 = self.getHostnames(VAR_7)\n", "VAR_11 = []\n", "if VAR_7[0] in VAR_9:\n", "if not VAR_1:\n", "VAR_1 = 'os'\n", "if VAR_7[0] in VAR_10:\n", "if VAR_1 == 'global':\n", "VAR_1 = 'appliance'\n", "if VAR_7[0] in VAR_11:\n", "VAR_0 = 'global'\n", "VAR_0 = VAR_7[0]\n", "VAR_1 = 'host'\n", "VAR_3, VAR_4, VAR_5, VAR_12, VAR_13, VAR_14, VAR_15, VAR_16 = self.fillParams([\n ('adapter', None), ('enclosure', None), ('slot', None), ('hotspare',\n None), ('raidlevel', None), ('arrayid', None, True), ('options', ''), (\n 'force', 'n')])\n", "if not VAR_12 and not VAR_5:\n", "if VAR_14 != 'global' and not VAR_13:\n", "if VAR_3:\n", "VAR_3 = -1\n", "VAR_3 = int(VAR_3)\n", "if VAR_3 < 0:\n", "if VAR_4:\n", "VAR_4 = -1\n", "VAR_4 = int(VAR_4)\n", "if VAR_4 < 0:\n", "VAR_17 = []\n", "if VAR_5:\n", "for VAR_21 in VAR_5.split(','):\n", "VAR_18 = []\n", "if VAR_21 == '*':\n", "if VAR_12:\n", "VAR_21 = -1\n", "VAR_21 = int(VAR_21)\n", "if VAR_21 < 0:\n", "for VAR_22 in VAR_12.split(','):\n", "if VAR_14 in ['global', '*']:\n", "VAR_17.append(VAR_21)\n", "if VAR_21 in VAR_17:\n", "VAR_22 = int(VAR_22)\n", "if VAR_22 < 0:\n", "if VAR_14 == 'global' and len(VAR_18) == 0:\n", "VAR_14 = int(VAR_14)\n", "if VAR_14 < 1:\n", "if VAR_22 in VAR_18:\n", "VAR_2 = None\n", "VAR_18.append(VAR_22)\n", "if VAR_1 == 'global':\n", "VAR_2 = -1\n", "if VAR_1 == 'appliance':\n", "VAR_16 = self.str2bool(VAR_16)\n", "self.db.execute(\"\"\"select id from appliances where\n\t\t\t\tname = '%s' \"\"\" % VAR_0)\n", "if VAR_1 == 'host':\n", "for VAR_5 in VAR_17:\n", "VAR_2, = self.db.fetchone()\n", "self.db.execute(\"\"\"select id from nodes where\n\t\t\t\tname = '%s' \"\"\" % VAR_0)\n", "if not VAR_16:\n", "for VAR_12 in VAR_18:\n", "VAR_2, = self.db.fetchone()\n", "self.checkIt(VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5)\n", "if not VAR_16:\n", "if VAR_14 == 'global':\n", "self.checkIt(VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_12)\n", "VAR_14 = -1\n", "if VAR_14 == '*':\n", "for VAR_5 in VAR_17:\n", "VAR_14 = -2\n", "self.db.execute('string' % (VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_13,\n VAR_14, VAR_15))\n", "for VAR_12 in VAR_18:\n", "VAR_13 = -1\n", "if VAR_14 == 'global':\n", "VAR_14 = -1\n", "self.db.execute('string' % (VAR_1, VAR_2, VAR_3, VAR_4, VAR_12, VAR_13,\n VAR_14, VAR_15))\n" ]
[ "import stack.commands\n", "from stack.exception import CommandError, ParamRequired, ParamType, ParamValue, ParamError\n", "\"\"\"\n\tAdd a storage controller configuration to the database.\n\n\t<arg type='string' name='scope'>\n\tZero or one argument. The argument is the scope: a valid os (e.g.,\n\t'redhat'), a valid appliance (e.g., 'backend') or a valid host\n\t(e.g., 'backend-0-0). No argument means the scope is 'global'.\n\t</arg>\n\n\t<param type='int' name='adapter' optional='1'>\n\tAdapter address.\n\t</param>\n\n\t<param type='int' name='enclosure' optional='1'>\n\tEnclosure address.\n\t</param>\n\n\t<param type='int' name='slot'>\n\tSlot address(es). This can be a comma-separated list meaning all disks\n\tin the specified slots will be associated with the same array\n\t</param>\n\n\t<param type='int' name='raidlevel'>\n\tRAID level. Raid 0, 1, 5, 6 and 10 are currently supported.\n\t</param>\n\n\t<param type='int' name='hotspare' optional='1'>\n\tSlot address(es) of the hotspares associated with this array id. This\n\tcan be a comma-separated list (like the 'slot' parameter). If the\n\t'arrayid' is 'global', then the specified slots are global hotspares.\n\t</param>\n\n\t<param type='string' name='arrayid'>\n\tThe 'arrayid' is used to determine which disks are grouped as part\n\tof the same array. For example, all the disks with arrayid of '1' will\n\tbe part of the same array. Arrayids must be integers starting at 1\n\tor greater. If the arrayid is 'global', then 'hotspare' must\n\thave at least one slot definition (this is how one specifies a global\n\thotspare).\n\tIn addition, the arrays will be created in arrayid order, that is,\n\tthe array with arrayid equal to 1 will be created first, arrayid\n\tequal to 2 will be created second, etc.\n\t</param>\n\n\t<example cmd='add storage controller backend-0-0 slot=1 raidlevel=0 arrayid=1'>\n\tThe disk in slot 1 on backend-0-0 should be a RAID 0 disk.\n\t</example>\n\n\t<example cmd='add storage controller backend-0-0 slot=2,3,4,5,6 raidlevel=6 hotspare=7,8 arrayid=2'>\n\tThe disks in slots 2-6 on backend-0-0 should be a RAID 6 with two\n\thotspares associated with the array in slots 7 and 8.\n\t</example>\n\t\"\"\"\n", "def checkIt(self, name, scope, tableid, adapter, enclosure, slot):...\n", "self.db.execute(\n \"\"\"select scope, tableid, adapter, enclosure,\n\t\t\tslot from storage_controller where\n\t\t\tscope = '%s' and tableid = %s and adapter = %s and\n\t\t\tenclosure = %s and slot = %s\"\"\"\n % (scope, tableid, adapter, enclosure, slot))\n", "row = self.db.fetchone()\n", "if row:\n", "label = ['scope', 'name']\n", "def run(self, params, args):...\n", "value = [scope, name]\n", "scope = None\n", "if adapter > -1:\n", "oses = []\n", "label.append('adapter')\n", "if enclosure > -1:\n", "appliances = []\n", "value.append('%s' % adapter)\n", "label.append('enclosure')\n", "label.append('slot')\n", "hosts = []\n", "value.append('%s' % enclosure)\n", "value.append('%s' % slot)\n", "if len(args) == 0:\n", "scope = 'global'\n", "if len(args) == 1:\n", "if not scope:\n", "oses = self.getOSNames(args)\n", "oses = []\n", "appliances = self.getApplianceNames(args)\n", "appliances = []\n", "hosts = self.getHostnames(args)\n", "hosts = []\n", "if args[0] in oses:\n", "if not scope:\n", "scope = 'os'\n", "if args[0] in appliances:\n", "if scope == 'global':\n", "scope = 'appliance'\n", "if args[0] in hosts:\n", "name = 'global'\n", "name = args[0]\n", "scope = 'host'\n", "adapter, enclosure, slot, hotspare, raidlevel, arrayid, options, force = (self\n .fillParams([('adapter', None), ('enclosure', None), ('slot', None), (\n 'hotspare', None), ('raidlevel', None), ('arrayid', None, True), (\n 'options', ''), ('force', 'n')]))\n", "if not hotspare and not slot:\n", "if arrayid != 'global' and not raidlevel:\n", "if adapter:\n", "adapter = -1\n", "adapter = int(adapter)\n", "if adapter < 0:\n", "if enclosure:\n", "enclosure = -1\n", "enclosure = int(enclosure)\n", "if enclosure < 0:\n", "slots = []\n", "if slot:\n", "for s in slot.split(','):\n", "hotspares = []\n", "if s == '*':\n", "if hotspare:\n", "s = -1\n", "s = int(s)\n", "if s < 0:\n", "for h in hotspare.split(','):\n", "if arrayid in ['global', '*']:\n", "slots.append(s)\n", "if s in slots:\n", "h = int(h)\n", "if h < 0:\n", "if arrayid == 'global' and len(hotspares) == 0:\n", "arrayid = int(arrayid)\n", "if arrayid < 1:\n", "if h in hotspares:\n", "tableid = None\n", "hotspares.append(h)\n", "if scope == 'global':\n", "tableid = -1\n", "if scope == 'appliance':\n", "force = self.str2bool(force)\n", "self.db.execute(\"\"\"select id from appliances where\n\t\t\t\tname = '%s' \"\"\" % name)\n", "if scope == 'host':\n", "for slot in slots:\n", "tableid, = self.db.fetchone()\n", "self.db.execute(\"\"\"select id from nodes where\n\t\t\t\tname = '%s' \"\"\" % name)\n", "if not force:\n", "for hotspare in hotspares:\n", "tableid, = self.db.fetchone()\n", "self.checkIt(name, scope, tableid, adapter, enclosure, slot)\n", "if not force:\n", "if arrayid == 'global':\n", "self.checkIt(name, scope, tableid, adapter, enclosure, hotspare)\n", "arrayid = -1\n", "if arrayid == '*':\n", "for slot in slots:\n", "arrayid = -2\n", "self.db.execute(\n \"\"\"insert into storage_controller\n\t\t\t\t(scope, tableid, adapter, enclosure, slot,\n\t\t\t\traidlevel, arrayid, options) values ('%s', %s, %s, %s,\n\t\t\t\t%s, %s, %s, '%s') \"\"\"\n % (scope, tableid, adapter, enclosure, slot, raidlevel, arrayid, options))\n", "for hotspare in hotspares:\n", "raidlevel = -1\n", "if arrayid == 'global':\n", "arrayid = -1\n", "self.db.execute(\n \"\"\"insert into storage_controller\n\t\t\t\t(scope, tableid, adapter, enclosure, slot,\n\t\t\t\traidlevel, arrayid, options) values ('%s', %s, %s, %s,\n\t\t\t\t%s, %s, %s, '%s') \"\"\"\n % (scope, tableid, adapter, enclosure, hotspare, raidlevel, arrayid,\n options))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "For", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "For", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "For", "Assign'", "Expr'", "Condition", "For", "Assign'", "Expr'", "Condition", "Condition", "Expr'", "Assign'", "Condition", "For", "Assign'", "Expr'", "For", "Assign'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_12(self, VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = ['volume', 'create', VAR_16['name'], '%sG' % VAR_16['size']]\n", "VAR_0.error(_('Failed to create volume %s'), VAR_16['name'])\n", "if self.configuration.eqlx_pool != 'default':\n", "VAR_28.append('pool')\n", "if self.configuration.san_thin_provision:\n", "VAR_28.append(self.configuration.eqlx_pool)\n", "VAR_28.append('thin-provision')\n", "VAR_25 = self._eql_execute(*VAR_28)\n", "return self._get_volume_data(VAR_25)\n" ]
[ "def create_volume(self, volume):...\n", "\"\"\"docstring\"\"\"\n", "cmd = ['volume', 'create', volume['name'], '%sG' % volume['size']]\n", "LOG.error(_('Failed to create volume %s'), volume['name'])\n", "if self.configuration.eqlx_pool != 'default':\n", "cmd.append('pool')\n", "if self.configuration.san_thin_provision:\n", "cmd.append(self.configuration.eqlx_pool)\n", "cmd.append('thin-provision')\n", "out = self._eql_execute(*cmd)\n", "return self._get_volume_data(out)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_5():...\n", "VAR_1.clear()\n", "VAR_3.clear()\n", "VAR_3 = VAR_0\n", "VAR_2.clear()\n" ]
[ "def session_clear():...\n", "dirs.clear()\n", "curr_path.clear()\n", "curr_path = root\n", "path.clear()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1:\n", "return\n", "VAR_3 = VAR_0\n", "while True:\n", "if os.path.isfile(os.path.join(VAR_3, 'google_appengine', 'VERSION')):\n", "VAR_14 = os.path.dirname(VAR_3)\n", "VAR_1 = os.path.realpath(os.path.join(VAR_3, 'google_appengine'))\n", "if VAR_14 == VAR_3:\n", "VAR_4 = os.path.realpath(os.path.join(VAR_1, 'lib'))\n", "VAR_3 = VAR_14\n", "sys.path.insert(0, os.path.realpath(os.path.join(VAR_4, 'yaml', 'lib')))\n" ]
[ "def _load_modules():...\n", "\"\"\"docstring\"\"\"\n", "if GAE_SDK:\n", "return\n", "root_dir = BASE_DIR\n", "while True:\n", "if os.path.isfile(os.path.join(root_dir, 'google_appengine', 'VERSION')):\n", "next_root = os.path.dirname(root_dir)\n", "GAE_SDK = os.path.realpath(os.path.join(root_dir, 'google_appengine'))\n", "if next_root == root_dir:\n", "gae_sdk_lib = os.path.realpath(os.path.join(GAE_SDK, 'lib'))\n", "root_dir = next_root\n", "sys.path.insert(0, os.path.realpath(os.path.join(gae_sdk_lib, 'yaml', 'lib')))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_3(VAR_1):...\n", "VAR_10 = [None, 0]\n", "async def FUNC_15(*VAR_8, **VAR_9):...\n", "if time.time() - VAR_10[1] > FUNC_3.timeout:\n", "VAR_10[0] = await VAR_1(*VAR_8, **kwargs)\n", "return VAR_10[0]\n", "VAR_10[1] = time.time()\n" ]
[ "def cache_page(func):...\n", "cache = [None, 0]\n", "async def ret(*args, **kwargs):...\n", "if time.time() - cache[1] > cache_page.timeout:\n", "cache[0] = await func(*args, **kwargs)\n", "return cache[0]\n", "cache[1] = time.time()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "AsyncFunctionDef'", "Condition", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_6(VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "return {'login': FUNC_7('/user_api/v1/account/login_session/', VAR_3.\n session), 'registration': FUNC_7('/user_api/v1/account/registration/',\n VAR_3.session), 'password_reset': FUNC_7(\n '/user_api/v1/account/password_reset/', VAR_3.session)}\n" ]
[ "def _get_form_descriptions(request):...\n", "\"\"\"docstring\"\"\"\n", "return {'login': _local_server_get('/user_api/v1/account/login_session/',\n request.session), 'registration': _local_server_get(\n '/user_api/v1/account/registration/', request.session),\n 'password_reset': _local_server_get(\n '/user_api/v1/account/password_reset/', request.session)}\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@VAR_0.before_request...\n", "\"\"\"docstring\"\"\"\n", "g.session = Session(VAR_3)\n", "g.Base = VAR_2\n" ]
[ "@app.before_request...\n", "\"\"\"docstring\"\"\"\n", "g.session = Session(engine)\n", "g.Base = Base\n" ]
[ 0, 0, 0, 0 ]
[ "For", "Docstring", "Assign'", "Assign'" ]
[ "def FUNC_6(self, VAR_11):...\n", "if VAR_11 and is_banned_domain(VAR_11):\n", "VAR_101.errors.add(errors.BANNED_DOMAIN)\n" ]
[ "def run(self, url):...\n", "if url and is_banned_domain(url):\n", "c.errors.add(errors.BANNED_DOMAIN)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "def FUNC_9(VAR_17):...\n", "if os.path.isfile(VAR_17):\n", "os.remove(VAR_17)\n" ]
[ "def clear_log(file_path):...\n", "if os.path.isfile(file_path):\n", "os.remove(file_path)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "async def FUNC_8(self, VAR_5, VAR_11, VAR_9=0):...\n", "VAR_17 = await CLASS_0.create(VAR_7=user_id, VAR_8=self.id, VAR_9=ref_id)\n", "VAR_18 = await create_pool(RedisSettings.from_url(REDIS_URL))\n", "await asyncio.gather(VAR_17.set_content(VAR_11), VAR_18.enqueue_job(\n 'mention_users', self.id, VAR_11, VAR_5), return_exceptions=True)\n", "return VAR_17\n" ]
[ "async def add_comment(self, user_id, content, ref_id=0):...\n", "obj = await Comment.create(github_id=user_id, post_id=self.id, ref_id=ref_id)\n", "redis = await create_pool(RedisSettings.from_url(REDIS_URL))\n", "await asyncio.gather(obj.set_content(content), redis.enqueue_job(\n 'mention_users', self.id, content, user_id), return_exceptions=True)\n", "return obj\n" ]
[ 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_6, VAR_7, VAR_8):...\n", "VAR_9 = (\n f\"UPDATE `artikelen` SET `{VAR_6}` = '{VAR_7}' WHERE `title` = '{VAR_8}'\")\n", "print(VAR_9)\n", "cursor.execute(VAR_9)\n", "return\n" ]
[ "def update_sql(self, column, location_nw, title):...\n", "sql_update = (\n f\"UPDATE `artikelen` SET `{column}` = '{location_nw}' WHERE `title` = '{title}'\"\n )\n", "print(sql_update)\n", "cursor.execute(sql_update)\n", "return\n" ]
[ 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(VAR_0):...\n", "return 'Meetup : %s' % VAR_0.title\n" ]
[ "def meetup_article_title(meetup):...\n", "return 'Meetup : %s' % meetup.title\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_11(self, VAR_14):...\n", "" ]
[ "def is_integer(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "return self.get_submission_id() is not None\n" ]
[ "def test_success(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "return self.get_submission_id() is not None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import os\n", "import re\n", "import tempfile\n", "import urllib2\n", "from invenio.legacy.bibclassify import config as bconfig\n", "if bconfig.STANDALONE:\n", "from urllib2 import urlopen\n", "from invenio.utils.url import make_invenio_opener\n", "VAR_0 = bconfig.get_logger('bibclassify.text_extractor')\n", "VAR_8 = make_invenio_opener('BibClassify').open\n", "VAR_1 = re.compile('[A-Za-z]{2,}')\n", "def FUNC_0(VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "if not FUNC_4('pdftotext'):\n", "VAR_0.warning(\n 'GNU file was not found on the system. Switching to a weak file extension test.'\n )\n", "VAR_9 = os.popen('file ' + re.escape(VAR_2)).read()\n", "if VAR_2.lower().endswith('.pdf'):\n", "VAR_19 = VAR_9.split(':')[1]\n", "VAR_0.error('string')\n", "VAR_10 = VAR_19.find('PDF') > -1\n", "return True\n", "return False\n", "return VAR_10\n" ]
[ "\"\"\"\nBibClassify text extractor.\n\nThis module provides method to extract the fulltext from local or remote\ndocuments. Currently 2 formats of documents are supported: PDF and text\ndocuments.\n\n2 methods provide the functionality of the module: text_lines_from_local_file\nand text_lines_from_url.\n\nThis module also provides the utility 'is_pdf' that uses GNU file in order to\ndetermine if a local file is a PDF file.\n\nThis module is STANDALONE safe\n\"\"\"\n", "import os\n", "import re\n", "import tempfile\n", "import urllib2\n", "from invenio.legacy.bibclassify import config as bconfig\n", "if bconfig.STANDALONE:\n", "from urllib2 import urlopen\n", "from invenio.utils.url import make_invenio_opener\n", "log = bconfig.get_logger('bibclassify.text_extractor')\n", "urlopen = make_invenio_opener('BibClassify').open\n", "_ONE_WORD = re.compile('[A-Za-z]{2,}')\n", "def is_pdf(document):...\n", "\"\"\"docstring\"\"\"\n", "if not executable_exists('pdftotext'):\n", "log.warning(\n 'GNU file was not found on the system. Switching to a weak file extension test.'\n )\n", "file_output = os.popen('file ' + re.escape(document)).read()\n", "if document.lower().endswith('.pdf'):\n", "filetype = file_output.split(':')[1]\n", "log.error(\n \"Your version of the 'file' utility seems to be unsupported. Please report this to [email protected].\"\n )\n", "pdf = filetype.find('PDF') > -1\n", "return True\n", "return False\n", "return pdf\n" ]
[ 7, 0, 0, 7, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Condition", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Return'", "Return'", "Return'" ]
[ "def __init__(self):...\n", "self.xsrf_token = None\n", "self.session = requests.Session()\n" ]
[ "def __init__(self):...\n", "self.xsrf_token = None\n", "self.session = requests.Session()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_32(self):...\n", "VAR_33 = VAR_5.search(self.file)\n", "if VAR_33:\n", "return self.file[:VAR_33.start()]\n", "return self.file\n" ]
[ "def constant_prefix(self):...\n", "first_wildcard = _wildcard_regex.search(self.file)\n", "if first_wildcard:\n", "return self.file[:first_wildcard.start()]\n", "return self.file\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(VAR_1, VAR_2, VAR_3='', VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_4(VAR_1, VAR_2['access_token'], VAR_6='', VAR_3=token_type,\n VAR_4=extra_data)\n" ]
[ "def oauth2_token_setter(remote, resp, token_type='', extra_data=None):...\n", "\"\"\"docstring\"\"\"\n", "return token_setter(remote, resp['access_token'], secret='', token_type=\n token_type, extra_data=extra_data)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_1():...\n", "return frappe.db.sql('string', as_dict=1)\n" ]
[ "def get_children():...\n", "return frappe.db.sql(\n \"\"\"select route as name,\n\t\ttitle from `tabBlog Category`\n\t\twhere published = 1\n\t\tand exists (select name from `tabBlog Post`\n\t\t\twhere `tabBlog Post`.blog_category=`tabBlog Category`.name and published=1)\n\t\torder by title asc\"\"\"\n , as_dict=1)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_9(VAR_2):...\n", "assert VAR_2 == 'hello'\n" ]
[ "def assert_warn(line):...\n", "assert line == 'hello'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assert'" ]
[ "def __iter__(self):...\n", "return self.nodes.__iter__()\n" ]
[ "def __iter__(self):...\n", "return self.nodes.__iter__()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __iter__(self: T) ->Iterator[VAR_0]:...\n", "self.is_subquery = True\n", "yield self\n" ]
[ "def __iter__(self: T) ->Iterator[T]:...\n", "self.is_subquery = True\n", "yield self\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_24(self, *VAR_71, **VAR_72):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.params = VAR_71, VAR_72\n", "return VAR_101\n" ]
[ "def params(self, *params, **kwparams):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.params = params, kwparams\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "@Endpoint('GET', '/cephfs/filesystems')...\n", "return CephFS.list_filesystems()\n" ]
[ "@Endpoint('GET', '/cephfs/filesystems')...\n", "return CephFS.list_filesystems()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_0(self):...\n", "if not LoginRequest.test_success(self):\n", "return False\n", "VAR_4 = re.compile('Failed to log in.')\n", "if VAR_4.search(self.res_data) is not None:\n", "return False\n", "VAR_5 = re.compile(self.username)\n", "if VAR_5.search(self.res_data) is None:\n", "return False\n", "return True\n" ]
[ "def test_success(self):...\n", "if not LoginRequest.test_success(self):\n", "return False\n", "fail_re = re.compile('Failed to log in.')\n", "if fail_re.search(self.res_data) is not None:\n", "return False\n", "username_re = re.compile(self.username)\n", "if username_re.search(self.res_data) is None:\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_11():...\n", "VAR_8 = logging.getLogger(__name__)\n", "VAR_8.setLevel(logging.DEBUG)\n", "VAR_36 = argparse.ArgumentParser()\n", "VAR_36.add_argument('--config', '-c', type=str, default='test.yaml', help=\n 'YAML config file. see sample-config.yaml. Default: test.yaml')\n", "VAR_37 = VAR_36.add_subparsers(dest='cmd')\n", "VAR_38 = VAR_37.add_parser('edit', help=\n 'Launches the editor to edit or create new systems and components')\n", "VAR_39 = VAR_37.add_parser('run', help=\n 'Launches the setup specified by the --config argument')\n", "VAR_40 = VAR_37.add_parser('validate', help=\n 'Validate the setup specified by the --config argument')\n", "VAR_41 = VAR_37.add_parser('slave', help=\n \"\"\"Run a component locally without controlling it. The control is taken care of the remote master invoking this command.\nIf run with the --kill flag, the passed component will be killed\"\"\"\n )\n", "VAR_40.add_argument('--visual', help='Generate and show a graph image',\n action='store_true')\n", "VAR_42 = VAR_41.add_mutually_exclusive_group(required=False)\n", "VAR_42.add_argument('-k', '--kill', help='switch to kill mode', action=\n 'store_true')\n", "VAR_42.add_argument('-c', '--check', help='Run a component check', action=\n 'store_true')\n", "VAR_43 = VAR_36.parse_args()\n", "VAR_8.debug(VAR_43)\n", "if VAR_43.cmd == 'edit':\n", "VAR_8.debug('Launching editor mode')\n", "if VAR_43.cmd == 'run':\n", "VAR_8.debug('Launching runner mode')\n", "if VAR_43.cmd == 'validate':\n", "VAR_61 = CLASS_1(VAR_43.config)\n", "VAR_8.debug('Launching validation mode')\n", "if VAR_43.cmd == 'slave':\n", "VAR_61.init()\n", "VAR_61 = CLASS_1(VAR_43.config)\n", "VAR_8.debug('Launching slave mode')\n", "FUNC_12(VAR_61)\n", "if VAR_43.visual:\n", "VAR_63 = CLASS_2(VAR_43.config, VAR_43.kill, VAR_43.check)\n", "VAR_61.set_dependencies(False)\n", "VAR_61.set_dependencies(True)\n", "if VAR_43.check:\n", "VAR_61.draw_graph()\n", "VAR_63.run_check()\n", "VAR_63.init()\n" ]
[ "def main():...\n", "logger = logging.getLogger(__name__)\n", "logger.setLevel(logging.DEBUG)\n", "parser = argparse.ArgumentParser()\n", "parser.add_argument('--config', '-c', type=str, default='test.yaml', help=\n 'YAML config file. see sample-config.yaml. Default: test.yaml')\n", "subparsers = parser.add_subparsers(dest='cmd')\n", "subparser_editor = subparsers.add_parser('edit', help=\n 'Launches the editor to edit or create new systems and components')\n", "subparser_run = subparsers.add_parser('run', help=\n 'Launches the setup specified by the --config argument')\n", "subparser_val = subparsers.add_parser('validate', help=\n 'Validate the setup specified by the --config argument')\n", "subparser_remote = subparsers.add_parser('slave', help=\n \"\"\"Run a component locally without controlling it. The control is taken care of the remote master invoking this command.\nIf run with the --kill flag, the passed component will be killed\"\"\"\n )\n", "subparser_val.add_argument('--visual', help=\n 'Generate and show a graph image', action='store_true')\n", "remote_mutex = subparser_remote.add_mutually_exclusive_group(required=False)\n", "remote_mutex.add_argument('-k', '--kill', help='switch to kill mode',\n action='store_true')\n", "remote_mutex.add_argument('-c', '--check', help='Run a component check',\n action='store_true')\n", "args = parser.parse_args()\n", "logger.debug(args)\n", "if args.cmd == 'edit':\n", "logger.debug('Launching editor mode')\n", "if args.cmd == 'run':\n", "logger.debug('Launching runner mode')\n", "if args.cmd == 'validate':\n", "cc = ControlCenter(args.config)\n", "logger.debug('Launching validation mode')\n", "if args.cmd == 'slave':\n", "cc.init()\n", "cc = ControlCenter(args.config)\n", "logger.debug('Launching slave mode')\n", "start_gui(cc)\n", "if args.visual:\n", "sl = SlaveLauncher(args.config, args.kill, args.check)\n", "cc.set_dependencies(False)\n", "cc.set_dependencies(True)\n", "if args.check:\n", "cc.draw_graph()\n", "sl.run_check()\n", "sl.init()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(VAR_13, VAR_16):...\n", "VAR_35 = subprocess.Popen([VAR_13], stdin=subprocess.PIPE, VAR_36=\n subprocess.PIPE, VAR_37=subprocess.PIPE)\n", "VAR_36, VAR_37 = VAR_35.communicate(json.dumps(VAR_16))\n", "VAR_35.wait()\n", "if VAR_35.returncode != 0:\n", "return VAR_36\n" ]
[ "def render_executable(path, config):...\n", "p = subprocess.Popen([path], stdin=subprocess.PIPE, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", "stdout, stderr = p.communicate(json.dumps(config))\n", "p.wait()\n", "if p.returncode != 0:\n", "return stdout\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Return'" ]
[ "def FUNC_0(VAR_1, VAR_2, VAR_3):...\n", "VAR_6 = users.get_current_user()\n", "if VAR_6:\n", "VAR_26 = db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n VAR_6.user_id()).get()\n", "VAR_26 = None\n", "VAR_3['current_li'] = VAR_26\n", "VAR_3['user'] = VAR_6\n", "VAR_3['logout_url'] = users.create_logout_url('/')\n", "VAR_3['login_url'] = users.create_login_url('/users/verify_user/')\n", "VAR_3['is_admin'] = users.is_current_user_admin()\n", "if VAR_6:\n", "VAR_27 = db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n VAR_6.user_id()).get()\n", "VAR_7 = VAR_0.get_template(VAR_2)\n", "VAR_3['unread_messages'] = db.GqlQuery(\n 'SELECT * FROM Message WHERE recipient_id = :1 AND read = :2', VAR_6.\n user_id(), False).count()\n", "VAR_1.response.out.write(VAR_7.render(VAR_3))\n", "if VAR_27 and not VAR_27.is_active:\n", "VAR_2 = '/users/inactive_notification.html'\n" ]
[ "def render_template(handler_object, file_name, template_values):...\n", "user = users.get_current_user()\n", "if user:\n", "current_li = db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1',\n user.user_id()).get()\n", "current_li = None\n", "template_values['current_li'] = current_li\n", "template_values['user'] = user\n", "template_values['logout_url'] = users.create_logout_url('/')\n", "template_values['login_url'] = users.create_login_url('/users/verify_user/')\n", "template_values['is_admin'] = users.is_current_user_admin()\n", "if user:\n", "li = db.GqlQuery('SELECT * FROM LoginInformation WHERE user_id = :1', user.\n user_id()).get()\n", "template = jinja_environment.get_template(file_name)\n", "template_values['unread_messages'] = db.GqlQuery(\n 'SELECT * FROM Message WHERE recipient_id = :1 AND read = :2', user.\n user_id(), False).count()\n", "handler_object.response.out.write(template.render(template_values))\n", "if li and not li.is_active:\n", "file_name = '/users/inactive_notification.html'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'" ]
[ "def FUNC_18(*VAR_20, **VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = VAR_20[0]\n", "if len(VAR_20) == 1:\n", "VAR_53 = product\n", "if len(VAR_20) == 2:\n", "if isinstance(VAR_32, str):\n", "VAR_53 = VAR_20[1]\n", "VAR_32 = [VAR_32]\n", "def FUNC_36(VAR_11):...\n", "for VAR_51, VAR_58 in VAR_11.items():\n", "if isinstance(VAR_58, str) or not isinstance(VAR_58, Iterable):\n", "return [VAR_10.format(**comb) for comb in map(dict, VAR_53(*FUNC_36(VAR_11)\n )) for VAR_10 in VAR_32]\n", "VAR_58 = [VAR_58]\n", "yield [(VAR_51, VAR_16) for VAR_16 in VAR_58]\n" ]
[ "def expand(*args, **wildcards):...\n", "\"\"\"docstring\"\"\"\n", "filepatterns = args[0]\n", "if len(args) == 1:\n", "combinator = product\n", "if len(args) == 2:\n", "if isinstance(filepatterns, str):\n", "combinator = args[1]\n", "filepatterns = [filepatterns]\n", "def flatten(wildcards):...\n", "for wildcard, values in wildcards.items():\n", "if isinstance(values, str) or not isinstance(values, Iterable):\n", "return [filepattern.format(**comb) for comb in map(dict, combinator(*\n flatten(wildcards))) for filepattern in filepatterns]\n", "values = [values]\n", "yield [(wildcard, value) for value in values]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "FunctionDef'", "For", "Condition", "Return'", "Assign'", "Expr'" ]
[ "@VAR_0.route('/level-1/info')...\n", "if int(current_user.progress) > 1:\n", "return render_template('info-pages/level-1.html')\n", "return redirect('/')\n" ]
[ "@app.route('/level-1/info')...\n", "if int(current_user.progress) > 1:\n", "return render_template('info-pages/level-1.html')\n", "return redirect('/')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = VAR_1[:]\n", "VAR_11.extend(FUNC_7(self))\n", "VAR_13 = ()\n", "if self.status == 'valid' and self.signed_status != 'aborted':\n", "VAR_13 += 'geninv.estimation',\n", "if self.status == 'valid':\n", "VAR_13 += 'set_signed_status.estimation',\n", "if self.status == 'valid' and self.signed_status != 'signed' and not self.geninv:\n", "VAR_13 += 'set_date.estimation',\n", "if VAR_13:\n", "VAR_11.append((Allow, 'group:admin', VAR_13))\n", "if self.status != 'valid':\n", "VAR_11.append((Allow, 'group:manager', VAR_13))\n", "VAR_11.append((Allow, 'group:estimation_validation', ('valid.estimation',)))\n", "VAR_11.extend(FUNC_6(self))\n", "VAR_11.append((Deny, 'group:estimation_validation', ('wait.estimation',)))\n", "for user in self.company.employees:\n", "VAR_12 = ()\n", "return VAR_11\n", "if self.status == 'valid':\n", "VAR_12 += 'set_signed_status.estimation',\n", "if VAR_12:\n", "if not self.signed_status == 'aborted':\n", "VAR_11.append((Allow, user.login, VAR_12))\n", "VAR_12 += 'geninv.estimation',\n" ]
[ "def get_estimation_default_acl(self):...\n", "\"\"\"docstring\"\"\"\n", "acl = DEFAULT_PERM_NEW[:]\n", "acl.extend(_get_admin_status_acl(self))\n", "admin_perms = ()\n", "if self.status == 'valid' and self.signed_status != 'aborted':\n", "admin_perms += 'geninv.estimation',\n", "if self.status == 'valid':\n", "admin_perms += 'set_signed_status.estimation',\n", "if self.status == 'valid' and self.signed_status != 'signed' and not self.geninv:\n", "admin_perms += 'set_date.estimation',\n", "if admin_perms:\n", "acl.append((Allow, 'group:admin', admin_perms))\n", "if self.status != 'valid':\n", "acl.append((Allow, 'group:manager', admin_perms))\n", "acl.append((Allow, 'group:estimation_validation', ('valid.estimation',)))\n", "acl.extend(_get_user_status_acl(self))\n", "acl.append((Deny, 'group:estimation_validation', ('wait.estimation',)))\n", "for user in self.company.employees:\n", "perms = ()\n", "return acl\n", "if self.status == 'valid':\n", "perms += 'set_signed_status.estimation',\n", "if perms:\n", "if not self.signed_status == 'aborted':\n", "acl.append((Allow, user.login, perms))\n", "perms += 'geninv.estimation',\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Condition", "AugAssign'", "Condition", "AugAssign'", "Condition", "AugAssign'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "For", "Assign'", "Return'", "Condition", "AugAssign'", "Condition", "Condition", "Expr'", "AugAssign'" ]
[ "@VAR_5.setter...\n", "self._benchmark = IOFile(VAR_5, VAR_16=self)\n" ]
[ "@benchmark.setter...\n", "self._benchmark = IOFile(benchmark, rule=self)\n" ]
[ 0, 0 ]
[ "Condition", "Assign'" ]
[ "def FUNC_18(self, VAR_14, *VAR_11, **VAR_12):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_14 == 'options':\n", "self.client.lineSend(self.csessid, [VAR_14, VAR_11, VAR_12])\n" ]
[ "def send_default(self, cmdname, *args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "if not cmdname == 'options':\n", "self.client.lineSend(self.csessid, [cmdname, args, kwargs])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'" ]
[ "@integration_synonym_api...\n", "FUNC_2(VAR_2)\n", "FUNC_3(VAR_2, 'DOUBLE J AVIATION LTD.', VAR_7='1')\n", "FUNC_5(VAR_4, VAR_5, VAR_11='TABLE', VAR_10=[{'name': '----TABLE'}])\n" ]
[ "@integration_synonym_api...\n", "clean_database(solr)\n", "seed_database_with(solr, 'DOUBLE J AVIATION LTD.', id='1')\n", "verify_results(client, jwt, query='TABLE', expected=[{'name': '----TABLE'}])\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_5(self):...\n", "VAR_28 = self.state.client_table()\n", "self.local_scheduler_id_to_ip_map = {}\n", "for local_scheduler_info in VAR_28:\n", "VAR_36 = local_scheduler_info.get('DBClientID') or local_scheduler_info[\n 'ClientID']\n", "VAR_44 = (local_scheduler_info.get('AuxAddress') or local_scheduler_info[\n 'NodeManagerAddress']).split(':')[0]\n", "self.local_scheduler_id_to_ip_map[VAR_36] = VAR_44\n" ]
[ "def update_local_scheduler_map(self):...\n", "local_schedulers = self.state.client_table()\n", "self.local_scheduler_id_to_ip_map = {}\n", "for local_scheduler_info in local_schedulers:\n", "client_id = local_scheduler_info.get('DBClientID') or local_scheduler_info[\n 'ClientID']\n", "ip_address = (local_scheduler_info.get('AuxAddress') or\n local_scheduler_info['NodeManagerAddress']).split(':')[0]\n", "self.local_scheduler_id_to_ip_map[client_id] = ip_address\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_4(self, *VAR_4, **VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_9 in VAR_4:\n", "self._set_inoutput_item(VAR_9)\n", "for VAR_10, VAR_9 in VAR_6.items():\n", "self._set_inoutput_item(VAR_9, VAR_10=name)\n" ]
[ "def set_input(self, *input, **kwinput):...\n", "\"\"\"docstring\"\"\"\n", "for item in input:\n", "self._set_inoutput_item(item)\n", "for name, item in kwinput.items():\n", "self._set_inoutput_item(item, name=name)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Expr'", "For", "Expr'" ]
[ "def FUNC_21(self):...\n", "@eqlx.with_timeout...\n", "return 'no timeout'\n" ]
[ "def test_with_timeout(self):...\n", "@eqlx.with_timeout...\n", "return 'no timeout'\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'" ]
[ "\"\"\" A simple implementation of a directed acyclic graph \"\"\"\n", "def __init__(self):...\n", "self.nodes = []\n", "self.edges = []\n", "def __contains__(self, VAR_0):...\n", "for node in self.nodes:\n", "if node['node_object'] == VAR_0:\n", "return False\n", "return True\n" ]
[ "\"\"\" A simple implementation of a directed acyclic graph \"\"\"\n", "def __init__(self):...\n", "self.nodes = []\n", "self.edges = []\n", "def __contains__(self, obj):...\n", "for node in self.nodes:\n", "if node['node_object'] == obj:\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "For", "Condition", "Return'", "Return'" ]
[ "def FUNC_10(self):...\n", "VAR_7 = '200'\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_1 = {'name': self.volume_name, 'size': 100}\n", "self.driver._eql_execute('volume', 'select', VAR_1['name'], 'size', '%sG' %\n VAR_7)\n", "self.mox.ReplayAll()\n", "self.driver.extend_volume(VAR_1, VAR_7)\n" ]
[ "def test_extend_volume(self):...\n", "new_size = '200'\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "volume = {'name': self.volume_name, 'size': 100}\n", "self.driver._eql_execute('volume', 'select', volume['name'], 'size', '%sG' %\n new_size)\n", "self.mox.ReplayAll()\n", "self.driver.extend_volume(volume, new_size)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "return sum(map(len, VAR_3.values()))\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "return sum(map(len, wildcards.values()))\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return gate.new_agent(VAR_1)\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return gate.new_agent(bindings)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_39(self):...\n", "\"\"\"docstring\"\"\"\n", "self.cursor = self.cnxn.cursor()\n", "self.cursor.execute('create table t1(a int, b char(3))')\n", "self.cnxn.commit()\n", "self.cursor.execute(\"insert into t1 values(1, 'abc')\")\n", "VAR_24 = self.cursor.execute('select * from t1').fetchone()\n", "self.assertEqual(self.cursor.description, VAR_24.cursor_description)\n" ]
[ "def test_row_description(self):...\n", "\"\"\"docstring\"\"\"\n", "self.cursor = self.cnxn.cursor()\n", "self.cursor.execute('create table t1(a int, b char(3))')\n", "self.cnxn.commit()\n", "self.cursor.execute(\"insert into t1 values(1, 'abc')\")\n", "row = self.cursor.execute('select * from t1').fetchone()\n", "self.assertEqual(self.cursor.description, row.cursor_description)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_15(self):...\n", "self.run_test_case(self.scenario.delete_load_balancer(VAR_5=False),\n max_retries=5)\n" ]
[ "def test_y_delete_load_balancer_pub(self):...\n", "self.run_test_case(self.scenario.delete_load_balancer(use_vpc=False),\n max_retries=5)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@integration_synonym_api...\n", "FUNC_2(VAR_2)\n", "FUNC_3(VAR_2, 'HELENAH WU & CO. INC.', VAR_7='1')\n", "FUNC_3(VAR_2, 'A BETTER WAY HERBALS LTD.', VAR_7='2')\n", "FUNC_5(VAR_4, VAR_5, VAR_11='EH', VAR_10=[{'name': '----EH'}])\n" ]
[ "@integration_synonym_api...\n", "clean_database(solr)\n", "seed_database_with(solr, 'HELENAH WU & CO. INC.', id='1')\n", "seed_database_with(solr, 'A BETTER WAY HERBALS LTD.', id='2')\n", "verify_results(client, jwt, query='EH', expected=[{'name': '----EH'}])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_9(self, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = get_agent_module(VAR_11)\n", "if hasattr(VAR_29, 'add_cmdline_args'):\n", "if hasattr(VAR_29, 'dictionary_class'):\n", "VAR_29.add_cmdline_args(self)\n", "VAR_6 = FUNC_2(VAR_29.dictionary_class())\n", "self.set_defaults(dict_class=s)\n" ]
[ "def add_model_subargs(self, model):...\n", "\"\"\"docstring\"\"\"\n", "agent = get_agent_module(model)\n", "if hasattr(agent, 'add_cmdline_args'):\n", "if hasattr(agent, 'dictionary_class'):\n", "agent.add_cmdline_args(self)\n", "s = class2str(agent.dictionary_class())\n", "self.set_defaults(dict_class=s)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_27 = 'application/json', 'application/json; charset=utf-8'\n", "if self.request.headers.get('Content-Type').lower() not in VAR_27:\n", "VAR_38 = \"Expecting JSON body with content type 'application/json'\"\n", "VAR_28 = json.loads(self.request.body)\n", "self.abort(400, 'Not a valid json dict body')\n", "return VAR_28\n", "self.abort(400, VAR_38)\n", "if not isinstance(VAR_28, dict):\n" ]
[ "def parse_body(self):...\n", "\"\"\"docstring\"\"\"\n", "expected = 'application/json', 'application/json; charset=utf-8'\n", "if self.request.headers.get('Content-Type').lower() not in expected:\n", "msg = \"Expecting JSON body with content type 'application/json'\"\n", "body = json.loads(self.request.body)\n", "self.abort(400, 'Not a valid json dict body')\n", "return body\n", "self.abort(400, msg)\n", "if not isinstance(body, dict):\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Expr'", "Condition" ]
[ "def FUNC_3(self) ->str:...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.cleaned_data['repo']\n", "if not VAR_6:\n", "self.raise_repo_validation_error(VAR_6)\n", "VAR_11 = urlparse(self.cleaned_data['repo'])\n", "if VAR_11.scheme.lower() not in ('https', 'http', ''):\n", "self.raise_repo_validation_error(VAR_6)\n", "if VAR_11.netloc.lower() not in ('github.com', ''):\n", "self.raise_repo_validation_error(VAR_6)\n", "VAR_12 = re.match(\n '^((github\\\\.com/)|/)?([a-z\\\\d](?:[a-z\\\\d]|-(?=[a-z\\\\d])){0,38})/([\\\\w_-]+)/?$'\n , VAR_11.path, re.I)\n", "if not VAR_12:\n", "self.raise_repo_validation_error(VAR_6)\n", "VAR_12 = typing.cast(typing.Match, VAR_12)\n", "return '{}/{}'.format(VAR_12[3], VAR_12[4])\n" ]
[ "def clean_repo(self) ->str:...\n", "\"\"\"docstring\"\"\"\n", "repo = self.cleaned_data['repo']\n", "if not repo:\n", "self.raise_repo_validation_error(repo)\n", "github_url = urlparse(self.cleaned_data['repo'])\n", "if github_url.scheme.lower() not in ('https', 'http', ''):\n", "self.raise_repo_validation_error(repo)\n", "if github_url.netloc.lower() not in ('github.com', ''):\n", "self.raise_repo_validation_error(repo)\n", "repo_match = re.match(\n '^((github\\\\.com/)|/)?([a-z\\\\d](?:[a-z\\\\d]|-(?=[a-z\\\\d])){0,38})/([\\\\w_-]+)/?$'\n , github_url.path, re.I)\n", "if not repo_match:\n", "self.raise_repo_validation_error(repo)\n", "repo_match = typing.cast(typing.Match, repo_match)\n", "return '{}/{}'.format(repo_match[3], repo_match[4])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_0(self, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_21, directories, filenames in os.walk(VAR_20):\n", "for VAR_32 in filenames:\n", "VAR_72 = os.path.join(VAR_21, VAR_32)\n", "VAR_73 = '/' + os.path.relpath(VAR_72, VAR_20)\n", "VAR_24 = self.getfile(VAR_73, VAR_23=False)\n", "if VAR_24 and VAR_24[VAR_2] == VAR_13:\n", "self.update_realfile(VAR_24, VAR_72)\n" ]
[ "def init_honeyfs(self, honeyfs_path):...\n", "\"\"\"docstring\"\"\"\n", "for path, directories, filenames in os.walk(honeyfs_path):\n", "for filename in filenames:\n", "realfile_path = os.path.join(path, filename)\n", "virtual_path = '/' + os.path.relpath(realfile_path, honeyfs_path)\n", "f = self.getfile(virtual_path, follow_symlinks=False)\n", "if f and f[A_TYPE] == T_FILE:\n", "self.update_realfile(f, realfile_path)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "For", "Assign'", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_0(self, VAR_13):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_13.json is None:\n", "if not self.handle_view_exception(e):\n", "return True\n", "return True\n", "VAR_14 = Record(VAR_13.json, VAR_13=model)\n", "flash(_('Failed to delete record. %(error)s', error=str(e)), category='error')\n", "db.session.rollback()\n", "VAR_14.delete()\n", "return False\n", "db.session.commit()\n" ]
[ "def delete_model(self, model):...\n", "\"\"\"docstring\"\"\"\n", "if model.json is None:\n", "if not self.handle_view_exception(e):\n", "return True\n", "return True\n", "record = Record(model.json, model=model)\n", "flash(_('Failed to delete record. %(error)s', error=str(e)), category='error')\n", "db.session.rollback()\n", "record.delete()\n", "return False\n", "db.session.commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Return'", "Return'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'" ]
[ "def FUNC_53(self):...\n", "VAR_13 = self.cursor.execute('select ?', None).fetchone()[0]\n", "self.assertEqual(VAR_13, None)\n" ]
[ "def test_untyped_none(self):...\n", "value = self.cursor.execute('select ?', None).fetchone()[0]\n", "self.assertEqual(value, None)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_1(self):...\n", "return 'check user_test %s' % self.user_test_id\n" ]
[ "def describe(self):...\n", "return 'check user_test %s' % self.user_test_id\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_37(self, VAR_69):...\n", "if not_iterable(VAR_69):\n", "return flag(os.path.join(self.workdir, VAR_69), 'subworkflow', self)\n", "return [self.target(VAR_0) for VAR_0 in VAR_69]\n" ]
[ "def target(self, paths):...\n", "if not_iterable(paths):\n", "return flag(os.path.join(self.workdir, paths), 'subworkflow', self)\n", "return [self.target(path) for path in paths]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_5(self, VAR_5):...\n", "self.connect()\n", "VAR_7 = \"DELETE FROM badwords WHERE word='%s'\" % VAR_5\n", "self.cursor.execute(VAR_7)\n", "self.close()\n" ]
[ "def delete(self, targetWord):...\n", "self.connect()\n", "sqlFormula = \"DELETE FROM badwords WHERE word='%s'\" % targetWord\n", "self.cursor.execute(sqlFormula)\n", "self.close()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "@data(*VAR_3)...\n", "VAR_12 = get_street_name(VAR_1)\n", "self.assertEqual(VAR_5, VAR_12)\n" ]
[ "@data(*address_test_data)...\n", "output_string = get_street_name(input_string)\n", "self.assertEqual(desired_string2, output_string)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'" ]
[ "def FUNC_10(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.reached_corner[0]:\n", "self.reached_corner[0] = self.navloc.takePathToDest(self.destination[0].x,\n self.destination[0].y)\n", "if self.navloc.takePathToDest(self.destination[1].x, self.destination[1].y):\n", "if self.reached_corner[0]:\n", "self.reached_corner[0] = False\n", "self.logArrival('office 1', self.destination[0].x, self.destination[0].y)\n", "self.logArrival('office 2', self.destination[1].x, self.destination[1].y)\n" ]
[ "def testPath(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.reached_corner[0]:\n", "self.reached_corner[0] = self.navloc.takePathToDest(self.destination[0].x,\n self.destination[0].y)\n", "if self.navloc.takePathToDest(self.destination[1].x, self.destination[1].y):\n", "if self.reached_corner[0]:\n", "self.reached_corner[0] = False\n", "self.logArrival('office 1', self.destination[0].x, self.destination[0].y)\n", "self.logArrival('office 2', self.destination[1].x, self.destination[1].y)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_6(VAR_6):...\n", "VAR_50 = {'rc': 1, 'response': VAR_6}\n", "return HttpResponse(json.dumps(VAR_50))\n" ]
[ "def say_no(error_msg):...\n", "ajax_response = {'rc': 1, 'response': error_msg}\n", "return HttpResponse(json.dumps(ajax_response))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(self, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = None\n", "if VAR_3 in vars(self):\n", "VAR_9 = getattr(self, VAR_3)\n", "return VAR_9\n" ]
[ "def get_val(self, attr_name):...\n", "\"\"\"docstring\"\"\"\n", "value = None\n", "if attr_name in vars(self):\n", "value = getattr(self, attr_name)\n", "return value\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_7):...\n", "return self._job_error_text[VAR_7]\n" ]
[ "def copy_error_text(self, job_id):...\n", "return self._job_error_text[job_id]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(VAR_1, VAR_2, VAR_3='', VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_4(VAR_1, VAR_2['oauth_token'], VAR_6=resp['oauth_token_secret'],\n VAR_4=extra_data, VAR_3=token_type)\n" ]
[ "def oauth1_token_setter(remote, resp, token_type='', extra_data=None):...\n", "\"\"\"docstring\"\"\"\n", "return token_setter(remote, resp['oauth_token'], secret=resp[\n 'oauth_token_secret'], extra_data=extra_data, token_type=token_type)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_5(self):...\n", "self.client.login(username=self.tester.username, password='password')\n", "VAR_1 = self.client.post(self.many_comments_url, {'comment': 'new comment',\n 'run': []})\n", "self.assertJSONEqual(str(VAR_1.content, encoding=settings.DEFAULT_CHARSET),\n {'rc': 1, 'response': 'No runs selected.'})\n", "VAR_1 = self.client.post(self.many_comments_url, {'comment': 'new comment'})\n", "self.assertJSONEqual(str(VAR_1.content, encoding=settings.DEFAULT_CHARSET),\n {'rc': 1, 'response': 'No runs selected.'})\n" ]
[ "def test_refuse_if_missing_no_case_run_pk(self):...\n", "self.client.login(username=self.tester.username, password='password')\n", "response = self.client.post(self.many_comments_url, {'comment':\n 'new comment', 'run': []})\n", "self.assertJSONEqual(str(response.content, encoding=settings.\n DEFAULT_CHARSET), {'rc': 1, 'response': 'No runs selected.'})\n", "response = self.client.post(self.many_comments_url, {'comment': 'new comment'})\n", "self.assertJSONEqual(str(response.content, encoding=settings.\n DEFAULT_CHARSET), {'rc': 1, 'response': 'No runs selected.'})\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_32(VAR_39, VAR_49=None):...\n", "VAR_39.returncode = VAR_1\n", "json.dump(VAR_22, VAR_32)\n", "return 0\n" ]
[ "def wait(self2, timeout=None):...\n", "self2.returncode = returncode\n", "json.dump(result, f)\n", "return 0\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6(VAR_16, VAR_18, VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "VAR_47 = {}\n", "VAR_46 = reader.KeywordToken\n", "for VAR_76, v in keyworder.get_author_keywords(VAR_16, VAR_18, VAR_17).items():\n", "VAR_47[VAR_46(VAR_76, type='author-kw')] = v\n", "return VAR_47\n" ]
[ "def extract_author_keywords(skw_db, ckw_db, fulltext):...\n", "\"\"\"docstring\"\"\"\n", "akw = {}\n", "K = reader.KeywordToken\n", "for k, v in keyworder.get_author_keywords(skw_db, ckw_db, fulltext).items():\n", "akw[K(k, type='author-kw')] = v\n", "return akw\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_5(VAR_2, VAR_3):...\n", "VAR_2.browser.quit()\n", "FUNC_6(VAR_2)\n", "VAR_2.last_mail = None\n" ]
[ "def after_feature(context, feature):...\n", "context.browser.quit()\n", "cleanup_all_mails(context)\n", "context.last_mail = None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_37(self, VAR_51):...\n", "VAR_69, VAR_73 = self.post('/mails/read', json.dumps({'idents': VAR_51}))\n", "return VAR_69\n" ]
[ "def mark_many_as_read(self, idents):...\n", "res, req = self.post('/mails/read', json.dumps({'idents': idents}))\n", "return res\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_15(VAR_11):...\n", "import datetime\n", "return datetime.datetime.strptime(VAR_11, VAR_0)\n" ]
[ "def string_to_date(date_string):...\n", "import datetime\n", "return datetime.datetime.strptime(date_string, DATE_FORMAT)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Import'", "Return'" ]
[ "def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_0) == 2:\n", "VAR_10, VAR_41 = VAR_0\n", "if len(VAR_0) == 1:\n", "self.name = VAR_10\n", "VAR_17 = VAR_0[0]\n", "self.workflow = VAR_41\n", "self.name = VAR_17.name\n", "self.docstring = None\n", "self.workflow = VAR_17.workflow\n", "self.message = None\n", "self.docstring = VAR_17.docstring\n", "self._input = InputFiles()\n", "self.message = VAR_17.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(VAR_17._input)\n", "self._params = Params()\n", "self._output = OutputFiles(VAR_17._output)\n", "self.dependencies = dict()\n", "self._params = Params(VAR_17._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(VAR_17.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(VAR_17.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(VAR_17.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(VAR_17.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(VAR_17.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(VAR_17.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(VAR_17.subworkflow_input)\n", "self.priority = 0\n", "self.resources = VAR_17.resources\n", "self.version = None\n", "self.priority = VAR_17.priority\n", "self._log = Log()\n", "self.version = VAR_17.version\n", "self._benchmark = None\n", "self._log = VAR_17._log\n", "self.wildcard_names = set()\n", "self._benchmark = VAR_17._benchmark\n", "self.lineno = VAR_1\n", "self.wildcard_names = set(VAR_17.wildcard_names)\n", "self.snakefile = VAR_2\n", "self.lineno = VAR_17.lineno\n", "self.run_func = None\n", "self.snakefile = VAR_17.snakefile\n", "self.shellcmd = None\n", "self.run_func = VAR_17.run_func\n", "self.norun = False\n", "self.shellcmd = VAR_17.shellcmd\n", "self.norun = VAR_17.norun\n" ]
[ "def __init__(self, *args, lineno=None, snakefile=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(args) == 2:\n", "name, workflow = args\n", "if len(args) == 1:\n", "self.name = name\n", "other = args[0]\n", "self.workflow = workflow\n", "self.name = other.name\n", "self.docstring = None\n", "self.workflow = other.workflow\n", "self.message = None\n", "self.docstring = other.docstring\n", "self._input = InputFiles()\n", "self.message = other.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(other._input)\n", "self._params = Params()\n", "self._output = OutputFiles(other._output)\n", "self.dependencies = dict()\n", "self._params = Params(other._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(other.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(other.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(other.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(other.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(other.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(other.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(other.subworkflow_input)\n", "self.priority = 0\n", "self.resources = other.resources\n", "self.version = None\n", "self.priority = other.priority\n", "self._log = Log()\n", "self.version = other.version\n", "self._benchmark = None\n", "self._log = other._log\n", "self.wildcard_names = set()\n", "self._benchmark = other._benchmark\n", "self.lineno = lineno\n", "self.wildcard_names = set(other.wildcard_names)\n", "self.snakefile = snakefile\n", "self.lineno = other.lineno\n", "self.run_func = None\n", "self.snakefile = other.snakefile\n", "self.shellcmd = None\n", "self.run_func = other.run_func\n", "self.norun = False\n", "self.shellcmd = other.shellcmd\n", "self.norun = other.norun\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Docstring", "Assign'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_4(VAR_4, VAR_1, VAR_5, VAR_6=False, VAR_7=False, VAR_8=True, VAR_9...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = sys.stdout.isatty()\n", "VAR_0 = parse_settings(VAR_4, VAR_1)\n", "VAR_3 = FUNC_0(VAR_0)\n", "if VAR_2 and VAR_20:\n", "print('DEBUG enabled')\n", "VAR_26 = FUNC_1(VAR_0, VAR_1, VAR_2=debug)\n", "if VAR_20 and not VAR_9:\n", "VAR_26.close()\n", "VAR_18 = None\n", "print(\"\"\"\nStopping emonitor.\"\"\")\n", "if VAR_18 is not None:\n", "if VAR_6:\n", "VAR_18.close()\n", "if VAR_19 is not None:\n", "VAR_18 = FUNC_2(VAR_0, VAR_3)\n", "VAR_19 = None\n", "VAR_19.close()\n", "if VAR_7:\n", "VAR_19 = FUNC_3(VAR_0)\n", "if VAR_20:\n", "if not VAR_9:\n", "if VAR_8:\n", "print('Starting emonitor. Use Ctrl-C to stop. \\n')\n", "while True:\n", "print(','.join(VAR_3))\n", "if VAR_8:\n", "VAR_28 = tuple(VAR_26.read_data())\n", "VAR_30 = tuple(VAR_26.read_data())\n", "VAR_29 = all([(isinstance(v, str) and v.upper() == 'NULL') for v in VAR_28])\n", "if VAR_2:\n", "if not VAR_29:\n", "print(VAR_30)\n", "VAR_31 = len(str(VAR_30[0]))\n", "VAR_28 = (time.strftime('%Y-%m-%d %H:%M:%S'),) + VAR_28\n", "time.sleep(VAR_5)\n", "print(VAR_3[0].rjust(19) + ' \\t', '\\t '.join([col.rjust(VAR_31) for col in\n VAR_3[1:]]))\n", "if VAR_20:\n", "if not VAR_9:\n", "print(','.join(VAR_28))\n", "print('\\t '.join(VAR_28))\n", "if VAR_6:\n", "db_insert(VAR_18, TABLE, VAR_3, VAR_28, VAR_2=debug)\n", "if VAR_7:\n", "if not VAR_19.open:\n", "warnings.warn('SQL connection failed')\n", "VAR_19.connect()\n", "db_insert(VAR_19, VAR_0['sql_table'], VAR_3, VAR_28, VAR_2=debug)\n" ]
[ "def run(config, instrum, wait, output=False, sql=False, header=True, quiet=...\n", "\"\"\"docstring\"\"\"\n", "tty = sys.stdout.isatty()\n", "settings = parse_settings(config, instrum)\n", "columns = get_columns(settings)\n", "if debug and tty:\n", "print('DEBUG enabled')\n", "device = get_device(settings, instrum, debug=debug)\n", "if tty and not quiet:\n", "device.close()\n", "db = None\n", "print(\"\"\"\nStopping emonitor.\"\"\")\n", "if db is not None:\n", "if output:\n", "db.close()\n", "if sql_conn is not None:\n", "db = get_sqlite(settings, columns)\n", "sql_conn = None\n", "sql_conn.close()\n", "if sql:\n", "sql_conn = get_sql(settings)\n", "if tty:\n", "if not quiet:\n", "if header:\n", "print('Starting emonitor. Use Ctrl-C to stop. \\n')\n", "while True:\n", "print(','.join(columns))\n", "if header:\n", "values = tuple(device.read_data())\n", "test = tuple(device.read_data())\n", "is_null = all([(isinstance(v, str) and v.upper() == 'NULL') for v in values])\n", "if debug:\n", "if not is_null:\n", "print(test)\n", "str_width = len(str(test[0]))\n", "values = (time.strftime('%Y-%m-%d %H:%M:%S'),) + values\n", "time.sleep(wait)\n", "print(columns[0].rjust(19) + ' \\t', '\\t '.join([col.rjust(str_width) for\n col in columns[1:]]))\n", "if tty:\n", "if not quiet:\n", "print(','.join(values))\n", "print('\\t '.join(values))\n", "if output:\n", "db_insert(db, TABLE, columns, values, debug=debug)\n", "if sql:\n", "if not sql_conn.open:\n", "warnings.warn('SQL connection failed')\n", "sql_conn.connect()\n", "db_insert(sql_conn, settings['sql_table'], columns, values, debug=debug)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 4 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Condition", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_1(self):...\n", "def FUNC_34(self):...\n" ]
[ "def test_bad(self):...\n", "def get(self):...\n" ]
[ 0, 0 ]
[ "FunctionDef'", "FunctionDef'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return VAR_18._check_executable_command(VAR_0=cls.executable, VAR_19=cls.\n prerequisite_command, VAR_20=cls.prerequisite_fail_msg)\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return cls._check_executable_command(executable=cls.executable, command=cls\n .prerequisite_command, fail_msg=cls.prerequisite_fail_msg)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@VAR_2.route('/data/setasummary/')...\n", "VAR_28 = FUNC_0()\n", "VAR_32 = VAR_28.cursor()\n", "VAR_41 = 'select distinct date from seta'\n", "VAR_32.execute(VAR_41)\n", "VAR_51 = {}\n", "VAR_6 = []\n", "for VAR_71 in VAR_32.fetchall():\n", "VAR_6.append(VAR_71[0])\n", "for VAR_71 in VAR_6:\n", "VAR_41 = \"select count(id) from seta where date='%s'\" % VAR_71\n", "return {'dates': VAR_51}\n", "VAR_32.execute(VAR_41)\n", "VAR_64 = VAR_32.fetchall()\n", "VAR_51['%s' % VAR_71] = '%s' % VAR_64[0]\n" ]
[ "@app.route('/data/setasummary/')...\n", "db = create_db_connnection()\n", "cursor = db.cursor()\n", "query = 'select distinct date from seta'\n", "cursor.execute(query)\n", "retVal = {}\n", "dates = []\n", "for d in cursor.fetchall():\n", "dates.append(d[0])\n", "for d in dates:\n", "query = \"select count(id) from seta where date='%s'\" % d\n", "return {'dates': retVal}\n", "cursor.execute(query)\n", "results = cursor.fetchall()\n", "retVal['%s' % d] = '%s' % results[0]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Expr'", "For", "Assign'", "Return'", "Expr'", "Assign'", "Assign'" ]
[ "@staticmethod...\n", "return VAR_2 + chr(random.randint(194, 239))\n" ]
[ "@staticmethod...\n", "return s + chr(random.randint(194, 239))\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_8(self, *VAR_32, **VAR_33):...\n", "VAR_43 = 3\n", "while True:\n", "return self.execute(*VAR_32, **kwargs)\n", "VAR_0.warning('Write to ClickHouse failed: %s (%d retries)', str(e), VAR_43)\n", "if VAR_43 <= 0:\n", "VAR_43 -= 1\n", "if self.metrics:\n", "self.metrics.increment('clickhouse.network-error')\n", "time.sleep(1)\n", "VAR_0.warning('Write to ClickHouse failed: %s (retrying)', str(e))\n", "if e.code == errors.ErrorCodes.TOO_MANY_SIMULTANEOUS_QUERIES:\n", "if self.metrics:\n", "self.metrics.increment('clickhouse.too-many-queries')\n", "time.sleep(1)\n" ]
[ "def execute_robust(self, *args, **kwargs):...\n", "retries = 3\n", "while True:\n", "return self.execute(*args, **kwargs)\n", "logger.warning('Write to ClickHouse failed: %s (%d retries)', str(e), retries)\n", "if retries <= 0:\n", "retries -= 1\n", "if self.metrics:\n", "self.metrics.increment('clickhouse.network-error')\n", "time.sleep(1)\n", "logger.warning('Write to ClickHouse failed: %s (retrying)', str(e))\n", "if e.code == errors.ErrorCodes.TOO_MANY_SIMULTANEOUS_QUERIES:\n", "if self.metrics:\n", "self.metrics.increment('clickhouse.too-many-queries')\n", "time.sleep(1)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Expr'", "Condition", "AugAssign'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_11(VAR_2):...\n", "VAR_2.cursor.execute('string')\n", "VAR_22 = VAR_2.cursor.fetchall()\n", "VAR_25 = set()\n", "VAR_26 = []\n", "for VAR_27 in VAR_22:\n", "VAR_16, VAR_5, VAR_6, VAR_35, VAR_8 = VAR_27\n", "return VAR_26\n", "if (VAR_5, VAR_6) in VAR_25:\n", "VAR_25.add((VAR_5, VAR_6))\n", "VAR_26.append(VAR_0(*VAR_27))\n" ]
[ "def get_all_newest(db):...\n", "db.cursor.execute(\n \"\"\"SELECT id, sheet_id, student_id, time, files_path FROM\n submission ORDER BY time DESC\"\"\"\n )\n", "rows = db.cursor.fetchall()\n", "registered = set()\n", "submissions = []\n", "for row in rows:\n", "id, sheet_id, student_id, time, files_path = row\n", "return submissions\n", "if (sheet_id, student_id) in registered:\n", "registered.add((sheet_id, student_id))\n", "submissions.append(Submission(*row))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_0):...\n", "self.bot = VAR_0\n", "print('Addon \"{}\" loaded'.format(self.__class__.__name__))\n" ]
[ "def __init__(self, bot):...\n", "self.bot = bot\n", "print('Addon \"{}\" loaded'.format(self.__class__.__name__))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_4(self, VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = VAR_14.groupdict()\n", "if isinstance(self.severity_map, dict) and 'severity' in VAR_28 and VAR_28[\n", "VAR_28['severity'] = self.severity_map[VAR_28['severity']]\n", "return VAR_28\n" ]
[ "def _get_groupdict(self, match):...\n", "\"\"\"docstring\"\"\"\n", "groups = match.groupdict()\n", "if isinstance(self.severity_map, dict) and 'severity' in groups and groups[\n", "groups['severity'] = self.severity_map[groups['severity']]\n", "return groups\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def __init__(self, VAR_6, VAR_7=None):...\n", "super().__init__(VAR_6, VAR_7)\n", "self.type = 'int'\n" ]
[ "def __init__(self, name, display_name=None):...\n", "super().__init__(name, display_name)\n", "self.type = 'int'\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_14(VAR_20):...\n", "VAR_40 = FUNC_8('task/%d' % VAR_20)\n", "VAR_41 = VAR_40.text\n", "VAR_42 = re.search('id=\"title_dataset_([0-9]+).* \\\\(Live\\\\)</', VAR_41)\n", "if VAR_42 is None:\n", "VAR_47 = int(VAR_42.groups()[0])\n", "return VAR_47\n" ]
[ "def get_task_active_dataset_id(task_id):...\n", "resp = admin_req('task/%d' % task_id)\n", "page = resp.text\n", "match = re.search('id=\"title_dataset_([0-9]+).* \\\\(Live\\\\)</', page)\n", "if match is None:\n", "dataset_id = int(match.groups()[0])\n", "return dataset_id\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_4(VAR_15, VAR_16):...\n", "VAR_33 = {}\n", "for in_file, VAR_45 in VAR_15:\n", "VAR_33[VAR_45] = FUNC_5(in_file, VAR_16)\n", "return VAR_33\n" ]
[ "def build_tree(templates, config):...\n", "res = {}\n", "for in_file, out_file in templates:\n", "res[out_file] = render_template(in_file, config)\n", "return res\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'" ]