lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_1(self):...\n", "self.assertTrue(self.client.login(VAR_8='autotest', VAR_9='password'))\n" ]
[ "def setUp(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_9(self, VAR_21, VAR_27, VAR_28, VAR_29, VAR_30, VAR_31=None):...\n", "\"\"\"docstring\"\"\"\n", "if self.newcount > 10000:\n", "return False\n", "if VAR_31 is None:\n", "VAR_31 = time.time()\n", "VAR_58 = self.get_path(os.path.dirname(VAR_21))\n", "VAR_59 = os.path.basename(VAR_21)\n", "if VAR_59 in [x[VAR_1] for x in VAR_58]:\n", "VAR_58.remove([x for x in VAR_58 if x[VAR_1] == VAR_59][0])\n", "VAR_58.append([VAR_59, VAR_13, VAR_27, VAR_28, VAR_29, VAR_30, VAR_31, [],\n None, None])\n", "self.newcount += 1\n", "return True\n" ]
[ "def mkfile(self, path, uid, gid, size, mode, ctime=None):...\n", "\"\"\"docstring\"\"\"\n", "if self.newcount > 10000:\n", "return False\n", "if ctime is None:\n", "ctime = time.time()\n", "dir = self.get_path(os.path.dirname(path))\n", "outfile = os.path.basename(path)\n", "if outfile in [x[A_NAME] for x in dir]:\n", "dir.remove([x for x in dir if x[A_NAME] == outfile][0])\n", "dir.append([outfile, T_FILE, uid, gid, size, mode, ctime, [], None, None])\n", "self.newcount += 1\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "AugAssign'", "Return'" ]
[ "@api.model...\n", "\"\"\"docstring\"\"\"\n", "VAR_27 = VAR_32.get('email')\n", "if VAR_27:\n", "VAR_32['email'] = VAR_27.strip()\n", "VAR_52 = self.search(['|', '&', ('email', '=', VAR_32.get('email')), (\n 'email', '!=', False), '&', '&', ('firstname', 'ilike', VAR_32.get(\n 'firstname')), ('lastname', 'ilike', VAR_32.get('lastname')), ('zip',\n '=', VAR_32.get('zip'))])\n", "VAR_53 = [(4, itm.id) for itm in VAR_52]\n", "VAR_32.update({'partner_duplicate_ids': VAR_53})\n", "VAR_32['ref'] = self.env['ir.sequence'].get('partner.ref')\n", "VAR_54 = super(CLASS_0, self.with_context(mail_create_nosubscribe=True)\n ).create(VAR_32)\n", "VAR_54.compute_geopoint()\n", "if VAR_54.contact_type == 'attached' and not VAR_32.get('active'):\n", "VAR_54.active = False\n", "return VAR_54\n" ]
[ "@api.model...\n", "\"\"\"docstring\"\"\"\n", "email = vals.get('email')\n", "if email:\n", "vals['email'] = email.strip()\n", "duplicate = self.search(['|', '&', ('email', '=', vals.get('email')), (\n 'email', '!=', False), '&', '&', ('firstname', 'ilike', vals.get(\n 'firstname')), ('lastname', 'ilike', vals.get('lastname')), ('zip', '=',\n vals.get('zip'))])\n", "duplicate_ids = [(4, itm.id) for itm in duplicate]\n", "vals.update({'partner_duplicate_ids': duplicate_ids})\n", "vals['ref'] = self.env['ir.sequence'].get('partner.ref')\n", "partner = super(ResPartner, self.with_context(mail_create_nosubscribe=True)\n ).create(vals)\n", "partner.compute_geopoint()\n", "if partner.contact_type == 'attached' and not vals.get('active'):\n", "partner.active = False\n", "return partner\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_21(VAR_16):...\n", "return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n" ]
[ "def get_io(rule):...\n", "return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8():...\n", "VAR_9 = FUNC_0()\n", "VAR_10 = VAR_9.cursor()\n", "VAR_10.execute(' SELECT name, karma FROM people ORDER BY karma DESC LIMIT 5 ')\n", "VAR_2.error('Execution failed with error: {}'.format(e))\n", "VAR_18 = VAR_10.fetchall()\n", "VAR_2.debug('fetched top karma values')\n", "VAR_9.close()\n", "return VAR_18\n" ]
[ "def karma_top():...\n", "db = db_connect()\n", "cursor = db.cursor()\n", "cursor.execute(' SELECT name, karma FROM people ORDER BY karma DESC LIMIT 5 ')\n", "logger.error('Execution failed with error: {}'.format(e))\n", "leaders = cursor.fetchall()\n", "logger.debug('fetched top karma values')\n", "db.close()\n", "return leaders\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = api.nova.server_group_list(VAR_1)\n", "return {'items': [u.to_dict() for u in VAR_10]}\n" ]
[ "@rest_utils.ajax()...\n", "\"\"\"docstring\"\"\"\n", "result = api.nova.server_group_list(request)\n", "return {'items': [u.to_dict() for u in result]}\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_27(VAR_13):...\n", "self.assertEqual('/request', VAR_13.path)\n", "VAR_12.append('not_applicable')\n", "return None\n" ]
[ "def not_applicable(request):...\n", "self.assertEqual('/request', request.path)\n", "calls.append('not_applicable')\n", "return None\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_4(self):...\n", "self.ensure_one()\n", "VAR_5 = self._get_state_domain()\n", "VAR_27 = False\n", "if self.automaton:\n", "VAR_33 = self.env['crapo.transition'].search([('automaton', '=', self.\n automaton.id), ('from_state', '=', self.state.id)])\n", "VAR_5.append(('sequence', '>', self.state.sequence))\n", "VAR_34 = VAR_33.mapped(lambda x: x.to_state.id)\n", "VAR_27 = self.env['crapo.state'].search(VAR_5, limit=1)\n", "if VAR_34:\n", "return VAR_27\n", "VAR_5.append(('id', 'in', VAR_34))\n", "VAR_27 = self.env['crapo.state'].search(VAR_5)\n" ]
[ "def _next_states(self):...\n", "self.ensure_one()\n", "domain = self._get_state_domain()\n", "next_states = False\n", "if self.automaton:\n", "eligible_transitions = self.env['crapo.transition'].search([('automaton',\n '=', self.automaton.id), ('from_state', '=', self.state.id)])\n", "domain.append(('sequence', '>', self.state.sequence))\n", "target_ids = eligible_transitions.mapped(lambda x: x.to_state.id)\n", "next_states = self.env['crapo.state'].search(domain, limit=1)\n", "if target_ids:\n", "return next_states\n", "domain.append(('id', 'in', target_ids))\n", "next_states = self.env['crapo.state'].search(domain)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Return'", "Expr'", "Assign'" ]
[ "@app.route('/metric/api/v1.0/metric/get')...\n", "\"\"\"docstring\"\"\"\n", "VAR_0 = getMetric(request.args.get('fromtime', None), request.args.get(\n 'totime', None), request.args.get('origin', None), request.args.get(\n 'key', None), request.args.get('count', None), (request.args.get(\n 'order', 'Time'), bool(request.args.get('desc', True))))\n", "return jsonify({'results': VAR_0, 'resultcount': len(VAR_0)})\n" ]
[ "@app.route('/metric/api/v1.0/metric/get')...\n", "\"\"\"docstring\"\"\"\n", "res = getMetric(request.args.get('fromtime', None), request.args.get(\n 'totime', None), request.args.get('origin', None), request.args.get(\n 'key', None), request.args.get('count', None), (request.args.get(\n 'order', 'Time'), bool(request.args.get('desc', True))))\n", "return jsonify({'results': res, 'resultcount': len(res)})\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "\"\"\"\nPython scripte to list items from MySQL\n\"\"\"\n", "import MySQLdb\n", "from sys import argv\n", "if __name__ == '__main__':\n", "VAR_0 = MySQLdb.connect(host='localhost', port=3306, user=argv[1], passwd=\n argv[2], VAR_0=argv[3], charset='utf8')\n", "VAR_1 = VAR_0.cursor()\n", "VAR_1.execute('SELECT * FROM states WHERE name LIKE %s ORDER BY id ASC',\n (argv[4],))\n", "for rows in VAR_1.fetchall():\n", "print(rows)\n", "VAR_1.close()\n", "VAR_0.close()\n" ]
[ "\"\"\"\nPython scripte to list items from MySQL\n\"\"\"\n", "import MySQLdb\n", "from sys import argv\n", "if __name__ == '__main__':\n", "db = MySQLdb.connect(host='localhost', port=3306, user=argv[1], passwd=argv\n [2], db=argv[3], charset='utf8')\n", "c = db.cursor()\n", "c.execute('SELECT * FROM states WHERE name LIKE %s ORDER BY id ASC', (\n argv[4],))\n", "for rows in c.fetchall():\n", "print(rows)\n", "c.close()\n", "db.close()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "Condition", "Assign'", "Assign'", "Expr'", "For", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_0):...\n", "print(VAR_0, file=sys.stderr)\n" ]
[ "def log_error(error):...\n", "print(error, file=sys.stderr)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __repr__(self):...\n", "return self.get_name()\n" ]
[ "def __repr__(self):...\n", "return self.get_name()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __sinit__(self):...\n", "self.ctx = zmq.Context()\n", "super().__sinit__()\n" ]
[ "def __sinit__(self):...\n", "self.ctx = zmq.Context()\n", "super().__sinit__()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "self.common.delete_snapshot(VAR_9)\n", "self.common.client_logout()\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "self.common.delete_snapshot(snapshot)\n", "self.common.client_logout()\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_7(self, VAR_21):...\n", "VAR_22 = \"SELECT * FROM log_query WHERE log_hash = '%s' LIMIT 1\" % VAR_21\n", "self.cur.execute(VAR_22)\n", "self.conn.commit()\n", "VAR_25 = self.cur.fetchone()\n", "return VAR_25\n" ]
[ "def get_query_by_loghash(self, loghash):...\n", "sql = \"SELECT * FROM log_query WHERE log_hash = '%s' LIMIT 1\" % loghash\n", "self.cur.execute(sql)\n", "self.conn.commit()\n", "query = self.cur.fetchone()\n", "return query\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Return'" ]
[ "@eqlx.with_timeout...\n", "time.sleep(1)\n" ]
[ "@eqlx.with_timeout...\n", "time.sleep(1)\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def FUNC_6(self, VAR_8=None):...\n", "if not self.title:\n", "self.title = VAR_8\n", "VAR_3 = (\"UPDATE jdk_entries SET title = '\" + self.title + \"'\" +\n \"WHERE jdk_entries.id = '\" + self.entry_id + \"';\")\n", "FUNC_0(VAR_3)\n", "self.update_date_modified()\n", "return None\n" ]
[ "def update_title(self, title=None):...\n", "if not self.title:\n", "self.title = title\n", "sql = (\"UPDATE jdk_entries SET title = '\" + self.title + \"'\" +\n \"WHERE jdk_entries.id = '\" + self.entry_id + \"';\")\n", "db_execute(sql)\n", "self.update_date_modified()\n", "return None\n" ]
[ 0, 0, 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@command...\n", "\"\"\"docstring\"\"\"\n", "prnt('Bye', VIOLET)\n", "exit(0)\n" ]
[ "@command...\n", "\"\"\"docstring\"\"\"\n", "prnt('Bye', VIOLET)\n", "exit(0)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Expr'" ]
[ "import os\n", "from kubernetes import client, config\n", "import pytest\n", "from pytest_bdd import scenario, then, parsers\n", "import yaml\n", "from tests import utils\n", "@pytest.fixture...\n", "config.load_kube_config(config_file=kubeconfig)\n", "VAR_4 = client.CoreV1Api()\n", "VAR_5 = os.path.join(os.path.realpath(os.path.dirname(__file__)), 'files',\n 'busybox.yaml')\n", "VAR_6 = yaml.safe_load(pod_fd)\n", "VAR_4.create_namespaced_pod(body=pod_manifest_content, namespace='default')\n", "def FUNC_3():...\n", "VAR_7 = VAR_4.read_namespaced_pod(name='busybox', namespace='default')\n", "assert VAR_7.status.phase == 'Running', \"Wrong status for 'busybox' Pod - found {status}\".format(\n status=pod_info.status.phase)\n", "utils.retry(FUNC_3, times=10)\n", "yield 'busybox'\n", "VAR_4.delete_namespaced_pod(name='busybox', namespace='default', body=\n client.V1DeleteOptions())\n", "@scenario('../features/dns_resolution.feature', 'check DNS')...\n", "@then(parsers.parse(\"the hostname '{hostname}' should be resolved\"))...\n", "VAR_8 = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf exec -ti {0} nslookup {1}'\n .format(pod_name, VAR_3))\n", "VAR_9 = VAR_1.run(VAR_8)\n", "assert VAR_9.rc == 0, 'Cannot resolve {}'.format(VAR_3)\n" ]
[ "import os\n", "from kubernetes import client, config\n", "import pytest\n", "from pytest_bdd import scenario, then, parsers\n", "import yaml\n", "from tests import utils\n", "@pytest.fixture...\n", "config.load_kube_config(config_file=kubeconfig)\n", "k8s_client = client.CoreV1Api()\n", "pod_manifest = os.path.join(os.path.realpath(os.path.dirname(__file__)),\n 'files', 'busybox.yaml')\n", "pod_manifest_content = yaml.safe_load(pod_fd)\n", "k8s_client.create_namespaced_pod(body=pod_manifest_content, namespace='default'\n )\n", "def _check_status():...\n", "pod_info = k8s_client.read_namespaced_pod(name='busybox', namespace='default')\n", "assert pod_info.status.phase == 'Running', \"Wrong status for 'busybox' Pod - found {status}\".format(\n status=pod_info.status.phase)\n", "utils.retry(_check_status, times=10)\n", "yield 'busybox'\n", "k8s_client.delete_namespaced_pod(name='busybox', namespace='default', body=\n client.V1DeleteOptions())\n", "@scenario('../features/dns_resolution.feature', 'check DNS')...\n", "@then(parsers.parse(\"the hostname '{hostname}' should be resolved\"))...\n", "cmd_nslookup = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf exec -ti {0} nslookup {1}'\n .format(pod_name, hostname))\n", "res = host.run(cmd_nslookup)\n", "assert res.rc == 0, 'Cannot resolve {}'.format(hostname)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2 ]
[ "Import'", "ImportFrom'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assert'", "Expr'", "Expr'", "Expr'", "Condition", "Condition", "Assign'", "Assign'", "Assert'" ]
[ "def FUNC_11(self):...\n", "for string in self.test_strings:\n", "self.assertEqual(prepare_string_argument(string, 'WeIrD_O/S'), string)\n" ]
[ "def test_prepare_string_argument_unsupported(self):...\n", "for string in self.test_strings:\n", "self.assertEqual(prepare_string_argument(string, 'WeIrD_O/S'), string)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "For", "Expr'" ]
[ "async def FUNC_2(VAR_6):...\n", "VAR_7 = {'username': VAR_0, 'password': 'testing123G'}\n", "VAR_8 = await VAR_6.post('/users', VAR_7=json.dumps(data))\n", "assert VAR_8.status == 201\n" ]
[ "async def test_positive_register_(test_cli):...\n", "data = {'username': username, 'password': 'testing123G'}\n", "resp = await test_cli.post('/users', data=json.dumps(data))\n", "assert resp.status == 201\n" ]
[ 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Assert'" ]
[ "from datetime import date, datetime\n", "import simplejson as json\n", "import time\n", "from base import BaseTest\n", "from snuba.util import all_referenced_columns, column_expr, complex_column_expr, conditions_expr, escape_literal, tuplify, Timer\n", "def FUNC_0(self):...\n", "VAR_0 = {'granularity': 86400}\n", "assert column_expr('tags[foo]', VAR_0.copy()\n ) == \"(tags.value[indexOf(tags.key, 'foo')] AS `tags[foo]`)\"\n", "assert column_expr('tags[server_name]', VAR_0.copy()\n ) == '(server_name AS `tags[server_name]`)'\n", "assert column_expr('tags[app.device]', VAR_0.copy()\n ) == '(app_device AS `tags[app.device]`)'\n", "assert column_expr('tags_key', VAR_0.copy()\n ) == '(arrayJoin(tags.key) AS tags_key)'\n", "VAR_1 = {'groupby': ['tags_key', 'tags_value']}\n", "assert column_expr('tags_key', VAR_1\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n", "assert column_expr('time', VAR_0.copy()) == '(toDate(timestamp) AS time)'\n", "assert column_expr('col', VAR_0.copy(), aggregate='sum') == '(sum(col) AS col)'\n", "assert column_expr(None, VAR_0.copy(), alias='sum', aggregate='sum') == 'sum'\n", "assert column_expr('col', VAR_0.copy(), alias='summation', aggregate='sum'\n ) == '(sum(col) AS summation)'\n", "assert column_expr('', VAR_0.copy(), alias='count', aggregate='count()'\n ) == '(count() AS count)'\n", "assert column_expr('', VAR_0.copy(), alias='aggregate', aggregate='count()'\n ) == '(count() AS aggregate)'\n", "assert column_expr('sentry:release', VAR_0.copy()) == '`sentry:release`'\n", "assert column_expr('-timestamp', VAR_0.copy()) == '-timestamp'\n", "assert column_expr('-sentry:release', VAR_0.copy()) == '-`sentry:release`'\n", "assert column_expr(\"'hello world'\", VAR_0.copy()) == \"'hello world'\"\n", "assert column_expr(tuplify(['concat', ['a', \"':'\", 'b']]), VAR_0.copy()\n ) == \"concat(a, ':', b)\"\n", "VAR_2 = VAR_0.copy()\n", "assert column_expr('issue', VAR_2) == '(group_id AS issue)'\n", "def FUNC_1(self):...\n", "VAR_0 = {'groupby': ['tags_key', 'tags_value']}\n", "assert column_expr('tags_key', VAR_0\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n", "assert column_expr('tags_key', VAR_0) == 'tags_key'\n", "assert column_expr('tags_value', VAR_0) == '((all_tags)[2] AS tags_value)'\n", "def FUNC_2(self):...\n", "assert escape_literal(\"'\") == \"'\\\\''\"\n", "assert escape_literal(date(2001, 1, 1)) == \"toDate('2001-01-01')\"\n", "assert escape_literal(datetime(2001, 1, 1, 1, 1, 1)\n ) == \"toDateTime('2001-01-01T01:01:01')\"\n", "assert escape_literal([1, 'a', date(2001, 1, 1)]\n ) == \"(1, 'a', toDate('2001-01-01'))\"\n", "def FUNC_3(self):...\n", "VAR_3 = [['a', '=', 1]]\n", "assert conditions_expr(VAR_3, {}) == 'a = 1'\n", "VAR_3 = [[['a', '=', 1]]]\n", "assert conditions_expr(VAR_3, {}) == 'a = 1'\n", "VAR_3 = [['a', '=', 1], ['b', '=', 2]]\n", "assert conditions_expr(VAR_3, {}) == 'a = 1 AND b = 2'\n", "VAR_3 = [[['a', '=', 1], ['b', '=', 2]]]\n", "assert conditions_expr(VAR_3, {}) == '(a = 1 OR b = 2)'\n", "VAR_3 = [[['a', '=', 1], ['b', '=', 2]], ['c', '=', 3]]\n", "assert conditions_expr(VAR_3, {}) == '(a = 1 OR b = 2) AND c = 3'\n", "VAR_3 = [[['a', '=', 1], ['b', '=', 2]], [['c', '=', 3], ['d', '=', 4]]]\n", "assert conditions_expr(VAR_3, {}) == '(a = 1 OR b = 2) AND (c = 3 OR d = 4)'\n", "VAR_3 = [[['a', '=', 1], []]]\n", "assert conditions_expr(VAR_3, {}) == 'a = 1'\n", "VAR_3 = [[['tags[foo]', '=', 1], ['b', '=', 2]]]\n", "VAR_4 = column_expr('tags[foo]', {})\n", "assert conditions_expr(VAR_3, {}) == '({} = 1 OR b = 2)'.format(VAR_4)\n", "VAR_5 = {}\n", "VAR_3 = [[['tags[foo]', '=', 1], ['b', '=', 2]]]\n", "column_expr('tags[foo]', VAR_5)\n", "assert conditions_expr(VAR_3, VAR_5) == '(`tags[foo]` = 1 OR b = 2)'\n", "VAR_3 = [['primary_hash', 'LIKE', '%foo%']]\n", "assert conditions_expr(VAR_3, {}) == \"primary_hash LIKE '%foo%'\"\n", "VAR_3 = tuplify([[['notEmpty', ['arrayElement', ['exception_stacks.type', 1\n ]]], '=', 1]])\n", "assert conditions_expr(VAR_3, {}\n ) == 'notEmpty(arrayElement(exception_stacks.type, 1)) = 1'\n", "VAR_3 = tuplify([[['notEmpty', ['tags[sentry:user]']], '=', 1]])\n", "assert conditions_expr(VAR_3, {}\n ) == 'notEmpty((`sentry:user` AS `tags[sentry:user]`)) = 1'\n", "VAR_3 = tuplify([[['notEmpty', ['tags_key']], '=', 1]])\n", "assert conditions_expr(VAR_3, {}\n ) == 'notEmpty((arrayJoin(tags.key) AS tags_key)) = 1'\n", "VAR_3 = tuplify([[[['notEmpty', ['tags[sentry:environment]']], '=', 'dev'],\n [['notEmpty', ['tags[sentry:environment]']], '=', 'prod']], [[[\n 'notEmpty', ['tags[sentry:user]']], '=', 'joe'], [['notEmpty', [\n 'tags[sentry:user]']], '=', 'bob']]])\n", "assert conditions_expr(VAR_3, {}) == 'string'\n", "VAR_3 = [['exception_frames.filename', 'LIKE', '%foo%']]\n", "assert conditions_expr(VAR_3, {}\n ) == \"arrayExists(x -> assumeNotNull(x LIKE '%foo%'), exception_frames.filename)\"\n", "VAR_3 = [['exception_frames.filename', 'NOT LIKE', '%foo%']]\n", "assert conditions_expr(VAR_3, {}\n ) == \"arrayAll(x -> assumeNotNull(x NOT LIKE '%foo%'), exception_frames.filename)\"\n", "def FUNC_4(self):...\n", "VAR_0 = {'aggregations': [['topK(3)', 'logger', 'dupe_alias'], ['uniq',\n 'environment', 'dupe_alias']]}\n", "VAR_6 = [column_expr(col, VAR_0, alias, agg) for agg, col, alias in VAR_0[\n 'aggregations']]\n", "assert VAR_6 == ['(topK(3)(logger) AS dupe_alias)', 'dupe_alias']\n", "def FUNC_5(self):...\n", "VAR_0 = {}\n", "assert complex_column_expr(tuplify(['count', []]), VAR_0.copy()) == 'count()'\n", "assert complex_column_expr(tuplify(['notEmpty', ['foo']]), VAR_0.copy()\n ) == 'notEmpty(foo)'\n", "assert complex_column_expr(tuplify(['notEmpty', ['arrayElement', ['foo', 1]\n ]]), VAR_0.copy()) == 'notEmpty(arrayElement(foo, 1))'\n", "assert complex_column_expr(tuplify(['foo', ['bar', ['qux'], 'baz']]), VAR_0\n .copy()) == 'foo(bar(qux), baz)'\n", "assert complex_column_expr(tuplify(['foo', [], 'a']), VAR_0.copy()\n ) == '(foo() AS a)'\n", "assert complex_column_expr(tuplify(['foo', ['b', 'c'], 'd']), VAR_0.copy()\n ) == '(foo(b, c) AS d)'\n", "assert complex_column_expr(tuplify(['foo', ['b', 'c', ['d']]]), VAR_0.copy()\n ) == 'foo(b, c(d))'\n", "assert complex_column_expr(tuplify(['topK', [3], ['project_id']]), VAR_0.copy()\n ) == 'topK(3)(project_id)'\n", "assert complex_column_expr(tuplify(['topK', [3], ['project_id'], 'baz']),\n VAR_0.copy()) == '(topK(3)(project_id) AS baz)'\n", "assert complex_column_expr(tuplify(['emptyIfNull', ['project_id']]), VAR_0.\n copy()) == \"ifNull(project_id, '')\"\n", "assert complex_column_expr(tuplify(['emptyIfNull', ['project_id'], 'foo']),\n VAR_0.copy()) == \"(ifNull(project_id, '') AS foo)\"\n", "assert complex_column_expr(tuplify(['positionCaseInsensitive', ['message',\n \"'lol 'single' quotes'\"]]), VAR_0.copy()\n ) == \"positionCaseInsensitive(message, 'lol \\\\'single\\\\' quotes')\"\n", "def FUNC_6(self):...\n", "VAR_0 = {'conditions': [['a', '=', '1'], ['b', '=', '1']]}\n", "assert all_referenced_columns(VAR_0) == set(['a', 'b'])\n", "VAR_0 = {'conditions': [['a', '=', '1'], [['b', '=', '1'], ['c', '=', '1']]]}\n", "assert all_referenced_columns(VAR_0) == set(['a', 'b', 'c'])\n", "VAR_0 = {'conditions': [['a', '=', '1'], [['b', '=', '1'], [['foo', ['c']],\n '=', '1']]]}\n", "assert all_referenced_columns(VAR_0) == set(['a', 'b', 'c'])\n", "VAR_0 = {'conditions': [['a', '=', '1'], [['b', '=', '1'], [['foo', ['c', [\n 'bar', ['d']]]], '=', '1']]]}\n", "assert all_referenced_columns(VAR_0) == set(['a', 'b', 'c', 'd'])\n", "VAR_0 = {'arrayjoin': 'tags_key', 'groupby': ['time', 'issue'], 'orderby':\n '-time', 'selected_columns': ['issue', 'time', ['foo', ['c', ['bar', [\n 'd']]]]], 'aggregations': [['uniq', 'tags_value', 'values_seen']]}\n", "assert all_referenced_columns(VAR_0) == set(['tags_key', 'tags_value',\n 'time', 'issue', 'c', 'd'])\n", "def FUNC_7(self):...\n", "VAR_7 = Timer()\n", "time.sleep(0.001)\n", "VAR_7.mark('thing1')\n", "time.sleep(0.001)\n", "VAR_7.mark('thing2')\n", "VAR_8 = VAR_7.finish()\n", "time.sleep(0.001)\n", "VAR_7.mark('thing1')\n", "time.sleep(0.001)\n", "VAR_7.mark('thing2')\n", "VAR_9 = VAR_7.finish()\n", "assert VAR_8['marks_ms'].keys() == VAR_9['marks_ms'].keys()\n", "assert VAR_8['marks_ms']['thing1'] < VAR_9['marks_ms']['thing1']\n", "assert VAR_8['marks_ms']['thing2'] < VAR_9['marks_ms']['thing2']\n" ]
[ "from datetime import date, datetime\n", "import simplejson as json\n", "import time\n", "from base import BaseTest\n", "from snuba.util import all_referenced_columns, column_expr, complex_column_expr, conditions_expr, escape_literal, tuplify, Timer\n", "def test_column_expr(self):...\n", "body = {'granularity': 86400}\n", "assert column_expr('tags[foo]', body.copy()\n ) == \"(tags.value[indexOf(tags.key, 'foo')] AS `tags[foo]`)\"\n", "assert column_expr('tags[server_name]', body.copy()\n ) == '(server_name AS `tags[server_name]`)'\n", "assert column_expr('tags[app.device]', body.copy()\n ) == '(app_device AS `tags[app.device]`)'\n", "assert column_expr('tags_key', body.copy()\n ) == '(arrayJoin(tags.key) AS tags_key)'\n", "tag_group_body = {'groupby': ['tags_key', 'tags_value']}\n", "assert column_expr('tags_key', tag_group_body\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n", "assert column_expr('time', body.copy()) == '(toDate(timestamp) AS time)'\n", "assert column_expr('col', body.copy(), aggregate='sum') == '(sum(col) AS col)'\n", "assert column_expr(None, body.copy(), alias='sum', aggregate='sum') == 'sum'\n", "assert column_expr('col', body.copy(), alias='summation', aggregate='sum'\n ) == '(sum(col) AS summation)'\n", "assert column_expr('', body.copy(), alias='count', aggregate='count()'\n ) == '(count() AS count)'\n", "assert column_expr('', body.copy(), alias='aggregate', aggregate='count()'\n ) == '(count() AS aggregate)'\n", "assert column_expr('sentry:release', body.copy()) == '`sentry:release`'\n", "assert column_expr('-timestamp', body.copy()) == '-timestamp'\n", "assert column_expr('-sentry:release', body.copy()) == '-`sentry:release`'\n", "assert column_expr(\"'hello world'\", body.copy()) == \"'hello world'\"\n", "assert column_expr(tuplify(['concat', ['a', \"':'\", 'b']]), body.copy()\n ) == \"concat(a, ':', b)\"\n", "group_id_body = body.copy()\n", "assert column_expr('issue', group_id_body) == '(group_id AS issue)'\n", "def test_alias_in_alias(self):...\n", "body = {'groupby': ['tags_key', 'tags_value']}\n", "assert column_expr('tags_key', body\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n", "assert column_expr('tags_key', body) == 'tags_key'\n", "assert column_expr('tags_value', body) == '((all_tags)[2] AS tags_value)'\n", "def test_escape(self):...\n", "assert escape_literal(\"'\") == \"'\\\\''\"\n", "assert escape_literal(date(2001, 1, 1)) == \"toDate('2001-01-01')\"\n", "assert escape_literal(datetime(2001, 1, 1, 1, 1, 1)\n ) == \"toDateTime('2001-01-01T01:01:01')\"\n", "assert escape_literal([1, 'a', date(2001, 1, 1)]\n ) == \"(1, 'a', toDate('2001-01-01'))\"\n", "def test_conditions_expr(self):...\n", "conditions = [['a', '=', 1]]\n", "assert conditions_expr(conditions, {}) == 'a = 1'\n", "conditions = [[['a', '=', 1]]]\n", "assert conditions_expr(conditions, {}) == 'a = 1'\n", "conditions = [['a', '=', 1], ['b', '=', 2]]\n", "assert conditions_expr(conditions, {}) == 'a = 1 AND b = 2'\n", "conditions = [[['a', '=', 1], ['b', '=', 2]]]\n", "assert conditions_expr(conditions, {}) == '(a = 1 OR b = 2)'\n", "conditions = [[['a', '=', 1], ['b', '=', 2]], ['c', '=', 3]]\n", "assert conditions_expr(conditions, {}) == '(a = 1 OR b = 2) AND c = 3'\n", "conditions = [[['a', '=', 1], ['b', '=', 2]], [['c', '=', 3], ['d', '=', 4]]]\n", "assert conditions_expr(conditions, {}\n ) == '(a = 1 OR b = 2) AND (c = 3 OR d = 4)'\n", "conditions = [[['a', '=', 1], []]]\n", "assert conditions_expr(conditions, {}) == 'a = 1'\n", "conditions = [[['tags[foo]', '=', 1], ['b', '=', 2]]]\n", "expanded = column_expr('tags[foo]', {})\n", "assert conditions_expr(conditions, {}) == '({} = 1 OR b = 2)'.format(expanded)\n", "reuse_body = {}\n", "conditions = [[['tags[foo]', '=', 1], ['b', '=', 2]]]\n", "column_expr('tags[foo]', reuse_body)\n", "assert conditions_expr(conditions, reuse_body) == '(`tags[foo]` = 1 OR b = 2)'\n", "conditions = [['primary_hash', 'LIKE', '%foo%']]\n", "assert conditions_expr(conditions, {}) == \"primary_hash LIKE '%foo%'\"\n", "conditions = tuplify([[['notEmpty', ['arrayElement', [\n 'exception_stacks.type', 1]]], '=', 1]])\n", "assert conditions_expr(conditions, {}\n ) == 'notEmpty(arrayElement(exception_stacks.type, 1)) = 1'\n", "conditions = tuplify([[['notEmpty', ['tags[sentry:user]']], '=', 1]])\n", "assert conditions_expr(conditions, {}\n ) == 'notEmpty((`sentry:user` AS `tags[sentry:user]`)) = 1'\n", "conditions = tuplify([[['notEmpty', ['tags_key']], '=', 1]])\n", "assert conditions_expr(conditions, {}\n ) == 'notEmpty((arrayJoin(tags.key) AS tags_key)) = 1'\n", "conditions = tuplify([[[['notEmpty', ['tags[sentry:environment]']], '=',\n 'dev'], [['notEmpty', ['tags[sentry:environment]']], '=', 'prod']], [[[\n 'notEmpty', ['tags[sentry:user]']], '=', 'joe'], [['notEmpty', [\n 'tags[sentry:user]']], '=', 'bob']]])\n", "assert conditions_expr(conditions, {}\n ) == \"(notEmpty((tags.value[indexOf(tags.key, 'sentry:environment')] AS `tags[sentry:environment]`)) = 'dev' OR notEmpty(`tags[sentry:environment]`) = 'prod') AND (notEmpty((`sentry:user` AS `tags[sentry:user]`)) = 'joe' OR notEmpty(`tags[sentry:user]`) = 'bob')\"\n", "conditions = [['exception_frames.filename', 'LIKE', '%foo%']]\n", "assert conditions_expr(conditions, {}\n ) == \"arrayExists(x -> assumeNotNull(x LIKE '%foo%'), exception_frames.filename)\"\n", "conditions = [['exception_frames.filename', 'NOT LIKE', '%foo%']]\n", "assert conditions_expr(conditions, {}\n ) == \"arrayAll(x -> assumeNotNull(x NOT LIKE '%foo%'), exception_frames.filename)\"\n", "def test_duplicate_expression_alias(self):...\n", "body = {'aggregations': [['topK(3)', 'logger', 'dupe_alias'], ['uniq',\n 'environment', 'dupe_alias']]}\n", "exprs = [column_expr(col, body, alias, agg) for agg, col, alias in body[\n 'aggregations']]\n", "assert exprs == ['(topK(3)(logger) AS dupe_alias)', 'dupe_alias']\n", "def test_complex_conditions_expr(self):...\n", "body = {}\n", "assert complex_column_expr(tuplify(['count', []]), body.copy()) == 'count()'\n", "assert complex_column_expr(tuplify(['notEmpty', ['foo']]), body.copy()\n ) == 'notEmpty(foo)'\n", "assert complex_column_expr(tuplify(['notEmpty', ['arrayElement', ['foo', 1]\n ]]), body.copy()) == 'notEmpty(arrayElement(foo, 1))'\n", "assert complex_column_expr(tuplify(['foo', ['bar', ['qux'], 'baz']]), body.\n copy()) == 'foo(bar(qux), baz)'\n", "assert complex_column_expr(tuplify(['foo', [], 'a']), body.copy()\n ) == '(foo() AS a)'\n", "assert complex_column_expr(tuplify(['foo', ['b', 'c'], 'd']), body.copy()\n ) == '(foo(b, c) AS d)'\n", "assert complex_column_expr(tuplify(['foo', ['b', 'c', ['d']]]), body.copy()\n ) == 'foo(b, c(d))'\n", "assert complex_column_expr(tuplify(['topK', [3], ['project_id']]), body.copy()\n ) == 'topK(3)(project_id)'\n", "assert complex_column_expr(tuplify(['topK', [3], ['project_id'], 'baz']),\n body.copy()) == '(topK(3)(project_id) AS baz)'\n", "assert complex_column_expr(tuplify(['emptyIfNull', ['project_id']]), body.\n copy()) == \"ifNull(project_id, '')\"\n", "assert complex_column_expr(tuplify(['emptyIfNull', ['project_id'], 'foo']),\n body.copy()) == \"(ifNull(project_id, '') AS foo)\"\n", "assert complex_column_expr(tuplify(['positionCaseInsensitive', ['message',\n \"'lol 'single' quotes'\"]]), body.copy()\n ) == \"positionCaseInsensitive(message, 'lol \\\\'single\\\\' quotes')\"\n", "def test_referenced_columns(self):...\n", "body = {'conditions': [['a', '=', '1'], ['b', '=', '1']]}\n", "assert all_referenced_columns(body) == set(['a', 'b'])\n", "body = {'conditions': [['a', '=', '1'], [['b', '=', '1'], ['c', '=', '1']]]}\n", "assert all_referenced_columns(body) == set(['a', 'b', 'c'])\n", "body = {'conditions': [['a', '=', '1'], [['b', '=', '1'], [['foo', ['c']],\n '=', '1']]]}\n", "assert all_referenced_columns(body) == set(['a', 'b', 'c'])\n", "body = {'conditions': [['a', '=', '1'], [['b', '=', '1'], [['foo', ['c', [\n 'bar', ['d']]]], '=', '1']]]}\n", "assert all_referenced_columns(body) == set(['a', 'b', 'c', 'd'])\n", "body = {'arrayjoin': 'tags_key', 'groupby': ['time', 'issue'], 'orderby':\n '-time', 'selected_columns': ['issue', 'time', ['foo', ['c', ['bar', [\n 'd']]]]], 'aggregations': [['uniq', 'tags_value', 'values_seen']]}\n", "assert all_referenced_columns(body) == set(['tags_key', 'tags_value',\n 'time', 'issue', 'c', 'd'])\n", "def test_timer(self):...\n", "t = Timer()\n", "time.sleep(0.001)\n", "t.mark('thing1')\n", "time.sleep(0.001)\n", "t.mark('thing2')\n", "snapshot = t.finish()\n", "time.sleep(0.001)\n", "t.mark('thing1')\n", "time.sleep(0.001)\n", "t.mark('thing2')\n", "snapshot_2 = t.finish()\n", "assert snapshot['marks_ms'].keys() == snapshot_2['marks_ms'].keys()\n", "assert snapshot['marks_ms']['thing1'] < snapshot_2['marks_ms']['thing1']\n", "assert snapshot['marks_ms']['thing2'] < snapshot_2['marks_ms']['thing2']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assert'", "Assert'", "Assert'", "FunctionDef'", "Assert'", "Assert'", "Assert'", "Assert'", "FunctionDef'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assign'", "Assert'", "Assign'", "Assign'", "Expr'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'", "FunctionDef'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_0(VAR_0=None):...\n", "VAR_1 = frappe.db.sql_list('select name from `tabLeave Type` order by name asc'\n )\n", "VAR_3 = FUNC_1(VAR_1)\n", "VAR_4 = FUNC_2(VAR_0, VAR_1)\n", "return VAR_3, VAR_4\n" ]
[ "def execute(filters=None):...\n", "leave_types = frappe.db.sql_list(\n 'select name from `tabLeave Type` order by name asc')\n", "columns = get_columns(leave_types)\n", "data = get_data(filters, leave_types)\n", "return columns, data\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "return 'submit user test %s for task %s (ID %d) %s' % (repr(self.filenames),\n self.task[1], self.task[0], self.url)\n" ]
[ "def describe(self):...\n", "return 'submit user test %s for task %s (ID %d) %s' % (repr(self.filenames),\n self.task[1], self.task[0], self.url)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self):...\n", "if self.resources is not None:\n", "assert 'CPU' not in self.resources, \"'CPU' should not be included in the resource dictionary. Use num_cpus instead.\"\n", "if self.num_workers is not None:\n", "assert 'GPU' not in self.resources, \"'GPU' should not be included in the resource dictionary. Use num_gpus instead.\"\n", "if self.include_java is None and self.java_worker_options is not None:\n" ]
[ "def _check_usage(self):...\n", "if self.resources is not None:\n", "assert 'CPU' not in self.resources, \"'CPU' should not be included in the resource dictionary. Use num_cpus instead.\"\n", "if self.num_workers is not None:\n", "assert 'GPU' not in self.resources, \"'GPU' should not be included in the resource dictionary. Use num_gpus instead.\"\n", "if self.include_java is None and self.java_worker_options is not None:\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assert'", "Condition", "Assert'", "Condition" ]
[ "def FUNC_1(VAR_0, VAR_1, VAR_3, VAR_4, VAR_5, VAR_6, **VAR_2):...\n", "if VAR_3 not in ('post_add', 'pre_remove'):\n", "return\n", "if VAR_4:\n", "if VAR_5 == Submission:\n", "FUNC_0(Submission, VAR_1)\n", "VAR_36 = set()\n", "for submission_pk in VAR_6:\n", "VAR_40 = Submission.objects.get(pk=submission_pk)\n", "VAR_8 = VAR_40.exercise.course_instance\n", "if VAR_8.pk not in VAR_36:\n", "CLASS_0.invalidate(VAR_8, VAR_1.user)\n", "VAR_36.add(VAR_8.pk)\n" ]
[ "def invalidate_content_m2m(sender, instance, action, reverse, model, pk_set,...\n", "if action not in ('post_add', 'pre_remove'):\n", "return\n", "if reverse:\n", "if model == Submission:\n", "invalidate_content(Submission, instance)\n", "seen_courses = set()\n", "for submission_pk in pk_set:\n", "submission = Submission.objects.get(pk=submission_pk)\n", "course_instance = submission.exercise.course_instance\n", "if course_instance.pk not in seen_courses:\n", "CachedPoints.invalidate(course_instance, instance.user)\n", "seen_courses.add(course_instance.pk)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Condition", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_18, VAR_35=10000, **VAR_16):...\n", "CLASS_17.__init__(self, VAR_18, VAR_35=length, VAR_36=errors.NO_MSG_BODY, **kw)\n" ]
[ "def __init__(self, item, length=10000, **kw):...\n", "VLength.__init__(self, item, length=length, empty_error=errors.NO_MSG_BODY,\n **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __str__(self):...\n", "return 'Could not delete ad.\\n' + super().__str__()\n" ]
[ "def __str__(self):...\n", "return 'Could not delete ad.\\n' + super().__str__()\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_11(VAR_22, VAR_23):...\n", "if not VAR_23:\n", "return VAR_22\n", "for k in VAR_23.split('.'):\n", "if k in VAR_22 and isinstance(VAR_22[k], dict):\n", "return VAR_22\n", "VAR_22 = VAR_22[k]\n" ]
[ "def strip_hash(h, keys):...\n", "if not keys:\n", "return h\n", "for k in keys.split('.'):\n", "if k in h and isinstance(h[k], dict):\n", "return h\n", "h = h[k]\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "For", "Condition", "Return'", "Assign'" ]
[ "def __init__(self):...\n", "" ]
[ "def __init__(self):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_11(self):...\n", "if self.accept(CLASS_1.WORD):\n", "VAR_22 = self.last.lexeme\n", "return None\n", "VAR_21 = []\n", "while self.accept(CLASS_1.WORD):\n", "VAR_21.append(self.last.lexeme)\n", "VAR_43 = CLASS_8(VAR_22, VAR_21)\n", "VAR_33 = self.redirections()\n", "if VAR_33:\n", "VAR_43 = CLASS_9(VAR_43, VAR_33)\n", "if self.accept(CLASS_1.PIPE):\n", "return CLASS_7(VAR_43, self.command())\n", "return VAR_43\n" ]
[ "def command(self):...\n", "if self.accept(TokenType.WORD):\n", "command = self.last.lexeme\n", "return None\n", "args = []\n", "while self.accept(TokenType.WORD):\n", "args.append(self.last.lexeme)\n", "node = CommandNode(command, args)\n", "redirs = self.redirections()\n", "if redirs:\n", "node = RedirectionsNode(node, redirs)\n", "if self.accept(TokenType.PIPE):\n", "return PipeNode(node, self.command())\n", "return node\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_30(VAR_13):...\n", "self.fail('authenticate should not be called')\n" ]
[ "def skipped(request):...\n", "self.fail('authenticate should not be called')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __init__(self, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "super(CLASS_2, self).__init__()\n", "self.relationship_direction = VAR_5\n", "self._data = VAR_3\n", "self._accessor = VAR_4\n", "self._relationships = {}\n" ]
[ "def __init__(self, data, accessor, relationship_direction):...\n", "\"\"\"docstring\"\"\"\n", "super(DataTraversalProxy, self).__init__()\n", "self.relationship_direction = relationship_direction\n", "self._data = data\n", "self._accessor = accessor\n", "self._relationships = {}\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@wraps(VAR_7)...\n", "" ]
[ "@wraps(f)...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def __init__(self, VAR_42, VAR_43):...\n", "\"\"\"docstring\"\"\"\n", "self.key = VAR_42\n", "self.test_tags = iter(VAR_43)\n", "self.counter = {'tag': 0}\n" ]
[ "def __init__(self, key, test_tags):...\n", "\"\"\"docstring\"\"\"\n", "self.key = key\n", "self.test_tags = iter(test_tags)\n", "self.counter = {'tag': 0}\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_13():...\n", "from .log import init_logging\n", "init_logging()\n", "VAR_23 = argparse.ArgumentParser()\n", "VAR_23.add_argument('--admin', nargs='*', help=\n 'List of usernames set to be admin')\n", "VAR_23.add_argument('--user-requirements-txt-url', help=\n 'URL to a requirements.txt file that should be installed in the user enviornment'\n )\n", "VAR_23.add_argument('--plugin', nargs='*', help='Plugin pip-specs to install')\n", "VAR_24 = VAR_23.parse_args()\n", "VAR_18 = FUNC_10(VAR_24.plugin)\n", "FUNC_12(VAR_18)\n", "FUNC_7(VAR_24.admin)\n", "FUNC_5()\n", "FUNC_6(VAR_24.user_requirements_txt_url)\n", "VAR_1.info('Setting up JupyterHub...')\n", "FUNC_0()\n", "FUNC_4(HUB_ENV_PREFIX)\n", "FUNC_3()\n", "FUNC_2(HUB_ENV_PREFIX)\n", "FUNC_8()\n", "FUNC_9(HUB_ENV_PREFIX)\n", "FUNC_11(VAR_18, VAR_24.plugin)\n", "VAR_1.info('Done!')\n" ]
[ "def main():...\n", "from .log import init_logging\n", "init_logging()\n", "argparser = argparse.ArgumentParser()\n", "argparser.add_argument('--admin', nargs='*', help=\n 'List of usernames set to be admin')\n", "argparser.add_argument('--user-requirements-txt-url', help=\n 'URL to a requirements.txt file that should be installed in the user enviornment'\n )\n", "argparser.add_argument('--plugin', nargs='*', help=\n 'Plugin pip-specs to install')\n", "args = argparser.parse_args()\n", "pm = setup_plugins(args.plugin)\n", "ensure_config_yaml(pm)\n", "ensure_admins(args.admin)\n", "ensure_usergroups()\n", "ensure_user_environment(args.user_requirements_txt_url)\n", "logger.info('Setting up JupyterHub...')\n", "ensure_node()\n", "ensure_jupyterhub_package(HUB_ENV_PREFIX)\n", "ensure_jupyterlab_extensions()\n", "ensure_jupyterhub_service(HUB_ENV_PREFIX)\n", "ensure_jupyterhub_running()\n", "ensure_symlinks(HUB_ENV_PREFIX)\n", "run_plugin_actions(pm, args.plugin)\n", "logger.info('Done!')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@Json...\n", "if VAR_2._chk_error(errors.NO_TITLE):\n", "VAR_2._chk_error(errors.TITLE_TOO_LONG)\n", "VAR_2._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION, errors.\n INVALID_DATE, errors.NO_DATE))\n", "VAR_2._focus('title')\n", "if VAR_2.error:\n", "return\n", "VAR_0.title = VAR_4\n", "VAR_0.description = VAR_8\n", "VAR_0.location = VAR_9\n", "VAR_0.latitude = VAR_10\n", "VAR_0.longitude = VAR_11\n", "VAR_0.timestamp = VAR_12 / 1000\n", "VAR_0.tzoffset = VAR_13\n", "g.rendercache.invalidate_key_group(Meetup.group_cache_key())\n", "VAR_0._commit()\n", "VAR_18 = Link._byID(VAR_0.assoc_link)\n", "VAR_18._load()\n", "VAR_19 = VAR_18.url\n", "VAR_18.title = FUNC_1(VAR_0)\n", "VAR_18.article = FUNC_0(VAR_0)\n", "VAR_18._commit()\n", "VAR_18.update_url_cache(VAR_19)\n", "VAR_2._redirect(url_for(action='show', id=meetup._id36))\n" ]
[ "@Json...\n", "if res._chk_error(errors.NO_TITLE):\n", "res._chk_error(errors.TITLE_TOO_LONG)\n", "res._chk_errors((errors.NO_LOCATION, errors.NO_DESCRIPTION, errors.\n INVALID_DATE, errors.NO_DATE))\n", "res._focus('title')\n", "if res.error:\n", "return\n", "meetup.title = title\n", "meetup.description = description\n", "meetup.location = location\n", "meetup.latitude = latitude\n", "meetup.longitude = longitude\n", "meetup.timestamp = timestamp / 1000\n", "meetup.tzoffset = tzoffset\n", "g.rendercache.invalidate_key_group(Meetup.group_cache_key())\n", "meetup._commit()\n", "article = Link._byID(meetup.assoc_link)\n", "article._load()\n", "article_old_url = article.url\n", "article.title = meetup_article_title(meetup)\n", "article.article = meetup_article_text(meetup)\n", "article._commit()\n", "article.update_url_cache(article_old_url)\n", "res._redirect(url_for(action='show', id=meetup._id36))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_6(self):...\n", "for VAR_6 in self.actors.keys():\n", "self.actors[VAR_6].moveTick()\n" ]
[ "def moveTick(self):...\n", "for ID in self.actors.keys():\n", "self.actors[ID].moveTick()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "For", "Expr'" ]
[ "def FUNC_29(self):...\n", "VAR_25 = 3000000000\n", "self.cursor.execute('create table t1(d bigint)')\n", "self.cursor.execute('insert into t1 values (?)', VAR_25)\n", "VAR_20 = self.cursor.execute('select d from t1').fetchone()[0]\n", "self.assertEqual(VAR_20, VAR_25)\n" ]
[ "def test_bigint(self):...\n", "input = 3000000000\n", "self.cursor.execute('create table t1(d bigint)')\n", "self.cursor.execute('insert into t1 values (?)', input)\n", "result = self.cursor.execute('select d from t1').fetchone()[0]\n", "self.assertEqual(result, input)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_15(self, VAR_3=None):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_22(VAR_30, VAR_3):...\n", "if not isinstance(VAR_30, _IOFile):\n", "return IOFile(VAR_30, VAR_16=self)\n", "return VAR_30.apply_wildcards(VAR_3, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n" ]
[ "def expand_wildcards(self, wildcards=None):...\n", "\"\"\"docstring\"\"\"\n", "def concretize_iofile(f, wildcards):...\n", "if not isinstance(f, _IOFile):\n", "return IOFile(f, rule=self)\n", "return f.apply_wildcards(wildcards, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_2(VAR_1, VAR_2, VAR_3='', VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_4(VAR_1, VAR_2['oauth_token'], VAR_6=resp['oauth_token_secret'],\n VAR_4=extra_data, VAR_3=token_type)\n" ]
[ "def oauth1_token_setter(remote, resp, token_type='', extra_data=None):...\n", "\"\"\"docstring\"\"\"\n", "return token_setter(remote, resp['oauth_token'], secret=resp[\n 'oauth_token_secret'], extra_data=extra_data, token_type=token_type)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_2(self, VAR_4, *VAR_5, **VAR_6):...\n", "return self.to_python(VAR_4)\n" ]
[ "def from_db_value(self, value, *args, **kwargs):...\n", "return self.to_python(value)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(VAR_0, VAR_1, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = False\n", "if VAR_2 == 'prev':\n", "VAR_0 = VAR_0.reverse()\n", "for m in VAR_0:\n", "if VAR_15:\n", "if VAR_15:\n", "return m\n", "if VAR_1 == m:\n", "return VAR_0[0]\n", "return False\n", "VAR_15 = True\n" ]
[ "def get_next_or_prev(models, item, direction):...\n", "\"\"\"docstring\"\"\"\n", "getit = False\n", "if direction == 'prev':\n", "models = models.reverse()\n", "for m in models:\n", "if getit:\n", "if getit:\n", "return m\n", "if item == m:\n", "return models[0]\n", "return False\n", "getit = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "For", "Condition", "Condition", "Return'", "Condition", "Return'", "Return'", "Assign'" ]
[ "def FUNC_4(self, VAR_5='test', VAR_6=None):...\n", "\"\"\"docstring\"\"\"\n", "from invenio.modules.oauthclient.client import oauth\n", "oauth.remote_apps[VAR_5].handle_oauth2_response = MagicMock(return_value=\n data or {'access_token': 'test_access_token', 'scope': '', 'token_type':\n 'bearer'})\n" ]
[ "def mock_response(self, app='test', data=None):...\n", "\"\"\"docstring\"\"\"\n", "from invenio.modules.oauthclient.client import oauth\n", "oauth.remote_apps[app].handle_oauth2_response = MagicMock(return_value=data or\n {'access_token': 'test_access_token', 'scope': '', 'token_type': 'bearer'})\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Assign'" ]
[ "\"\"\"\n\"\"\"\n", "import pytz\n", "import sqlite3\n", "from datetime import datetime, timedelta\n", "def __init__(self, VAR_0):...\n", "self.config = VAR_0\n", "self.co2_mult = self.config.get_co2_avoidance_factor()\n", "self.db = sqlite3.connect(self.config.get_database_path(),\n check_same_thread=False)\n", "self.c = self.db.cursor()\n", "self.local_timezone = self.get_local_timezone()\n", "def FUNC_0(self, VAR_1):...\n", "VAR_5 = dict()\n", "VAR_5['today'] = self.get_today()\n", "VAR_5['requested'] = self.get_requested(VAR_1)\n", "return VAR_5\n" ]
[ "\"\"\"\n\"\"\"\n", "import pytz\n", "import sqlite3\n", "from datetime import datetime, timedelta\n", "def __init__(self, config):...\n", "self.config = config\n", "self.co2_mult = self.config.get_co2_avoidance_factor()\n", "self.db = sqlite3.connect(self.config.get_database_path(),\n check_same_thread=False)\n", "self.c = self.db.cursor()\n", "self.local_timezone = self.get_local_timezone()\n", "def get(self, date):...\n", "data = dict()\n", "data['today'] = self.get_today()\n", "data['requested'] = self.get_requested(date)\n", "return data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_4):...\n", "if self.log_dir is None:\n", "return\n", "VAR_19 = '%s_%s.log' % (VAR_4.start_time, VAR_4.__class__.__name__)\n", "VAR_20 = os.path.join(self.log_dir, VAR_19)\n", "VAR_21 = os.path.join(self.log_dir, VAR_4.__class__.__name__)\n", "VAR_4.store_to_file(fd)\n", "os.remove(VAR_21)\n", "os.symlink(VAR_19, VAR_21)\n" ]
[ "def store_to_file(self, request):...\n", "if self.log_dir is None:\n", "return\n", "filename = '%s_%s.log' % (request.start_time, request.__class__.__name__)\n", "filepath = os.path.join(self.log_dir, filename)\n", "linkpath = os.path.join(self.log_dir, request.__class__.__name__)\n", "request.store_to_file(fd)\n", "os.remove(linkpath)\n", "os.symlink(filename, linkpath)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "super(CLASS_0, VAR_0).initArgumentParser(VAR_2, VAR_3=defaults)\n", "VAR_3 = VAR_3 or {}\n", "VAR_2.add_argument('--test_component_detail', default='fe', help=\n 'Refinement for component name to create.')\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "super(AwsSmokeTestScenario, cls).initArgumentParser(parser, defaults=defaults)\n", "defaults = defaults or {}\n", "parser.add_argument('--test_component_detail', default='fe', help=\n 'Refinement for component name to create.')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_13():...\n", "VAR_16 = FUNC_7('models.json')\n", "VAR_35 = [FUNC_12(VAR_34['model_system_name']) for VAR_34 in VAR_16]\n", "return [item for inputs in VAR_35 for item in inputs]\n" ]
[ "def get_inputs_choices():...\n", "models = load_json('models.json')\n", "inputs_by_models = [get_inputs_choices_by_model(model['model_system_name']) for\n model in models]\n", "return [item for inputs in inputs_by_models for item in inputs]\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "import os\n", "import sys\n", "import unittest\n", "from coalib.misc.Shell import escape_path_argument, prepare_string_argument, run_interactive_shell_command, run_shell_command\n", "def FUNC_0(self):...\n", "VAR_1 = 'sh'\n", "self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n", "self.assertEqual(escape_path_argument('/home/usr/a-dir/', VAR_1),\n '/home/usr/a-dir/')\n", "self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n", "self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n VAR_1), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n", "self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', VAR_1),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n", "def FUNC_1(self):...\n", "VAR_1 = 'cmd'\n", "self.assertEqual(escape_path_argument('C:\\\\Windows\\\\has-a-weird-shell.txt',\n VAR_1), '\"C:\\\\Windows\\\\has-a-weird-shell.txt\"')\n", "self.assertEqual(escape_path_argument('C:\\\\Windows\\\\lolrofl\\\\dirs\\\\', VAR_1\n ), '\"C:\\\\Windows\\\\lolrofl\\\\dirs\\\\\"')\n", "self.assertEqual(escape_path_argument('X:\\\\Users\\\\Maito Gai\\\\fi le.exe',\n VAR_1), '\"X:\\\\Users\\\\Maito Gai\\\\fi le.exe\"')\n", "self.assertEqual(escape_path_argument('X:\\\\Users\\\\Mai to Gai\\\\director y\\\\',\n VAR_1), '\"X:\\\\Users\\\\Mai to Gai\\\\director y\\\\\"')\n", "self.assertEqual(escape_path_argument(\n 'X:\\\\Users\\\\Maito Gai\\\\\"seven-gates\".y', VAR_1),\n '\"X:\\\\Users\\\\Maito Gai\\\\^\"seven-gates^\".y\"')\n", "self.assertEqual(escape_path_argument('System32\\\\my-custom relative tool\\\\',\n VAR_1), '\"System32\\\\my-custom relative tool\\\\\"')\n", "self.assertEqual(escape_path_argument('System32\\\\illegal\" name \"\".curd',\n VAR_1), '\"System32\\\\illegal^\" name ^\"^\".curd\"')\n", "def FUNC_2(self):...\n", "VAR_1 = 'INVALID'\n", "self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n", "self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us r/a-file with spaces.bla')\n", "self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n VAR_1), '|home|us r|a*dir with spaces|x|')\n", "self.assertEqual(escape_path_argument('system|a|b|c?d', VAR_1),\n 'system|a|b|c?d')\n", "@staticmethod...\n", "return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', VAR_0)))\n" ]
[ "import os\n", "import sys\n", "import unittest\n", "from coalib.misc.Shell import escape_path_argument, prepare_string_argument, run_interactive_shell_command, run_shell_command\n", "def test_escape_path_argument_sh(self):...\n", "_type = 'sh'\n", "self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n", "self.assertEqual(escape_path_argument('/home/usr/a-dir/', _type),\n '/home/usr/a-dir/')\n", "self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n", "self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n _type), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n", "self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', _type),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n", "def test_escape_path_argument_cmd(self):...\n", "_type = 'cmd'\n", "self.assertEqual(escape_path_argument('C:\\\\Windows\\\\has-a-weird-shell.txt',\n _type), '\"C:\\\\Windows\\\\has-a-weird-shell.txt\"')\n", "self.assertEqual(escape_path_argument('C:\\\\Windows\\\\lolrofl\\\\dirs\\\\', _type\n ), '\"C:\\\\Windows\\\\lolrofl\\\\dirs\\\\\"')\n", "self.assertEqual(escape_path_argument('X:\\\\Users\\\\Maito Gai\\\\fi le.exe',\n _type), '\"X:\\\\Users\\\\Maito Gai\\\\fi le.exe\"')\n", "self.assertEqual(escape_path_argument('X:\\\\Users\\\\Mai to Gai\\\\director y\\\\',\n _type), '\"X:\\\\Users\\\\Mai to Gai\\\\director y\\\\\"')\n", "self.assertEqual(escape_path_argument(\n 'X:\\\\Users\\\\Maito Gai\\\\\"seven-gates\".y', _type),\n '\"X:\\\\Users\\\\Maito Gai\\\\^\"seven-gates^\".y\"')\n", "self.assertEqual(escape_path_argument('System32\\\\my-custom relative tool\\\\',\n _type), '\"System32\\\\my-custom relative tool\\\\\"')\n", "self.assertEqual(escape_path_argument('System32\\\\illegal\" name \"\".curd',\n _type), '\"System32\\\\illegal^\" name ^\"^\".curd\"')\n", "def test_escape_path_argument_unsupported(self):...\n", "_type = 'INVALID'\n", "self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n", "self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us r/a-file with spaces.bla')\n", "self.assertEqual(escape_path_argument('|home|us r|a*dir with spaces|x|',\n _type), '|home|us r|a*dir with spaces|x|')\n", "self.assertEqual(escape_path_argument('system|a|b|c?d', _type),\n 'system|a|b|c?d')\n", "@staticmethod...\n", "return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', scriptname)))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2 ]
[ "Import'", "Import'", "Import'", "ImportFrom'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Return'" ]
[ "def FUNC_3(self):...\n", "return self.best_submission.grade if self.best_submission and not self.unofficial else 0\n" ]
[ "def get_points(self):...\n", "return self.best_submission.grade if self.best_submission and not self.unofficial else 0\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_3(VAR_11, VAR_12, VAR_13, VAR_14, VAR_15, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "VAR_33 = None\n", "def FUNC_10(VAR_32, VAR_34):...\n", "logging.info('Got signal %s', VAR_32)\n", "VAR_7 = os.path.dirname(VAR_15)\n", "if not os.path.isdir(VAR_7):\n", "if not VAR_33:\n", "if not os.path.isdir(VAR_7):\n", "VAR_8 = CLASS_0(json.load(f))\n", "VAR_33 = {u'exit_code': None, u'hard_timeout': False, u'io_timeout': False,\n u'must_signal_internal_failure': u'task_runner received signal %s' % e.\n signal, u'version': VAR_4}\n", "os.mkdir(VAR_7)\n", "json.dump(VAR_33, f)\n", "VAR_33 = FUNC_8(VAR_12, VAR_8, VAR_7, VAR_13, VAR_14, VAR_10)\n" ]
[ "def load_and_run(in_file, swarming_server, cost_usd_hour, start, out_file,...\n", "\"\"\"docstring\"\"\"\n", "task_result = None\n", "def handler(sig, _):...\n", "logging.info('Got signal %s', sig)\n", "work_dir = os.path.dirname(out_file)\n", "if not os.path.isdir(work_dir):\n", "if not task_result:\n", "if not os.path.isdir(work_dir):\n", "task_details = TaskDetails(json.load(f))\n", "task_result = {u'exit_code': None, u'hard_timeout': False, u'io_timeout': \n False, u'must_signal_internal_failure': \n u'task_runner received signal %s' % e.signal, u'version': OUT_VERSION}\n", "os.mkdir(work_dir)\n", "json.dump(task_result, f)\n", "task_result = run_command(swarming_server, task_details, work_dir,\n cost_usd_hour, start, min_free_space)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "def __contains__(self, VAR_41):...\n", "return VAR_41 in self._lookup\n" ]
[ "def __contains__(self, key):...\n", "return key in self._lookup\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_14(VAR_7):...\n", "def FUNC_13(self, *VAR_16, **VAR_10):...\n", "if not self.is_group_user(VAR_8):\n", "return VAR_7(self, *VAR_16, **kwargs)\n" ]
[ "def decorator(method):...\n", "def wrapper(self, *args, **kwargs):...\n", "if not self.is_group_user(group):\n", "return method(self, *args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Condition", "Return'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.params = VAR_71, VAR_72\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.params = params, kwparams\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_8, VAR_9):...\n", "VAR_0.info('Checking new event existence: ' + VAR_8.name + ', date: ' + str\n (VAR_9))\n", "VAR_18 = VAR_8.event_type_id.id\n", "VAR_19 = self.env['event.event'].search([('event_type_id', '=', VAR_18), (\n 'date_end', '=', str(VAR_9))])\n", "if VAR_19:\n", "return False\n", "return True\n" ]
[ "def _event_does_not_exist(self, old_repeating_event, new_end_date):...\n", "_logger.info('Checking new event existence: ' + old_repeating_event.name +\n ', date: ' + str(new_end_date))\n", "old_event_cat = old_repeating_event.event_type_id.id\n", "existing_event = self.env['event.event'].search([('event_type_id', '=',\n old_event_cat), ('date_end', '=', str(new_end_date))])\n", "if existing_event:\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __init__(self, VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = logging.getLogger(__name__)\n", "VAR_7.setLevel(logging.DEBUG)\n", "VAR_7.debug('Initialized thread')\n", "super(CLASS_8, self).__init__()\n", "self.job_queue = VAR_6\n", "self.subscribed_queues = []\n", "self.end = False\n" ]
[ "def __init__(self, queue):...\n", "\"\"\"docstring\"\"\"\n", "logger = logging.getLogger(__name__)\n", "logger.setLevel(logging.DEBUG)\n", "logger.debug('Initialized thread')\n", "super(MonitoringThread, self).__init__()\n", "self.job_queue = queue\n", "self.subscribed_queues = []\n", "self.end = False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "def __repr__(self):...\n", "return f'<Roamer: {self._r_path_.description()} => {self._r_item_!r}>'\n" ]
[ "def __repr__(self):...\n", "return f'<Roamer: {self._r_path_.description()} => {self._r_item_!r}>'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(VAR_15, VAR_2, VAR_3='text', VAR_4=bconfig....\n", "\"\"\"docstring\"\"\"\n", "VAR_43 = reader.get_cache(VAR_2)\n", "if not VAR_43:\n", "reader.set_cache(VAR_2, reader.get_regular_expressions(VAR_2, rebuild=\n rebuild_cache, VAR_7=no_cache))\n", "VAR_44 = VAR_43[0]\n", "VAR_43 = reader.get_cache(VAR_2)\n", "VAR_45 = VAR_43[1]\n", "VAR_15 = normalizer.cut_references(VAR_15)\n", "VAR_17 = normalizer.normalize_fulltext('\\n'.join(VAR_15))\n", "if VAR_6 == 'partial':\n", "VAR_17 = FUNC_26(VAR_17)\n", "VAR_22 = None\n", "if VAR_8:\n", "VAR_22 = FUNC_6(VAR_44, VAR_45, VAR_17)\n", "VAR_23 = {}\n", "if VAR_11:\n", "VAR_23 = FUNC_5(VAR_17)\n", "VAR_20 = FUNC_3(VAR_44, VAR_17)\n", "VAR_21 = FUNC_4(VAR_45, VAR_17, VAR_20)\n", "if VAR_10:\n", "VAR_20 = FUNC_22(FUNC_20(VAR_20))\n", "VAR_20 = FUNC_22(VAR_20)\n", "VAR_21 = FUNC_20(VAR_21)\n", "return FUNC_7(VAR_20, VAR_21, VAR_2, VAR_22, VAR_23, VAR_3, VAR_4, VAR_5,\n VAR_10)\n" ]
[ "def get_keywords_from_text(text_lines, taxonomy_name, output_mode='text',...\n", "\"\"\"docstring\"\"\"\n", "cache = reader.get_cache(taxonomy_name)\n", "if not cache:\n", "reader.set_cache(taxonomy_name, reader.get_regular_expressions(\n taxonomy_name, rebuild=rebuild_cache, no_cache=no_cache))\n", "_skw = cache[0]\n", "cache = reader.get_cache(taxonomy_name)\n", "_ckw = cache[1]\n", "text_lines = normalizer.cut_references(text_lines)\n", "fulltext = normalizer.normalize_fulltext('\\n'.join(text_lines))\n", "if match_mode == 'partial':\n", "fulltext = _get_partial_text(fulltext)\n", "author_keywords = None\n", "if with_author_keywords:\n", "author_keywords = extract_author_keywords(_skw, _ckw, fulltext)\n", "acronyms = {}\n", "if extract_acronyms:\n", "acronyms = extract_abbreviations(fulltext)\n", "single_keywords = extract_single_keywords(_skw, fulltext)\n", "composite_keywords = extract_composite_keywords(_ckw, fulltext, single_keywords\n )\n", "if only_core_tags:\n", "single_keywords = clean_before_output(_filter_core_keywors(single_keywords))\n", "single_keywords = clean_before_output(single_keywords)\n", "composite_keywords = _filter_core_keywors(composite_keywords)\n", "return get_keywords_output(single_keywords, composite_keywords,\n taxonomy_name, author_keywords, acronyms, output_mode, output_limit,\n spires, only_core_tags)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_0(self):...\n", "VAR_8 = lambda x: dict(request_token_params={'scope': ''}, base_url=\n 'https://foo.bar/', request_token_url=None, access_token_url=\n 'https://foo.bar/oauth/access_token', authorize_url=\n 'https://foo.bar/oauth/authorize', consumer_key=x, consumer_secret=\n 'testsecret')\n", "self.app.config['OAUTHCLIENT_REMOTE_APPS'] = dict(test=dict(\n authorized_handler=self.handler, params=params('testid'), title=\n 'MyLinkedTestAccount'), test_invalid=dict(authorized_handler=self.\n handler_invalid, params=params('test_invalidid'), title='Test Invalid'),\n full=dict(params=params('fullid'), title='Full'))\n", "self.handled_resp = None\n", "self.handled_remote = None\n", "self.handled_args = None\n", "self.handled_kwargs = None\n", "from invenio.modules.oauthclient.models import RemoteToken, RemoteAccount\n", "RemoteToken.query.delete()\n", "RemoteAccount.query.delete()\n", "db.session.commit()\n" ]
[ "def setUp(self):...\n", "params = lambda x: dict(request_token_params={'scope': ''}, base_url=\n 'https://foo.bar/', request_token_url=None, access_token_url=\n 'https://foo.bar/oauth/access_token', authorize_url=\n 'https://foo.bar/oauth/authorize', consumer_key=x, consumer_secret=\n 'testsecret')\n", "self.app.config['OAUTHCLIENT_REMOTE_APPS'] = dict(test=dict(\n authorized_handler=self.handler, params=params('testid'), title=\n 'MyLinkedTestAccount'), test_invalid=dict(authorized_handler=self.\n handler_invalid, params=params('test_invalidid'), title='Test Invalid'),\n full=dict(params=params('fullid'), title='Full'))\n", "self.handled_resp = None\n", "self.handled_remote = None\n", "self.handled_args = None\n", "self.handled_kwargs = None\n", "from invenio.modules.oauthclient.models import RemoteToken, RemoteAccount\n", "RemoteToken.query.delete()\n", "RemoteAccount.query.delete()\n", "db.session.commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_12(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_51 = frappe.db.get_table_columns(self.doctype)\n", "VAR_52 = []\n", "for fld in self.fields:\n", "for VAR_34 in optional_fields:\n", "for fld in VAR_52:\n", "if VAR_34 in fld and not VAR_34 in VAR_51:\n", "VAR_52 = []\n", "VAR_52.append(fld)\n", "for VAR_68 in self.filters:\n", "if isinstance(VAR_68, string_types):\n", "for VAR_68 in VAR_52:\n", "VAR_68 = [VAR_68]\n", "for element in VAR_68:\n", "if isinstance(self.filters, dict):\n", "if element in optional_fields and element not in VAR_51:\n", "self.filters.remove(VAR_68)\n", "VAR_52.append(VAR_68)\n" ]
[ "def set_optional_columns(self):...\n", "\"\"\"docstring\"\"\"\n", "columns = frappe.db.get_table_columns(self.doctype)\n", "to_remove = []\n", "for fld in self.fields:\n", "for f in optional_fields:\n", "for fld in to_remove:\n", "if f in fld and not f in columns:\n", "to_remove = []\n", "to_remove.append(fld)\n", "for each in self.filters:\n", "if isinstance(each, string_types):\n", "for each in to_remove:\n", "each = [each]\n", "for element in each:\n", "if isinstance(self.filters, dict):\n", "if element in optional_fields and element not in columns:\n", "self.filters.remove(each)\n", "to_remove.append(each)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "For", "For", "Condition", "Assign'", "Expr'", "For", "Condition", "For", "Assign'", "For", "Condition", "Condition", "Expr'", "Expr'" ]
[ "from __future__ import absolute_import\n", "from __future__ import print_function\n", "from __future__ import unicode_literals\n", "import codecs\n", "import datetime\n", "import io\n", "import os\n", "import sys\n", "import time\n", "import traceback\n", "import urllib\n", "import requests\n", "VAR_0 = False\n", "def __init__(self):...\n", "self.xsrf_token = None\n", "self.session = requests.Session()\n", "def FUNC_0(self, VAR_1):...\n", "VAR_15 = self.session.get(VAR_1)\n", "for cookie in VAR_15.cookies:\n", "if cookie.name == '_xsrf':\n", "def FUNC_1(self, VAR_2):...\n", "self.xsrf_token = cookie.value\n", "self.read_xsrf_token(VAR_2.base_url)\n", "VAR_2.execute()\n", "def FUNC_2(self, VAR_1, VAR_3=None, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_4 is None:\n", "if VAR_3 is None:\n", "VAR_3 = VAR_3.copy()\n", "for fobj in VAR_20.itervalues():\n", "return VAR_15\n", "VAR_15 = self.session.get(VAR_1)\n", "VAR_3 = VAR_3.copy()\n", "VAR_3['_xsrf'] = self.xsrf_token\n", "fobj.close()\n", "VAR_3['_xsrf'] = self.xsrf_token\n", "VAR_20 = dict((k, io.open(v, 'rb')) for k, v in VAR_4)\n", "VAR_15 = self.session.post(VAR_1, VAR_3)\n", "VAR_15 = self.session.post(VAR_1, VAR_3, files=file_objs)\n" ]
[ "from __future__ import absolute_import\n", "from __future__ import print_function\n", "from __future__ import unicode_literals\n", "import codecs\n", "import datetime\n", "import io\n", "import os\n", "import sys\n", "import time\n", "import traceback\n", "import urllib\n", "import requests\n", "debug = False\n", "def __init__(self):...\n", "self.xsrf_token = None\n", "self.session = requests.Session()\n", "def read_xsrf_token(self, url):...\n", "response = self.session.get(url)\n", "for cookie in response.cookies:\n", "if cookie.name == '_xsrf':\n", "def login(self, login_request):...\n", "self.xsrf_token = cookie.value\n", "self.read_xsrf_token(login_request.base_url)\n", "login_request.execute()\n", "def do_request(self, url, data=None, file_names=None):...\n", "\"\"\"docstring\"\"\"\n", "if file_names is None:\n", "if data is None:\n", "data = data.copy()\n", "for fobj in file_objs.itervalues():\n", "return response\n", "response = self.session.get(url)\n", "data = data.copy()\n", "data['_xsrf'] = self.xsrf_token\n", "fobj.close()\n", "data['_xsrf'] = self.xsrf_token\n", "file_objs = dict((k, io.open(v, 'rb')) for k, v in file_names)\n", "response = self.session.post(url, data)\n", "response = self.session.post(url, data, files=file_objs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "For", "Condition", "FunctionDef'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Condition", "Condition", "Assign'", "For", "Return'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_1(self, VAR_1, VAR_2):...\n", "for VAR_3 in VAR_2:\n", "VAR_28 = VAR_3['source']['type']\n", "if VAR_28 == 'inverter':\n", "self.add_inverter_data(VAR_1, VAR_3)\n", "if VAR_28 == 'consumption':\n", "self.add_consumption_data_row(VAR_1, VAR_3['energy'], VAR_3['power'])\n" ]
[ "def add_data(self, ts, data_points):...\n", "for data in data_points:\n", "data_type = data['source']['type']\n", "if data_type == 'inverter':\n", "self.add_inverter_data(ts, data)\n", "if data_type == 'consumption':\n", "self.add_consumption_data_row(ts, data['energy'], data['power'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Condition", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_6(self):...\n", "self.response.headers['Content-Type'] = 'text/plain; charset=utf-8'\n", "self.response.out.write('Server up')\n" ]
[ "def get(self):...\n", "self.response.headers['Content-Type'] = 'text/plain; charset=utf-8'\n", "self.response.out.write('Server up')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6):...\n", "VAR_0.info('Initializing an instance of MySQLConnectionPool')\n", "VAR_0.debug(\n 'Type checking for host address, username, password, database name and pool size'\n )\n", "if type(VAR_2) != str:\n", "if type(VAR_3) != str:\n", "if type(VAR_4) != str:\n", "if type(VAR_5) != str:\n", "VAR_0.debug('All type checks passed')\n", "VAR_0.info('Initializing class variables')\n", "self._hostaddr = VAR_2\n", "self._usr = VAR_3\n", "self._pwd = VAR_4\n", "self._dbname = VAR_5\n", "VAR_0.info('Initializing MySQL connection pool')\n", "self._pool = Queue(VAR_6)\n", "for i in range(VAR_6):\n", "self._pool.put(MySQLdb.connect(VAR_2, VAR_3, VAR_4, VAR_5), block=False)\n", "VAR_0.info('Initialized MySQL connection pool')\n" ]
[ "def __init__(self, hostaddr, usr, pwd, dbname, size):...\n", "logger.info('Initializing an instance of MySQLConnectionPool')\n", "logger.debug(\n 'Type checking for host address, username, password, database name and pool size'\n )\n", "if type(hostaddr) != str:\n", "if type(usr) != str:\n", "if type(pwd) != str:\n", "if type(dbname) != str:\n", "logger.debug('All type checks passed')\n", "logger.info('Initializing class variables')\n", "self._hostaddr = hostaddr\n", "self._usr = usr\n", "self._pwd = pwd\n", "self._dbname = dbname\n", "logger.info('Initializing MySQL connection pool')\n", "self._pool = Queue(size)\n", "for i in range(size):\n", "self._pool.put(MySQLdb.connect(hostaddr, usr, pwd, dbname), block=False)\n", "logger.info('Initialized MySQL connection pool')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Condition", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "For", "Expr'", "Expr'" ]
[ "def FUNC_21(self, VAR_3):...\n", "if self.allowed_email_suffix:\n", "return VAR_3.endswith(self.allowed_email_suffix)\n", "return True\n" ]
[ "def passes_email_suffix(self, email):...\n", "if self.allowed_email_suffix:\n", "return email.endswith(self.allowed_email_suffix)\n", "return True\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "@property...\n", "self.check_broken_symlink()\n", "return os.path.getsize(self.file)\n" ]
[ "@property...\n", "self.check_broken_symlink()\n", "return os.path.getsize(self.file)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Return'" ]
[ "@FUNC_0...\n", "VAR_27 = VAR_10.get_transport()\n", "VAR_7 = VAR_27.open_session()\n", "VAR_7.invoke_shell()\n", "VAR_0.debug(_('Reading CLI MOTD'))\n", "self._get_output(VAR_7)\n", "VAR_28 = 'stty columns 255'\n", "VAR_0.debug(_(\"Setting CLI terminal width: '%s'\"), VAR_28)\n", "VAR_7.send(VAR_28 + '\\r')\n", "VAR_25 = self._get_output(VAR_7)\n", "VAR_0.debug(_(\"Sending CLI command: '%s'\"), VAR_11)\n", "VAR_7.send(VAR_11 + '\\r')\n", "VAR_25 = self._get_output(VAR_7)\n", "VAR_7.close()\n", "if any(line.startswith(('% Error', 'Error:')) for line in VAR_25):\n", "VAR_37 = _('Error executing EQL command')\n", "return VAR_25\n", "VAR_38 = '\\n'.join(VAR_25)\n", "VAR_0.error(VAR_38)\n" ]
[ "@with_timeout...\n", "transport = ssh.get_transport()\n", "chan = transport.open_session()\n", "chan.invoke_shell()\n", "LOG.debug(_('Reading CLI MOTD'))\n", "self._get_output(chan)\n", "cmd = 'stty columns 255'\n", "LOG.debug(_(\"Setting CLI terminal width: '%s'\"), cmd)\n", "chan.send(cmd + '\\r')\n", "out = self._get_output(chan)\n", "LOG.debug(_(\"Sending CLI command: '%s'\"), command)\n", "chan.send(command + '\\r')\n", "out = self._get_output(chan)\n", "chan.close()\n", "if any(line.startswith(('% Error', 'Error:')) for line in out):\n", "desc = _('Error executing EQL command')\n", "return out\n", "cmdout = '\\n'.join(out)\n", "LOG.error(cmdout)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "For", "Assign'", "Return'", "Assign'", "Expr'" ]
[ "def FUNC_11(self, VAR_9, VAR_14):...\n", "from opennode.oms.endpoint.httprest.auth import IHttpRestAuthenticationUtility\n", "VAR_19 = getUtility(IHttpRestAuthenticationUtility)\n", "VAR_31 = VAR_19.get_principal(VAR_14)\n", "VAR_31 = 'oms.anonymous'\n", "if VAR_31 != 'oms.anonymous':\n", "VAR_19.renew_token(VAR_9, VAR_14)\n", "if VAR_9.method == 'OPTIONS':\n", "VAR_31 = 'oms.rest_options'\n", "return new_interaction(VAR_31)\n" ]
[ "def get_interaction(self, request, token):...\n", "from opennode.oms.endpoint.httprest.auth import IHttpRestAuthenticationUtility\n", "authentication_utility = getUtility(IHttpRestAuthenticationUtility)\n", "principal = authentication_utility.get_principal(token)\n", "principal = 'oms.anonymous'\n", "if principal != 'oms.anonymous':\n", "authentication_utility.renew_token(request, token)\n", "if request.method == 'OPTIONS':\n", "principal = 'oms.rest_options'\n", "return new_interaction(principal)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Return'" ]
[ "from sqlalchemy import Column, ForeignKey, Integer, String\n", "from sqlalchemy.ext.declarative import declarative_base\n", "from sqlalchemy.orm import relationship\n", "from sqlalchemy import create_engine\n", "VAR_0 = declarative_base()\n", "__tablename__ = 'user'\n", "VAR_2 = Column(Integer, primary_key=True)\n", "VAR_3 = Column(String(250), nullable=False)\n", "VAR_4 = Column(String(250), nullable=False)\n", "VAR_5 = Column(String(250))\n", "__tablename__ = 'Grudget'\n", "VAR_2 = Column(Integer, primary_key=True)\n", "VAR_3 = Column(String(250), nullable=False)\n", "VAR_6 = Column(Integer, ForeignKey('user.id'))\n", "VAR_7 = relationship(CLASS_0)\n", "@property...\n", "\"\"\"docstring\"\"\"\n", "return {'name': self.name, 'id': self.id}\n" ]
[ "from sqlalchemy import Column, ForeignKey, Integer, String\n", "from sqlalchemy.ext.declarative import declarative_base\n", "from sqlalchemy.orm import relationship\n", "from sqlalchemy import create_engine\n", "Base = declarative_base()\n", "__tablename__ = 'user'\n", "id = Column(Integer, primary_key=True)\n", "name = Column(String(250), nullable=False)\n", "email = Column(String(250), nullable=False)\n", "picture = Column(String(250))\n", "__tablename__ = 'Grudget'\n", "id = Column(Integer, primary_key=True)\n", "name = Column(String(250), nullable=False)\n", "user_id = Column(Integer, ForeignKey('user.id'))\n", "user = relationship(User)\n", "@property...\n", "\"\"\"docstring\"\"\"\n", "return {'name': self.name, 'id': self.id}\n" ]
[ 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Docstring", "Return'" ]
[ "def __init__(self):...\n", "self.content = VAR_2\n" ]
[ "def __init__(self):...\n", "self.content = content\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "@utils.synchronized('3par', external=True)...\n", "" ]
[ "@utils.synchronized('3par', external=True)...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_11(self, VAR_14):...\n", "" ]
[ "def is_integer(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]