lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_36(self, VAR_29):...\n",
"VAR_6.assertEqual('Too bad', VAR_29.message)\n",
"VAR_12.append('authentication_error')\n",
"super(CLASS_7, self).authentication_error(VAR_29)\n"
] | [
"def authentication_error(self, err):...\n",
"test.assertEqual('Too bad', err.message)\n",
"calls.append('authentication_error')\n",
"super(Handler, self).authentication_error(err)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"import json\n",
"import os\n",
"import time\n",
"import urllib\n",
"import psycopg2\n",
"from websocket import create_connection\n",
"import config\n",
"VAR_0 = create_connection(config.WEBSOCKET_URL)\n",
"VAR_1 = psycopg2.connect(**config.POSTGRES)\n",
"VAR_2 = VAR_1.cursor()\n",
"VAR_3 = 'TRUNCATE holders'\n",
"VAR_2.execute(VAR_3)\n",
"VAR_3 = 'ALTER SEQUENCE holders_hid_seq RESTART WITH 1'\n",
"VAR_2.execute(VAR_3)\n",
"VAR_1.commit()\n",
"VAR_0.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_count\",[]]}')\n",
"VAR_4 = VAR_0.recv()\n",
"VAR_5 = json.loads(VAR_4)\n",
"VAR_6 = int(VAR_5['result'])\n",
"for ac in range(0, VAR_6):\n",
"VAR_0.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"1.2.' +\n str(ac) + '\"]]]}')\n",
"VAR_1.close()\n",
"VAR_4 = VAR_0.recv()\n",
"VAR_5 = json.loads(VAR_4)\n",
"VAR_9 = VAR_5['result'][0]['id']\n",
"VAR_0.send(\n '{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_balances\",[\"' +\n VAR_9 + '\", [\"1.3.0\"]]]}')\n",
"VAR_10 = VAR_5['result'][0]['name']\n",
"VAR_7 = VAR_0.recv()\n",
"VAR_8 = json.loads(VAR_7)\n",
"if VAR_8['result'][0]['amount'] == 0:\n",
"VAR_11 = VAR_8['result'][0]['amount']\n",
"VAR_0.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"' +\n VAR_5['result'][0]['statistics'] + '\"]]]}')\n",
"VAR_4 = VAR_0.recv()\n",
"VAR_12 = json.loads(VAR_4)\n",
"VAR_14 = VAR_12['result'][0]['total_core_in_orders']\n",
"VAR_14 = 0\n",
"VAR_11 = int(VAR_11) + int(VAR_14)\n",
"VAR_13 = VAR_5['result'][0]['options']['voting_account']\n",
"VAR_3 = (\n \"INSERT INTO holders (account_id, account_name, amount, voting_as) VALUES('\"\n + VAR_9 + \"', '\" + VAR_10 + \"','\" + str(VAR_11) + \"', '\" + VAR_13 + \"')\")\n",
"VAR_2.execute(VAR_3)\n",
"VAR_1.commit()\n"
] | [
"import json\n",
"import os\n",
"import time\n",
"import urllib\n",
"import psycopg2\n",
"from websocket import create_connection\n",
"import config\n",
"ws = create_connection(config.WEBSOCKET_URL)\n",
"con = psycopg2.connect(**config.POSTGRES)\n",
"cur = con.cursor()\n",
"query = 'TRUNCATE holders'\n",
"cur.execute(query)\n",
"query = 'ALTER SEQUENCE holders_hid_seq RESTART WITH 1'\n",
"cur.execute(query)\n",
"con.commit()\n",
"ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_count\",[]]}')\n",
"result = ws.recv()\n",
"j = json.loads(result)\n",
"account_count = int(j['result'])\n",
"for ac in range(0, account_count):\n",
"ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"1.2.' + str\n (ac) + '\"]]]}')\n",
"con.close()\n",
"result = ws.recv()\n",
"j = json.loads(result)\n",
"account_id = j['result'][0]['id']\n",
"ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_account_balances\",[\"' +\n account_id + '\", [\"1.3.0\"]]]}')\n",
"account_name = j['result'][0]['name']\n",
"result3 = ws.recv()\n",
"jb = json.loads(result3)\n",
"if jb['result'][0]['amount'] == 0:\n",
"amount = jb['result'][0]['amount']\n",
"ws.send('{\"id\":1, \"method\":\"call\", \"params\":[0,\"get_objects\",[[\"' + j[\n 'result'][0]['statistics'] + '\"]]]}')\n",
"result = ws.recv()\n",
"js = json.loads(result)\n",
"total_core_in_orders = js['result'][0]['total_core_in_orders']\n",
"total_core_in_orders = 0\n",
"amount = int(amount) + int(total_core_in_orders)\n",
"voting_account = j['result'][0]['options']['voting_account']\n",
"query = (\n \"INSERT INTO holders (account_id, account_name, amount, voting_as) VALUES('\"\n + account_id + \"', '\" + account_name + \"','\" + str(amount) + \"', '\" +\n voting_account + \"')\")\n",
"cur.execute(query)\n",
"con.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"VAR_35 = {}\n",
"VAR_36 = self.configuration.safe_get('volume_backend_name')\n",
"VAR_35['volume_backend_name'] = VAR_36 or self.__class__.__name__\n",
"VAR_35['driver_version'] = '1.0'\n",
"VAR_35['reserved_percentage'] = 0\n",
"VAR_35['storage_protocol'] = 'iSCSI'\n",
"VAR_35['vendor_name'] = 'Hewlett-Packard'\n",
"VAR_18 = self._cliq_run_xml('getClusterInfo', {})\n",
"VAR_37 = VAR_18.find('response/cluster')\n",
"VAR_38 = VAR_37.attrib.get('spaceTotal')\n",
"VAR_39 = VAR_37.attrib.get('unprovisionedSpace')\n",
"VAR_40 = 1073741824\n",
"VAR_35['total_capacity_gb'] = int(VAR_38) / VAR_40\n",
"VAR_35['free_capacity_gb'] = int(VAR_39) / VAR_40\n",
"self.device_stats = VAR_35\n"
] | [
"def _update_backend_status(self):...\n",
"data = {}\n",
"backend_name = self.configuration.safe_get('volume_backend_name')\n",
"data['volume_backend_name'] = backend_name or self.__class__.__name__\n",
"data['driver_version'] = '1.0'\n",
"data['reserved_percentage'] = 0\n",
"data['storage_protocol'] = 'iSCSI'\n",
"data['vendor_name'] = 'Hewlett-Packard'\n",
"result_xml = self._cliq_run_xml('getClusterInfo', {})\n",
"cluster_node = result_xml.find('response/cluster')\n",
"total_capacity = cluster_node.attrib.get('spaceTotal')\n",
"free_capacity = cluster_node.attrib.get('unprovisionedSpace')\n",
"GB = 1073741824\n",
"data['total_capacity_gb'] = int(total_capacity) / GB\n",
"data['free_capacity_gb'] = int(free_capacity) / GB\n",
"self.device_stats = data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_13(self, VAR_14):...\n",
"return [CLASS_2(None, VAR_14, self)]\n"
] | [
"def flatten(self, name):...\n",
"return [FlattenedColumn(None, name, self)]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = VAR_4.lower().strip()\n",
"FUNC_3(VAR_10)\n"
] | [
"def process(input):...\n",
"\"\"\"docstring\"\"\"\n",
"_input = input.lower().strip()\n",
"check_sub_command(_input)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def __repr__(self):...\n",
"return '<%s: %s>' % (self.__class__.__name__, self)\n"
] | [
"def __repr__(self):...\n",
"return '<%s: %s>' % (self.__class__.__name__, self)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@auth.require(acl.is_ereporter2_editor)...\n",
"VAR_19 = self.request.get('to_delete')\n",
"if VAR_19:\n",
"ndb.Key(models.ErrorReportingMonitoring, VAR_19).delete()\n",
"VAR_33 = self.request.get('mute_type')\n",
"self.get()\n",
"VAR_17 = None\n",
"if VAR_33 in ('exception_type', 'signature'):\n",
"VAR_17 = self.request.get(VAR_33)\n",
"if not VAR_17:\n",
"self.abort(400)\n",
"VAR_34 = self.request.get('silenced')\n",
"VAR_35 = self.request.get('silenced_until')\n",
"if VAR_35 == 'T':\n",
"VAR_35 = ''\n",
"VAR_36 = self.request.get('threshold')\n",
"VAR_37 = models.ErrorReportingMonitoring.error_to_key(VAR_17)\n",
"if not VAR_34 and not VAR_35 and not VAR_36:\n",
"VAR_37.delete()\n",
"VAR_39 = models.ErrorReportingMonitoring(VAR_37=key, VAR_17=error)\n",
"if VAR_34:\n",
"VAR_39.silenced = True\n",
"if VAR_35:\n",
"VAR_39.silenced_until = datetime.datetime.strptime(VAR_35, '%Y-%m-%dT%H:%M')\n",
"if VAR_36:\n",
"VAR_39.threshold = int(VAR_36)\n",
"VAR_39.put()\n"
] | [
"@auth.require(acl.is_ereporter2_editor)...\n",
"to_delete = self.request.get('to_delete')\n",
"if to_delete:\n",
"ndb.Key(models.ErrorReportingMonitoring, to_delete).delete()\n",
"mute_type = self.request.get('mute_type')\n",
"self.get()\n",
"error = None\n",
"if mute_type in ('exception_type', 'signature'):\n",
"error = self.request.get(mute_type)\n",
"if not error:\n",
"self.abort(400)\n",
"silenced = self.request.get('silenced')\n",
"silenced_until = self.request.get('silenced_until')\n",
"if silenced_until == 'T':\n",
"silenced_until = ''\n",
"threshold = self.request.get('threshold')\n",
"key = models.ErrorReportingMonitoring.error_to_key(error)\n",
"if not silenced and not silenced_until and not threshold:\n",
"key.delete()\n",
"item = models.ErrorReportingMonitoring(key=key, error=error)\n",
"if silenced:\n",
"item.silenced = True\n",
"if silenced_until:\n",
"item.silenced_until = datetime.datetime.strptime(silenced_until,\n '%Y-%m-%dT%H:%M')\n",
"if threshold:\n",
"item.threshold = int(threshold)\n",
"item.put()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"VAR_0.persistConfig(self.get_argument('AudioCodec'), self.get_argument(\n 'AudioRate'), self.get_argument('VideoCodec'), self.get_argument(\n 'VideoRate'), self.get_argument('VideoSize'), self.get_argument(\n 'StreamEncryption'), self.get_argument('GenEncryptionKey'))\n",
"self.render('../config.html', cfg=cfgDAO.loadConfig())\n"
] | [
"def post(self):...\n",
"cfgDAO.persistConfig(self.get_argument('AudioCodec'), self.get_argument(\n 'AudioRate'), self.get_argument('VideoCodec'), self.get_argument(\n 'VideoRate'), self.get_argument('VideoSize'), self.get_argument(\n 'StreamEncryption'), self.get_argument('GenEncryptionKey'))\n",
"self.render('../config.html', cfg=cfgDAO.loadConfig())\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"@api_view(['GET'])...\n",
"VAR_13 = VAR_0.query_params.get('measure', None)\n",
"VAR_12, VAR_2 = FUNC_1(VAR_0)\n",
"VAR_14 = ImportLog.objects.latest_in_category('prescribing').current_at\n",
"VAR_15 = (VAR_14 - relativedelta(months=2)).strftime('%Y-%m-01')\n",
"VAR_16 = Measure.objects.get(pk=measure)\n",
"if VAR_16.numerator_is_list_of_bnf_codes:\n",
"if VAR_2 in ['stp_id', 'regional_team_id']:\n",
"VAR_31 = []\n",
"VAR_34 = 'string'\n",
"if VAR_2 == 'pr.ccg_id':\n",
"VAR_17 = Response(VAR_31)\n",
"VAR_27 = re.match('SUM\\\\((items|quantity|actual_cost)\\\\) AS numerator',\n VAR_16.numerator_columns)\n",
"VAR_34 = \"\"\"\n INNER JOIN frontend_practice pr\n ON p.practice_id = pr.code\n \"\"\"\n",
"VAR_34 = ''\n",
"VAR_18 = '%s-%s-breakdown.csv' % (VAR_13, VAR_12)\n",
"if VAR_27:\n",
"if VAR_0.accepted_renderer.format == 'csv':\n",
"VAR_35 = {'items': 'total_items', 'actual_cost': 'cost', 'quantity': 'quantity'\n }[VAR_27.groups()[0]]\n",
"VAR_35 = 'total_items'\n",
"VAR_17['content-disposition'] = 'attachment; filename=%s' % VAR_18\n",
"return VAR_17\n",
"VAR_28 = VAR_12 and VAR_2\n",
"VAR_29 = {'numerator_bnf_codes': VAR_16.numerator_bnf_codes,\n 'three_months_ago': VAR_15}\n",
"if VAR_28:\n",
"VAR_36 = '{org_type} = %(org_id)s AND '.format(VAR_2=org_type)\n",
"VAR_36 = ''\n",
"VAR_37 = '{org_type}, '.format(VAR_2=org_type)\n",
"VAR_37 = ''\n",
"VAR_29['org_id'] = VAR_12\n",
"VAR_30 = 'string'.format(VAR_36=org_condition, VAR_37=org_group, VAR_2=\n org_type, VAR_15=three_months_ago, VAR_34=extra_join, VAR_35=order_col)\n",
"VAR_31 = utils.execute_query(VAR_30, VAR_29)\n"
] | [
"@api_view(['GET'])...\n",
"measure = request.query_params.get('measure', None)\n",
"org_id, org_type = _get_org_id_and_type_from_request(request)\n",
"this_month = ImportLog.objects.latest_in_category('prescribing').current_at\n",
"three_months_ago = (this_month - relativedelta(months=2)).strftime('%Y-%m-01')\n",
"m = Measure.objects.get(pk=measure)\n",
"if m.numerator_is_list_of_bnf_codes:\n",
"if org_type in ['stp_id', 'regional_team_id']:\n",
"data = []\n",
"extra_join = \"\"\"\n INNER JOIN frontend_practice pr\n ON p.practice_id = pr.code\n INNER JOIN frontend_pct\n ON frontend_pct.code = pr.ccg_id\n \"\"\"\n",
"if org_type == 'pr.ccg_id':\n",
"response = Response(data)\n",
"match = re.match('SUM\\\\((items|quantity|actual_cost)\\\\) AS numerator', m.\n numerator_columns)\n",
"extra_join = \"\"\"\n INNER JOIN frontend_practice pr\n ON p.practice_id = pr.code\n \"\"\"\n",
"extra_join = ''\n",
"filename = '%s-%s-breakdown.csv' % (measure, org_id)\n",
"if match:\n",
"if request.accepted_renderer.format == 'csv':\n",
"order_col = {'items': 'total_items', 'actual_cost': 'cost', 'quantity':\n 'quantity'}[match.groups()[0]]\n",
"order_col = 'total_items'\n",
"response['content-disposition'] = 'attachment; filename=%s' % filename\n",
"return response\n",
"focus_on_org = org_id and org_type\n",
"params = {'numerator_bnf_codes': m.numerator_bnf_codes, 'three_months_ago':\n three_months_ago}\n",
"if focus_on_org:\n",
"org_condition = '{org_type} = %(org_id)s AND '.format(org_type=org_type)\n",
"org_condition = ''\n",
"org_group = '{org_type}, '.format(org_type=org_type)\n",
"org_group = ''\n",
"params['org_id'] = org_id\n",
"query = (\n \"\"\"\n SELECT\n presentation_code AS bnf_code,\n pn.name AS presentation_name,\n SUM(total_items) AS total_items,\n SUM(actual_cost) AS cost,\n SUM(quantity) AS quantity\n FROM\n frontend_prescription p\n INNER JOIN\n frontend_presentation pn\n ON p.presentation_code = pn.bnf_code\n {extra_join}\n WHERE\n {org_condition}\n processing_date >= %(three_months_ago)s\n AND\n pn.bnf_code = ANY(%(numerator_bnf_codes)s)\n GROUP BY\n {org_group}\n presentation_code, pn.name\n ORDER BY {order_col} DESC\n LIMIT 50\n \"\"\"\n .format(org_condition=org_condition, org_group=org_group, org_type=\n org_type, three_months_ago=three_months_ago, extra_join=extra_join,\n order_col=order_col))\n",
"data = utils.execute_query(query, params)\n"
] | [
0,
0,
4,
0,
0,
0,
0,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
4,
0,
4,
0,
0,
4,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_18, VAR_35=10000, VAR_36=errors.BAD_COMMENT, VAR_37=...\n",
"CLASS_0.__init__(self, VAR_18, **kw)\n",
"self.length = VAR_35\n",
"self.len_error = VAR_37\n",
"self.emp_error = VAR_36\n"
] | [
"def __init__(self, item, length=10000, empty_error=errors.BAD_COMMENT,...\n",
"Validator.__init__(self, item, **kw)\n",
"self.length = length\n",
"self.len_error = length_error\n",
"self.emp_error = empty_error\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(self, VAR_11='', VAR_6=None):...\n",
"VAR_5 = self.session.query(self.obj)\n",
"VAR_5 = self._get_base_query(VAR_5=query, VAR_6=filters)\n",
"VAR_25 = VAR_5.all()\n",
"VAR_26 = GroupByDateMonth(VAR_11, 'Group by Month')\n",
"return VAR_26.apply(VAR_25)\n"
] | [
"def query_month_group(self, group_by='', filters=None):...\n",
"query = self.session.query(self.obj)\n",
"query = self._get_base_query(query=query, filters=filters)\n",
"query_result = query.all()\n",
"group = GroupByDateMonth(group_by, 'Group by Month')\n",
"return group.apply(query_result)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_12(self, VAR_10, VAR_12, **VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {}\n",
"VAR_5['volumeName'] = VAR_10['name']\n",
"VAR_5['serverName'] = VAR_12['host']\n",
"self._cliq_run_xml('unassignVolumeToServer', VAR_5)\n"
] | [
"def terminate_connection(self, volume, connector, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"cliq_args = {}\n",
"cliq_args['volumeName'] = volume['name']\n",
"cliq_args['serverName'] = connector['host']\n",
"self._cliq_run_xml('unassignVolumeToServer', cliq_args)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_16(self, VAR_15):...\n",
"\"\"\"docstring\"\"\"\n",
"for o in self.products:\n",
"if o.match(VAR_15):\n",
"return False\n",
"return True\n"
] | [
"def is_producer(self, requested_output):...\n",
"\"\"\"docstring\"\"\"\n",
"for o in self.products:\n",
"if o.match(requested_output):\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Return'",
"Return'"
] |
[
"import logging\n",
"import re\n",
"from grokcore.component import Adapter, implements, baseclass\n",
"from zope.interface import Interface\n",
"from opennode.oms.model.model.symlink import follow_symlinks\n",
"__all__ = ['traverse_path', 'traverse1']\n",
"VAR_0 = logging.getLogger(__name__)\n",
"\"\"\"Adapters providing object traversal should implement this interface.\"\"\"\n",
"def FUNC_3(VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"\"\"\"Base class for all object traversers.\"\"\"\n",
"implements(CLASS_0)\n",
"baseclass()\n",
"def FUNC_0(VAR_1, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_2 or VAR_2 == '/':\n",
"return [VAR_1], []\n",
"VAR_2 = re.sub('\\\\/+', '/', VAR_2)\n",
"if VAR_2.endswith('/'):\n",
"VAR_2 = VAR_2[:-1]\n",
"if VAR_2.startswith('/'):\n",
"VAR_2 = VAR_2[1:]\n",
"VAR_2 = VAR_2.split('/')\n",
"VAR_5 = [VAR_1]\n",
"while VAR_2:\n",
"VAR_4 = VAR_2[0]\n",
"return VAR_5[1:], VAR_2\n",
"VAR_10 = CLASS_0(VAR_5[-1])\n",
"VAR_9 = follow_symlinks(VAR_10.traverse(VAR_4))\n",
"if not VAR_9:\n",
"VAR_5.append(VAR_9)\n",
"VAR_2 = VAR_2[1:]\n"
] | [
"import logging\n",
"import re\n",
"from grokcore.component import Adapter, implements, baseclass\n",
"from zope.interface import Interface\n",
"from opennode.oms.model.model.symlink import follow_symlinks\n",
"__all__ = ['traverse_path', 'traverse1']\n",
"log = logging.getLogger(__name__)\n",
"\"\"\"Adapters providing object traversal should implement this interface.\"\"\"\n",
"def traverse(name):...\n",
"\"\"\"docstring\"\"\"\n",
"\"\"\"Base class for all object traversers.\"\"\"\n",
"implements(ITraverser)\n",
"baseclass()\n",
"def traverse_path(obj, path):...\n",
"\"\"\"docstring\"\"\"\n",
"if not path or path == '/':\n",
"return [obj], []\n",
"path = re.sub('\\\\/+', '/', path)\n",
"if path.endswith('/'):\n",
"path = path[:-1]\n",
"if path.startswith('/'):\n",
"path = path[1:]\n",
"path = path.split('/')\n",
"ret = [obj]\n",
"while path:\n",
"name = path[0]\n",
"return ret[1:], path\n",
"traverser = ITraverser(ret[-1])\n",
"next_obj = follow_symlinks(traverser.traverse(name))\n",
"if not next_obj:\n",
"ret.append(next_obj)\n",
"path = path[1:]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
1,
1,
1,
1,
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_4(self, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {}\n",
"VAR_5['volumeName'] = VAR_9\n",
"VAR_18 = self._cliq_run_xml('getVolumeInfo', VAR_5)\n",
"VAR_23 = {}\n",
"VAR_24 = VAR_18.find('response/volume')\n",
"for VAR_43, v in VAR_24.attrib.items():\n",
"VAR_23['volume.' + VAR_43] = v\n",
"VAR_25 = VAR_24.find('status')\n",
"if VAR_25 is not None:\n",
"for VAR_43, v in VAR_25.attrib.items():\n",
"VAR_26 = VAR_24.find('permission')\n",
"VAR_23['status.' + VAR_43] = v\n",
"if VAR_26 is not None:\n",
"for VAR_43, v in VAR_25.attrib.items():\n",
"VAR_0.debug(_('Volume info: %(volume_name)s => %(volume_attributes)s') % {\n 'volume_name': VAR_9, 'volume_attributes': VAR_23})\n",
"VAR_23['permission.' + VAR_43] = v\n",
"return VAR_23\n"
] | [
"def _cliq_get_volume_info(self, volume_name):...\n",
"\"\"\"docstring\"\"\"\n",
"cliq_args = {}\n",
"cliq_args['volumeName'] = volume_name\n",
"result_xml = self._cliq_run_xml('getVolumeInfo', cliq_args)\n",
"volume_attributes = {}\n",
"volume_node = result_xml.find('response/volume')\n",
"for k, v in volume_node.attrib.items():\n",
"volume_attributes['volume.' + k] = v\n",
"status_node = volume_node.find('status')\n",
"if status_node is not None:\n",
"for k, v in status_node.attrib.items():\n",
"permission_node = volume_node.find('permission')\n",
"volume_attributes['status.' + k] = v\n",
"if permission_node is not None:\n",
"for k, v in status_node.attrib.items():\n",
"LOG.debug(_('Volume info: %(volume_name)s => %(volume_attributes)s') % {\n 'volume_name': volume_name, 'volume_attributes': volume_attributes})\n",
"volume_attributes['permission.' + k] = v\n",
"return volume_attributes\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"For",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_16(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1.value = beamr.interpreters.Text(VAR_1.value)\n",
"return VAR_1\n"
] | [
"def t_TEXT(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t.value = beamr.interpreters.Text(t.value)\n",
"return t\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_21(self):...\n",
"\"\"\"docstring\"\"\"\n",
"tournament.registerPlayer(VAR_1='J!mes Dean')\n"
] | [
"def test_name_contains_symbols(self):...\n",
"\"\"\"docstring\"\"\"\n",
"tournament.registerPlayer(player_name='J!mes Dean')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.response.headers['Strict-Transport-Security'\n ] = 'max-age=2592000; includeSubdomains'\n",
"self.response.headers['X-Content-Type-Options'] = 'nosniff'\n",
"self.response.headers['X-Frame-Options'] = 'deny'\n"
] | [
"def _add_security_response_headers(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.response.headers['Strict-Transport-Security'\n ] = 'max-age=2592000; includeSubdomains'\n",
"self.response.headers['X-Content-Type-Options'] = 'nosniff'\n",
"self.response.headers['X-Frame-Options'] = 'deny'\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_14(self, VAR_23):...\n",
"VAR_23.insert(0, b'')\n",
"self.wz_sock.send_multipart(VAR_23)\n"
] | [
"def send_to_router(self, msg):...\n",
"msg.insert(0, b'')\n",
"self.wz_sock.send_multipart(msg)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_16(self, VAR_27: int):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_35 = License.get_by_id(VAR_27)\n",
"if VAR_35 in self.accepted_licenses:\n",
"return True\n",
"return False\n"
] | [
"def has_user_accepted_licence(self, license_id: int):...\n",
"\"\"\"docstring\"\"\"\n",
"image_license = License.get_by_id(license_id)\n",
"if image_license in self.accepted_licenses:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_18(self):...\n",
"self._test_strtype('blob', None, 100)\n"
] | [
"def test_null_blob(self):...\n",
"self._test_strtype('blob', None, 100)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_5(VAR_3, VAR_10, VAR_2):...\n",
"if '%(key)s' in VAR_3:\n",
"VAR_3 = VAR_3.replace('%(key)s', VAR_10)\n",
"if '%s' in VAR_3:\n",
"VAR_3 = VAR_3.replace('%s', (VAR_2 or '') + '%')\n",
"return VAR_3\n"
] | [
"def scrub_custom_query(query, key, txt):...\n",
"if '%(key)s' in query:\n",
"query = query.replace('%(key)s', key)\n",
"if '%s' in query:\n",
"query = query.replace('%s', (txt or '') + '%')\n",
"return query\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_4():...\n",
"if VAR_9 in VAR_1:\n",
"VAR_1.remove(VAR_9)\n",
"print('Directory does not exist.')\n",
"if VAR_9 in VAR_2:\n",
"VAR_2.remove(VAR_9)\n"
] | [
"def rm():...\n",
"if dir in dirs:\n",
"dirs.remove(dir)\n",
"print('Directory does not exist.')\n",
"if dir in path:\n",
"path.remove(dir)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"from django.conf import settings\n",
"from django.test import TransactionTestCase\n",
"from scheduler import chaos\n",
"\"\"\"Tests creation of containers on nodes\"\"\"\n",
"VAR_0 = ['tests.json']\n",
"def FUNC_0(self):...\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"chaos.CREATE_ERROR_RATE = 0\n",
"chaos.DESTROY_ERROR_RATE = 0\n",
"chaos.START_ERROR_RATE = 0\n",
"chaos.STOP_ERROR_RATE = 0\n",
"settings.SCHEDULER_MODULE = 'chaos'\n",
"settings.SSH_PRIVATE_KEY = '<some-ssh-private-key>'\n",
"def FUNC_1(self):...\n",
"settings.SCHEDULER_MODULE = 'mock'\n",
"settings.SSH_PRIVATE_KEY = ''\n",
"def FUNC_2(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['error', 'created']))\n",
"def FUNC_3(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.START_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['crashed', 'up']))\n",
"def FUNC_4(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"chaos.DESTROY_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 0}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['error']))\n",
"VAR_6 = 20\n",
"for _ in range(100):\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"def FUNC_5(self):...\n",
"VAR_4 = {'web': 0}\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"VAR_2 = self.client.post(VAR_1)\n",
"if VAR_2.status_code == 204:\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"self.assertEquals(VAR_2.status_code, 503)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"VAR_6 = len(VAR_2.data['results'])\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['up']))\n",
"def FUNC_6(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_4 = {'web': 20}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"VAR_1 = '/api/apps/{app_id}/config'.format(**locals())\n",
"VAR_4 = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 20)\n",
"VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n",
"self.assertEqual(VAR_5, set(['up']))\n",
"def FUNC_7(self):...\n",
"VAR_1 = '/api/apps'\n",
"VAR_2 = self.client.post(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_3 = VAR_2.data['id']\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 201)\n",
"VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 2)\n",
"VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n",
"VAR_2 = self.client.get(VAR_1)\n",
"self.assertEqual(VAR_2.status_code, 200)\n",
"self.assertEqual(len(VAR_2.data['results']), 1)\n",
"chaos.CREATE_ERROR_RATE = 1\n",
"VAR_1 = '/api/apps/{app_id}/run'.format(**locals())\n",
"VAR_4 = {'command': 'ls -al'}\n",
"VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_2.status_code, 503)\n"
] | [
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"from django.conf import settings\n",
"from django.test import TransactionTestCase\n",
"from scheduler import chaos\n",
"\"\"\"Tests creation of containers on nodes\"\"\"\n",
"fixtures = ['tests.json']\n",
"def setUp(self):...\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"chaos.CREATE_ERROR_RATE = 0\n",
"chaos.DESTROY_ERROR_RATE = 0\n",
"chaos.START_ERROR_RATE = 0\n",
"chaos.STOP_ERROR_RATE = 0\n",
"settings.SCHEDULER_MODULE = 'chaos'\n",
"settings.SSH_PRIVATE_KEY = '<some-ssh-private-key>'\n",
"def tearDown(self):...\n",
"settings.SCHEDULER_MODULE = 'mock'\n",
"settings.SSH_PRIVATE_KEY = ''\n",
"def test_create_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['error', 'created']))\n",
"def test_start_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.START_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['crashed', 'up']))\n",
"def test_destroy_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"chaos.DESTROY_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 0}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['error']))\n",
"containers = 20\n",
"for _ in range(100):\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"def test_build_chaos(self):...\n",
"body = {'web': 0}\n",
"url = '/api/apps'\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"response = self.client.post(url)\n",
"if response.status_code == 204:\n",
"self.assertEqual(response.status_code, 201)\n",
"self.assertEquals(response.status_code, 503)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"self.assertEqual(response.status_code, 200)\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"containers = len(response.data['results'])\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['up']))\n",
"def test_config_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 20}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 204)\n",
"chaos.CREATE_ERROR_RATE = 0.5\n",
"chaos.START_ERROR_RATE = 0.5\n",
"url = '/api/apps/{app_id}/config'.format(**locals())\n",
"body = {'values': json.dumps({'NEW_URL1': 'http://localhost:8080/'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 20)\n",
"states = set([c['state'] for c in response.data['results']])\n",
"self.assertEqual(states, set(['up']))\n",
"def test_run_chaos(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/releases'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 2)\n",
"url = '/api/apps/{app_id}/containers'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"chaos.CREATE_ERROR_RATE = 1\n",
"url = '/api/apps/{app_id}/run'.format(**locals())\n",
"body = {'command': 'ls -al'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 503)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
5,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_30(self, VAR_16):...\n",
""
] | [
"def delete_all(self, items):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_8(self):...\n",
"VAR_7 = f'/datagroup/{DataGroup.objects.first().id}/'\n",
"VAR_2 = self.client.get(VAR_7).content.decode('utf8')\n",
"VAR_8 = '<span class=\"oi oi-trash\"></span>'\n",
"self.assertIn(VAR_8, VAR_2, 'Trash button should be present if not matched.')\n",
"self.objects.doc.matched = True\n",
"self.objects.doc.save()\n",
"VAR_2 = self.client.get(VAR_7).content.decode('utf8')\n",
"VAR_8 = '<span class=\"oi oi-circle-check\" style=\"color:green;\"></span>'\n",
"self.assertIn(VAR_8, VAR_2, 'Check should be present if matched.')\n"
] | [
"def test_delete_doc_button(self):...\n",
"url = f'/datagroup/{DataGroup.objects.first().id}/'\n",
"response = self.client.get(url).content.decode('utf8')\n",
"span = '<span class=\"oi oi-trash\"></span>'\n",
"self.assertIn(span, response, 'Trash button should be present if not matched.')\n",
"self.objects.doc.matched = True\n",
"self.objects.doc.save()\n",
"response = self.client.get(url).content.decode('utf8')\n",
"span = '<span class=\"oi oi-circle-check\" style=\"color:green;\"></span>'\n",
"self.assertIn(span, response, 'Check should be present if matched.')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_4 is None:\n",
"VAR_4 = {}\n",
"VAR_17 = self.pool.get('product.product').browse(VAR_1, VAR_2, VAR_3, VAR_4\n =context)\n",
"VAR_18 = (VAR_17.property_stock_account_input and VAR_17.\n property_stock_account_input.id or False)\n",
"if not VAR_18:\n",
"VAR_18 = (VAR_17.categ_id.property_stock_account_input_categ and VAR_17.\n categ_id.property_stock_account_input_categ.id or False)\n",
"VAR_19 = (VAR_17.property_stock_account_output and VAR_17.\n property_stock_account_output.id or False)\n",
"if not VAR_19:\n",
"VAR_19 = (VAR_17.categ_id.property_stock_account_output_categ and VAR_17.\n categ_id.property_stock_account_output_categ.id or False)\n",
"VAR_20 = (VAR_17.categ_id.property_stock_journal and VAR_17.categ_id.\n property_stock_journal.id or False)\n",
"VAR_21 = (VAR_17.categ_id.property_stock_variation and VAR_17.categ_id.\n property_stock_variation.id or False)\n",
"return {'stock_account_input': VAR_18, 'stock_account_output': VAR_19,\n 'stock_journal': VAR_20, 'property_stock_variation': VAR_21}\n"
] | [
"def get_product_accounts(self, cr, uid, product_id, context=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if context is None:\n",
"context = {}\n",
"product_obj = self.pool.get('product.product').browse(cr, uid, product_id,\n context=context)\n",
"stock_input_acc = (product_obj.property_stock_account_input and product_obj\n .property_stock_account_input.id or False)\n",
"if not stock_input_acc:\n",
"stock_input_acc = (product_obj.categ_id.property_stock_account_input_categ and\n product_obj.categ_id.property_stock_account_input_categ.id or False)\n",
"stock_output_acc = (product_obj.property_stock_account_output and\n product_obj.property_stock_account_output.id or False)\n",
"if not stock_output_acc:\n",
"stock_output_acc = (product_obj.categ_id.\n property_stock_account_output_categ and product_obj.categ_id.\n property_stock_account_output_categ.id or False)\n",
"journal_id = (product_obj.categ_id.property_stock_journal and product_obj.\n categ_id.property_stock_journal.id or False)\n",
"account_variation = (product_obj.categ_id.property_stock_variation and\n product_obj.categ_id.property_stock_variation.id or False)\n",
"return {'stock_account_input': stock_input_acc, 'stock_account_output':\n stock_output_acc, 'stock_journal': journal_id,\n 'property_stock_variation': account_variation}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@login_required...\n",
""
] | [
"@login_required...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_4(self, VAR_4):...\n",
"VAR_6 = 'SELECT COUNT(rno) FROM rides WHERE rno = {rno}'.format(VAR_4=rno)\n",
"self.cursor.execute(VAR_6)\n",
"VAR_9 = self.cursor.fetchone()\n",
"if int(VAR_9[0]) > 0:\n",
"return True\n",
"return False\n"
] | [
"def verify_rno(self, rno):...\n",
"query = 'SELECT COUNT(rno) FROM rides WHERE rno = {rno}'.format(rno=rno)\n",
"self.cursor.execute(query)\n",
"result = self.cursor.fetchone()\n",
"if int(result[0]) > 0:\n",
"return True\n",
"return False\n"
] | [
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_3(self, VAR_4, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = self.add_outstanding_query(VAR_3)\n",
"VAR_10 = self.dump_session_cookie(VAR_8)\n",
"VAR_11 = auth_response(VAR_8, VAR_4)\n",
"VAR_24 = self.app.dispatch_request()\n",
"VAR_10 = VAR_24.headers['Set-Cookie']\n",
"return VAR_10\n"
] | [
"def login(self, eppn, came_from):...\n",
"\"\"\"docstring\"\"\"\n",
"session_id = self.add_outstanding_query(came_from)\n",
"cookie = self.dump_session_cookie(session_id)\n",
"saml_response = auth_response(session_id, eppn)\n",
"response1 = self.app.dispatch_request()\n",
"cookie = response1.headers['Set-Cookie']\n",
"return cookie\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"import datetime\n",
"import os\n",
"import os.path\n",
"import urlparse\n",
"import socket\n",
"from time import localtime, strftime, time\n",
"from requests.exceptions import RequestException, ConnectionError, Timeout\n",
"import requests\n",
"import yaml\n",
"from monitoring_config_generator.exceptions import MonitoringConfigGeneratorException, HostUnreachableException\n",
"from monitoring_config_generator.yaml_tools.merger import merge_yaml_files\n",
"def FUNC_0(VAR_0):...\n",
"return VAR_0.scheme in ['', 'file']\n"
] | [
"import datetime\n",
"import os\n",
"import os.path\n",
"import urlparse\n",
"import socket\n",
"from time import localtime, strftime, time\n",
"from requests.exceptions import RequestException, ConnectionError, Timeout\n",
"import requests\n",
"import yaml\n",
"from monitoring_config_generator.exceptions import MonitoringConfigGeneratorException, HostUnreachableException\n",
"from monitoring_config_generator.yaml_tools.merger import merge_yaml_files\n",
"def is_file(parsed_uri):...\n",
"return parsed_uri.scheme in ['', 'file']\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Return'"
] |
[
"def __getattr__(self, VAR_8):...\n",
"if self._r_item_ is VAR_0:\n",
"if not self._r_via_alternate_lookup_:\n",
"VAR_28 = CLASS_3(self)\n",
"self._r_path_.log_getattr(VAR_8, self)\n",
"return self\n",
"if self._r_is_multi_item_:\n",
"VAR_33 = []\n",
"VAR_28._r_item_ = getattr(VAR_28._r_item_, VAR_8)\n",
"VAR_28._r_item_ = VAR_0\n",
"if VAR_28._r_item_ is VAR_0 and not self._r_via_alternate_lookup_:\n",
"for i in self._r_item_:\n",
"if not self._r_via_alternate_lookup_:\n",
"self._r_via_alternate_lookup_ = True\n",
"VAR_28._r_path_.log_getattr(VAR_8, VAR_28)\n",
"if VAR_28._r_item_ is VAR_0 and VAR_28._r_raise_:\n",
"VAR_36 = None\n",
"VAR_28._r_item_ = tuple(VAR_33)\n",
"VAR_28._r_path_.log_getattr(VAR_8, VAR_28)\n",
"VAR_28 = self[VAR_8]\n",
"self._r_via_alternate_lookup_ = False\n",
"return VAR_28\n",
"VAR_36 = getattr(i, VAR_8)\n",
"if isinstance(VAR_36, (tuple, list, range)):\n",
"VAR_36 = i[VAR_8]\n",
"VAR_33 += VAR_36\n",
"if VAR_36 is not None:\n",
"VAR_33.append(VAR_36)\n"
] | [
"def __getattr__(self, attr_name):...\n",
"if self._r_item_ is MISSING:\n",
"if not self._r_via_alternate_lookup_:\n",
"copy = Roamer(self)\n",
"self._r_path_.log_getattr(attr_name, self)\n",
"return self\n",
"if self._r_is_multi_item_:\n",
"multi_items = []\n",
"copy._r_item_ = getattr(copy._r_item_, attr_name)\n",
"copy._r_item_ = MISSING\n",
"if copy._r_item_ is MISSING and not self._r_via_alternate_lookup_:\n",
"for i in self._r_item_:\n",
"if not self._r_via_alternate_lookup_:\n",
"self._r_via_alternate_lookup_ = True\n",
"copy._r_path_.log_getattr(attr_name, copy)\n",
"if copy._r_item_ is MISSING and copy._r_raise_:\n",
"lookup = None\n",
"copy._r_item_ = tuple(multi_items)\n",
"copy._r_path_.log_getattr(attr_name, copy)\n",
"copy = self[attr_name]\n",
"self._r_via_alternate_lookup_ = False\n",
"return copy\n",
"lookup = getattr(i, attr_name)\n",
"if isinstance(lookup, (tuple, list, range)):\n",
"lookup = i[attr_name]\n",
"multi_items += lookup\n",
"if lookup is not None:\n",
"multi_items.append(lookup)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Condition",
"Expr'"
] |
[
"@Endpoint('GET', '/clusters')...\n",
"return Ganesha.get_ganesha_clusters()\n"
] | [
"@Endpoint('GET', '/clusters')...\n",
"return Ganesha.get_ganesha_clusters()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@VAR_5.setter...\n",
"self._benchmark = IOFile(VAR_5, VAR_16=self)\n"
] | [
"@benchmark.setter...\n",
"self._benchmark = IOFile(benchmark, rule=self)\n"
] | [
0,
0
] | [
"Condition",
"Assign'"
] |
[
"def FUNC_12(self, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = ['volume', 'create', VAR_16['name'], '%sG' % VAR_16['size']]\n",
"VAR_0.error(_('Failed to create volume %s'), VAR_16['name'])\n",
"if self.configuration.eqlx_pool != 'default':\n",
"VAR_28.append('pool')\n",
"if self.configuration.san_thin_provision:\n",
"VAR_28.append(self.configuration.eqlx_pool)\n",
"VAR_28.append('thin-provision')\n",
"VAR_25 = self._eql_execute(*VAR_28)\n",
"return self._get_volume_data(VAR_25)\n"
] | [
"def create_volume(self, volume):...\n",
"\"\"\"docstring\"\"\"\n",
"cmd = ['volume', 'create', volume['name'], '%sG' % volume['size']]\n",
"LOG.error(_('Failed to create volume %s'), volume['name'])\n",
"if self.configuration.eqlx_pool != 'default':\n",
"cmd.append('pool')\n",
"if self.configuration.san_thin_provision:\n",
"cmd.append(self.configuration.eqlx_pool)\n",
"cmd.append('thin-provision')\n",
"out = self._eql_execute(*cmd)\n",
"return self._get_volume_data(out)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'KRYSTAL', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='CHRISTAL', VAR_10=[{'name': '----CHRISTAL'}, {\n 'name': 'KRYSTAL'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'KRYSTAL', id='1')\n",
"verify_results(client, jwt, query='CHRISTAL', expected=[{'name':\n '----CHRISTAL'}, {'name': 'KRYSTAL'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_17(self, VAR_3, VAR_4, VAR_12=True, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.__make_proxy('make_target_proxy', VAR_3, (VAR_4,), dict(VAR_12=\n manage_back_references, VAR_11=options))\n"
] | [
"def make_target_proxy(self, data, accessor, manage_back_references=True,...\n",
"\"\"\"docstring\"\"\"\n",
"return self.__make_proxy('make_target_proxy', data, (accessor,), dict(\n manage_back_references=manage_back_references, options=options))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_13(self, VAR_12, VAR_13, VAR_14=None):...\n",
"\"\"\"docstring\"\"\"\n",
"assert VAR_14 is None or (VAR_13 is None or VAR_14 >= VAR_23(VAR_13))\n",
"if VAR_14:\n",
"VAR_37 = 'create table t1(s %s(%s))' % (VAR_12, VAR_14)\n",
"VAR_37 = 'create table t1(s %s)' % VAR_12\n",
"self.cursor.execute(VAR_37)\n",
"self.cursor.execute('insert into t1 values(?)', VAR_13)\n",
"VAR_4 = self.cursor.execute('select * from t1').fetchone()[0]\n",
"self.assertEqual(type(VAR_4), type(VAR_13))\n",
"if VAR_13 is not None:\n",
"self.assertEqual(VAR_23(VAR_4), VAR_23(VAR_13))\n",
"self.assertEqual(VAR_4, VAR_13)\n"
] | [
"def _test_strliketype(self, sqltype, value, colsize=None):...\n",
"\"\"\"docstring\"\"\"\n",
"assert colsize is None or (value is None or colsize >= len(value))\n",
"if colsize:\n",
"sql = 'create table t1(s %s(%s))' % (sqltype, colsize)\n",
"sql = 'create table t1(s %s)' % sqltype\n",
"self.cursor.execute(sql)\n",
"self.cursor.execute('insert into t1 values(?)', value)\n",
"v = self.cursor.execute('select * from t1').fetchone()[0]\n",
"self.assertEqual(type(v), type(value))\n",
"if value is not None:\n",
"self.assertEqual(len(v), len(value))\n",
"self.assertEqual(v, value)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assert'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_7=None, VAR_8=None, VAR_9=True, VAR_10=True, VAR_11=True...\n",
"if VAR_7:\n",
"self.param = VAR_7\n",
"self.param = self.default_param\n",
"self.default = VAR_8\n",
"self.post, self.get, self.url = VAR_9, VAR_10, VAR_11\n"
] | [
"def __init__(self, param=None, default=None, post=True, get=True, url=True):...\n",
"if param:\n",
"self.param = param\n",
"self.param = self.default_param\n",
"self.default = default\n",
"self.post, self.get, self.url = post, get, url\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self):...\n",
"self.container.destroy()\n"
] | [
"def destroy_container(self):...\n",
"self.container.destroy()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = VAR_1.GET.get('detailed') == 'true'\n",
"VAR_10 = api.nova.availability_zone_list(VAR_1, VAR_11)\n",
"return {'items': [u.to_dict() for u in VAR_10]}\n"
] | [
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"detailed = request.GET.get('detailed') == 'true'\n",
"result = api.nova.availability_zone_list(request, detailed)\n",
"return {'items': [u.to_dict() for u in result]}\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_26(VAR_20=None):...\n",
"return json.loads(VAR_7.get('/request', VAR_20=headers).body)\n"
] | [
"def call(headers=None):...\n",
"return json.loads(app.get('/request', headers=headers).body)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_23(self, VAR_21, VAR_27, VAR_28):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_57 = self.getfile(VAR_21)\n",
"if VAR_57 == False:\n",
"if VAR_27 != -1:\n",
"VAR_57[VAR_3] = VAR_27\n",
"if VAR_28 != -1:\n",
"VAR_57[VAR_4] = VAR_28\n"
] | [
"def chown(self, path, uid, gid):...\n",
"\"\"\"docstring\"\"\"\n",
"p = self.getfile(path)\n",
"if p == False:\n",
"if uid != -1:\n",
"p[A_UID] = uid\n",
"if gid != -1:\n",
"p[A_GID] = gid\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"\"\"\"string\"\"\"\n",
"import re\n",
"from lib.core.data import conf\n",
"from lib.core.settings import MATCH_RATIO\n",
"def FUNC_0(VAR_0, VAR_1=None, VAR_2=False):...\n",
"VAR_3 = None\n",
"if conf.eString and conf.eString in VAR_0:\n",
"VAR_4 = VAR_0.index(conf.eString)\n",
"if conf.eRegexp:\n",
"VAR_5 = len(conf.eString)\n",
"VAR_3 = re.findall(conf.eRegexp, VAR_0, re.I | re.M)\n",
"if conf.string:\n",
"VAR_6 = VAR_0[:VAR_4]\n",
"if VAR_3:\n",
"if conf.string in VAR_0:\n",
"if conf.regexp:\n",
"VAR_6 += VAR_0[VAR_4 + VAR_5:]\n",
"for regExpResult in VAR_3:\n",
"return True\n",
"return False\n",
"if re.search(conf.regexp, VAR_0, re.I | re.M):\n",
"conf.seqMatcher.set_seq2(VAR_0)\n",
"VAR_0 = VAR_6\n",
"VAR_4 = VAR_0.index(regExpResult)\n",
"return True\n",
"return False\n",
"if VAR_2:\n",
"VAR_5 = len(regExpResult)\n",
"return round(conf.seqMatcher.ratio(), 5)\n",
"if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n",
"VAR_7 = VAR_0[:VAR_4]\n",
"return True\n",
"return False\n",
"VAR_7 += VAR_0[VAR_4 + VAR_5:]\n",
"VAR_0 = VAR_7\n"
] | [
"\"\"\"\n$Id$\n\nThis file is part of the sqlmap project, http://sqlmap.sourceforge.net.\n\nCopyright (c) 2006-2008 Bernardo Damele A. G. <[email protected]>\n and Daniele Bellucci <[email protected]>\n\nsqlmap is free software; you can redistribute it and/or modify it under\nthe terms of the GNU General Public License as published by the Free\nSoftware Foundation version 2 of the License.\n\nsqlmap is distributed in the hope that it will be useful, but WITHOUT ANY\nWARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\nFOR A PARTICULAR PURPOSE. See the GNU General Public License for more\ndetails.\n\nYou should have received a copy of the GNU General Public License along\nwith sqlmap; if not, write to the Free Software Foundation, Inc., 51\nFranklin St, Fifth Floor, Boston, MA 02110-1301 USA\n\"\"\"\n",
"import re\n",
"from lib.core.data import conf\n",
"from lib.core.settings import MATCH_RATIO\n",
"def comparison(page, headers=None, getSeqMatcher=False):...\n",
"regExpResults = None\n",
"if conf.eString and conf.eString in page:\n",
"index = page.index(conf.eString)\n",
"if conf.eRegexp:\n",
"length = len(conf.eString)\n",
"regExpResults = re.findall(conf.eRegexp, page, re.I | re.M)\n",
"if conf.string:\n",
"pageWithoutString = page[:index]\n",
"if regExpResults:\n",
"if conf.string in page:\n",
"if conf.regexp:\n",
"pageWithoutString += page[index + length:]\n",
"for regExpResult in regExpResults:\n",
"return True\n",
"return False\n",
"if re.search(conf.regexp, page, re.I | re.M):\n",
"conf.seqMatcher.set_seq2(page)\n",
"page = pageWithoutString\n",
"index = page.index(regExpResult)\n",
"return True\n",
"return False\n",
"if getSeqMatcher:\n",
"length = len(regExpResult)\n",
"return round(conf.seqMatcher.ratio(), 5)\n",
"if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n",
"pageWithoutRegExp = page[:index]\n",
"return True\n",
"return False\n",
"pageWithoutRegExp += page[index + length:]\n",
"page = pageWithoutRegExp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
2,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"AugAssign'",
"For",
"Return'",
"Return'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Return'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Return'",
"Return'",
"AugAssign'",
"Assign'"
] |
[
"def FUNC_4(self):...\n",
"return {'email': 'unauthenticated user', 'first_name': 'unauthenticated',\n 'last_name': 'user'}\n"
] | [
"def current_user(self):...\n",
"return {'email': 'unauthenticated user', 'first_name': 'unauthenticated',\n 'last_name': 'user'}\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = 2\n",
"VAR_10 = 'string'.format(repo_dir=REPO_DIR, shard_str='/shard_' + self.\n shard if self.shard else '', procs=process_count)\n",
"VAR_7 = BokChoyTestSuite('', num_processes=process_count)\n",
"self.assertEqual(BokChoyTestSuite.verbosity_processes_string(VAR_7), VAR_10)\n"
] | [
"def test_verbosity_settings_2_processes(self):...\n",
"\"\"\"docstring\"\"\"\n",
"process_count = 2\n",
"expected_verbosity_string = (\n '--with-xunitmp --xunitmp-file={repo_dir}/reports/bok_choy{shard_str}/xunit.xml --processes={procs} --no-color --process-timeout=1200'\n .format(repo_dir=REPO_DIR, shard_str='/shard_' + self.shard if self.\n shard else '', procs=process_count))\n",
"suite = BokChoyTestSuite('', num_processes=process_count)\n",
"self.assertEqual(BokChoyTestSuite.verbosity_processes_string(suite),\n expected_verbosity_string)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"import psycopg2\n",
"def FUNC_0(VAR_0, VAR_1, VAR_2):...\n",
"VAR_3 = psycopg2.connect(database=dbname, user=uname, password=psw, host=\n '127.0.0.1', port='5432')\n",
"return VAR_3\n"
] | [
"import psycopg2\n",
"def connectDB(dbname, uname, psw):...\n",
"conn = psycopg2.connect(database=dbname, user=uname, password=psw, host=\n '127.0.0.1', port='5432')\n",
"return conn\n"
] | [
0,
0,
0,
0
] | [
"Import'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_7():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = FUNC_0()\n",
"VAR_4 = VAR_3.cursor()\n",
"VAR_5 = 'players'\n",
"VAR_4.execute('string' % (VAR_5,))\n",
"VAR_7 = VAR_4.fetchall()\n",
"VAR_8 = list()\n",
"if VAR_7 % 2 != 0:\n",
"return None\n",
"for i in range(0, len(VAR_7), 2):\n",
"VAR_9 = VAR_7[i:i + 2]\n",
"return VAR_8\n",
"VAR_10 = VAR_9[0]\n",
"VAR_11 = VAR_9[1]\n",
"VAR_8.append((VAR_10[0], VAR_10[1], VAR_11[0], VAR_11[1]))\n"
] | [
"def swissPairings():...\n",
"\"\"\"docstring\"\"\"\n",
"conn = connect()\n",
"c = conn.cursor()\n",
"table = 'players'\n",
"c.execute(\n \"\"\"SELECT playerID, \n playerName FROM %s ORDER BY wins DESC;\"\"\"\n % (table,))\n",
"result = c.fetchall()\n",
"pairings = list()\n",
"if result % 2 != 0:\n",
"return None\n",
"for i in range(0, len(result), 2):\n",
"tmpList = result[i:i + 2]\n",
"return pairings\n",
"temp1 = tmpList[0]\n",
"temp2 = tmpList[1]\n",
"pairings.append((temp1[0], temp1[1], temp2[0], temp2[1]))\n"
] | [
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_9(self, VAR_12):...\n",
"print('Test type: %s' % self.__class__.__name__, file=fd)\n",
"print('Execution start time: %s' % datetime.datetime.fromtimestamp(self.\n start_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n",
"print('Execution stop time: %s' % datetime.datetime.fromtimestamp(self.\n stop_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n",
"print('Duration: %f seconds' % self.duration, file=fd)\n",
"print('Outcome: %s' % self.outcome, file=fd)\n",
"VAR_12.write(self.specific_info())\n",
"if self.exception_data is not None:\n",
"print('', file=fd)\n",
"print('EXCEPTION CASTED', file=fd)\n",
"VAR_12.write(unicode(self.exception_data))\n"
] | [
"def store_to_file(self, fd):...\n",
"print('Test type: %s' % self.__class__.__name__, file=fd)\n",
"print('Execution start time: %s' % datetime.datetime.fromtimestamp(self.\n start_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n",
"print('Execution stop time: %s' % datetime.datetime.fromtimestamp(self.\n stop_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n",
"print('Duration: %f seconds' % self.duration, file=fd)\n",
"print('Outcome: %s' % self.outcome, file=fd)\n",
"fd.write(self.specific_info())\n",
"if self.exception_data is not None:\n",
"print('', file=fd)\n",
"print('EXCEPTION CASTED', file=fd)\n",
"fd.write(unicode(self.exception_data))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_1.route('/callback/<provider>')...\n",
"VAR_15 = OAuthSignIn.get_provider(VAR_7)\n",
"VAR_16, VAR_17, VAR_12 = VAR_15.callback()\n",
"if VAR_16 is None:\n",
"flash('Authentication failed.')\n",
"VAR_13 = query_social_user(VAR_16)\n",
"return redirect(url_for('login'))\n",
"VAR_18['social'] = VAR_16\n",
"if VAR_13 is None:\n",
"insert_social_user(VAR_16)\n",
"return redirect('/')\n"
] | [
"@app.route('/callback/<provider>')...\n",
"oauth = OAuthSignIn.get_provider(provider)\n",
"social, username, email = oauth.callback()\n",
"if social is None:\n",
"flash('Authentication failed.')\n",
"user = query_social_user(social)\n",
"return redirect(url_for('login'))\n",
"session['social'] = social\n",
"if user is None:\n",
"insert_social_user(social)\n",
"return redirect('/')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def __getitem__(self, VAR_44):...\n",
""
] | [
"def __getitem__(self, key):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_17(VAR_22, VAR_5=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_59 = {}\n",
"if VAR_22:\n",
"for VAR_96, VAR_63 in VAR_22.items():\n",
"return VAR_59\n",
"VAR_32 = VAR_63[0]\n",
"VAR_33 = VAR_63[1]\n",
"VAR_92 = []\n",
"for VAR_94, spans in VAR_33.items():\n",
"VAR_92.append(VAR_94.output(VAR_5))\n",
"for VAR_93, spans in VAR_32.items():\n",
"VAR_92.append(VAR_93.output(VAR_5))\n",
"if VAR_92:\n",
"VAR_59[VAR_96] = VAR_92\n",
"VAR_59[VAR_96] = 0\n"
] | [
"def _get_author_keywords(author_keywords, spires=False):...\n",
"\"\"\"docstring\"\"\"\n",
"out = {}\n",
"if author_keywords:\n",
"for keyword, matches in author_keywords.items():\n",
"return out\n",
"skw_matches = matches[0]\n",
"ckw_matches = matches[1]\n",
"matches_str = []\n",
"for ckw, spans in ckw_matches.items():\n",
"matches_str.append(ckw.output(spires))\n",
"for skw, spans in skw_matches.items():\n",
"matches_str.append(skw.output(spires))\n",
"if matches_str:\n",
"out[keyword] = matches_str\n",
"out[keyword] = 0\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"For",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"For",
"Expr'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_7=None, VAR_8=None, VAR_9=True, VAR_10=True, VAR_11=True...\n",
"if VAR_7:\n",
"self.param = VAR_7\n",
"self.param = self.default_param\n",
"self.default = VAR_8\n",
"self.post, self.get, self.url = VAR_9, VAR_10, VAR_11\n"
] | [
"def __init__(self, param=None, default=None, post=True, get=True, url=True):...\n",
"if param:\n",
"self.param = param\n",
"self.param = self.default_param\n",
"self.default = default\n",
"self.post, self.get, self.url = post, get, url\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(self.tables) > 1:\n",
"for VAR_74, VAR_34 in enumerate(self.fields):\n",
"if '.' not in VAR_34:\n",
"self.fields[VAR_74] = '{0}.{1}'.format(self.tables[0], VAR_34)\n"
] | [
"def set_field_tables(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(self.tables) > 1:\n",
"for i, f in enumerate(self.fields):\n",
"if '.' not in f:\n",
"self.fields[i] = '{0}.{1}'.format(self.tables[0], f)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"For",
"Condition",
"Assign'"
] |
[
"import os\n",
"import os.path\n",
"import json\n",
"import sys\n",
"import yaml\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = '.yml'\n",
"VAR_3 = os.path.basename(VAR_0)\n",
"if os.path.isdir(VAR_0):\n",
"VAR_4 = {}\n",
"if os.path.isfile(VAR_0):\n",
"for entry in os.listdir(VAR_0):\n",
"if os.path.abspath(VAR_0) == os.path.abspath(sys.argv[0]):\n",
"def FUNC_1(VAR_1):...\n",
"VAR_5 = os.path.join(VAR_0, entry)\n",
"return VAR_3, VAR_4\n",
"return None, None\n",
"if VAR_0.endswith(VAR_2):\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6, VAR_7 = FUNC_0(VAR_5)\n",
"VAR_3 = VAR_3[:-len(VAR_2)]\n",
"return None, None\n",
"if os.path.exists(VAR_1):\n",
"if not VAR_6:\n",
"return VAR_3, yaml.load(open(VAR_0))\n",
"return VAR_3, None\n",
"return FUNC_0(VAR_1)[1]\n",
"return {}\n",
"VAR_4[VAR_6] = VAR_7\n"
] | [
"import os\n",
"import os.path\n",
"import json\n",
"import sys\n",
"import yaml\n",
"def _load_yml_filedir(path):...\n",
"\"\"\"docstring\"\"\"\n",
"YML_FILE_SUFFIX = '.yml'\n",
"bpath = os.path.basename(path)\n",
"if os.path.isdir(path):\n",
"result = {}\n",
"if os.path.isfile(path):\n",
"for entry in os.listdir(path):\n",
"if os.path.abspath(path) == os.path.abspath(sys.argv[0]):\n",
"def load_yml_filedir(root_dir):...\n",
"epath = os.path.join(path, entry)\n",
"return bpath, result\n",
"return None, None\n",
"if path.endswith(YML_FILE_SUFFIX):\n",
"\"\"\"docstring\"\"\"\n",
"key, value = _load_yml_filedir(epath)\n",
"bpath = bpath[:-len(YML_FILE_SUFFIX)]\n",
"return None, None\n",
"if os.path.exists(root_dir):\n",
"if not key:\n",
"return bpath, yaml.load(open(path))\n",
"return bpath, None\n",
"return _load_yml_filedir(root_dir)[1]\n",
"return {}\n",
"result[key] = value\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
1,
1,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"For",
"Condition",
"FunctionDef'",
"Assign'",
"Return'",
"Return'",
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'",
"Return'",
"Assign'"
] |
[
"def __init__(self, *VAR_3, **VAR_4):...\n",
"VAR_11 = VAR_4.pop('execute', self.san_execute)\n",
"super(CLASS_0, self).__init__(*VAR_3, VAR_11=execute, **kwargs)\n",
"self.configuration.append_config_values(VAR_1)\n",
"self.run_local = self.configuration.san_is_local\n",
"self.sshpool = None\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"execute = kwargs.pop('execute', self.san_execute)\n",
"super(SanDriver, self).__init__(*args, execute=execute, **kwargs)\n",
"self.configuration.append_config_values(san_opts)\n",
"self.run_local = self.configuration.san_is_local\n",
"self.sshpool = None\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return FUNC_0(self.request, self.request.user)\n"
] | [
"def get_redirect_url(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return find_nextlocation(self.request, self.request.user)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_1 = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n",
"self.assertEqual(len(VAR_1), 3)\n",
"self.assertEqual(VAR_1[0].origin, 'Lint')\n",
"self.assertEqual(VAR_1[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n",
"self.assertEqual(VAR_1[0].severity, RESULT_SEVERITY.INFO)\n",
"self.assertEqual(VAR_1[0].message, 'Info message')\n",
"self.assertEqual(VAR_1[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n",
"self.assertEqual(VAR_1[1].severity, RESULT_SEVERITY.NORMAL)\n",
"self.assertEqual(VAR_1[1].message, 'Normal message')\n",
"self.assertEqual(VAR_1[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n",
"self.assertEqual(VAR_1[2].severity, RESULT_SEVERITY.MAJOR)\n",
"self.assertEqual(VAR_1[2].message, 'Major message')\n"
] | [
"def test_invalid_output(self):...\n",
"out = list(self.uut.process_output(['1.0|0: Info message\\n',\n '2.2|1: Normal message\\n', \"\"\"3.4|2: Major message\n\"\"\"], 'a/file.py', [\n 'original_file_lines_placeholder']))\n",
"self.assertEqual(len(out), 3)\n",
"self.assertEqual(out[0].origin, 'Lint')\n",
"self.assertEqual(out[0].affected_code[0], SourceRange.from_values(\n 'a/file.py', 1, 0))\n",
"self.assertEqual(out[0].severity, RESULT_SEVERITY.INFO)\n",
"self.assertEqual(out[0].message, 'Info message')\n",
"self.assertEqual(out[1].affected_code[0], SourceRange.from_values(\n 'a/file.py', 2, 2))\n",
"self.assertEqual(out[1].severity, RESULT_SEVERITY.NORMAL)\n",
"self.assertEqual(out[1].message, 'Normal message')\n",
"self.assertEqual(out[2].affected_code[0], SourceRange.from_values(\n 'a/file.py', 3, 4))\n",
"self.assertEqual(out[2].severity, RESULT_SEVERITY.MAJOR)\n",
"self.assertEqual(out[2].message, 'Major message')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2():...\n",
"VAR_16 = InputMail()\n",
"VAR_16.fdoc = CLASS_0({})\n",
"VAR_16._chash = '123'\n",
"VAR_16.as_dict = lambda : None\n",
"return VAR_16\n"
] | [
"def input_mail():...\n",
"mail = InputMail()\n",
"mail.fdoc = TestDoc({})\n",
"mail._chash = '123'\n",
"mail.as_dict = lambda : None\n",
"return mail\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"from django.db import models, migrations\n",
"from django.conf import settings\n",
"VAR_0 = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]\n",
"VAR_1 = [migrations.CreateModel(name='StudentGroup', fields=[('id', models.\n AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('name', models.CharField(unique=True, max_length=\n 32)), ('description', models.CharField(max_length=256)), (\n 'member_limit', models.PositiveIntegerField()), ('is_public', models.\n BooleanField(default=False)), ('invitation_key', models.CharField(\n max_length=10, blank=True))], options={'ordering': ['name']}, bases=(\n models.Model,)), migrations.CreateModel(name='UserProfile', fields=[(\n 'id', models.AutoField(verbose_name='ID', serialize=False, auto_created\n =True, primary_key=True)), ('lang', models.CharField(default=b'en_US',\n max_length=5)), ('student_id', models.CharField(max_length=25, null=\n True, blank=True)), ('user', models.OneToOneField(to=settings.\n AUTH_USER_MODEL))], options={'ordering': ['id']}, bases=(models.Model,)\n ), migrations.AddField(model_name='studentgroup', name='members', field\n =models.ManyToManyField(related_name='groups', to=\n 'userprofile.UserProfile'), preserve_default=True)]\n"
] | [
"from django.db import models, migrations\n",
"from django.conf import settings\n",
"dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]\n",
"operations = [migrations.CreateModel(name='StudentGroup', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('name', models.CharField(unique=True, max_length=\n 32)), ('description', models.CharField(max_length=256)), (\n 'member_limit', models.PositiveIntegerField()), ('is_public', models.\n BooleanField(default=False)), ('invitation_key', models.CharField(\n max_length=10, blank=True))], options={'ordering': ['name']}, bases=(\n models.Model,)), migrations.CreateModel(name='UserProfile', fields=[(\n 'id', models.AutoField(verbose_name='ID', serialize=False, auto_created\n =True, primary_key=True)), ('lang', models.CharField(default=b'en_US',\n max_length=5)), ('student_id', models.CharField(max_length=25, null=\n True, blank=True)), ('user', models.OneToOneField(to=settings.\n AUTH_USER_MODEL))], options={'ordering': ['id']}, bases=(models.Model,)\n ), migrations.AddField(model_name='studentgroup', name='members', field\n =models.ManyToManyField(related_name='groups', to=\n 'userprofile.UserProfile'), preserve_default=True)]\n"
] | [
0,
0,
0,
4
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(VAR_2, VAR_3):...\n",
"VAR_16 = {}\n",
"for VAR_42, VAR_45 in VAR_2.items():\n",
"if VAR_42 not in VAR_3 and VAR_45:\n",
"return VAR_16\n",
"VAR_16[VAR_94(VAR_42)] = VAR_45\n"
] | [
"def strip_parameters(request_dict, skip_parameters):...\n",
"parameters = {}\n",
"for key, value in request_dict.items():\n",
"if key not in skip_parameters and value:\n",
"return parameters\n",
"parameters[str(key)] = value\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'"
] |
[
"def FUNC_0(self):...\n",
"VAR_8 = []\n",
"for VAR_5 in self.groups:\n",
"VAR_8.append(VAR_5.serialize())\n",
"return dict(VAR_2=self.name, vars=self.vars.copy(), address=self.address,\n uuid=self._uuid, VAR_8=groups, implicit=self.implicit)\n"
] | [
"def serialize(self):...\n",
"groups = []\n",
"for group in self.groups:\n",
"groups.append(group.serialize())\n",
"return dict(name=self.name, vars=self.vars.copy(), address=self.address,\n uuid=self._uuid, groups=groups, implicit=self.implicit)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"@VAR_2.patch('core.common.utils.c_onboarding_status')...\n",
"self.assertEqual(get_onboarding_setting(VAR_3), VAR_4)\n"
] | [
"@mock.patch('core.common.utils.c_onboarding_status')...\n",
"self.assertEqual(get_onboarding_setting(setting_name), value)\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"VAR_1 = 'http://my.url.com'\n",
"VAR_2 = 'True'\n",
"url_helper.urllib2.urlopen(mox.StrContains(VAR_1), timeout=mox.IgnoreArg()\n ).AndReturn(StringIO.StringIO(VAR_2))\n",
"self._mox.ReplayAll()\n",
"self.assertEqual(url_helper.UrlOpen(VAR_1, method='GET'), VAR_2)\n",
"self._mox.VerifyAll()\n"
] | [
"def testUrlOpenGETSuccess(self):...\n",
"url = 'http://my.url.com'\n",
"response = 'True'\n",
"url_helper.urllib2.urlopen(mox.StrContains(url), timeout=mox.IgnoreArg()\n ).AndReturn(StringIO.StringIO(response))\n",
"self._mox.ReplayAll()\n",
"self.assertEqual(url_helper.UrlOpen(url, method='GET'), response)\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
5,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_18(self, VAR_10, VAR_1=None, VAR_65=None):...\n",
"VAR_100 = CLASS_2(self, VAR_10, VAR_1, VAR_65)\n",
"self._subworkflows[VAR_10] = VAR_100\n",
"self.globals[VAR_10] = VAR_100.target\n"
] | [
"def subworkflow(self, name, snakefile=None, workdir=None):...\n",
"sw = Subworkflow(self, name, snakefile, workdir)\n",
"self._subworkflows[name] = sw\n",
"self.globals[name] = sw.target\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"import mock\n",
"import requests\n",
"from django.contrib.auth.models import User\n",
"from django.test import TransactionTestCase\n",
"from django_fsm import TransitionNotAllowed\n",
"from api.models import Container, App\n",
"def FUNC_0(*VAR_0, **VAR_1):...\n",
"VAR_2 = requests.Response()\n",
"VAR_2.status_code = 200\n",
"VAR_2._content_consumed = True\n",
"return VAR_2\n"
] | [
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"import mock\n",
"import requests\n",
"from django.contrib.auth.models import User\n",
"from django.test import TransactionTestCase\n",
"from django_fsm import TransitionNotAllowed\n",
"from api.models import Container, App\n",
"def mock_import_repository_task(*args, **kwargs):...\n",
"resp = requests.Response()\n",
"resp.status_code = 200\n",
"resp._content_consumed = True\n",
"return resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"VAR_9 = '10.1.2.3'\n",
"for feature in ('confirmation', 'paging', 'events', 'formatoutput'):\n",
"self.driver._eql_execute('cli-settings', feature, 'off')\n",
"self.driver._eql_execute('grpparams', 'show').AndReturn([\n 'Group-Ipaddress: %s' % VAR_9])\n",
"self.mox.ReplayAll()\n",
"self.driver.do_setup(self._context)\n",
"self.assertEqual(VAR_9, self.driver._group_ip)\n"
] | [
"def test_do_setup(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"fake_group_ip = '10.1.2.3'\n",
"for feature in ('confirmation', 'paging', 'events', 'formatoutput'):\n",
"self.driver._eql_execute('cli-settings', feature, 'off')\n",
"self.driver._eql_execute('grpparams', 'show').AndReturn([\n 'Group-Ipaddress: %s' % fake_group_ip])\n",
"self.mox.ReplayAll()\n",
"self.driver.do_setup(self._context)\n",
"self.assertEqual(fake_group_ip, self.driver._group_ip)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(VAR_8, VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = os.path.abspath(VAR_8)\n",
"VAR_12 = FUNC_0(VAR_2, VAR_7='GET')\n",
"if VAR_12 is None:\n",
"return False\n",
"f.write(VAR_12)\n",
"logging.error(\"\"\"Failed to write to %s\n%s\"\"\", VAR_8, e)\n",
"return True\n",
"return False\n"
] | [
"def DownloadFile(local_file, url):...\n",
"\"\"\"docstring\"\"\"\n",
"local_file = os.path.abspath(local_file)\n",
"url_data = UrlOpen(url, method='GET')\n",
"if url_data is None:\n",
"return False\n",
"f.write(url_data)\n",
"logging.error(\"\"\"Failed to write to %s\n%s\"\"\", local_file, e)\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"def __getitem__(self, VAR_44):...\n",
""
] | [
"def __getitem__(self, key):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def __init__(self, *VAR_4, **VAR_5):...\n",
"super(CLASS_0, self).__init__(*VAR_4, **kwargs)\n",
"self.configuration.append_config_values(VAR_1)\n",
"self._group_ip = None\n",
"self.sshpool = None\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(DellEQLSanISCSIDriver, self).__init__(*args, **kwargs)\n",
"self.configuration.append_config_values(eqlx_opts)\n",
"self._group_ip = None\n",
"self.sshpool = None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(VAR_1)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(bindings)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_18(self, VAR_10, VAR_1=None, VAR_65=None):...\n",
"VAR_100 = CLASS_2(self, VAR_10, VAR_1, VAR_65)\n",
"self._subworkflows[VAR_10] = VAR_100\n",
"self.globals[VAR_10] = VAR_100.target\n"
] | [
"def subworkflow(self, name, snakefile=None, workdir=None):...\n",
"sw = Subworkflow(self, name, snakefile, workdir)\n",
"self._subworkflows[name] = sw\n",
"self.globals[name] = sw.target\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"from __future__ import absolute_import\n",
"from __future__ import print_function\n",
"from __future__ import unicode_literals\n",
"import logging\n",
"import os\n",
"import random\n",
"import re\n",
"import tempfile\n",
"from cms.grading.languagemanager import filename_to_language\n",
"from cmscommon.crypto import decrypt_number\n",
"from cmstestsuite.web import GenericRequest, LoginRequest\n",
"VAR_0 = logging.getLogger(__name__)\n",
"def FUNC_0(self):...\n",
"if not LoginRequest.test_success(self):\n",
"return False\n",
"if self.redirected_to != self.base_url:\n",
"return False\n",
"return True\n"
] | [
"from __future__ import absolute_import\n",
"from __future__ import print_function\n",
"from __future__ import unicode_literals\n",
"import logging\n",
"import os\n",
"import random\n",
"import re\n",
"import tempfile\n",
"from cms.grading.languagemanager import filename_to_language\n",
"from cmscommon.crypto import decrypt_number\n",
"from cmstestsuite.web import GenericRequest, LoginRequest\n",
"logger = logging.getLogger(__name__)\n",
"def test_success(self):...\n",
"if not LoginRequest.test_success(self):\n",
"return False\n",
"if self.redirected_to != self.base_url:\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_14(self, VAR_10=None, VAR_14=None, VAR_16=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self.add_extra_args(VAR_10)\n",
"self.args = super().parse_args(VAR_10=args)\n",
"self.opt = vars(self.args)\n",
"self.opt['parlai_home'] = self.parlai_home\n",
"if 'batchsize' in self.opt and self.opt['batchsize'] <= 1:\n",
"self.opt.pop('batch_sort', None)\n",
"if self.opt.get('download_path'):\n",
"self.opt.pop('context_length', None)\n",
"VAR_20.environ['PARLAI_DOWNPATH'] = self.opt['download_path']\n",
"if self.opt.get('datapath'):\n",
"VAR_20.environ['PARLAI_DATAPATH'] = self.opt['datapath']\n",
"if self.opt.get('model_file') is not None:\n",
"self.opt['model_file'] = FUNC_3(self.opt.get('datapath'), self.opt[\n 'model_file'])\n",
"if self.opt.get('dict_file') is not None:\n",
"self.opt['dict_file'] = FUNC_3(self.opt.get('datapath'), self.opt['dict_file'])\n",
"VAR_32 = {}\n",
"VAR_33 = []\n",
"VAR_34 = []\n",
"for group in self._action_groups:\n",
"for a in group._group_actions:\n",
"for VAR_43 in range(len(self.cli_args)):\n",
"if hasattr(a, 'option_strings'):\n",
"if self.cli_args[VAR_43] in VAR_32:\n",
"self.opt['override'] = self.overridable\n",
"for VAR_44 in a.option_strings:\n",
"if self.cli_args[VAR_43] in VAR_33:\n",
"if VAR_16:\n",
"VAR_32[VAR_44] = a.dest\n",
"self.overridable[VAR_32[self.cli_args[VAR_43]]] = True\n",
"if self.cli_args[VAR_43] in VAR_34:\n",
"self.print_args()\n",
"return self.opt\n",
"if '_StoreTrueAction' in VAR_38(type(a)):\n",
"self.overridable[VAR_32[self.cli_args[VAR_43]]] = False\n",
"if VAR_43 < len(self.cli_args) - 1 and self.cli_args[VAR_43 + 1][0] != '-':\n",
"VAR_33.append(VAR_44)\n",
"if '_StoreFalseAction' in VAR_38(type(a)):\n",
"self.overridable[VAR_32[self.cli_args[VAR_43]]] = self.cli_args[VAR_43 + 1]\n",
"VAR_34.append(VAR_44)\n"
] | [
"def parse_args(self, args=None, namespace=None, print_args=True):...\n",
"\"\"\"docstring\"\"\"\n",
"self.add_extra_args(args)\n",
"self.args = super().parse_args(args=args)\n",
"self.opt = vars(self.args)\n",
"self.opt['parlai_home'] = self.parlai_home\n",
"if 'batchsize' in self.opt and self.opt['batchsize'] <= 1:\n",
"self.opt.pop('batch_sort', None)\n",
"if self.opt.get('download_path'):\n",
"self.opt.pop('context_length', None)\n",
"os.environ['PARLAI_DOWNPATH'] = self.opt['download_path']\n",
"if self.opt.get('datapath'):\n",
"os.environ['PARLAI_DATAPATH'] = self.opt['datapath']\n",
"if self.opt.get('model_file') is not None:\n",
"self.opt['model_file'] = modelzoo_path(self.opt.get('datapath'), self.opt[\n 'model_file'])\n",
"if self.opt.get('dict_file') is not None:\n",
"self.opt['dict_file'] = modelzoo_path(self.opt.get('datapath'), self.opt[\n 'dict_file'])\n",
"option_strings_dict = {}\n",
"store_true = []\n",
"store_false = []\n",
"for group in self._action_groups:\n",
"for a in group._group_actions:\n",
"for i in range(len(self.cli_args)):\n",
"if hasattr(a, 'option_strings'):\n",
"if self.cli_args[i] in option_strings_dict:\n",
"self.opt['override'] = self.overridable\n",
"for option in a.option_strings:\n",
"if self.cli_args[i] in store_true:\n",
"if print_args:\n",
"option_strings_dict[option] = a.dest\n",
"self.overridable[option_strings_dict[self.cli_args[i]]] = True\n",
"if self.cli_args[i] in store_false:\n",
"self.print_args()\n",
"return self.opt\n",
"if '_StoreTrueAction' in str(type(a)):\n",
"self.overridable[option_strings_dict[self.cli_args[i]]] = False\n",
"if i < len(self.cli_args) - 1 and self.cli_args[i + 1][0] != '-':\n",
"store_true.append(option)\n",
"if '_StoreFalseAction' in str(type(a)):\n",
"self.overridable[option_strings_dict[self.cli_args[i]]] = self.cli_args[i + 1]\n",
"store_false.append(option)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"For",
"For",
"Condition",
"Condition",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_15(self):...\n",
"self.run_test_case(self.scenario.register_load_balancer_instances())\n"
] | [
"def test_c_register_load_balancer_instances(self):...\n",
"self.run_test_case(self.scenario.register_load_balancer_instances())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_20(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
"def check_authorization(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return True\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_6(self, VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"return '' if VAR_19 is None else VAR_19\n"
] | [
"def run(self, value):...\n",
"\"\"\"docstring\"\"\"\n",
"return '' if value is None else value\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"if not self['ssh_user'] == 'root':\n",
"self['need_sudo'] = True\n"
] | [
"def check_user_privs(self):...\n",
"if not self['ssh_user'] == 'root':\n",
"self['need_sudo'] = True\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'"
] |
[
"def FUNC_2(self, VAR_13, VAR_11, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"for diff in self.__yield_diffs(VAR_12, VAR_13):\n",
"yield Result(self, self.diff_message, affected_code=(diff.range(filename),),\n diffs={filename: diff}, severity=self.diff_severity)\n"
] | [
"def _process_corrected(self, output, filename, file):...\n",
"\"\"\"docstring\"\"\"\n",
"for diff in self.__yield_diffs(file, output):\n",
"yield Result(self, self.diff_message, affected_code=(diff.range(filename),),\n diffs={filename: diff}, severity=self.diff_severity)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'"
] |
[
"def FUNC_22(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.cursor.execute('create table t1(id integer, s varchar(20))')\n",
"self.cursor.execute('insert into t1 values (?,?)', 1, 'test')\n",
"self.cursor.execute('select * from t1')\n",
"self.cnxn.close()\n",
"self.sql = 'select * from t1'\n",
"self.assertRaises(pyodbc.ProgrammingError, self._exec)\n"
] | [
"def test_close_cnxn(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.cursor.execute('create table t1(id integer, s varchar(20))')\n",
"self.cursor.execute('insert into t1 values (?,?)', 1, 'test')\n",
"self.cursor.execute('select * from t1')\n",
"self.cnxn.close()\n",
"self.sql = 'select * from t1'\n",
"self.assertRaises(pyodbc.ProgrammingError, self._exec)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@VAR_0.route('/crawling')...\n",
"VAR_10['crawl_start_time'] = time.time()\n",
"VAR_11 = VAR_10.get('url', None)\n",
"VAR_12 = shlex.split('timeout %d wget -r -A pdf %s' % (VAR_4, VAR_11))\n",
"VAR_13 = subprocess.Popen(VAR_12, cwd=WGET_DATA_PATH)\n",
"VAR_10['crawl_process_id'] = VAR_13.pid\n",
"return render_template('crawling.html', max_crawling_duration=\n MAX_CRAWLING_DURATION)\n"
] | [
"@app.route('/crawling')...\n",
"session['crawl_start_time'] = time.time()\n",
"url = session.get('url', None)\n",
"command = shlex.split('timeout %d wget -r -A pdf %s' % (\n MAX_CRAWLING_DURATION, url))\n",
"process = subprocess.Popen(command, cwd=WGET_DATA_PATH)\n",
"session['crawl_process_id'] = process.pid\n",
"return render_template('crawling.html', max_crawling_duration=\n MAX_CRAWLING_DURATION)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(VAR_1, VAR_2):...\n",
"for VAR_13 in VAR_2.strip('/').split('/'):\n",
"VAR_1 = VAR_1[VAR_13]\n",
"FUNC_1(VAR_1)\n"
] | [
"def scan_git_subtree(tree, path):...\n",
"for p in path.strip('/').split('/'):\n",
"tree = tree[p]\n",
"scan_git_tree(tree)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"Expr'"
] |
[
"def FUNC_18(self, VAR_25, VAR_27):...\n",
"VAR_20 = request_mock(VAR_25=path, method='PUT', VAR_27=body, VAR_28={\n 'Content-Type': ['application/json']})\n",
"return self._render(VAR_20)\n"
] | [
"def put(self, path, body):...\n",
"request = request_mock(path=path, method='PUT', body=body, headers={\n 'Content-Type': ['application/json']})\n",
"return self._render(request)\n"
] | [
0,
5,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_12(self, VAR_9, VAR_10, VAR_11, VAR_16={}):...\n",
"get_and_check_project(VAR_9, VAR_11)\n",
"VAR_28 = self.queryset.annotate(**annotate).get(VAR_10=pk, VAR_1=project_pk)\n",
"return VAR_28\n"
] | [
"def get_and_check_task(self, request, pk, project_pk, annotate={}):...\n",
"get_and_check_project(request, project_pk)\n",
"task = self.queryset.annotate(**annotate).get(pk=pk, project=project_pk)\n",
"return task\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(VAR_4):...\n",
"VAR_13 = [(\"facebook: '\" + VAR_9[u'facebook'] + \"' and gmail: '\" + VAR_9[\n u'gmail'] + \"'\") for VAR_9 in VAR_1.emails.find()]\n",
"return HttpResponse('\\n\\n'.join(VAR_13))\n"
] | [
"def getAllUsers(request):...\n",
"all_emails = [(\"facebook: '\" + email[u'facebook'] + \"' and gmail: '\" +\n email[u'gmail'] + \"'\") for email in db.emails.find()]\n",
"return HttpResponse('\\n\\n'.join(all_emails))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_39(VAR_101):...\n",
"if VAR_101.input:\n",
"VAR_13.set_input(*VAR_101.input[0], **ruleinfo.input[1])\n",
"if VAR_101.output:\n",
"VAR_13.set_output(*VAR_101.output[0], **ruleinfo.output[1])\n",
"if VAR_101.params:\n",
"VAR_13.set_params(*VAR_101.params[0], **ruleinfo.params[1])\n",
"if VAR_101.threads:\n",
"if not isinstance(VAR_101.threads, int):\n",
"if VAR_101.resources:\n",
"VAR_13.resources['_cores'] = VAR_101.threads\n",
"VAR_76, VAR_52 = VAR_101.resources\n",
"if VAR_101.priority:\n",
"if VAR_76:\n",
"if not isinstance(VAR_101.priority, int) and not isinstance(VAR_101.\n",
"if VAR_101.version:\n",
"if not all(map(lambda r: isinstance(r, int), VAR_52.values())):\n",
"VAR_13.priority = VAR_101.priority\n",
"VAR_13.version = VAR_101.version\n",
"if VAR_101.log:\n",
"VAR_13.resources.update(VAR_52)\n",
"VAR_13.set_log(*VAR_101.log[0], **ruleinfo.log[1])\n",
"if VAR_101.message:\n",
"VAR_13.message = VAR_101.message\n",
"if VAR_101.benchmark:\n",
"VAR_13.benchmark = VAR_101.benchmark\n",
"VAR_13.norun = VAR_101.norun\n",
"VAR_13.docstring = VAR_101.docstring\n",
"VAR_13.run_func = VAR_101.func\n",
"VAR_13.shellcmd = VAR_101.shellcmd\n",
"VAR_101.func.__name__ = '__{}'.format(VAR_10)\n",
"self.globals[VAR_101.func.__name__] = VAR_101.func\n",
"setattr(VAR_86, VAR_10, VAR_13)\n",
"return VAR_101.func\n"
] | [
"def decorate(ruleinfo):...\n",
"if ruleinfo.input:\n",
"rule.set_input(*ruleinfo.input[0], **ruleinfo.input[1])\n",
"if ruleinfo.output:\n",
"rule.set_output(*ruleinfo.output[0], **ruleinfo.output[1])\n",
"if ruleinfo.params:\n",
"rule.set_params(*ruleinfo.params[0], **ruleinfo.params[1])\n",
"if ruleinfo.threads:\n",
"if not isinstance(ruleinfo.threads, int):\n",
"if ruleinfo.resources:\n",
"rule.resources['_cores'] = ruleinfo.threads\n",
"args, resources = ruleinfo.resources\n",
"if ruleinfo.priority:\n",
"if args:\n",
"if not isinstance(ruleinfo.priority, int) and not isinstance(ruleinfo.\n",
"if ruleinfo.version:\n",
"if not all(map(lambda r: isinstance(r, int), resources.values())):\n",
"rule.priority = ruleinfo.priority\n",
"rule.version = ruleinfo.version\n",
"if ruleinfo.log:\n",
"rule.resources.update(resources)\n",
"rule.set_log(*ruleinfo.log[0], **ruleinfo.log[1])\n",
"if ruleinfo.message:\n",
"rule.message = ruleinfo.message\n",
"if ruleinfo.benchmark:\n",
"rule.benchmark = ruleinfo.benchmark\n",
"rule.norun = ruleinfo.norun\n",
"rule.docstring = ruleinfo.docstring\n",
"rule.run_func = ruleinfo.func\n",
"rule.shellcmd = ruleinfo.shellcmd\n",
"ruleinfo.func.__name__ = '__{}'.format(name)\n",
"self.globals[ruleinfo.func.__name__] = ruleinfo.func\n",
"setattr(rules, name, rule)\n",
"return ruleinfo.func\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self, VAR_4):...\n",
"VAR_6 = 'SELECT COUNT(rno) FROM rides WHERE rno = {rno}'.format(VAR_4=rno)\n",
"self.cursor.execute(VAR_6)\n",
"VAR_9 = self.cursor.fetchone()\n",
"if int(VAR_9[0]) > 0:\n",
"return True\n",
"return False\n"
] | [
"def verify_rno(self, rno):...\n",
"query = 'SELECT COUNT(rno) FROM rides WHERE rno = {rno}'.format(rno=rno)\n",
"self.cursor.execute(query)\n",
"result = self.cursor.fetchone()\n",
"if int(result[0]) > 0:\n",
"return True\n",
"return False\n"
] | [
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"\"\"\"docstring\"\"\"\n",
"assert self._plasma_store_socket_name is None\n",
"self._plasma_store_socket_name = (self._ray_params.plasma_store_socket_name or\n get_object_store_socket_name())\n",
"self.prepare_socket_file(self._plasma_store_socket_name)\n",
"VAR_14, VAR_15 = new_plasma_store_log_file(self._ray_params.redirect_output)\n",
"VAR_16 = ray.services.start_plasma_store(self._node_ip_address, self.\n _redis_address, VAR_14=stdout_file, VAR_15=stderr_file,\n object_store_memory=self._ray_params.object_store_memory,\n plasma_directory=self._ray_params.plasma_directory, huge_pages=self.\n _ray_params.huge_pages, plasma_store_socket_name=self.\n _plasma_store_socket_name, redis_password=self._ray_params.redis_password)\n",
"assert VAR_13.PROCESS_TYPE_PLASMA_STORE not in self.all_processes\n",
"self.all_processes[VAR_13.PROCESS_TYPE_PLASMA_STORE] = [VAR_16]\n"
] | [
"def start_plasma_store(self):...\n",
"\"\"\"docstring\"\"\"\n",
"assert self._plasma_store_socket_name is None\n",
"self._plasma_store_socket_name = (self._ray_params.plasma_store_socket_name or\n get_object_store_socket_name())\n",
"self.prepare_socket_file(self._plasma_store_socket_name)\n",
"stdout_file, stderr_file = new_plasma_store_log_file(self._ray_params.\n redirect_output)\n",
"process_info = ray.services.start_plasma_store(self._node_ip_address, self.\n _redis_address, stdout_file=stdout_file, stderr_file=stderr_file,\n object_store_memory=self._ray_params.object_store_memory,\n plasma_directory=self._ray_params.plasma_directory, huge_pages=self.\n _ray_params.huge_pages, plasma_store_socket_name=self.\n _plasma_store_socket_name, redis_password=self._ray_params.redis_password)\n",
"assert ray_constants.PROCESS_TYPE_PLASMA_STORE not in self.all_processes\n",
"self.all_processes[ray_constants.PROCESS_TYPE_PLASMA_STORE] = [process_info]\n"
] | [
0,
0,
0,
0,
0,
0,
6,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assert'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assert'",
"Assign'"
] |
[
"def FUNC_5(VAR_8, VAR_9):...\n",
"VAR_11, VAR_10 = FUNC_1()\n",
"VAR_12 = FUNC_0()\n",
"VAR_11.execute(\"INSERT INTO usage (date,user,command) VALUES ('\" + VAR_12 +\n \"','\" + str(VAR_8) + \"','\" + VAR_9 + \"')\")\n",
"VAR_10.commit()\n",
"VAR_10.close()\n"
] | [
"def insertUsage(user, command):...\n",
"c, conn = getConnection()\n",
"date = now()\n",
"c.execute(\"INSERT INTO usage (date,user,command) VALUES ('\" + date + \"','\" +\n str(user) + \"','\" + command + \"')\")\n",
"conn.commit()\n",
"conn.close()\n"
] | [
0,
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@staticmethod...\n",
"VAR_8 = dbaseConn()\n",
"VAR_11 = 'SELECT `uid`, `email` FROM `User` WHERE `uid`=%s'\n",
"cursor.execute(VAR_11, (VAR_0,))\n",
"VAR_10 = cursor.fetchone()\n",
"if VAR_10 is None:\n",
"VAR_8.db_conn_close()\n",
"VAR_9 = ('UPDATE `User` SET `email` = ' + \"'\" + VAR_6 + \"'\" +\n ' WHERE `uid` = ' + \"'\" + VAR_0 + \"'\")\n",
"return False\n",
"cursor.execute(VAR_9)\n",
"VAR_8.db_conn_close()\n",
"VAR_8.db_conn_close()\n",
"VAR_8.db.commit()\n",
"return False\n",
"return True\n"
] | [
"@staticmethod...\n",
"dbcon = dbaseConn()\n",
"validate_sql = 'SELECT `uid`, `email` FROM `User` WHERE `uid`=%s'\n",
"cursor.execute(validate_sql, (uid,))\n",
"result = cursor.fetchone()\n",
"if result is None:\n",
"dbcon.db_conn_close()\n",
"sql = ('UPDATE `User` SET `email` = ' + \"'\" + new_email + \"'\" +\n ' WHERE `uid` = ' + \"'\" + uid + \"'\")\n",
"return False\n",
"cursor.execute(sql)\n",
"dbcon.db_conn_close()\n",
"dbcon.db_conn_close()\n",
"dbcon.db.commit()\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_4(VAR_18, VAR_17, VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_composite_keywords(VAR_18, VAR_17, VAR_19) or {}\n"
] | [
"def extract_composite_keywords(ckw_db, fulltext, skw_spans):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_composite_keywords(ckw_db, fulltext, skw_spans) or {}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __iter__(self) ->'Iterator[IRow]':...\n",
"...\n"
] | [
"def __iter__(self) ->'Iterator[IRow]':...\n",
"...\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_18(self):...\n",
"if self.child_branch and self.child_leaf:\n",
"if self.child_branch == self.parent:\n",
"if self.parent and self.child_branch in self.parent.ascendants_of_branch:\n",
"if self.child_leaf and self.link_type == LinkTypes.REFERENCE.name:\n"
] | [
"def clean(self):...\n",
"if self.child_branch and self.child_leaf:\n",
"if self.child_branch == self.parent:\n",
"if self.parent and self.child_branch in self.parent.ascendants_of_branch:\n",
"if self.child_leaf and self.link_type == LinkTypes.REFERENCE.name:\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Condition",
"Condition"
] |
[
"def FUNC_6(self, VAR_11):...\n",
"VAR_33 = CLASS_43.run(self, VAR_11)\n",
"if VAR_33 is None:\n",
"return ''\n",
"return VAR_33\n"
] | [
"def run(self, url):...\n",
"val = VSanitizedUrl.run(self, url)\n",
"if val is None:\n",
"return ''\n",
"return val\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import os\n",
"import re\n",
"import tempfile\n",
"import urllib2\n",
"from invenio.legacy.bibclassify import config as bconfig\n",
"if bconfig.STANDALONE:\n",
"from urllib2 import urlopen\n",
"from invenio.utils.url import make_invenio_opener\n",
"VAR_0 = bconfig.get_logger('bibclassify.text_extractor')\n",
"VAR_8 = make_invenio_opener('BibClassify').open\n",
"VAR_1 = re.compile('[A-Za-z]{2,}')\n",
"def FUNC_0(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"if not FUNC_4('pdftotext'):\n",
"VAR_0.warning(\n 'GNU file was not found on the system. Switching to a weak file extension test.'\n )\n",
"VAR_9 = os.popen('file ' + re.escape(VAR_2)).read()\n",
"if VAR_2.lower().endswith('.pdf'):\n",
"VAR_19 = VAR_9.split(':')[1]\n",
"VAR_0.error('string')\n",
"VAR_10 = VAR_19.find('PDF') > -1\n",
"return True\n",
"return False\n",
"return VAR_10\n"
] | [
"\"\"\"\nBibClassify text extractor.\n\nThis module provides method to extract the fulltext from local or remote\ndocuments. Currently 2 formats of documents are supported: PDF and text\ndocuments.\n\n2 methods provide the functionality of the module: text_lines_from_local_file\nand text_lines_from_url.\n\nThis module also provides the utility 'is_pdf' that uses GNU file in order to\ndetermine if a local file is a PDF file.\n\nThis module is STANDALONE safe\n\"\"\"\n",
"import os\n",
"import re\n",
"import tempfile\n",
"import urllib2\n",
"from invenio.legacy.bibclassify import config as bconfig\n",
"if bconfig.STANDALONE:\n",
"from urllib2 import urlopen\n",
"from invenio.utils.url import make_invenio_opener\n",
"log = bconfig.get_logger('bibclassify.text_extractor')\n",
"urlopen = make_invenio_opener('BibClassify').open\n",
"_ONE_WORD = re.compile('[A-Za-z]{2,}')\n",
"def is_pdf(document):...\n",
"\"\"\"docstring\"\"\"\n",
"if not executable_exists('pdftotext'):\n",
"log.warning(\n 'GNU file was not found on the system. Switching to a weak file extension test.'\n )\n",
"file_output = os.popen('file ' + re.escape(document)).read()\n",
"if document.lower().endswith('.pdf'):\n",
"filetype = file_output.split(':')[1]\n",
"log.error(\n \"Your version of the 'file' utility seems to be unsupported. Please report this to [email protected].\"\n )\n",
"pdf = filetype.find('PDF') > -1\n",
"return True\n",
"return False\n",
"return pdf\n"
] | [
7,
0,
0,
7,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
7,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Condition",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Return'",
"Return'"
] |
[
"@patch('invenio.ext.session.interface.SessionInterface.save_session')...\n",
"from invenio.modules.oauthclient.models import RemoteToken\n",
"from invenio.modules.oauthclient.handlers import token_getter\n",
"from invenio.modules.oauthclient.client import oauth\n",
"VAR_9 = MagicMock()\n",
"VAR_9.get_id = MagicMock(return_value=1)\n",
"VAR_9.is_authenticated = MagicMock(return_value=True)\n",
"VAR_10 = c.get(url_for('oauthclient.login', remote_app='full'))\n",
"assert VAR_10.status_code == 302\n",
"assert VAR_10.location.startswith(oauth.remote_apps['full'].authorize_url)\n",
"self.mock_response(VAR_5='full')\n",
"c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n",
"assert session['oauth_token_full'] == ('test_access_token', '')\n",
"VAR_11 = RemoteToken.get(1, 'fullid')\n",
"assert VAR_11.remote_account.client_id == 'fullid'\n",
"assert VAR_11.access_token == 'test_access_token'\n",
"assert RemoteToken.query.count() == 1\n",
"self.mock_response(VAR_5='full', VAR_6={'access_token': 'new_access_token',\n 'scope': '', 'token_type': 'bearer'})\n",
"c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n",
"VAR_11 = RemoteToken.get(1, 'fullid')\n",
"assert VAR_11.access_token == 'new_access_token'\n",
"assert RemoteToken.query.count() == 1\n",
"VAR_12 = token_getter(oauth.remote_apps['full'])\n",
"assert VAR_12 == ('new_access_token', '')\n",
"VAR_10 = c.get(url_for('oauthclient.disconnect', remote_app='full'))\n",
"assert VAR_10.status_code == 302\n",
"assert VAR_10.location.endswith(url_for('oauthclient_settings.index'))\n",
"VAR_11 = RemoteToken.get(1, 'fullid')\n",
"assert VAR_11 is None\n"
] | [
"@patch('invenio.ext.session.interface.SessionInterface.save_session')...\n",
"from invenio.modules.oauthclient.models import RemoteToken\n",
"from invenio.modules.oauthclient.handlers import token_getter\n",
"from invenio.modules.oauthclient.client import oauth\n",
"user = MagicMock()\n",
"user.get_id = MagicMock(return_value=1)\n",
"user.is_authenticated = MagicMock(return_value=True)\n",
"res = c.get(url_for('oauthclient.login', remote_app='full'))\n",
"assert res.status_code == 302\n",
"assert res.location.startswith(oauth.remote_apps['full'].authorize_url)\n",
"self.mock_response(app='full')\n",
"c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n",
"assert session['oauth_token_full'] == ('test_access_token', '')\n",
"t = RemoteToken.get(1, 'fullid')\n",
"assert t.remote_account.client_id == 'fullid'\n",
"assert t.access_token == 'test_access_token'\n",
"assert RemoteToken.query.count() == 1\n",
"self.mock_response(app='full', data={'access_token': 'new_access_token',\n 'scope': '', 'token_type': 'bearer'})\n",
"c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n",
"t = RemoteToken.get(1, 'fullid')\n",
"assert t.access_token == 'new_access_token'\n",
"assert RemoteToken.query.count() == 1\n",
"val = token_getter(oauth.remote_apps['full'])\n",
"assert val == ('new_access_token', '')\n",
"res = c.get(url_for('oauthclient.disconnect', remote_app='full'))\n",
"assert res.status_code == 302\n",
"assert res.location.endswith(url_for('oauthclient_settings.index'))\n",
"t = RemoteToken.get(1, 'fullid')\n",
"assert t is None\n"
] | [
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Expr'",
"Expr'",
"Assert'",
"Assign'",
"Assert'",
"Assert'",
"Assert'",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assert'"
] |
[
"@classmethod...\n",
"return super(CLASS_0, VAR_3).subsystem_dependencies() + (NativeToolchain.\n scoped(VAR_3), PythonSetup)\n"
] | [
"@classmethod...\n",
"return super(PythonNativeCode, cls).subsystem_dependencies() + (NativeToolchain\n .scoped(cls), PythonSetup)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_30(self, VAR_21, VAR_23=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_21 == '/':\n",
"VAR_57 = {VAR_2: VAR_12, VAR_3: 0, VAR_4: 0, VAR_5: 4096, VAR_6: 16877,\n VAR_7: time.time()}\n",
"VAR_57 = self.getfile(VAR_21, VAR_23=follow_symlinks)\n",
"if VAR_57 == False:\n",
"return CLASS_3(VAR_57[VAR_6], 0, 0, 1, VAR_57[VAR_3], VAR_57[VAR_4], VAR_57\n [VAR_5], VAR_57[VAR_7], VAR_57[VAR_7], VAR_57[VAR_7])\n"
] | [
"def stat(self, path, follow_symlinks=True):...\n",
"\"\"\"docstring\"\"\"\n",
"if path == '/':\n",
"p = {A_TYPE: T_DIR, A_UID: 0, A_GID: 0, A_SIZE: 4096, A_MODE: 16877,\n A_CTIME: time.time()}\n",
"p = self.getfile(path, follow_symlinks=follow_symlinks)\n",
"if p == False:\n",
"return _statobj(p[A_MODE], 0, 0, 1, p[A_UID], p[A_GID], p[A_SIZE], p[\n A_CTIME], p[A_CTIME], p[A_CTIME])\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_16(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_23 = ''\n",
"VAR_24 = 16\n",
"VAR_25 = platform.architecture()[0]\n",
"if VAR_25 == '64bit':\n",
"VAR_29 = 40\n",
"VAR_29 = 32\n",
"VAR_9 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)\n",
"VAR_26 = array.array('B', b'\\x00' * (VAR_24 * VAR_29))\n",
"VAR_27 = struct.pack('iL', VAR_24 * VAR_29, VAR_26.buffer_info()[0])\n",
"VAR_17 = fcntl.ioctl(VAR_9.fileno(), 35090, VAR_27)\n",
"VAR_28 = struct.unpack('iL', VAR_17)[0]\n",
"if VAR_28 == VAR_24 * VAR_29:\n",
"logger.warn('SIOCGIFCONF returned more than {0} up network interfaces.', VAR_24\n )\n",
"VAR_9 = VAR_26.tostring()\n",
"for i in range(0, VAR_29 * VAR_24, VAR_29):\n",
"VAR_23 = self._format_single_interface_name(VAR_9, i)\n",
"return VAR_23.decode('latin-1'), socket.inet_ntoa(VAR_9[i + 20:i + 24])\n",
"if b'lo' in VAR_23:\n"
] | [
"def get_first_if(self):...\n",
"\"\"\"docstring\"\"\"\n",
"iface = ''\n",
"expected = 16\n",
"python_arc = platform.architecture()[0]\n",
"if python_arc == '64bit':\n",
"struct_size = 40\n",
"struct_size = 32\n",
"sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)\n",
"buff = array.array('B', b'\\x00' * (expected * struct_size))\n",
"param = struct.pack('iL', expected * struct_size, buff.buffer_info()[0])\n",
"ret = fcntl.ioctl(sock.fileno(), 35090, param)\n",
"retsize = struct.unpack('iL', ret)[0]\n",
"if retsize == expected * struct_size:\n",
"logger.warn('SIOCGIFCONF returned more than {0} up network interfaces.',\n expected)\n",
"sock = buff.tostring()\n",
"for i in range(0, struct_size * expected, struct_size):\n",
"iface = self._format_single_interface_name(sock, i)\n",
"return iface.decode('latin-1'), socket.inet_ntoa(sock[i + 20:i + 24])\n",
"if b'lo' in iface:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14, VAR_15 = new_webui_log_file()\n",
"self._webui_url, VAR_16 = ray.services.start_ui(self._redis_address, VAR_14\n =stdout_file, VAR_15=stderr_file)\n",
"assert VAR_13.PROCESS_TYPE_WEB_UI not in self.all_processes\n",
"if VAR_16 is not None:\n",
"self.all_processes[VAR_13.PROCESS_TYPE_WEB_UI] = [VAR_16]\n"
] | [
"def start_ui(self):...\n",
"\"\"\"docstring\"\"\"\n",
"stdout_file, stderr_file = new_webui_log_file()\n",
"self._webui_url, process_info = ray.services.start_ui(self._redis_address,\n stdout_file=stdout_file, stderr_file=stderr_file)\n",
"assert ray_constants.PROCESS_TYPE_WEB_UI not in self.all_processes\n",
"if process_info is not None:\n",
"self.all_processes[ray_constants.PROCESS_TYPE_WEB_UI] = [process_info]\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assert'",
"Condition",
"Assign'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.