lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_2(VAR_1):...\n",
"VAR_3 = []\n",
"for VAR_4 in VAR_1:\n",
"VAR_9 = mock.MagicMock(VAR_8='AndroidDevice', VAR_4=serial, h_port=None)\n",
"return VAR_3\n",
"VAR_3.append(VAR_9)\n"
] | [
"def get_instances(serials):...\n",
"ads = []\n",
"for serial in serials:\n",
"ad = mock.MagicMock(name='AndroidDevice', serial=serial, h_port=None)\n",
"return ads\n",
"ads.append(ad)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_43(self, VAR_9, VAR_25):...\n",
"return cast_fieldtype(VAR_25.fieldtype, VAR_9)\n"
] | [
"def cast(self, value, df):...\n",
"return cast_fieldtype(df.fieldtype, value)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"import sqlite3\n",
"from collections import OrderedDict as odict\n",
"import db_handler\n",
"import record_fetcher\n",
"def FUNC_0():...\n",
"VAR_6 = odict({'id': 1, 'url':\n 'https://www.mozilla.org/en-US/firefox/central/', 'title': None,\n 'rev_host': 'gro.allizom.www.', 'visit_count': 0, 'hidden': 0, 'typed':\n 0, 'favicon_id': None, 'frecency': 76, 'last_visit_date': None, 'guid':\n 'NNqZA_f2KHI1', 'foreign_count': 1, 'url_hash': 47356370932282,\n 'description': None, 'preview_image_url': None})\n",
"return VAR_6\n"
] | [
"import sqlite3\n",
"from collections import OrderedDict as odict\n",
"import db_handler\n",
"import record_fetcher\n",
"def create_test_data():...\n",
"test_record = odict({'id': 1, 'url':\n 'https://www.mozilla.org/en-US/firefox/central/', 'title': None,\n 'rev_host': 'gro.allizom.www.', 'visit_count': 0, 'hidden': 0, 'typed':\n 0, 'favicon_id': None, 'frecency': 76, 'last_visit_date': None, 'guid':\n 'NNqZA_f2KHI1', 'foreign_count': 1, 'url_hash': 47356370932282,\n 'description': None, 'preview_image_url': None})\n",
"return test_record\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"VAR_12 = int(time()) - VAR_5\n",
"VAR_9 = 'DELETE FROM {} WHERE timestamp >= {}'.format(VAR_1, str(VAR_12))\n",
"self.connection.execute(VAR_9)\n",
"self.connection.commit()\n"
] | [
"def delete_old(self):...\n",
"last = int(time()) - one_year\n",
"stmt = 'DELETE FROM {} WHERE timestamp >= {}'.format(tb_name, str(last))\n",
"self.connection.execute(stmt)\n",
"self.connection.commit()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self, **VAR_23):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_25, VAR_23 = self._prepare_env(VAR_23)\n",
"return subprocess.Popen(self.cmd, VAR_25=env, **kwargs)\n"
] | [
"def run(self, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"env, kwargs = self._prepare_env(kwargs)\n",
"return subprocess.Popen(self.cmd, env=env, **kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_6 = [self.project_id, self.user_id, self.money, self.time]\n",
"VAR_6 = [repr(x) for x in VAR_6]\n",
"VAR_7 = ['project_id', 'user_id', 'money', 'timestamp']\n",
"return dict(zip(VAR_7, VAR_6))\n"
] | [
"def to_database_query(self):...\n",
"data = [self.project_id, self.user_id, self.money, self.time]\n",
"data = [repr(x) for x in data]\n",
"labels = ['project_id', 'user_id', 'money', 'timestamp']\n",
"return dict(zip(labels, data))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"from invenio.modules.oauthclient.models import RemoteAccount\n",
"RemoteAccount.create(1, 'testid', None)\n",
"self.assert401(self.client.get(url_for('oauthclient_settings.index'),\n follow_redirects=True))\n",
"self.login('admin', '')\n",
"VAR_10 = self.client.get(url_for('oauthclient_settings.index'))\n",
"self.assert200(VAR_10)\n",
"assert 'MyLinkedTestAccount' in VAR_10.data\n",
"assert url_for('oauthclient.disconnect', remote_app='test') in VAR_10.data\n",
"assert url_for('oauthclient.login', remote_app='full') in VAR_10.data\n",
"assert url_for('oauthclient.login', remote_app='test_invalid') in VAR_10.data\n"
] | [
"def test_settings_view(self):...\n",
"from invenio.modules.oauthclient.models import RemoteAccount\n",
"RemoteAccount.create(1, 'testid', None)\n",
"self.assert401(self.client.get(url_for('oauthclient_settings.index'),\n follow_redirects=True))\n",
"self.login('admin', '')\n",
"res = self.client.get(url_for('oauthclient_settings.index'))\n",
"self.assert200(res)\n",
"assert 'MyLinkedTestAccount' in res.data\n",
"assert url_for('oauthclient.disconnect', remote_app='test') in res.data\n",
"assert url_for('oauthclient.login', remote_app='full') in res.data\n",
"assert url_for('oauthclient.login', remote_app='test_invalid') in res.data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assert'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_0.suspected_flake_build_number is None:\n",
"return {}\n",
"VAR_2 = VAR_0.GetDataPointOfSuspectedBuild()\n",
"assert VAR_2\n",
"return {'confidence': VAR_0.confidence_in_suspected_build, 'build_number':\n VAR_0.suspected_flake_build_number, 'commit_position': VAR_2.\n commit_position, 'git_hash': VAR_2.git_hash,\n 'lower_bound_commit_position': VAR_2.previous_build_commit_position,\n 'lower_bound_git_hash': VAR_2.previous_build_git_hash, 'triage_result':\n VAR_0.triage_history[-1].triage_result if VAR_0.triage_history else\n triage_status.UNTRIAGED}\n"
] | [
"def _GetSuspectedFlakeInfo(analysis):...\n",
"\"\"\"docstring\"\"\"\n",
"if analysis.suspected_flake_build_number is None:\n",
"return {}\n",
"data_point = analysis.GetDataPointOfSuspectedBuild()\n",
"assert data_point\n",
"return {'confidence': analysis.confidence_in_suspected_build,\n 'build_number': analysis.suspected_flake_build_number,\n 'commit_position': data_point.commit_position, 'git_hash': data_point.\n git_hash, 'lower_bound_commit_position': data_point.\n previous_build_commit_position, 'lower_bound_git_hash': data_point.\n previous_build_git_hash, 'triage_result': analysis.triage_history[-1].\n triage_result if analysis.triage_history else triage_status.UNTRIAGED}\n"
] | [
0,
0,
5,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assert'",
"Return'"
] |
[
"from __future__ import print_function\n",
"import time\n",
"import hashlib\n",
"import mimetypes\n",
"import jinja2\n",
"from .__init__ import *\n",
"from .util import *\n",
"if not PY2:\n",
"VAR_0 = str\n",
"\"\"\"\n Spawned by HttpConn to process one http transaction\n \"\"\"\n",
"def __init__(self, VAR_1):...\n",
"self.conn = VAR_1\n",
"self.s = VAR_1.s\n",
"self.addr = VAR_1.addr\n",
"self.args = VAR_1.args\n",
"self.auth = VAR_1.auth\n",
"self.sr = VAR_1.sr\n",
"self.bufsz = 1024 * 32\n",
"self.ok = True\n",
"self.log_func = VAR_1.log_func\n",
"self.log_src = VAR_1.log_src\n",
"def FUNC_0(self, VAR_2):...\n",
"self.log_func(self.log_src, VAR_2)\n",
"def FUNC_1(self):...\n",
"VAR_26 = read_header(self.sr)\n",
"return False\n",
"self.headers = {}\n",
"VAR_27, self.req, VAR_28 = VAR_26[0].split(' ')\n",
"self.log('bad headers:\\n' + '\\n'.join(VAR_26))\n",
"for header_line in VAR_26[1:]:\n",
"return False\n",
"VAR_29, VAR_30 = header_line.split(':', 1)\n",
"self.uname = '*'\n",
"self.headers[VAR_29.lower()] = VAR_30.strip()\n",
"if 'cookie' in self.headers:\n",
"VAR_31 = self.headers['cookie'].split(';')\n",
"if self.uname:\n",
"for VAR_29, VAR_30 in [x.split('=', 1) for x in VAR_31]:\n",
"self.rvol = self.auth.vfs.user_tree(self.uname, VAR_13=True)\n",
"if VAR_27 == 'GET':\n",
"self.loud_reply(str(ex))\n",
"return self.ok\n",
"if VAR_29 != 'cppwd':\n",
"self.wvol = self.auth.vfs.user_tree(self.uname, VAR_14=True)\n",
"self.handle_get()\n",
"if VAR_27 == 'POST':\n",
"return False\n",
"VAR_30 = unescape_cookie(VAR_30)\n",
"self.log(self.rvol)\n",
"self.handle_post()\n",
"self.loud_reply(u'invalid HTTP mode \"{0}\"'.format(VAR_27))\n",
"if VAR_30 == 'x':\n",
"self.log(self.wvol)\n",
"if not VAR_30 in self.auth.iuser:\n",
"VAR_2 = u'bad_cpwd \"{}\"'.format(VAR_30)\n",
"self.uname = self.auth.iuser[VAR_30]\n",
"VAR_41 = u'Set-Cookie: cppwd=x; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT'\n",
"self.loud_reply(VAR_2, VAR_6=[nuke])\n",
"return True\n"
] | [
"from __future__ import print_function\n",
"import time\n",
"import hashlib\n",
"import mimetypes\n",
"import jinja2\n",
"from .__init__ import *\n",
"from .util import *\n",
"if not PY2:\n",
"unicode = str\n",
"\"\"\"\n Spawned by HttpConn to process one http transaction\n \"\"\"\n",
"def __init__(self, conn):...\n",
"self.conn = conn\n",
"self.s = conn.s\n",
"self.addr = conn.addr\n",
"self.args = conn.args\n",
"self.auth = conn.auth\n",
"self.sr = conn.sr\n",
"self.bufsz = 1024 * 32\n",
"self.ok = True\n",
"self.log_func = conn.log_func\n",
"self.log_src = conn.log_src\n",
"def log(self, msg):...\n",
"self.log_func(self.log_src, msg)\n",
"def run(self):...\n",
"headerlines = read_header(self.sr)\n",
"return False\n",
"self.headers = {}\n",
"mode, self.req, _ = headerlines[0].split(' ')\n",
"self.log('bad headers:\\n' + '\\n'.join(headerlines))\n",
"for header_line in headerlines[1:]:\n",
"return False\n",
"k, v = header_line.split(':', 1)\n",
"self.uname = '*'\n",
"self.headers[k.lower()] = v.strip()\n",
"if 'cookie' in self.headers:\n",
"cookies = self.headers['cookie'].split(';')\n",
"if self.uname:\n",
"for k, v in [x.split('=', 1) for x in cookies]:\n",
"self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)\n",
"if mode == 'GET':\n",
"self.loud_reply(str(ex))\n",
"return self.ok\n",
"if k != 'cppwd':\n",
"self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)\n",
"self.handle_get()\n",
"if mode == 'POST':\n",
"return False\n",
"v = unescape_cookie(v)\n",
"self.log(self.rvol)\n",
"self.handle_post()\n",
"self.loud_reply(u'invalid HTTP mode \"{0}\"'.format(mode))\n",
"if v == 'x':\n",
"self.log(self.wvol)\n",
"if not v in self.auth.iuser:\n",
"msg = u'bad_cpwd \"{}\"'.format(v)\n",
"self.uname = self.auth.iuser[v]\n",
"nuke = u'Set-Cookie: cppwd=x; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT'\n",
"self.loud_reply(msg, headers=[nuke])\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
1,
1,
0,
1,
1,
1
] | [
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Condition",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_7(self, VAR_11=VAR_2):...\n",
"self.fs_db = VAR_11\n",
"for VAR_18 in self.fs_db.execute(\n",
"VAR_0, VAR_23, VAR_24, VAR_36, VAR_37, VAR_38 = VAR_18\n",
"for VAR_0 in self.fs_uuid_idx:\n",
"VAR_45 = set()\n",
"VAR_18 = self.fs_uuid_idx[VAR_0]\n",
"def FUNC_24(VAR_40):...\n",
"for fil_idx in VAR_38:\n",
"if not VAR_18.is_dir:\n",
"for VAR_18 in VAR_40.sub_items:\n",
"VAR_59 = fil_idx[0]\n",
"VAR_44 = set()\n",
"for VAR_75 in VAR_18.sub_files:\n",
"if VAR_18.parent:\n",
"return\n",
"VAR_71 = fil_idx[1]\n",
"for fol_idx in VAR_37:\n",
"VAR_18.sub_items.add(VAR_75)\n",
"for n_sub_uuid in VAR_18.sub_folders:\n",
"VAR_18.parent = VAR_40\n",
"VAR_72 = fil_idx[2]\n",
"VAR_44.add(fol_idx)\n",
"VAR_64 = self.fsNode(True, VAR_23, VAR_24, VAR_0, VAR_36, VAR_44, VAR_45,\n VAR_29=self)\n",
"VAR_18.sub_names_idx[VAR_75.file_name] = VAR_75\n",
"VAR_75 = self.fs_uuid_idx[n_sub_uuid]\n",
"FUNC_24(VAR_18)\n",
"VAR_78 = float(fil_idx[3])\n",
"VAR_78 = FUNC_0()\n",
"VAR_73 = fil_idx[4]\n",
"self.fs_uuid_idx[VAR_0] = VAR_64\n",
"VAR_18.sub_items.add(VAR_75)\n",
"VAR_74 = self.fsNode(False, VAR_71, VAR_72, VAR_59, VAR_78, VAR_39=s_f_uuid,\n VAR_29=self)\n",
"VAR_18.sub_names_idx[VAR_75.file_name] = VAR_75\n",
"VAR_45.add(VAR_74)\n",
"self.fs_uuid_idx[VAR_59] = VAR_74\n"
] | [
"def load_sqlfs(self, db=Database):...\n",
"self.fs_db = db\n",
"for item in self.fs_db.execute(\n",
"uuid_, file_name, owner, upload_time, sub_folders, sub_files = item\n",
"for uuid_ in self.fs_uuid_idx:\n",
"n_sub_files = set()\n",
"item = self.fs_uuid_idx[uuid_]\n",
"def iterate_fsnode(node):...\n",
"for fil_idx in sub_files:\n",
"if not item.is_dir:\n",
"for item in node.sub_items:\n",
"s_uuid = fil_idx[0]\n",
"n_sub_folders = set()\n",
"for n_sub in item.sub_files:\n",
"if item.parent:\n",
"return\n",
"s_file_name = fil_idx[1]\n",
"for fol_idx in sub_folders:\n",
"item.sub_items.add(n_sub)\n",
"for n_sub_uuid in item.sub_folders:\n",
"item.parent = node\n",
"s_owner = fil_idx[2]\n",
"n_sub_folders.add(fol_idx)\n",
"fold_elem = self.fsNode(True, file_name, owner, uuid_, upload_time,\n n_sub_folders, n_sub_files, master=self)\n",
"item.sub_names_idx[n_sub.file_name] = n_sub\n",
"n_sub = self.fs_uuid_idx[n_sub_uuid]\n",
"iterate_fsnode(item)\n",
"s_upload_time = float(fil_idx[3])\n",
"s_upload_time = get_current_time()\n",
"s_f_uuid = fil_idx[4]\n",
"self.fs_uuid_idx[uuid_] = fold_elem\n",
"item.sub_items.add(n_sub)\n",
"s_file = self.fsNode(False, s_file_name, s_owner, s_uuid, s_upload_time,\n f_uuid=s_f_uuid, master=self)\n",
"item.sub_names_idx[n_sub.file_name] = n_sub\n",
"n_sub_files.add(s_file)\n",
"self.fs_uuid_idx[s_uuid] = s_file\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Assign'",
"FunctionDef'",
"For",
"Condition",
"For",
"Assign'",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"For",
"Expr'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_26(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = self._saml_auth()\n",
"logging.debug('Processing SAML response')\n",
"VAR_41 = VAR_15['saml_authn_request_id']\n",
"logging.warning('No saml_authn_request_id in session')\n",
"VAR_11.process_response(VAR_41=request_id)\n",
"VAR_2 = jsonify(VAR_32=['invalid_response'], message='SAML request failed',\n reason=\n 'No AuthNRequest ID from SP found to match with InResponseTo of response')\n",
"if VAR_11.get_errors():\n",
"VAR_2.status_code = 401\n",
"return self._render_saml_errors_json(VAR_11)\n",
"VAR_15.pop('saml_authn_request_id', None)\n",
"return VAR_2\n",
"if not VAR_11.is_authenticated():\n",
"logging.warning('auth.is_authenticated() => False')\n",
"VAR_28 = VAR_11.get_nameid()\n",
"VAR_2 = jsonify(error='Not Authenticated')\n",
"logging.info('SAML user authenticated: {!r}'.format(VAR_28))\n",
"VAR_2.status_code = 401\n",
"VAR_29 = VAR_11.get_attributes()\n",
"return VAR_2\n",
"logging.info('SAML attributes: {!r}'.format(VAR_29))\n",
"for VAR_48, val in VAR_29.iteritems():\n",
"if isinstance(val, list) and len(val) == 1:\n",
"VAR_15['saml_data'] = {'attrs': VAR_29, 'nameid': VAR_28, 'session_index':\n VAR_11.get_session_index()}\n",
"VAR_29[VAR_48] = val[0]\n",
"VAR_1 = {}\n",
"VAR_1['email'] = VAR_29.get('email', VAR_28)\n",
"for VAR_48, val in VAR_29.iteritems():\n",
"if not getattr(VAR_48, 'lower', None):\n",
"self.set_expiration()\n",
"logging.error('Bad list attr {!r}'.format({VAR_48: val}))\n",
"if VAR_48.lower() in ['firstname', 'first_name']:\n",
"self.set_current_user(**kwargs)\n",
"VAR_1['first_name'] = val\n",
"if VAR_48.lower() in ['lastname', 'last_name']:\n",
"VAR_30 = flask.url_for('index')\n",
"VAR_1['last_name'] = val\n",
"VAR_31 = request.form.get('RelayState', VAR_30)\n",
"if VAR_31.endswith('/saml/consume') or VAR_31.endswith('/login'):\n",
"VAR_31 = VAR_30\n",
"logging.debug('Redirecting to {0}'.format(VAR_31))\n",
"VAR_2 = flask.redirect(VAR_31)\n",
"self.set_csrf_token(VAR_2)\n",
"return VAR_2\n"
] | [
"def consume_saml_assertion(self):...\n",
"\"\"\"docstring\"\"\"\n",
"auth = self._saml_auth()\n",
"logging.debug('Processing SAML response')\n",
"request_id = session['saml_authn_request_id']\n",
"logging.warning('No saml_authn_request_id in session')\n",
"auth.process_response(request_id=request_id)\n",
"resp = jsonify(errors=['invalid_response'], message='SAML request failed',\n reason=\n 'No AuthNRequest ID from SP found to match with InResponseTo of response')\n",
"if auth.get_errors():\n",
"resp.status_code = 401\n",
"return self._render_saml_errors_json(auth)\n",
"session.pop('saml_authn_request_id', None)\n",
"return resp\n",
"if not auth.is_authenticated():\n",
"logging.warning('auth.is_authenticated() => False')\n",
"nameid = auth.get_nameid()\n",
"resp = jsonify(error='Not Authenticated')\n",
"logging.info('SAML user authenticated: {!r}'.format(nameid))\n",
"resp.status_code = 401\n",
"attributes = auth.get_attributes()\n",
"return resp\n",
"logging.info('SAML attributes: {!r}'.format(attributes))\n",
"for key, val in attributes.iteritems():\n",
"if isinstance(val, list) and len(val) == 1:\n",
"session['saml_data'] = {'attrs': attributes, 'nameid': nameid,\n 'session_index': auth.get_session_index()}\n",
"attributes[key] = val[0]\n",
"kwargs = {}\n",
"kwargs['email'] = attributes.get('email', nameid)\n",
"for key, val in attributes.iteritems():\n",
"if not getattr(key, 'lower', None):\n",
"self.set_expiration()\n",
"logging.error('Bad list attr {!r}'.format({key: val}))\n",
"if key.lower() in ['firstname', 'first_name']:\n",
"self.set_current_user(**kwargs)\n",
"kwargs['first_name'] = val\n",
"if key.lower() in ['lastname', 'last_name']:\n",
"default_redirect = flask.url_for('index')\n",
"kwargs['last_name'] = val\n",
"redirect_url = request.form.get('RelayState', default_redirect)\n",
"if redirect_url.endswith('/saml/consume') or redirect_url.endswith('/login'):\n",
"redirect_url = default_redirect\n",
"logging.debug('Redirecting to {0}'.format(redirect_url))\n",
"resp = flask.redirect(redirect_url)\n",
"self.set_csrf_token(resp)\n",
"return resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Expr'",
"Return'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return CLASS_0.select(lambda g: g.metadata_type == REGULAR_TORRENT and g.\n status != LEGACY_ENTRY).random(VAR_19)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return TorrentMetadata.select(lambda g: g.metadata_type == REGULAR_TORRENT and\n g.status != LEGACY_ENTRY).random(limit)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"@require_http_methods(['POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = BadRequestRateLimiter()\n",
"if VAR_17.is_rate_limit_exceeded(VAR_3):\n",
"VAR_0.warning('Password reset rate limit exceeded')\n",
"VAR_12 = VAR_3.user\n",
"return HttpResponseForbidden()\n",
"VAR_18 = VAR_12.email if VAR_12.is_authenticated() else VAR_3.POST.get('email')\n",
"if VAR_18:\n",
"return HttpResponseBadRequest(_('No email address provided.'))\n",
"request_password_change(VAR_18, VAR_3.is_secure())\n",
"VAR_0.info('Invalid password reset attempt')\n",
"return HttpResponse(status=200)\n",
"VAR_12 = VAR_12 if VAR_12.is_authenticated() else VAR_2.objects.get(VAR_18=\n email)\n",
"VAR_17.tick_bad_request_counter(VAR_3)\n",
"destroy_oauth_tokens(VAR_12)\n"
] | [
"@require_http_methods(['POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"limiter = BadRequestRateLimiter()\n",
"if limiter.is_rate_limit_exceeded(request):\n",
"AUDIT_LOG.warning('Password reset rate limit exceeded')\n",
"user = request.user\n",
"return HttpResponseForbidden()\n",
"email = user.email if user.is_authenticated() else request.POST.get('email')\n",
"if email:\n",
"return HttpResponseBadRequest(_('No email address provided.'))\n",
"request_password_change(email, request.is_secure())\n",
"AUDIT_LOG.info('Invalid password reset attempt')\n",
"return HttpResponse(status=200)\n",
"user = user if user.is_authenticated() else User.objects.get(email=email)\n",
"limiter.tick_bad_request_counter(request)\n",
"destroy_oauth_tokens(user)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = bleach.clean(VAR_2)\n",
"FUNC_1('insert into Player(name) values(%s)', (VAR_2,))\n"
] | [
"def registerPlayer(name):...\n",
"\"\"\"docstring\"\"\"\n",
"name = bleach.clean(name)\n",
"execute('insert into Player(name) values(%s)', (name,))\n"
] | [
0,
0,
4,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_18._check_executable_command(VAR_0=cls.executable, VAR_19=cls.\n prerequisite_command, VAR_20=cls.prerequisite_fail_msg)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return cls._check_executable_command(executable=cls.executable, command=cls\n .prerequisite_command, fail_msg=cls.prerequisite_fail_msg)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_1(VAR_1):...\n",
"for VAR_5 in VAR_1.traverse():\n",
"if VAR_5.path.startswith('LICENSES'):\n",
"if VAR_5.path.find('license-rules.rst') >= 0:\n",
"if not os.path.isfile(VAR_5.path):\n",
"VAR_22.parse_lines(VAR_15, VAR_19.maxlines, VAR_5.path)\n"
] | [
"def scan_git_tree(tree):...\n",
"for el in tree.traverse():\n",
"if el.path.startswith('LICENSES'):\n",
"if el.path.find('license-rules.rst') >= 0:\n",
"if not os.path.isfile(el.path):\n",
"parser.parse_lines(fd, args.maxlines, el.path)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Condition",
"Condition",
"Expr'"
] |
[
"def __init__(self, VAR_1, VAR_5, VAR_6, VAR_4=None):...\n",
"GenericRequest.__init__(self, VAR_1, VAR_4)\n",
"self.url = '%stasks/%s/statements/%s' % (self.base_url, VAR_5, VAR_6)\n",
"self.task_id = VAR_5\n"
] | [
"def __init__(self, browser, task_id, language_code, base_url=None):...\n",
"GenericRequest.__init__(self, browser, base_url)\n",
"self.url = '%stasks/%s/statements/%s' % (self.base_url, task_id, language_code)\n",
"self.task_id = task_id\n"
] | [
0,
0,
5,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_29(self, VAR_77):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.priority = VAR_77\n",
"return VAR_101\n"
] | [
"def priority(self, priority):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.priority = priority\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"from flask import Flask, render_template, url_for, flash, redirect, request\n",
"from flask_sqlalchemy import SQLAlchemy\n",
"VAR_0 = Flask(__name__, static_folder='static', static_url_path='')\n",
"VAR_0.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///site.sqlite3'\n",
"VAR_0.config['SECRET_KEY'] = 'random string'\n",
"VAR_1 = SQLAlchemy(VAR_0)\n",
"VAR_2 = VAR_1.Column(VAR_1.Integer, primary_key=True)\n",
"VAR_3 = VAR_1.Column(VAR_1.String(50))\n",
"VAR_4 = VAR_1.Column(VAR_1.String(20))\n",
"def __init__(self, VAR_3, VAR_4):...\n",
"self.email = VAR_3\n",
"self.password = VAR_4\n",
"@VAR_0.route('/')...\n",
"return render_template('home.html')\n"
] | [
"from flask import Flask, render_template, url_for, flash, redirect, request\n",
"from flask_sqlalchemy import SQLAlchemy\n",
"app = Flask(__name__, static_folder='static', static_url_path='')\n",
"app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///site.sqlite3'\n",
"app.config['SECRET_KEY'] = 'random string'\n",
"db = SQLAlchemy(app)\n",
"id = db.Column(db.Integer, primary_key=True)\n",
"email = db.Column(db.String(50))\n",
"password = db.Column(db.String(20))\n",
"def __init__(self, email, password):...\n",
"self.email = email\n",
"self.password = password\n",
"@app.route('/')...\n",
"return render_template('home.html')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
4,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def __init__(self, *VAR_2, **VAR_3):...\n",
"super(CLASS_1, self).__init__(*VAR_2, **kwargs)\n",
"self.OIDC_OP_AUTH_ENDPOINT = import_from_settings(\n 'OIDC_OP_AUTHORIZATION_ENDPOINT')\n",
"self.OIDC_RP_CLIENT_ID = import_from_settings('OIDC_RP_CLIENT_ID')\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(OIDCAuthenticationRequestView, self).__init__(*args, **kwargs)\n",
"self.OIDC_OP_AUTH_ENDPOINT = import_from_settings(\n 'OIDC_OP_AUTHORIZATION_ENDPOINT')\n",
"self.OIDC_RP_CLIENT_ID = import_from_settings('OIDC_RP_CLIENT_ID')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_2(self):...\n",
"VAR_1 = self.client.get(f'/qa/extractionscript/')\n",
"self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), VAR_1.content)\n",
"VAR_2 = 15\n",
"VAR_1 = self.client.get(f'/qa/extractionscript/{VAR_2}/')\n",
"VAR_3 = ExtractedText.objects.filter(extraction_script=pk).first()\n",
"self.assertIn(f'/qa/extractedtext/{VAR_3.pk}/'.encode(), VAR_1.content)\n",
"VAR_4 = QAGroup.objects.filter(extraction_script_id=pk).count()\n",
"self.assertTrue(VAR_4 == 1)\n",
"self.assertTrue(Script.objects.get(VAR_2=15).qa_begun)\n",
"VAR_5 = QAGroup.objects.get(extraction_script_id=pk).pk\n",
"VAR_3 = ExtractedText.objects.filter(extraction_script=pk).first()\n",
"self.assertTrue(VAR_3.qa_group_id == VAR_5)\n",
"VAR_1 = self.client.get(f'/qa/extractionscript/')\n",
"self.assertIn(f\"'/qa/extractionscript/15/'> Continue QA\".encode(), VAR_1.\n content)\n"
] | [
"def test_new_qa_group_urls(self):...\n",
"response = self.client.get(f'/qa/extractionscript/')\n",
"self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), response.content\n )\n",
"pk = 15\n",
"response = self.client.get(f'/qa/extractionscript/{pk}/')\n",
"et = ExtractedText.objects.filter(extraction_script=pk).first()\n",
"self.assertIn(f'/qa/extractedtext/{et.pk}/'.encode(), response.content)\n",
"group_count = QAGroup.objects.filter(extraction_script_id=pk).count()\n",
"self.assertTrue(group_count == 1)\n",
"self.assertTrue(Script.objects.get(pk=15).qa_begun)\n",
"group_pk = QAGroup.objects.get(extraction_script_id=pk).pk\n",
"et = ExtractedText.objects.filter(extraction_script=pk).first()\n",
"self.assertTrue(et.qa_group_id == group_pk)\n",
"response = self.client.get(f'/qa/extractionscript/')\n",
"self.assertIn(f\"'/qa/extractionscript/15/'> Continue QA\".encode(), response\n .content)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_28(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_65 = [x[VAR_1] for x in self.get_path(VAR_21)]\n",
"return VAR_65\n"
] | [
"def listdir(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"names = [x[A_NAME] for x in self.get_path(path)]\n",
"return names\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self, VAR_46):...\n",
"VAR_70 = []\n",
"for join_table, parent_table in VAR_46:\n",
"VAR_70.append(\n 'LEFT JOIN {join_tbl} ON {join_tbl}.{join_fld} = {parent_tbl}.{parent_fld}'\n .format(VAR_87=join_table, VAR_86=parent_table, join_fld=self.\n path_mapping[join_table][parent_table][self.JOIN_COLUMN], parent_fld=\n self.path_mapping[join_table][parent_table][self.PARENT_COLUMN]))\n",
"return ' '.join(VAR_70)\n"
] | [
"def generate_left_join(self, join_path):...\n",
"join_phrases = []\n",
"for join_table, parent_table in join_path:\n",
"join_phrases.append(\n 'LEFT JOIN {join_tbl} ON {join_tbl}.{join_fld} = {parent_tbl}.{parent_fld}'\n .format(join_tbl=join_table, parent_tbl=parent_table, join_fld=self.\n path_mapping[join_table][parent_table][self.JOIN_COLUMN], parent_fld=\n self.path_mapping[join_table][parent_table][self.PARENT_COLUMN]))\n",
"return ' '.join(join_phrases)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_14(self):...\n",
"self.run_test_case(self.scenario.create_instances())\n"
] | [
"def test_b_create_instances(self):...\n",
"self.run_test_case(self.scenario.create_instances())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"VAR_14 = self.objects.p\n",
"VAR_15 = self.objects.puc\n",
"VAR_16 = self.objects.pt\n",
"PUCToTag.objects.create(content_object=puc, VAR_16=tag)\n",
"ProductToPUC.objects.create(product=p, VAR_15=puc)\n",
"VAR_17 = self.live_server_url + f'/product/{VAR_14.pk}/'\n",
"self.browser.get(VAR_17)\n",
"VAR_18 = self.browser.find_element_by_id('tag_submit')\n",
"self.assertFalse(VAR_18.is_enabled(), 'Button should be disabled')\n",
"VAR_16 = self.browser.find_element_by_class_name('taggit-tag')\n",
"VAR_16.click()\n",
"self.assertTrue(VAR_18.is_enabled(), 'Button should be enabled')\n"
] | [
"def test_product(self):...\n",
"p = self.objects.p\n",
"puc = self.objects.puc\n",
"tag = self.objects.pt\n",
"PUCToTag.objects.create(content_object=puc, tag=tag)\n",
"ProductToPUC.objects.create(product=p, puc=puc)\n",
"url = self.live_server_url + f'/product/{p.pk}/'\n",
"self.browser.get(url)\n",
"submit = self.browser.find_element_by_id('tag_submit')\n",
"self.assertFalse(submit.is_enabled(), 'Button should be disabled')\n",
"tag = self.browser.find_element_by_class_name('taggit-tag')\n",
"tag.click()\n",
"self.assertTrue(submit.is_enabled(), 'Button should be enabled')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
""
] | [
"def _set_remote_data(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_5(self, VAR_12):...\n",
"VAR_14 = 'string'.format(VAR_12)\n",
"self.cur.execute(VAR_14)\n",
"return self.cur.fetchall()\n"
] | [
"def get_event(self, event_id):...\n",
"sql = (\n \"\"\"SELECT title, description, start_time, time_zone, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event\n WHERE event_id = {0}\n AND user_event.attending = 1)\n AS accepted, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event\n WHERE event_id = {0}\n AND user_event.attending = 0)\n AS declined\n FROM events\n WHERE event_id = {0};\n \"\"\"\n .format(event_id))\n",
"self.cur.execute(sql)\n",
"return self.cur.fetchall()\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(VAR_4, VAR_5, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = VAR_5['author']\n",
"VAR_22 = ''\n",
"if VAR_4.json['action'] == 'opened':\n",
"if VAR_6['message']['opened']['header'] == '':\n",
"if VAR_4.json['action'] in ['synchronize', 'reopened']:\n",
"VAR_22 = 'Hello @' + VAR_18 + '! Thanks for submitting the PR.\\n\\n'\n",
"VAR_22 = VAR_6['message']['opened']['header'] + '\\n\\n'\n",
"if VAR_6['message']['updated']['header'] == '':\n",
"VAR_23 = False\n",
"VAR_22 = 'Hello @' + VAR_18 + '! Thanks for updating the PR.\\n\\n'\n",
"VAR_22 = VAR_6['message']['updated']['header'] + '\\n\\n'\n",
"VAR_24 = []\n",
"for VAR_47, issues in VAR_5['results'].items():\n",
"if len(issues) == 0:\n",
"if VAR_6['only_mention_files_with_errors'] and not VAR_23:\n",
"if not VAR_6['only_mention_files_with_errors']:\n",
"VAR_23 = True\n",
"VAR_24.append(\n 'Cheers ! There are no PEP8 issues in this Pull Request. :beers: ')\n",
"VAR_24 = ''.join(VAR_24)\n",
"VAR_24.append(' - There are no PEP8 issues in the file [`{0}`]({1}) !'.\n format(VAR_47, VAR_5[VAR_47 + '_link']))\n",
"VAR_24.append('\\n\\n')\n",
"VAR_24.append(' - In the file [`{0}`]({1}), following are the PEP8 issues :\\n'\n .format(VAR_47, VAR_5[VAR_47 + '_link']))\n",
"VAR_25 = []\n",
"if len(VAR_5['extra_results'][VAR_47]) > 0:\n",
"for issue in issues:\n",
"if VAR_4.json['action'] == 'opened':\n",
"VAR_24.append(' - Complete extra results for this file :\\n\\n')\n",
"VAR_62 = issue.replace(VAR_47 + ':', 'Line ')\n",
"VAR_25.append(VAR_6['message']['opened']['footer'])\n",
"if VAR_4.json['action'] in ['synchronize', 'reopened']:\n",
"VAR_24.append('> ' + ''.join(VAR_5['extra_results'][VAR_47]))\n",
"VAR_63 = VAR_62.split(' ')\n",
"VAR_25 = ''.join(VAR_25)\n",
"VAR_25.append(VAR_6['message']['updated']['footer'])\n",
"VAR_24.append('---\\n\\n')\n",
"VAR_64 = VAR_63[2]\n",
"return VAR_22, VAR_24, VAR_25, VAR_23\n",
"VAR_65 = 'https://duckduckgo.com/?q=pep8%20{0}'.format(VAR_64)\n",
"VAR_63[2] = '[{0}]({1})'.format(VAR_64, VAR_65)\n",
"VAR_66, VAR_67 = VAR_63[1][:-1].split(':')\n",
"VAR_68 = VAR_5[VAR_47 + '_link'] + '#L' + VAR_66\n",
"VAR_63[1] = '[{0}:{1}]({2}):'.format(VAR_66, VAR_67, VAR_68)\n",
"VAR_62 = ' '.join(VAR_63)\n",
"VAR_62 = VAR_62.replace('Line [', '[Line ')\n",
"VAR_24.append('\\n> {0}'.format(VAR_62))\n"
] | [
"def prepare_comment(request, data, config):...\n",
"\"\"\"docstring\"\"\"\n",
"author = data['author']\n",
"comment_header = ''\n",
"if request.json['action'] == 'opened':\n",
"if config['message']['opened']['header'] == '':\n",
"if request.json['action'] in ['synchronize', 'reopened']:\n",
"comment_header = 'Hello @' + author + '! Thanks for submitting the PR.\\n\\n'\n",
"comment_header = config['message']['opened']['header'] + '\\n\\n'\n",
"if config['message']['updated']['header'] == '':\n",
"ERROR = False\n",
"comment_header = 'Hello @' + author + '! Thanks for updating the PR.\\n\\n'\n",
"comment_header = config['message']['updated']['header'] + '\\n\\n'\n",
"comment_body = []\n",
"for file, issues in data['results'].items():\n",
"if len(issues) == 0:\n",
"if config['only_mention_files_with_errors'] and not ERROR:\n",
"if not config['only_mention_files_with_errors']:\n",
"ERROR = True\n",
"comment_body.append(\n 'Cheers ! There are no PEP8 issues in this Pull Request. :beers: ')\n",
"comment_body = ''.join(comment_body)\n",
"comment_body.append(' - There are no PEP8 issues in the file [`{0}`]({1}) !'\n .format(file, data[file + '_link']))\n",
"comment_body.append('\\n\\n')\n",
"comment_body.append(\n ' - In the file [`{0}`]({1}), following are the PEP8 issues :\\n'.format\n (file, data[file + '_link']))\n",
"comment_footer = []\n",
"if len(data['extra_results'][file]) > 0:\n",
"for issue in issues:\n",
"if request.json['action'] == 'opened':\n",
"comment_body.append(' - Complete extra results for this file :\\n\\n')\n",
"error_string = issue.replace(file + ':', 'Line ')\n",
"comment_footer.append(config['message']['opened']['footer'])\n",
"if request.json['action'] in ['synchronize', 'reopened']:\n",
"comment_body.append('> ' + ''.join(data['extra_results'][file]))\n",
"error_string_list = error_string.split(' ')\n",
"comment_footer = ''.join(comment_footer)\n",
"comment_footer.append(config['message']['updated']['footer'])\n",
"comment_body.append('---\\n\\n')\n",
"code = error_string_list[2]\n",
"return comment_header, comment_body, comment_footer, ERROR\n",
"code_url = 'https://duckduckgo.com/?q=pep8%20{0}'.format(code)\n",
"error_string_list[2] = '[{0}]({1})'.format(code, code_url)\n",
"line, col = error_string_list[1][:-1].split(':')\n",
"line_url = data[file + '_link'] + '#L' + line\n",
"error_string_list[1] = '[{0}:{1}]({2}):'.format(line, col, line_url)\n",
"error_string = ' '.join(error_string_list)\n",
"error_string = error_string.replace('Line [', '[Line ')\n",
"comment_body.append('\\n> {0}'.format(error_string))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"For",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.do_step(HomepageRequest(self.browser, self.username, loggedin=False,\n VAR_10=self.base_url))\n",
"self.do_step(LoginRequest(self.browser, self.username, self.password,\n VAR_10=self.base_url))\n",
"self.do_step(HomepageRequest(self.browser, self.username, loggedin=True,\n VAR_10=self.base_url))\n"
] | [
"def login(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.do_step(HomepageRequest(self.browser, self.username, loggedin=False,\n base_url=self.base_url))\n",
"self.do_step(LoginRequest(self.browser, self.username, self.password,\n base_url=self.base_url))\n",
"self.do_step(HomepageRequest(self.browser, self.username, loggedin=True,\n base_url=self.base_url))\n"
] | [
0,
0,
5,
5,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.opt:\n",
"self.parse_args(VAR_16=False)\n",
"VAR_35 = {}\n",
"for VAR_39, VAR_0 in self.opt.items():\n",
"VAR_35[VAR_38(VAR_39)] = VAR_38(VAR_0)\n",
"for group in self._action_groups:\n",
"VAR_40 = {a.dest: getattr(self.args, a.dest, None) for a in group.\n _group_actions}\n",
"VAR_14 = argparse.Namespace(**group_dict)\n",
"VAR_41 = 0\n",
"for VAR_39 in VAR_14.__dict__:\n",
"if VAR_39 in VAR_35:\n",
"if VAR_41 == 0:\n",
"print('[ ' + group.title + ': ] ')\n",
"VAR_41 += 1\n",
"print('[ ' + VAR_39 + ': ' + VAR_35[VAR_39] + ' ]')\n"
] | [
"def print_args(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.opt:\n",
"self.parse_args(print_args=False)\n",
"values = {}\n",
"for key, value in self.opt.items():\n",
"values[str(key)] = str(value)\n",
"for group in self._action_groups:\n",
"group_dict = {a.dest: getattr(self.args, a.dest, None) for a in group.\n _group_actions}\n",
"namespace = argparse.Namespace(**group_dict)\n",
"count = 0\n",
"for key in namespace.__dict__:\n",
"if key in values:\n",
"if count == 0:\n",
"print('[ ' + group.title + ': ] ')\n",
"count += 1\n",
"print('[ ' + key + ': ' + values[key] + ' ]')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Expr'",
"AugAssign'",
"Expr'"
] |
[
"def FUNC_10(VAR_13):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_43 = []\n",
"if isinstance(VAR_13, six.string_types):\n",
"VAR_43.append(VAR_13.lstrip('-'))\n",
"if isinstance(VAR_13, (list, tuple)) and len(VAR_13) >= 2 and isinstance(VAR_13\n",
"return VAR_43\n",
"for func_arg in VAR_13[1]:\n",
"VAR_43.extend(FUNC_10(func_arg))\n"
] | [
"def columns_in_expr(expr):...\n",
"\"\"\"docstring\"\"\"\n",
"cols = []\n",
"if isinstance(expr, six.string_types):\n",
"cols.append(expr.lstrip('-'))\n",
"if isinstance(expr, (list, tuple)) and len(expr) >= 2 and isinstance(expr[1\n",
"return cols\n",
"for func_arg in expr[1]:\n",
"cols.extend(columns_in_expr(func_arg))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Return'",
"For",
"Expr'"
] |
[
"def FUNC_41(self):...\n",
"for VAR_40 in self._names:\n",
"yield VAR_40, getattr(self, VAR_40)\n"
] | [
"def items(self):...\n",
"for name in self._names:\n",
"yield name, getattr(self, name)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'"
] |
[
"def FUNC_0(*VAR_6):...\n",
"logging.error(*VAR_6)\n",
"sys.exit(1)\n"
] | [
"def fatal(*args):...\n",
"logging.error(*args)\n",
"sys.exit(1)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"@mock.patch.object(users, 'is_current_user_admin', return_value=True)...\n",
"VAR_2 = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')\n",
"VAR_2.put()\n",
"self.mock_current_user(user_email='[email protected]')\n",
"VAR_3 = self.test_app.get('/waterfall/analyze_regression_range', params={\n 'lower_bound_commit_position': 1, 'upper_bound_commit_position': 2,\n 'iterations_to_rerun': 100, 'key': analysis.key.urlsafe()})\n",
"self.assertEqual(200, VAR_3.status_int)\n"
] | [
"@mock.patch.object(users, 'is_current_user_admin', return_value=True)...\n",
"analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')\n",
"analysis.put()\n",
"self.mock_current_user(user_email='[email protected]')\n",
"response = self.test_app.get('/waterfall/analyze_regression_range', params=\n {'lower_bound_commit_position': 1, 'upper_bound_commit_position': 2,\n 'iterations_to_rerun': 100, 'key': analysis.key.urlsafe()})\n",
"self.assertEqual(200, response.status_int)\n"
] | [
5,
0,
0,
5,
5,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"self.loader = RegressionCheckLoader([\n 'unittests/resources/checks_unlisted/dependencies/normal.py'])\n",
"rt.runtime().resources.prefix = tempfile.mkdtemp(dir='unittests')\n"
] | [
"def setUp(self):...\n",
"self.loader = RegressionCheckLoader([\n 'unittests/resources/checks_unlisted/dependencies/normal.py'])\n",
"rt.runtime().resources.prefix = tempfile.mkdtemp(dir='unittests')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"from pytest_bdd import scenario, then\n",
"@scenario('../features/log_accessible.feature', 'get logs')...\n",
"@then('the pods logs should not be empty')...\n",
"VAR_1 = 'string'\n",
"VAR_2 = VAR_0.check_output(VAR_1)\n",
"for pod_id in VAR_2.split('\\n'):\n",
"VAR_3 = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf logs {} --limit-bytes=1 -n kube-system'\n .format(pod_id))\n",
"VAR_4 = VAR_0.check_output(VAR_3)\n",
"if 'salt-master' not in pod_id:\n",
"assert len(VAR_4.strip()) > 0, 'Error cannot retrieve logs for {}'.format(\n pod_id)\n"
] | [
"from pytest_bdd import scenario, then\n",
"@scenario('../features/log_accessible.feature', 'get logs')...\n",
"@then('the pods logs should not be empty')...\n",
"cmd = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf get pods -n kube-system --no-headers -o custom-columns=\":metadata.name\"'\n )\n",
"pods_list = host.check_output(cmd)\n",
"for pod_id in pods_list.split('\\n'):\n",
"cmd_logs = (\n 'kubectl --kubeconfig=/etc/kubernetes/admin.conf logs {} --limit-bytes=1 -n kube-system'\n .format(pod_id))\n",
"res = host.check_output(cmd_logs)\n",
"if 'salt-master' not in pod_id:\n",
"assert len(res.strip()) > 0, 'Error cannot retrieve logs for {}'.format(pod_id)\n"
] | [
0,
0,
0,
2,
2,
0,
2,
2,
0,
2
] | [
"ImportFrom'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assert'"
] |
[
"def FUNC_3(self, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_19 = self._cliq_get_cluster_info(VAR_8)\n",
"VAR_20 = []\n",
"for vip in VAR_19.findall('response/cluster/vip'):\n",
"VAR_20.append(vip.attrib.get('ipAddress'))\n",
"if len(VAR_20) == 1:\n",
"return VAR_20[0]\n",
"VAR_21 = etree.tostring(VAR_19)\n",
"VAR_22 = _(\n 'Unexpected number of virtual ips for cluster %(cluster_name)s. Result=%(_xml)s'\n ) % {'cluster_name': VAR_8, '_xml': VAR_21}\n"
] | [
"def _cliq_get_cluster_vip(self, cluster_name):...\n",
"\"\"\"docstring\"\"\"\n",
"cluster_xml = self._cliq_get_cluster_info(cluster_name)\n",
"vips = []\n",
"for vip in cluster_xml.findall('response/cluster/vip'):\n",
"vips.append(vip.attrib.get('ipAddress'))\n",
"if len(vips) == 1:\n",
"return vips[0]\n",
"_xml = etree.tostring(cluster_xml)\n",
"msg = _(\n 'Unexpected number of virtual ips for cluster %(cluster_name)s. Result=%(_xml)s'\n ) % {'cluster_name': cluster_name, '_xml': _xml}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_11(self):...\n",
"return ', '.join(column.for_schema() for column in self.columns)\n"
] | [
"def for_schema(self):...\n",
"return ', '.join(column.for_schema() for column in self.columns)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.assertTrue(self.client.login(username='autotest2', password='password'))\n",
"VAR_4 = 'autotest'\n",
"VAR_2 = '/api/apps'\n",
"VAR_5 = {'id': VAR_4}\n",
"VAR_3 = self.client.post(VAR_2, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"VAR_2 = '/api/apps/{}'.format(VAR_4)\n",
"VAR_3 = self.client.get(VAR_2)\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"VAR_2 = '/api/apps/{app_id}/logs'.format(**locals())\n",
"VAR_3 = self.client.get(VAR_2)\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"self.assertIn('autotest2 created initial release', VAR_3.data)\n",
"VAR_2 = '/api/apps/{app_id}/run'.format(**locals())\n",
"VAR_5 = {'command': 'ls -al'}\n",
"VAR_3 = self.client.post(VAR_2, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"self.assertEqual(VAR_3.data[0], 0)\n",
"VAR_2 = '/api/apps/{}'.format(VAR_4)\n",
"VAR_3 = self.client.delete(VAR_2)\n",
"self.assertEqual(VAR_3.status_code, 204)\n"
] | [
"def test_admin_can_manage_other_apps(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.assertTrue(self.client.login(username='autotest2', password='password'))\n",
"app_id = 'autotest'\n",
"url = '/api/apps'\n",
"body = {'id': app_id}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertTrue(self.client.login(username='autotest', password='password'))\n",
"url = '/api/apps/{}'.format(app_id)\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"url = '/api/apps/{app_id}/logs'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertIn('autotest2 created initial release', response.data)\n",
"url = '/api/apps/{app_id}/run'.format(**locals())\n",
"body = {'command': 'ls -al'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(response.data[0], 0)\n",
"url = '/api/apps/{}'.format(app_id)\n",
"response = self.client.delete(url)\n",
"self.assertEqual(response.status_code, 204)\n"
] | [
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@handler('registered', VAR_0='*')...\n",
"if isinstance(VAR_5, BaseController) and VAR_5.channel not in self.paths:\n",
"self.paths[VAR_5.channel] = VAR_5\n"
] | [
"@handler('registered', channel='*')...\n",
"if isinstance(component, BaseController\n",
"self.paths[component.channel] = component\n"
] | [
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return bool(self.wildcard_names)\n"
] | [
"def has_wildcards(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return bool(self.wildcard_names)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_10(VAR_17):...\n",
"VAR_35 = os.path.dirname(VAR_17)\n",
"if not os.path.exists(VAR_35):\n",
"os.makedirs(VAR_35)\n"
] | [
"def ensure_dir(file_path):...\n",
"directory = os.path.dirname(file_path)\n",
"if not os.path.exists(directory):\n",
"os.makedirs(directory)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_6(VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_21 = c_onboarding_status().find_one({onboarding.USER_ID: VAR_7}, {'_id':\n 0, 'user_id': 0}) or {}\n",
"VAR_22 = {}\n",
"for VAR_8 in FUNC_2():\n",
"VAR_22[VAR_8] = {'done': VAR_21.get(VAR_8, False)}\n",
"return VAR_22\n"
] | [
"def get_onboarding_status_with_settings(user_id):...\n",
"\"\"\"docstring\"\"\"\n",
"onboarding_status = c_onboarding_status().find_one({onboarding.USER_ID:\n user_id}, {'_id': 0, 'user_id': 0}) or {}\n",
"data = {}\n",
"for step in get_onboarding_steps():\n",
"data[step] = {'done': onboarding_status.get(step, False)}\n",
"return data\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_21(self, VAR_15):...\n",
"\"\"\"docstring\"\"\"\n",
"for key, value in self.iscsi_ips.items():\n",
"if value['nsp'] == VAR_15:\n",
"return key\n"
] | [
"def _get_ip_using_nsp(self, nsp):...\n",
"\"\"\"docstring\"\"\"\n",
"for key, value in self.iscsi_ips.items():\n",
"if value['nsp'] == nsp:\n",
"return key\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Return'"
] |
[
"def FUNC_5(self, VAR_10, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"return self\n"
] | [
"def getChild(self, name, request):...\n",
"\"\"\"docstring\"\"\"\n",
"return self\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_3(VAR_1):...\n",
"return Dependencies(VAR_1)\n"
] | [
"def setup_dependency_injection(config):...\n",
"return Dependencies(config)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6(self, VAR_23):...\n",
"if VAR_23:\n",
"return self.error()\n",
"return Account._by_name(VAR_23)\n"
] | [
"def run(self, name):...\n",
"if name:\n",
"return self.error()\n",
"return Account._by_name(name)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_22(self, VAR_21, VAR_40):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_57 = self.getfile(VAR_21)\n",
"if VAR_57 == False:\n",
"VAR_57[VAR_6] = FUNC_30.S_IFMT(VAR_57[VAR_6]) | VAR_40\n"
] | [
"def chmod(self, path, perm):...\n",
"\"\"\"docstring\"\"\"\n",
"p = self.getfile(path)\n",
"if p == False:\n",
"p[A_MODE] = stat.S_IFMT(p[A_MODE]) | perm\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_29(VAR_10, VAR_11='suggestall'):...\n",
"VAR_32 = [{'text': 'FAST results', 'children': [{'id': '', 'text':\n 'Error retrieving FAST results.'}]}]\n",
"VAR_29 = FUNC_27(VAR_10, VAR_11)\n",
"VAR_45 = requests.get(VAR_29, timeout=2)\n",
"VAR_1.error('fast lookup timed out')\n",
"VAR_27 = FUNC_28(VAR_45.json()['response']['docs'], VAR_11)\n",
"VAR_1.error('fast data exception: %s' % e)\n",
"return VAR_32\n",
"if VAR_27:\n",
"VAR_1.error('fast response: %s - %s' % (VAR_45.status_code, VAR_45.text))\n",
"return [{'text': 'FAST results', 'children': VAR_27}]\n",
"return []\n",
"return VAR_32\n"
] | [
"def _get_fast_results(term, index='suggestall'):...\n",
"error_response = [{'text': 'FAST results', 'children': [{'id': '', 'text':\n 'Error retrieving FAST results.'}]}]\n",
"url = _build_fast_url(term, index)\n",
"r = requests.get(url, timeout=2)\n",
"logger.error('fast lookup timed out')\n",
"select2_results = _fast_results_to_select2_list(r.json()['response']['docs'\n ], index)\n",
"logger.error('fast data exception: %s' % e)\n",
"return error_response\n",
"if select2_results:\n",
"logger.error('fast response: %s - %s' % (r.status_code, r.text))\n",
"return [{'text': 'FAST results', 'children': select2_results}]\n",
"return []\n",
"return error_response\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'",
"Condition",
"Expr'",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_25(self):...\n",
"VAR_13 = webapp2.Request({'HTTP_X_APPENGINE_INBOUND_APPID': 'some-app'})\n",
"self.assertEqual(model.Identity(model.IDENTITY_SERVICE, 'some-app'),\n handler.service_to_service_authentication(VAR_13))\n"
] | [
"def test_applicable(self):...\n",
"request = webapp2.Request({'HTTP_X_APPENGINE_INBOUND_APPID': 'some-app'})\n",
"self.assertEqual(model.Identity(model.IDENTITY_SERVICE, 'some-app'),\n handler.service_to_service_authentication(request))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def __new__(VAR_22, VAR_3):...\n",
"VAR_48 = str.__new__(VAR_22, VAR_3)\n",
"VAR_48._is_function = type(VAR_3).__name__ == 'function'\n",
"VAR_48._file = VAR_3\n",
"VAR_48.rule = None\n",
"VAR_48._regex = None\n",
"return VAR_48\n"
] | [
"def __new__(cls, file):...\n",
"obj = str.__new__(cls, file)\n",
"obj._is_function = type(file).__name__ == 'function'\n",
"obj._file = file\n",
"obj.rule = None\n",
"obj._regex = None\n",
"return obj\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_1):...\n",
"VAR_1.execute('string')\n",
"VAR_8 = VAR_1.fetchall()\n",
"return VAR_8\n"
] | [
"def retrieve_videos(db):...\n",
"db.execute(\n 'SELECT id, playlist_id, title, thumbnail, position from video ORDER BY playlist_id ASC, position ASC;'\n )\n",
"rows = db.fetchall()\n",
"return rows\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_5 = [{'targetSize': 2, 'capacity': {'min': 2, 'max': 2, 'desired': 2},\n 'replicaPoolName': self.__server_group_name, 'numReplicas': 2, 'region':\n self.TEST_REGION, 'zone': self.TEST_ZONE, 'asgName': self.\n __server_group_name, 'type': 'resizeServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'cloudProvider': 'gce', 'user': 'integration-tests'}]\n",
"VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_6.new_clause_builder('Server Group Resized', retryable_for_secs=90\n ).inspect_resource('instance-groups', self.__server_group_name, [\n '--zone', self.TEST_ZONE]).contains_path_eq('size', 2)\n",
"VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - resize to 2 instances', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'resize_instances', data=payload, path=self.__path), contract=builder.\n build())\n"
] | [
"def resize_server_group(self):...\n",
"job = [{'targetSize': 2, 'capacity': {'min': 2, 'max': 2, 'desired': 2},\n 'replicaPoolName': self.__server_group_name, 'numReplicas': 2, 'region':\n self.TEST_REGION, 'zone': self.TEST_ZONE, 'asgName': self.\n __server_group_name, 'type': 'resizeServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'cloudProvider': 'gce', 'user': 'integration-tests'}]\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Server Group Resized', retryable_for_secs=90\n ).inspect_resource('instance-groups', self.__server_group_name, [\n '--zone', self.TEST_ZONE]).contains_path_eq('size', 2)\n",
"payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - resize to 2 instances', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'resize_instances', data=payload, path=self.__path), contract=builder.\n build())\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_34, VAR_35, VAR_36):...\n",
"\"\"\"docstring\"\"\"\n",
"self.base_table = ''\n",
"self.field_mapping = self._parse_field_mapping(VAR_34)\n",
"self.path_mapping = self._parse_multi_path_mapping(VAR_35)\n",
"self.custom_methods = self._parse_custom_methods(VAR_36)\n",
"self.WHERE_CONDITION_MAPPING = {self.WHERE_CONDITION:\n '_generate_where_phrase', self.AND_CONDITION: '_parse_and', self.\n OR_CONDITION: '_parse_or', self.NOT_CONDITION: '_parse_not', self.\n EXISTS_CONDITION: '_parse_exists', self.CUSTOM_METHOD_CONDITION:\n '_parse_custom_method_condition'}\n"
] | [
"def __init__(self, field_mapping, paths, custom_methods):...\n",
"\"\"\"docstring\"\"\"\n",
"self.base_table = ''\n",
"self.field_mapping = self._parse_field_mapping(field_mapping)\n",
"self.path_mapping = self._parse_multi_path_mapping(paths)\n",
"self.custom_methods = self._parse_custom_methods(custom_methods)\n",
"self.WHERE_CONDITION_MAPPING = {self.WHERE_CONDITION:\n '_generate_where_phrase', self.AND_CONDITION: '_parse_and', self.\n OR_CONDITION: '_parse_or', self.NOT_CONDITION: '_parse_not', self.\n EXISTS_CONDITION: '_parse_exists', self.CUSTOM_METHOD_CONDITION:\n '_parse_custom_method_condition'}\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_13(self, VAR_14):...\n",
""
] | [
"def is_float(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_5(self):...\n",
"sievelib.commands.add_commands(CLASS_0)\n",
"sievelib.commands.get_command_instance('mytest')\n",
"self.assertRaises(sievelib.commands.UnknownCommand, sievelib.commands.\n get_command_instance, 'unknowncommand')\n",
"self.compilation_ok(\n \"\"\"\n mytest :testtag 10 [\"[email protected]\"];\n \"\"\")\n"
] | [
"def test_add_command(self):...\n",
"sievelib.commands.add_commands(MytestCommand)\n",
"sievelib.commands.get_command_instance('mytest')\n",
"self.assertRaises(sievelib.commands.UnknownCommand, sievelib.commands.\n get_command_instance, 'unknowncommand')\n",
"self.compilation_ok(\n \"\"\"\n mytest :testtag 10 [\"[email protected]\"];\n \"\"\")\n"
] | [
0,
0,
0,
2,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = {VAR_24: VAR_25 for line in infFileLines for VAR_24, VAR_25 in (\n line.strip().split('='),)}\n",
"VAR_16 = [open(picture, 'rb').read() for picture in VAR_17['imageCsv'].\n split(',')]\n",
"return [VAR_17, VAR_16]\n"
] | [
"def get_inf_details(inf_file):...\n",
"\"\"\"docstring\"\"\"\n",
"data = {key: val for line in infFileLines for key, val in (line.strip().\n split('='),)}\n",
"files = [open(picture, 'rb').read() for picture in data['imageCsv'].split(',')]\n",
"return [data, files]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"return openid.create_logout_url(VAR_3, VAR_23)\n"
] | [
"@staticmethod...\n",
"return openid.create_logout_url(request, dest_url)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_40(self, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = getattr(self, 'parent_doc', self)\n",
"if hasattr(VAR_11, 'format_data_map'):\n",
"return VAR_16 in VAR_11.format_data_map\n",
"return True\n"
] | [
"def in_format_data(self, fieldname):...\n",
"\"\"\"docstring\"\"\"\n",
"doc = getattr(self, 'parent_doc', self)\n",
"if hasattr(doc, 'format_data_map'):\n",
"return fieldname in doc.format_data_map\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Return'",
"Return'"
] |
[
"def FUNC_6(self, VAR_9):...\n",
"return self._tool_classpath('scalac', VAR_9)\n"
] | [
"def compiler_classpath(self, products):...\n",
"return self._tool_classpath('scalac', products)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"from invenio.modules.oauthclient.models import RemoteAccount\n",
"RemoteAccount.create(1, 'testid', None)\n",
"self.assert401(self.client.get(url_for('oauthclient_settings.index'),\n follow_redirects=True))\n",
"self.login('admin', '')\n",
"VAR_10 = self.client.get(url_for('oauthclient_settings.index'))\n",
"self.assert200(VAR_10)\n",
"assert 'MyLinkedTestAccount' in VAR_10.data\n",
"assert url_for('oauthclient.disconnect', remote_app='test') in VAR_10.data\n",
"assert url_for('oauthclient.login', remote_app='full') in VAR_10.data\n",
"assert url_for('oauthclient.login', remote_app='test_invalid') in VAR_10.data\n"
] | [
"def test_settings_view(self):...\n",
"from invenio.modules.oauthclient.models import RemoteAccount\n",
"RemoteAccount.create(1, 'testid', None)\n",
"self.assert401(self.client.get(url_for('oauthclient_settings.index'),\n follow_redirects=True))\n",
"self.login('admin', '')\n",
"res = self.client.get(url_for('oauthclient_settings.index'))\n",
"self.assert200(res)\n",
"assert 'MyLinkedTestAccount' in res.data\n",
"assert url_for('oauthclient.disconnect', remote_app='test') in res.data\n",
"assert url_for('oauthclient.login', remote_app='full') in res.data\n",
"assert url_for('oauthclient.login', remote_app='test_invalid') in res.data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assert'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_13(self, *VAR_16, **VAR_10):...\n",
"if not self.current_user or not self.current_user.admin:\n",
"return VAR_7(self, *VAR_16, **kwargs)\n"
] | [
"def wrapper(self, *args, **kwargs):...\n",
"if not self.current_user or not self.current_user.admin:\n",
"return method(self, *args, **kwargs)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_2(VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = 2.55 + 1\n",
"VAR_15 = float(len(VAR_4)) / VAR_14\n",
"VAR_16 = [word for word in re.split('\\\\W', VAR_4) if word.isalpha()]\n",
"VAR_17 = len(VAR_16)\n",
"return VAR_17 > VAR_15\n"
] | [
"def _is_english_text(text):...\n",
"\"\"\"docstring\"\"\"\n",
"avg_word_length = 2.55 + 1\n",
"expected_word_number = float(len(text)) / avg_word_length\n",
"words = [word for word in re.split('\\\\W', text) if word.isalpha()]\n",
"word_number = len(words)\n",
"return word_number > expected_word_number\n"
] | [
0,
0,
7,
7,
7,
7,
7
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"for resource in set(resource for VAR_13 in self.rules for resource in\n",
"if resource not in '_cores _nodes'.split():\n",
"logger.info(resource)\n"
] | [
"def list_resources(self):...\n",
"for resource in set(resource for rule in self.rules for resource in rule.\n",
"if resource not in '_cores _nodes'.split():\n",
"logger.info(resource)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Expr'"
] |
[
"def __init__(self, *VAR_17):...\n",
"if len(VAR_17) == 0:\n",
"self._java_dist_dirs = VAR_17\n"
] | [
"def __init__(self, *java_dist_dirs):...\n",
"if len(java_dist_dirs) == 0:\n",
"self._java_dist_dirs = java_dist_dirs\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'"
] |
[
"def FUNC_2(self, VAR_1):...\n",
"self.__checkCompilation(VAR_1, True)\n"
] | [
"def compilation_ok(self, script):...\n",
"self.__checkCompilation(script, True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"VAR_13 = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)\n",
"self.assertTrue(isinstance(VAR_13, bool))\n"
] | [
"def test_getinfo_bool(self):...\n",
"value = self.cnxn.getinfo(pyodbc.SQL_ACCESSIBLE_TABLES)\n",
"self.assertTrue(isinstance(value, bool))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = 1 / pd.concat([(1 / VAR_2[target_type]) for target_type in VAR_3],\n axis=1).sum(axis=1)\n",
"VAR_5.name = '+'.join(VAR_3)\n",
"return pd.concat([VAR_2, VAR_5], axis=1)\n"
] | [
"def combine_odds(odds, target_types):...\n",
"\"\"\"docstring\"\"\"\n",
"combined_odds = 1 / pd.concat([(1 / odds[target_type]) for target_type in\n target_types], axis=1).sum(axis=1)\n",
"combined_odds.name = '+'.join(target_types)\n",
"return pd.concat([odds, combined_odds], axis=1)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_14(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return self._get_users_api().get_current_user(self.request)\n"
] | [
"def get_current_user(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return self._get_users_api().get_current_user(self.request)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_40(VAR_87):...\n",
"return filterfalse(self.is_rule, VAR_87)\n"
] | [
"def files(items):...\n",
"return filterfalse(self.is_rule, items)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6():...\n",
"VAR_10 = kube_utils.get_pods(VAR_0, VAR_4, VAR_2='kube-system',\n status_phase='Running')\n",
"assert len(VAR_10) >= VAR_5\n"
] | [
"def _check_pods_count():...\n",
"pods = kube_utils.get_pods(host, label, namespace='kube-system',\n status_phase='Running')\n",
"assert len(pods) >= min_pods_count\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assert'"
] |
[
"@property...\n",
"VAR_4 = self.request.session.get('oidc_login_next', None)\n",
"return VAR_4 or import_from_settings('LOGIN_REDIRECT_URL', '/')\n"
] | [
"@property...\n",
"next_url = self.request.session.get('oidc_login_next', None)\n",
"return next_url or import_from_settings('LOGIN_REDIRECT_URL', '/')\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_14, VAR_6=None, VAR_15=None, VAR_16=None, VAR_17=...\n",
"self.object_list = VAR_14\n",
"self.model = VAR_14.model\n",
"self.query_params = VAR_6 or {}\n",
"self.list_display = VAR_15 or []\n",
"self.list_filter = VAR_16 or []\n",
"self.list_search = VAR_17 or []\n",
"self.search_query_value = self.query_params.get(VAR_18, '')\n",
"self.search_query_param = VAR_18\n",
"self.ordering_query_value = self.query_params.get(VAR_8, '')\n",
"self.ordering_query_param = VAR_8\n",
"self.columns = self.get_columns()\n",
"self.filters = [CLASS_7(self.model, VAR_10, self.query_params, self.\n object_list) for i, VAR_10 in enumerate(self.list_filter, start=1)\n ] if self.list_filter else []\n"
] | [
"def __init__(self, object_list, query_params=None, list_display=None,...\n",
"self.object_list = object_list\n",
"self.model = object_list.model\n",
"self.query_params = query_params or {}\n",
"self.list_display = list_display or []\n",
"self.list_filter = list_filter or []\n",
"self.list_search = list_search or []\n",
"self.search_query_value = self.query_params.get(search_query_param, '')\n",
"self.search_query_param = search_query_param\n",
"self.ordering_query_value = self.query_params.get(ordering_query_param, '')\n",
"self.ordering_query_param = ordering_query_param\n",
"self.columns = self.get_columns()\n",
"self.filters = [SmartFilter(self.model, field, self.query_params, self.\n object_list) for i, field in enumerate(self.list_filter, start=1)\n ] if self.list_filter else []\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_19(self, VAR_16, VAR_20, VAR_21=False, **VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', VAR_16['name'], 'access', 'delete', '1')\n",
"VAR_0.error(_('Failed to terminate connection to volume %s'), VAR_16['name'])\n"
] | [
"def terminate_connection(self, volume, connector, force=False, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', volume['name'], 'access', 'delete', '1')\n",
"LOG.error(_('Failed to terminate connection to volume %s'), volume['name'])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_16):...\n",
"return True\n"
] | [
"def does_intersect_rule(self, rulectx):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_36(VAR_11):...\n",
"for VAR_51, VAR_58 in VAR_11.items():\n",
"if isinstance(VAR_58, str) or not isinstance(VAR_58, Iterable):\n",
"VAR_58 = [VAR_58]\n",
"yield [(VAR_51, VAR_16) for VAR_16 in VAR_58]\n"
] | [
"def flatten(wildcards):...\n",
"for wildcard, values in wildcards.items():\n",
"if isinstance(values, str) or not isinstance(values, Iterable):\n",
"values = [values]\n",
"yield [(wildcard, value) for value in values]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Assign'",
"Expr'"
] |
[
"import MySQLdb\n",
"import subprocess\n",
"import os.path\n",
"import sys\n",
"VAR_0 = sys.argv[1]\n",
"VAR_1 = subprocess.check_output(['find', '/var/spool/asterisk/monitor/',\n '-type', 'f', '-name', '*.wav']).decode()\n",
"for wav_file in VAR_1.splitlines():\n",
"VAR_3, VAR_4 = os.path.splitext(wav_file)\n",
"VAR_6 = MySQLdb.connect(host='localhost', db='asteriskcdrdb')\n",
"VAR_8 = True\n",
"VAR_7.execute('SELECT uniqueid,recordingfile FROM cdr')\n",
"VAR_5 = '{}.'.format(VAR_3) + VAR_0\n",
"VAR_7 = VAR_6.cursor()\n",
"VAR_2 = VAR_7.fetchall()\n",
"subprocess.check_output(['ffmpeg', '-i', wav_file, VAR_5, '-y'])\n",
"for unique_id, record_file in VAR_2:\n",
"os.remove(wav_file)\n",
"VAR_3, VAR_4 = os.path.splitext(record_file)\n",
"if VAR_4 == '.wav':\n",
"print(VAR_4)\n",
"VAR_7.execute(\"UPDATE cdr SET recordingfile='{}.\".format(VAR_3) + VAR_0 +\n \"'\" + \" WHERE uniqueid='{}'\".format(unique_id))\n",
"VAR_6.commit()\n"
] | [
"import MySQLdb\n",
"import subprocess\n",
"import os.path\n",
"import sys\n",
"file_format = sys.argv[1]\n",
"record_files = subprocess.check_output(['find',\n '/var/spool/asterisk/monitor/', '-type', 'f', '-name', '*.wav']).decode()\n",
"for wav_file in record_files.splitlines():\n",
"name, ext = os.path.splitext(wav_file)\n",
"conn = MySQLdb.connect(host='localhost', db='asteriskcdrdb')\n",
"error = True\n",
"cursor.execute('SELECT uniqueid,recordingfile FROM cdr')\n",
"prefer_format_file = '{}.'.format(name) + file_format\n",
"cursor = conn.cursor()\n",
"result = cursor.fetchall()\n",
"subprocess.check_output(['ffmpeg', '-i', wav_file, prefer_format_file, '-y'])\n",
"for unique_id, record_file in result:\n",
"os.remove(wav_file)\n",
"name, ext = os.path.splitext(record_file)\n",
"if ext == '.wav':\n",
"print(ext)\n",
"cursor.execute(\"UPDATE cdr SET recordingfile='{}.\".format(name) +\n file_format + \"'\" + \" WHERE uniqueid='{}'\".format(unique_id))\n",
"conn.commit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def __bool__(self):...\n",
"return False\n"
] | [
"def __bool__(self):...\n",
"return False\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_37(self, VAR_40):...\n",
"\"\"\"docstring\"\"\"\n",
"self.set_name(VAR_40, len(self) - 1)\n"
] | [
"def add_name(self, name):...\n",
"\"\"\"docstring\"\"\"\n",
"self.set_name(name, len(self) - 1)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@VAR_0.route('/peti/a/<form_id>/')...\n",
"if VAR_7 == '':\n",
"return 404\n",
"VAR_9 = ''\n",
"print(VAR_7)\n",
"VAR_2.execute('select * from PETITION_DATA_TB where form_id = {}'.format(VAR_7)\n )\n",
"return 404\n",
"VAR_12 = VAR_11[0][1]\n",
"VAR_11 = VAR_2.fetchall()\n",
"VAR_13 = VAR_11[0][2]\n",
"VAR_14 = VAR_11[0][4]\n",
"VAR_15 = VAR_11[0][5]\n",
"VAR_9 += open('templates/peti_viewer.html').read()\n",
"VAR_9 = VAR_9.replace(' form_display_name ', VAR_12)\n",
"VAR_9 = VAR_9.replace(' form_publish_date ', VAR_13)\n",
"VAR_9 = VAR_9.replace(' form_author ', VAR_14)\n",
"VAR_9 = VAR_9.replace(' form_body_content ', VAR_15)\n",
"return render_template('index.html', OFORM_APPNAME=LocalSettings.\n OFORM_APPNAME, OFORM_CONTENT=BODY_CONTENT)\n"
] | [
"@app.route('/peti/a/<form_id>/')...\n",
"if form_id == '':\n",
"return 404\n",
"BODY_CONTENT = ''\n",
"print(form_id)\n",
"curs.execute('select * from PETITION_DATA_TB where form_id = {}'.format(\n form_id))\n",
"return 404\n",
"form_display_name = result[0][1]\n",
"result = curs.fetchall()\n",
"form_publish_date = result[0][2]\n",
"form_author = result[0][4]\n",
"form_body_content = result[0][5]\n",
"BODY_CONTENT += open('templates/peti_viewer.html').read()\n",
"BODY_CONTENT = BODY_CONTENT.replace(' form_display_name ', form_display_name)\n",
"BODY_CONTENT = BODY_CONTENT.replace(' form_publish_date ', form_publish_date)\n",
"BODY_CONTENT = BODY_CONTENT.replace(' form_author ', form_author)\n",
"BODY_CONTENT = BODY_CONTENT.replace(' form_body_content ', form_body_content)\n",
"return render_template('index.html', OFORM_APPNAME=LocalSettings.\n OFORM_APPNAME, OFORM_CONTENT=BODY_CONTENT)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"import sqlobject\n",
"import vdm.sqlobject.base as vdmbase\n",
"import vdm.base as vdmbase\n",
"VAR_16 = 'name'\n",
"from vdm.sqlobject.base import State\n",
"from vdm.base import State\n",
"VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n",
"VAR_1 = sqlobject.MultipleJoin('Package')\n",
"VAR_2 = sqlobject.ForeignKey('Package', cascade=True)\n",
"VAR_3 = sqlobject.UnicodeCol(default=None)\n",
"VAR_4 = sqlobject.UnicodeCol(default=None)\n",
"VAR_5 = sqlobject.UnicodeCol(default=None)\n",
"VAR_6 = sqlobject.ForeignKey('License', default=None)\n",
"VAR_7 = sqlobject.UnicodeCol(default=None)\n",
"VAR_2 = sqlobject.ForeignKey('Tag', cascade=True)\n",
"VAR_2 = sqlobject.ForeignKey('PackageTag', cascade=True)\n",
"VAR_8 = CLASS_1\n",
"VAR_9 = vdmbase.get_attribute_names(VAR_8)\n",
"VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n",
"VAR_10 = [('tags', 'ckan.models.package', 'Tag', 'PackageTag')]\n",
"def FUNC_0(self, VAR_11):...\n",
"VAR_14 = self.revision.model.tags.get(VAR_11)\n",
"VAR_14 = self.transaction.model.tags.create(VAR_0=tagname)\n",
"self.tags.create(VAR_14=tag)\n",
"VAR_8 = CLASS_2\n",
"VAR_0 = sqlobject.UnicodeCol(alternateID=True)\n",
"VAR_9 = vdmbase.get_attribute_names(VAR_8)\n",
"VAR_10 = [('packages', 'ckan.models.package', 'Package', 'PackageTag')]\n",
"@classmethod...\n",
"VAR_17 = str(VAR_12)\n",
"VAR_18 = \"UPPER(tag.name) LIKE UPPER('%%%s%%')\" % VAR_17\n",
"return self.select(VAR_18)\n"
] | [
"import sqlobject\n",
"import vdm.sqlobject.base as vdmbase\n",
"import vdm.base as vdmbase\n",
"_defaultOrder = 'name'\n",
"from vdm.sqlobject.base import State\n",
"from vdm.base import State\n",
"name = sqlobject.UnicodeCol(alternateID=True)\n",
"packages = sqlobject.MultipleJoin('Package')\n",
"base = sqlobject.ForeignKey('Package', cascade=True)\n",
"title = sqlobject.UnicodeCol(default=None)\n",
"url = sqlobject.UnicodeCol(default=None)\n",
"download_url = sqlobject.UnicodeCol(default=None)\n",
"license = sqlobject.ForeignKey('License', default=None)\n",
"notes = sqlobject.UnicodeCol(default=None)\n",
"base = sqlobject.ForeignKey('Tag', cascade=True)\n",
"base = sqlobject.ForeignKey('PackageTag', cascade=True)\n",
"sqlobj_version_class = PackageRevision\n",
"versioned_attributes = vdmbase.get_attribute_names(sqlobj_version_class)\n",
"name = sqlobject.UnicodeCol(alternateID=True)\n",
"m2m = [('tags', 'ckan.models.package', 'Tag', 'PackageTag')]\n",
"def add_tag_by_name(self, tagname):...\n",
"tag = self.revision.model.tags.get(tagname)\n",
"tag = self.transaction.model.tags.create(name=tagname)\n",
"self.tags.create(tag=tag)\n",
"sqlobj_version_class = TagRevision\n",
"name = sqlobject.UnicodeCol(alternateID=True)\n",
"versioned_attributes = vdmbase.get_attribute_names(sqlobj_version_class)\n",
"m2m = [('packages', 'ckan.models.package', 'Package', 'PackageTag')]\n",
"@classmethod...\n",
"text_query_str = str(text_query)\n",
"sql_query = \"UPPER(tag.name) LIKE UPPER('%%%s%%')\" % text_query_str\n",
"return self.select(sql_query)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
4
] | [
"Import'",
"Import'",
"Import'",
"Assign'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_8, VAR_4):...\n",
"VAR_14 = self.__cache_map[VAR_8]\n",
"VAR_14.remove(VAR_4)\n"
] | [
"def remove(self, entity_class, entity):...\n",
"cache = self.__cache_map[entity_class]\n",
"cache.remove(entity)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_32(*VAR_26):...\n",
""
] | [
"def mocked_check(*_args):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@abstractproperty...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"@abstractproperty...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"Condition",
"Docstring"
] |
[
"def FUNC_6(self):...\n",
"return self.title\n"
] | [
"def get_feed(self):...\n",
"return self.title\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_7(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self._rules:\n",
"if not VAR_10 in self._rules:\n",
"return self._rules[VAR_10]\n"
] | [
"def get_rule(self, name):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self._rules:\n",
"if not name in self._rules:\n",
"return self._rules[name]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_19(self, VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(self.iscsi_ips) == 1:\n",
"return self.iscsi_ips.keys()[0]\n",
"VAR_15 = self._get_active_nsp(VAR_11)\n",
"if VAR_15 is None:\n",
"VAR_15 = self._get_least_used_nsp(self._get_iscsi_nsps())\n",
"return self._get_ip_using_nsp(VAR_15)\n",
"if VAR_15 is None:\n",
"VAR_40 = _('Least busy iSCSI port not found, using first iSCSI port in list.')\n",
"VAR_1.warn(VAR_40)\n",
"return self.iscsi_ips.keys()[0]\n"
] | [
"def _get_iscsi_ip(self, hostname):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(self.iscsi_ips) == 1:\n",
"return self.iscsi_ips.keys()[0]\n",
"nsp = self._get_active_nsp(hostname)\n",
"if nsp is None:\n",
"nsp = self._get_least_used_nsp(self._get_iscsi_nsps())\n",
"return self._get_ip_using_nsp(nsp)\n",
"if nsp is None:\n",
"msg = _('Least busy iSCSI port not found, using first iSCSI port in list.')\n",
"LOG.warn(msg)\n",
"return self.iscsi_ips.keys()[0]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_31(self, *VAR_79, **VAR_80):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.log = VAR_79, VAR_80\n",
"return VAR_101\n"
] | [
"def log(self, *logs, **kwlogs):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.log = logs, kwlogs\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@login_required...\n",
"VAR_27 = {'title': VAR_19}\n",
"VAR_8 = None\n",
"if VAR_3 is not None:\n",
"VAR_8 = get_object_or_404(VAR_5, **{group_slug_field: group_slug})\n",
"VAR_46 = True\n",
"VAR_27.update({'content_type': get_ct(VAR_8), 'object_id': VAR_8.id})\n",
"if not VAR_46:\n",
"VAR_46 = FUNC_3(VAR_14.user, VAR_8, VAR_12, VAR_13)\n",
"return HttpResponseForbidden()\n",
"VAR_28 = get_object_or_404(VAR_2, **article_args)\n",
"if not VAR_41.is_observing(VAR_28, VAR_14.user):\n",
"VAR_41.observe(VAR_28, VAR_14.user, 'wiki_observed_article_changed')\n",
"return redirect(VAR_28)\n"
] | [
"@login_required...\n",
"article_args = {'title': title}\n",
"group = None\n",
"if group_slug is not None:\n",
"group = get_object_or_404(group_qs, **{group_slug_field: group_slug})\n",
"allow_read = True\n",
"article_args.update({'content_type': get_ct(group), 'object_id': group.id})\n",
"if not allow_read:\n",
"allow_read = has_read_perm(request.user, group, is_member, is_private)\n",
"return HttpResponseForbidden()\n",
"article = get_object_or_404(article_qs, **article_args)\n",
"if not notification.is_observing(article, request.user):\n",
"notification.observe(article, request.user, 'wiki_observed_article_changed')\n",
"return redirect(article)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"@FUNC_1.command(aliases=['sta'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = asyncio.get_event_loop()\n",
"VAR_7 = await VAR_6.run_in_executor(None, self.station_search, VAR_2)\n",
"await VAR_1.send(VAR_7)\n"
] | [
"@eddb.command(aliases=['sta'])...\n",
"\"\"\"docstring\"\"\"\n",
"loop = asyncio.get_event_loop()\n",
"result = await loop.run_in_executor(None, self.station_search, inp)\n",
"await ctx.send(result)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __next__(self):...\n",
""
] | [
"def __next__(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(self, VAR_8):...\n",
"self.cursor.execute(\n \"SELECT upc, name, weight, description FROM product WHERE description ILIKE '%%%s%%';\"\n , (VAR_8,))\n",
"return self.cursor.fetchall()\n"
] | [
"def find_products_by_desc(self, product_desc):...\n",
"self.cursor.execute(\n \"SELECT upc, name, weight, description FROM product WHERE description ILIKE '%%%s%%';\"\n , (product_desc,))\n",
"return self.cursor.fetchall()\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"from collections import defaultdict\n",
"import functools\n",
"import json\n",
"import logging\n",
"import re\n",
"import subprocess\n",
"import threading\n",
"import time\n",
"def __init__(self):...\n",
"self._job_status = defaultdict(functools.partial(defaultdict, str))\n",
"self._job_text = defaultdict(str)\n",
"self._job_error_text = defaultdict(str)\n",
"self._job_percent = defaultdict(int)\n",
"self._job_exitstatus = {}\n",
"self._stop_events = {}\n",
"self._latest_job_id = 0\n",
"def FUNC_2(self, VAR_1):...\n",
"VAR_12 = self._formatCredentials(VAR_1, VAR_8='current')\n",
"VAR_9 = '{} rclone lsjson current:'.format(VAR_12)\n",
"VAR_28 = self._execute(VAR_9)\n",
"VAR_36 = e.returncode\n",
"def FUNC_3(self, VAR_1, VAR_2):...\n",
"return {'result': True, 'message': 'Success'}\n",
"return {'result': False, 'message': 'Exit status {}'.format(VAR_36)}\n",
"VAR_12 = self._formatCredentials(VAR_1, VAR_8='current')\n",
"VAR_9 = '{credentials} rclone lsjson current:{path}'.format(VAR_12=\n credentials, VAR_2=path)\n",
"VAR_28 = self._execute(VAR_9)\n",
"def FUNC_4(self, VAR_1, VAR_2):...\n",
"VAR_28 = json.loads(VAR_28)\n",
"VAR_12 = self._formatCredentials(VAR_1, VAR_8='current')\n",
"return VAR_28\n",
"VAR_9 = '{credentials} rclone touch current:{path}/.keep'.format(VAR_12=\n credentials, VAR_2=path)\n",
"VAR_28 = self._execute(VAR_9)\n",
"def FUNC_5(self, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7=None):...\n",
"return {'message': 'Success'}\n",
"VAR_12 = ''\n",
"if VAR_3 is None:\n",
"VAR_29 = VAR_4\n",
"VAR_12 += self._formatCredentials(VAR_3, VAR_8='src')\n",
"if VAR_5 is None:\n",
"VAR_29 = 'src:{}'.format(VAR_4)\n",
"VAR_30 = VAR_6\n",
"VAR_12 += self._formatCredentials(VAR_5, VAR_8='dst')\n",
"VAR_9 = '{credentials} rclone copy {src} {dst} --progress --stats 2s '.format(\n VAR_12=credentials, VAR_29=src, VAR_30=dst)\n",
"VAR_30 = 'dst:{}'.format(VAR_6)\n",
"logging.info(FUNC_0(VAR_9))\n",
"if VAR_7 is None:\n",
"VAR_7 = self._get_next_job_id()\n",
"if self._job_id_exists(VAR_7):\n",
"self._stop_events[VAR_7] = threading.Event()\n",
"self._execute_interactive(VAR_9, VAR_7)\n",
"return VAR_7\n"
] | [
"from collections import defaultdict\n",
"import functools\n",
"import json\n",
"import logging\n",
"import re\n",
"import subprocess\n",
"import threading\n",
"import time\n",
"def __init__(self):...\n",
"self._job_status = defaultdict(functools.partial(defaultdict, str))\n",
"self._job_text = defaultdict(str)\n",
"self._job_error_text = defaultdict(str)\n",
"self._job_percent = defaultdict(int)\n",
"self._job_exitstatus = {}\n",
"self._stop_events = {}\n",
"self._latest_job_id = 0\n",
"def verify(self, data):...\n",
"credentials = self._formatCredentials(data, name='current')\n",
"command = '{} rclone lsjson current:'.format(credentials)\n",
"result = self._execute(command)\n",
"returncode = e.returncode\n",
"def ls(self, data, path):...\n",
"return {'result': True, 'message': 'Success'}\n",
"return {'result': False, 'message': 'Exit status {}'.format(returncode)}\n",
"credentials = self._formatCredentials(data, name='current')\n",
"command = '{credentials} rclone lsjson current:{path}'.format(credentials=\n credentials, path=path)\n",
"result = self._execute(command)\n",
"def mkdir(self, data, path):...\n",
"result = json.loads(result)\n",
"credentials = self._formatCredentials(data, name='current')\n",
"return result\n",
"command = '{credentials} rclone touch current:{path}/.keep'.format(credentials\n =credentials, path=path)\n",
"result = self._execute(command)\n",
"def copy(self, src_data, src_path, dst_data, dst_path, job_id=None):...\n",
"return {'message': 'Success'}\n",
"credentials = ''\n",
"if src_data is None:\n",
"src = src_path\n",
"credentials += self._formatCredentials(src_data, name='src')\n",
"if dst_data is None:\n",
"src = 'src:{}'.format(src_path)\n",
"dst = dst_path\n",
"credentials += self._formatCredentials(dst_data, name='dst')\n",
"command = ('{credentials} rclone copy {src} {dst} --progress --stats 2s '.\n format(credentials=credentials, src=src, dst=dst))\n",
"dst = 'dst:{}'.format(dst_path)\n",
"logging.info(sanitize(command))\n",
"if job_id is None:\n",
"job_id = self._get_next_job_id()\n",
"if self._job_id_exists(job_id):\n",
"self._stop_events[job_id] = threading.Event()\n",
"self._execute_interactive(command, job_id)\n",
"return job_id\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
2,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
2,
0,
0,
0,
2,
0,
0,
2,
0,
0,
0,
2,
2,
0,
2,
0,
0,
0,
0,
2,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"return self.datadocument_set.filter(matched=True).count()\n"
] | [
"def matched_docs(self):...\n",
"return self.datadocument_set.filter(matched=True).count()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.route('/crawling/end')...\n",
"VAR_14 = VAR_10.get('crawl_process_id', None)\n",
"os.kill(VAR_14, signal.SIGTERM)\n",
"VAR_10['crawl_process_id'] = -1\n",
"VAR_15 = VAR_10.get('crawl_start_time', None)\n",
"VAR_10['crawl_total_time'] = time.time() - VAR_15\n",
"flash('You successfully interrupted the crawler', 'success')\n",
"return render_template('end_crawling.html')\n"
] | [
"@app.route('/crawling/end')...\n",
"p_id = session.get('crawl_process_id', None)\n",
"os.kill(p_id, signal.SIGTERM)\n",
"session['crawl_process_id'] = -1\n",
"crawl_start_time = session.get('crawl_start_time', None)\n",
"session['crawl_total_time'] = time.time() - crawl_start_time\n",
"flash('You successfully interrupted the crawler', 'success')\n",
"return render_template('end_crawling.html')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"frappe.set_user('[email protected]')\n",
"self.assertRaises(frappe.PermissionError, get_filters_cond, 'DocType', dict\n (istable=1), [])\n",
"self.assertTrue(get_filters_cond('DocType', dict(istable=1), [],\n ignore_permissions=True))\n",
"frappe.set_user('Administrator')\n"
] | [
"def test_ignore_permissions_for_get_filters_cond(self):...\n",
"frappe.set_user('[email protected]')\n",
"self.assertRaises(frappe.PermissionError, get_filters_cond, 'DocType', dict\n (istable=1), [])\n",
"self.assertTrue(get_filters_cond('DocType', dict(istable=1), [],\n ignore_permissions=True))\n",
"frappe.set_user('Administrator')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(VAR_3):...\n",
"VAR_0, VAR_10 = FUNC_0()\n",
"VAR_19 = FUNC_13(VAR_10, VAR_3)\n",
"VAR_0.close()\n",
"return VAR_19\n"
] | [
"def tokenNeededExternal(poll_name):...\n",
"conn, c = connectDB()\n",
"tmp = checkTokenNeeded(c, poll_name)\n",
"conn.close()\n",
"return tmp\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_20(VAR_28, VAR_29, VAR_30, VAR_31=None):...\n",
"\"\"\"docstring\"\"\"\n",
"from datadog import DogStatsd\n",
"VAR_60 = VAR_30.split('.', 2)\n",
"assert len(VAR_60) >= 2 and VAR_60[0\n ] == 'snuba', 'prefix must be like `snuba.<category>`'\n",
"return DogStatsd(VAR_28=host, VAR_29=port, namespace=prefix, constant_tags=tags\n )\n"
] | [
"def create_metrics(host, port, prefix, tags=None):...\n",
"\"\"\"docstring\"\"\"\n",
"from datadog import DogStatsd\n",
"bits = prefix.split('.', 2)\n",
"assert len(bits) >= 2 and bits[0\n ] == 'snuba', 'prefix must be like `snuba.<category>`'\n",
"return DogStatsd(host=host, port=port, namespace=prefix, constant_tags=tags)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Assign'",
"Assert'",
"Return'"
] |
[
"def FUNC_1(VAR_3):...\n",
"VAR_3 = VAR_3.lower()\n",
"if VAR_3 == 'guiness':\n",
"VAR_3 = 'guinness'\n",
"return VAR_3\n"
] | [
"def fixTypingErrors(name):...\n",
"name = name.lower()\n",
"if name == 'guiness':\n",
"name = 'guinness'\n",
"return name\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"@rt.switch_runtime(fixtures.TEST_SITE_CONFIG, 'sys0')...\n",
"VAR_29 = CLASS_3.Node\n",
"VAR_30 = CLASS_3.has_edge\n",
"VAR_31 = CLASS_3.num_deps\n",
"VAR_32 = CLASS_3.find_check\n",
"VAR_25 = CLASS_3.find_case\n",
"VAR_0 = self.loader.load_all()\n",
"VAR_14 = executors.generate_testcases(VAR_0)\n",
"VAR_33 = VAR_32('Test1_exact', VAR_0)\n",
"VAR_33.getdep('Test0', 'e0')\n",
"VAR_34 = dependency.build_deps(VAR_14)\n",
"dependency.validate_deps(VAR_34)\n",
"assert VAR_31(VAR_34, 'Test1_fully') == 8\n",
"for p in ['sys0:p0', 'sys0:p1']:\n",
"for e0 in ['e0', 'e1']:\n",
"assert VAR_31(VAR_34, 'Test1_by_env') == 4\n",
"for e1 in ['e0', 'e1']:\n",
"assert VAR_31(VAR_34, 'Test1_default') == 4\n",
"assert VAR_30(VAR_34, VAR_29('Test1_fully', p, e0), VAR_29('Test0', p, e1))\n",
"for p in ['sys0:p0', 'sys0:p1']:\n",
"for e in ['e0', 'e1']:\n",
"assert VAR_31(VAR_34, 'Test1_exact') == 6\n",
"assert VAR_30(VAR_34, VAR_29('Test1_by_env', p, e), VAR_29('Test0', p, e))\n",
"for p in ['sys0:p0', 'sys0:p1']:\n",
"assert VAR_30(VAR_34, VAR_29('Test1_default', p, e), VAR_29('Test0', p, e))\n",
"assert VAR_30(VAR_34, VAR_29('Test1_exact', p, 'e0'), VAR_29('Test0', p, 'e0'))\n",
"VAR_35 = VAR_25('Test1_exact', 'e0', VAR_14).check\n",
"assert VAR_30(VAR_34, VAR_29('Test1_exact', p, 'e0'), VAR_29('Test0', p, 'e1'))\n",
"VAR_36 = VAR_25('Test1_exact', 'e1', VAR_14).check\n",
"assert VAR_30(VAR_34, VAR_29('Test1_exact', p, 'e1'), VAR_29('Test0', p, 'e1'))\n",
"assert VAR_35.getdep('Test0', 'e0').name == 'Test0'\n",
"assert VAR_35.getdep('Test0', 'e1').name == 'Test0'\n",
"assert VAR_36.getdep('Test0', 'e1').name == 'Test0'\n",
"VAR_35.getdep('TestX', 'e0')\n",
"VAR_35.getdep('Test0', 'eX')\n",
"VAR_36.getdep('Test0', 'e0')\n"
] | [
"@rt.switch_runtime(fixtures.TEST_SITE_CONFIG, 'sys0')...\n",
"Node = TestDependencies.Node\n",
"has_edge = TestDependencies.has_edge\n",
"num_deps = TestDependencies.num_deps\n",
"find_check = TestDependencies.find_check\n",
"find_case = TestDependencies.find_case\n",
"checks = self.loader.load_all()\n",
"cases = executors.generate_testcases(checks)\n",
"t = find_check('Test1_exact', checks)\n",
"t.getdep('Test0', 'e0')\n",
"deps = dependency.build_deps(cases)\n",
"dependency.validate_deps(deps)\n",
"assert num_deps(deps, 'Test1_fully') == 8\n",
"for p in ['sys0:p0', 'sys0:p1']:\n",
"for e0 in ['e0', 'e1']:\n",
"assert num_deps(deps, 'Test1_by_env') == 4\n",
"for e1 in ['e0', 'e1']:\n",
"assert num_deps(deps, 'Test1_default') == 4\n",
"assert has_edge(deps, Node('Test1_fully', p, e0), Node('Test0', p, e1))\n",
"for p in ['sys0:p0', 'sys0:p1']:\n",
"for e in ['e0', 'e1']:\n",
"assert num_deps(deps, 'Test1_exact') == 6\n",
"assert has_edge(deps, Node('Test1_by_env', p, e), Node('Test0', p, e))\n",
"for p in ['sys0:p0', 'sys0:p1']:\n",
"assert has_edge(deps, Node('Test1_default', p, e), Node('Test0', p, e))\n",
"assert has_edge(deps, Node('Test1_exact', p, 'e0'), Node('Test0', p, 'e0'))\n",
"check_e0 = find_case('Test1_exact', 'e0', cases).check\n",
"assert has_edge(deps, Node('Test1_exact', p, 'e0'), Node('Test0', p, 'e1'))\n",
"check_e1 = find_case('Test1_exact', 'e1', cases).check\n",
"assert has_edge(deps, Node('Test1_exact', p, 'e1'), Node('Test0', p, 'e1'))\n",
"assert check_e0.getdep('Test0', 'e0').name == 'Test0'\n",
"assert check_e0.getdep('Test0', 'e1').name == 'Test0'\n",
"assert check_e1.getdep('Test0', 'e1').name == 'Test0'\n",
"check_e0.getdep('TestX', 'e0')\n",
"check_e0.getdep('Test0', 'eX')\n",
"check_e1.getdep('Test0', 'e0')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assert'",
"For",
"For",
"Assert'",
"For",
"Assert'",
"Assert'",
"For",
"For",
"Assert'",
"Assert'",
"For",
"Assert'",
"Assert'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assert'",
"Assert'",
"Assert'",
"Expr'",
"Expr'",
"Expr'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.