lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_2(VAR_3):...\n", "VAR_8 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\' + VAR_3 + '.db')\n", "VAR_9 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n", "VAR_10 = VAR_8.cursor()\n", "VAR_11 = VAR_9.cursor()\n", "VAR_18 = list()\n", "VAR_19 = list()\n", "VAR_20 = list()\n", "VAR_21 = 0\n", "for i in VAR_0:\n", "VAR_11.execute('SELECT * FROM ' + str(i))\n", "VAR_8.close()\n", "VAR_14 = VAR_11.fetchone()\n", "VAR_9.close()\n", "VAR_27 = 0\n", "if VAR_21 == 0:\n", "while VAR_14 != None:\n", "return True\n", "for i in range(len(VAR_18)):\n", "VAR_10.execute(\"SELECT * FROM result WHERE problem = '\" + str(VAR_14[0]) +\n \"' AND diff = '\" + str(VAR_14[1]) + \"' AND verdict = 'OK'\")\n", "VAR_18.append(CLASS_0(VAR_27, i))\n", "if VAR_18[i].first / VAR_21 != 0:\n", "VAR_22, VAR_23 = plt.subplots()\n", "VAR_31 = VAR_10.fetchone()\n", "VAR_21 += VAR_27\n", "VAR_19.append(VAR_18[i].first / VAR_21)\n", "VAR_23.pie(VAR_19, autopct='%1.1f%%', shadow=True, startangle=90)\n", "if VAR_31 != None:\n", "VAR_20.append(VAR_18[i].second)\n", "VAR_23.axis('equal')\n", "VAR_27 += 1\n", "VAR_14 = VAR_11.fetchone()\n", "VAR_23.legend(VAR_20)\n", "VAR_24 = os.path.join(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\', VAR_3 + '.png')\n", "if os.path.exists(VAR_24):\n", "os.remove(VAR_24)\n", "plt.savefig(os.path.abspath(os.path.dirname(__file__)) + '\\\\users\\\\' +\n VAR_3 + '.png')\n", "plt.close()\n", "return False\n" ]
[ "def create_stats_picture(username):...\n", "conn = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\' + username + '.db')\n", "conn2 = sqlite3.connect(os.path.abspath(os.path.dirname(__file__)) + '\\\\cf.db')\n", "cursor = conn.cursor()\n", "cursor2 = conn2.cursor()\n", "a = list()\n", "b = list()\n", "leg = list()\n", "sum = 0\n", "for i in available_tags:\n", "cursor2.execute('SELECT * FROM ' + str(i))\n", "conn.close()\n", "x = cursor2.fetchone()\n", "conn2.close()\n", "count = 0\n", "if sum == 0:\n", "while x != None:\n", "return True\n", "for i in range(len(a)):\n", "cursor.execute(\"SELECT * FROM result WHERE problem = '\" + str(x[0]) +\n \"' AND diff = '\" + str(x[1]) + \"' AND verdict = 'OK'\")\n", "a.append(Pair(count, i))\n", "if a[i].first / sum != 0:\n", "fig1, ax1 = plt.subplots()\n", "y = cursor.fetchone()\n", "sum += count\n", "b.append(a[i].first / sum)\n", "ax1.pie(b, autopct='%1.1f%%', shadow=True, startangle=90)\n", "if y != None:\n", "leg.append(a[i].second)\n", "ax1.axis('equal')\n", "count += 1\n", "x = cursor2.fetchone()\n", "ax1.legend(leg)\n", "path = os.path.join(os.path.abspath(os.path.dirname(__file__)) +\n '\\\\users\\\\', username + '.png')\n", "if os.path.exists(path):\n", "os.remove(path)\n", "plt.savefig(os.path.abspath(os.path.dirname(__file__)) + '\\\\users\\\\' +\n username + '.png')\n", "plt.close()\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 0, 0, 4, 4, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Return'", "For", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "AugAssign'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "AugAssign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_5(self):...\n", "self.log('')\n", "self.log('POST ' + self.req)\n", "if self.headers['expect'].lower() == '100-continue':\n", "self.parser = MultipartParser(self.log, self.sr, self.headers)\n", "self.s.send(b'HTTP/1.1 100 Continue\\r\\n\\r\\n')\n", "self.parser.parse()\n", "VAR_15 = self.parser.require('act', 64)\n", "if VAR_15 == u'bput':\n", "self.handle_plain_upload()\n", "if VAR_15 == u'login':\n", "return\n", "self.handle_login()\n", "return\n" ]
[ "def handle_post(self):...\n", "self.log('')\n", "self.log('POST ' + self.req)\n", "if self.headers['expect'].lower() == '100-continue':\n", "self.parser = MultipartParser(self.log, self.sr, self.headers)\n", "self.s.send(b'HTTP/1.1 100 Continue\\r\\n\\r\\n')\n", "self.parser.parse()\n", "act = self.parser.require('act', 64)\n", "if act == u'bput':\n", "self.handle_plain_upload()\n", "if act == u'login':\n", "return\n", "self.handle_login()\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_33(self):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_54.flags.in_install:\n", "return\n", "for VAR_16, VAR_9 in self.get_valid_dict().items():\n", "if not VAR_9 or not isinstance(VAR_9, string_types):\n", "VAR_9 = VAR_54.as_unicode(VAR_9)\n", "if u'<' not in VAR_9 and u'>' not in VAR_9:\n", "if '<!-- markdown -->' in VAR_9 and not ('<script' in VAR_9 or \n", "VAR_25 = self.meta.get_field(VAR_16)\n", "VAR_61 = VAR_9\n", "if VAR_25 and VAR_25.get('fieldtype') in ('Data', 'Code', 'Small Text'\n", "VAR_61 = sanitize_email(VAR_9)\n", "if VAR_25 and (VAR_25.get('ignore_xss_filter') or VAR_25.get('fieldtype') ==\n", "self.set(VAR_16, VAR_61)\n", "VAR_61 = sanitize_html(VAR_9, linkify=df.fieldtype == 'Text Editor')\n" ]
[ "def _sanitize_content(self):...\n", "\"\"\"docstring\"\"\"\n", "if frappe.flags.in_install:\n", "return\n", "for fieldname, value in self.get_valid_dict().items():\n", "if not value or not isinstance(value, string_types):\n", "value = frappe.as_unicode(value)\n", "if u'<' not in value and u'>' not in value:\n", "if '<!-- markdown -->' in value and not ('<script' in value or \n", "df = self.meta.get_field(fieldname)\n", "sanitized_value = value\n", "if df and df.get('fieldtype') in ('Data', 'Code', 'Small Text') and df.get(\n", "sanitized_value = sanitize_email(value)\n", "if df and (df.get('ignore_xss_filter') or df.get('fieldtype') == 'Code' and\n", "self.set(fieldname, sanitized_value)\n", "sanitized_value = sanitize_html(value, linkify=df.fieldtype == 'Text Editor')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "For", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_1 in DataDocument.objects.all():\n", "VAR_22 = ExtractedText.objects.get_subclass(data_document=doc)\n", "if VAR_1.data_group.group_type.code == 'CP':\n", "self.assertEqual(type(VAR_22), ExtractedCPCat)\n", "if VAR_1.data_group.group_type.code == 'HH':\n", "self.assertEqual(type(VAR_22), ExtractedHHDoc)\n", "self.assertEqual(type(VAR_22), ExtractedText)\n" ]
[ "def test_extractedsubclasses(self):...\n", "\"\"\"docstring\"\"\"\n", "for doc in DataDocument.objects.all():\n", "extsub = ExtractedText.objects.get_subclass(data_document=doc)\n", "if doc.data_group.group_type.code == 'CP':\n", "self.assertEqual(type(extsub), ExtractedCPCat)\n", "if doc.data_group.group_type.code == 'HH':\n", "self.assertEqual(type(extsub), ExtractedHHDoc)\n", "self.assertEqual(type(extsub), ExtractedText)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_34(self):...\n", "self.cursor.execute('create table t1(i int)')\n", "self.cursor.execute('insert into t1 values (1)')\n", "VAR_4 = self.cursor.execute('select * from t1')\n", "self.assertEqual(VAR_4, self.cursor)\n" ]
[ "def test_retcursor_select(self):...\n", "self.cursor.execute('create table t1(i int)')\n", "self.cursor.execute('insert into t1 values (1)')\n", "v = self.cursor.execute('select * from t1')\n", "self.assertEqual(v, self.cursor)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_29(self, VAR_5):...\n", "def FUNC_31():...\n", "VAR_24 = vim.eval('tagfiles()')\n", "VAR_25 = VAR_0.getcwd()\n", "return [VAR_0.path.join(VAR_25, x) for x in VAR_24]\n" ]
[ "def _AddTagsFilesIfNeeded(self, extra_data):...\n", "def GetTagFiles():...\n", "tag_files = vim.eval('tagfiles()')\n", "current_working_directory = os.getcwd()\n", "return [os.path.join(current_working_directory, x) for x in tag_files]\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_38):...\n", "if not VAR_38:\n", "VAR_101.errors.add(errors.NO_TITLE)\n", "if len(VAR_38) > 100:\n", "VAR_101.errors.add(errors.TITLE_TOO_LONG)\n", "return VAR_38\n" ]
[ "def run(self, title):...\n", "if not title:\n", "c.errors.add(errors.NO_TITLE)\n", "if len(title) > 100:\n", "c.errors.add(errors.TITLE_TOO_LONG)\n", "return title\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Condition", "Expr'", "Return'" ]
[ "import os\n", "import vim\n", "import tempfile\n", "import json\n", "import signal\n", "from subprocess import PIPE\n", "from ycm import vimsupport\n", "from ycm import utils\n", "from ycm.diagnostic_interface import DiagnosticInterface\n", "from ycm.completers.all.omni_completer import OmniCompleter\n", "from ycm.completers.general import syntax_parse\n", "from ycm.completers.completer_utils import FiletypeCompleterExistsForFiletype\n", "from ycm.client.ycmd_keepalive import YcmdKeepalive\n", "from ycm.client.base_request import BaseRequest, BuildRequestData\n", "from ycm.client.command_request import SendCommandRequest\n", "from ycm.client.completion_request import CompletionRequest\n", "from ycm.client.omni_completion_request import OmniCompletionRequest\n", "from ycm.client.event_notification import SendEventNotificationAsync, EventNotification\n", "from ycm.server.responses import ServerError\n", "from UltiSnips import UltiSnips_Manager\n", "VAR_6 = False\n", "VAR_0.environ['no_proxy'] = '127.0.0.1,localhost'\n", "VAR_6 = True\n", "signal.signal(signal.SIGINT, signal.SIG_IGN)\n", "VAR_1 = 30\n", "VAR_2 = ('The ycmd server SHUT DOWN (restart with :YcmRestartServer). ' +\n \"\"\"Stderr (last {0} lines):\n\n\"\"\".format(VAR_1))\n", "VAR_3 = (\n 'The ycmd server SHUT DOWN (restart with :YcmRestartServer). check console output for logs!'\n )\n", "VAR_4 = 10800\n", "def __init__(self, VAR_7):...\n", "self._user_options = VAR_7\n", "self._user_notified_about_crash = False\n", "self._diag_interface = DiagnosticInterface(VAR_7)\n", "self._omnicomp = OmniCompleter(VAR_7)\n", "self._latest_completion_request = None\n", "self._latest_file_parse_request = None\n", "self._server_stdout = None\n", "self._server_stderr = None\n", "self._server_popen = None\n", "self._filetypes_with_keywords_loaded = set()\n", "self._temp_options_filename = None\n", "self._ycmd_keepalive = YcmdKeepalive()\n", "self._SetupServer()\n", "self._ycmd_keepalive.Start()\n", "def FUNC_2(self):...\n", "VAR_14 = utils.GetUnusedLocalhostPort()\n", "self._temp_options_filename = options_file.name\n", "json.dump(dict(self._user_options), options_file)\n", "options_file.flush()\n", "VAR_21 = [utils.PathToPythonInterpreter(), FUNC_0(), '--port={0}'.format(\n VAR_14), '--options_file={0}'.format(options_file.name), '--log={0}'.\n format(self._user_options['server_log_level']),\n '--idle_suicide_seconds={0}'.format(VAR_4)]\n", "if not self._user_options['server_use_vim_stdout']:\n", "VAR_26 = VAR_0.path.join(utils.PathToTempDir(), 'server_{port}_{std}.log')\n", "self._server_popen = utils.SafePopen(VAR_21, stdout=PIPE, stderr=PIPE)\n", "self._server_stdout = VAR_26.format(port=server_port, std='stdout')\n", "BaseRequest.server_location = 'http://localhost:' + str(VAR_14)\n", "self._server_stderr = VAR_26.format(port=server_port, std='stderr')\n", "self._NotifyUserIfServerCrashed()\n", "VAR_21.append('--stdout={0}'.format(self._server_stdout))\n", "def FUNC_3(self):...\n", "VAR_21.append('--stderr={0}'.format(self._server_stderr))\n", "VAR_15 = self._server_popen.poll()\n", "if self._user_options['server_keep_logfiles']:\n", "return VAR_15 is None\n", "VAR_21.append('--keep_logfiles')\n" ]
[ "import os\n", "import vim\n", "import tempfile\n", "import json\n", "import signal\n", "from subprocess import PIPE\n", "from ycm import vimsupport\n", "from ycm import utils\n", "from ycm.diagnostic_interface import DiagnosticInterface\n", "from ycm.completers.all.omni_completer import OmniCompleter\n", "from ycm.completers.general import syntax_parse\n", "from ycm.completers.completer_utils import FiletypeCompleterExistsForFiletype\n", "from ycm.client.ycmd_keepalive import YcmdKeepalive\n", "from ycm.client.base_request import BaseRequest, BuildRequestData\n", "from ycm.client.command_request import SendCommandRequest\n", "from ycm.client.completion_request import CompletionRequest\n", "from ycm.client.omni_completion_request import OmniCompletionRequest\n", "from ycm.client.event_notification import SendEventNotificationAsync, EventNotification\n", "from ycm.server.responses import ServerError\n", "from UltiSnips import UltiSnips_Manager\n", "USE_ULTISNIPS_DATA = False\n", "os.environ['no_proxy'] = '127.0.0.1,localhost'\n", "USE_ULTISNIPS_DATA = True\n", "signal.signal(signal.SIGINT, signal.SIG_IGN)\n", "NUM_YCMD_STDERR_LINES_ON_CRASH = 30\n", "SERVER_CRASH_MESSAGE_STDERR_FILE = (\n 'The ycmd server SHUT DOWN (restart with :YcmRestartServer). ' +\n \"\"\"Stderr (last {0} lines):\n\n\"\"\".format(NUM_YCMD_STDERR_LINES_ON_CRASH))\n", "SERVER_CRASH_MESSAGE_SAME_STDERR = (\n 'The ycmd server SHUT DOWN (restart with :YcmRestartServer). check console output for logs!'\n )\n", "SERVER_IDLE_SUICIDE_SECONDS = 10800\n", "def __init__(self, user_options):...\n", "self._user_options = user_options\n", "self._user_notified_about_crash = False\n", "self._diag_interface = DiagnosticInterface(user_options)\n", "self._omnicomp = OmniCompleter(user_options)\n", "self._latest_completion_request = None\n", "self._latest_file_parse_request = None\n", "self._server_stdout = None\n", "self._server_stderr = None\n", "self._server_popen = None\n", "self._filetypes_with_keywords_loaded = set()\n", "self._temp_options_filename = None\n", "self._ycmd_keepalive = YcmdKeepalive()\n", "self._SetupServer()\n", "self._ycmd_keepalive.Start()\n", "def _SetupServer(self):...\n", "server_port = utils.GetUnusedLocalhostPort()\n", "self._temp_options_filename = options_file.name\n", "json.dump(dict(self._user_options), options_file)\n", "options_file.flush()\n", "args = [utils.PathToPythonInterpreter(), _PathToServerScript(),\n '--port={0}'.format(server_port), '--options_file={0}'.format(\n options_file.name), '--log={0}'.format(self._user_options[\n 'server_log_level']), '--idle_suicide_seconds={0}'.format(\n SERVER_IDLE_SUICIDE_SECONDS)]\n", "if not self._user_options['server_use_vim_stdout']:\n", "filename_format = os.path.join(utils.PathToTempDir(), 'server_{port}_{std}.log'\n )\n", "self._server_popen = utils.SafePopen(args, stdout=PIPE, stderr=PIPE)\n", "self._server_stdout = filename_format.format(port=server_port, std='stdout')\n", "BaseRequest.server_location = 'http://localhost:' + str(server_port)\n", "self._server_stderr = filename_format.format(port=server_port, std='stderr')\n", "self._NotifyUserIfServerCrashed()\n", "args.append('--stdout={0}'.format(self._server_stdout))\n", "def _IsServerAlive(self):...\n", "args.append('--stderr={0}'.format(self._server_stderr))\n", "returncode = self._server_popen.poll()\n", "if self._user_options['server_keep_logfiles']:\n", "return returncode is None\n", "args.append('--keep_logfiles')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 7, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_0(self):...\n", "\"\"\"docstring\"\"\"\n", "db.session.add(self)\n", "db.session.commit()\n" ]
[ "def create(self):...\n", "\"\"\"docstring\"\"\"\n", "db.session.add(self)\n", "db.session.commit()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]
[ "def FUNC_9(VAR_8, VAR_9):...\n", "VAR_30 = {}\n", "for VAR_36 in ('point', 'line', 'polygon'):\n", "VAR_35 = VAR_9 * VAR_18[VAR_36]\n", "return VAR_30\n", "VAR_30[VAR_36] = bounds_buffer(VAR_8, VAR_35)\n" ]
[ "def bounds_pad(bounds, meters_per_pixel_dim):...\n", "buffered_by_type = {}\n", "for geometry_type in ('point', 'line', 'polygon'):\n", "offset = meters_per_pixel_dim * buf_by_type[geometry_type]\n", "return buffered_by_type\n", "buffered_by_type[geometry_type] = bounds_buffer(bounds, offset)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_14(self, VAR_9, VAR_10=None):...\n", "if isinstance(VAR_9, str) or callable(VAR_9):\n", "self.log.append(IOFile(VAR_9, VAR_16=self) if isinstance(VAR_9, str) else VAR_9\n )\n", "VAR_48 = len(self.log)\n", "if VAR_10:\n", "for VAR_51 in VAR_9:\n", "self.log.add_name(VAR_10)\n", "self._set_log_item(VAR_51)\n", "if VAR_10:\n", "self.log.set_name(VAR_10, VAR_48, end=len(self.log))\n" ]
[ "def _set_log_item(self, item, name=None):...\n", "if isinstance(item, str) or callable(item):\n", "self.log.append(IOFile(item, rule=self) if isinstance(item, str) else item)\n", "start = len(self.log)\n", "if name:\n", "for i in item:\n", "self.log.add_name(name)\n", "self._set_log_item(i)\n", "if name:\n", "self.log.set_name(name, start, end=len(self.log))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Condition", "For", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_0(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = len(self.log_files)\n", "VAR_9 = self.redis_client.lrange('LOG_FILENAMES:{}'.format(self.\n node_ip_address), VAR_8, -1)\n", "for VAR_10 in VAR_9:\n", "VAR_0.info('Beginning to track file {}'.format(VAR_10))\n", "assert VAR_10 not in self.log_files\n", "self.log_files[VAR_10] = []\n" ]
[ "def update_log_filenames(self):...\n", "\"\"\"docstring\"\"\"\n", "num_current_log_files = len(self.log_files)\n", "new_log_filenames = self.redis_client.lrange('LOG_FILENAMES:{}'.format(self\n .node_ip_address), num_current_log_files, -1)\n", "for log_filename in new_log_filenames:\n", "logger.info('Beginning to track file {}'.format(log_filename))\n", "assert log_filename not in self.log_files\n", "self.log_files[log_filename] = []\n" ]
[ 0, 0, 6, 6, 6, 0, 6, 6 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Expr'", "Assert'", "Assign'" ]
[ "def FUNC_2():...\n", "" ]
[ "def config_path():...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_7(self, VAR_3):...\n", "" ]
[ "def t_error(self, tok):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_19(VAR_12):...\n", "VAR_58 = 0\n", "for line in VAR_12:\n", "VAR_58 = VAR_58 + 1\n", "VAR_77 = 'line3' if VAR_58 % 2 == 0 else 'line'\n", "print('<div class=\"' + VAR_77 + '\">' + FUNC_23(line) + '</div>')\n" ]
[ "def show_log(stdout):...\n", "i = 0\n", "for line in stdout:\n", "i = i + 1\n", "line_class = 'line3' if i % 2 == 0 else 'line'\n", "print('<div class=\"' + line_class + '\">' + escape_html(line) + '</div>')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_12(self, VAR_9, VAR_10, VAR_11, VAR_16={}):...\n", "get_and_check_project(VAR_9, VAR_11)\n", "VAR_28 = self.queryset.annotate(**annotate).get(VAR_10=pk, VAR_1=project_pk)\n", "return VAR_28\n" ]
[ "def get_and_check_task(self, request, pk, project_pk, annotate={}):...\n", "get_and_check_project(request, project_pk)\n", "task = self.queryset.annotate(**annotate).get(pk=pk, project=project_pk)\n", "return task\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Return'" ]
[ "@VAR_0.route('/statistics')...\n", "VAR_28 = VAR_1.connection.cursor()\n", "VAR_28.execute(\n 'SELECT cid FROM Crawls WHERE crawl_date = (SELECT max(crawl_date) FROM Crawls)'\n )\n", "VAR_29 = VAR_28.fetchone()\n", "VAR_28.close()\n", "if VAR_29:\n", "VAR_49 = VAR_29['cid']\n", "flash(\n 'There are no statistics to display, please start a new query and wait for it to complete.'\n , 'danger')\n", "return redirect(url_for('cid_statistics', VAR_7=cid_last_crawl))\n", "return redirect(url_for('index'))\n" ]
[ "@app.route('/statistics')...\n", "cur = mysql.connection.cursor()\n", "cur.execute(\n 'SELECT cid FROM Crawls WHERE crawl_date = (SELECT max(crawl_date) FROM Crawls)'\n )\n", "result = cur.fetchone()\n", "cur.close()\n", "if result:\n", "cid_last_crawl = result['cid']\n", "flash(\n 'There are no statistics to display, please start a new query and wait for it to complete.'\n , 'danger')\n", "return redirect(url_for('cid_statistics', cid=cid_last_crawl))\n", "return redirect(url_for('index'))\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Return'", "Return'" ]
[ "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='NTR Streaming Guide', VAR_3=discord.Color.blue())\n", "VAR_7.url = 'string'\n", "VAR_7.description = 'How to use NTR CFW with Nitro Stream to Wirelessly Stream'\n", "VAR_7.add_field(name='4 common fixes', value=\n \"\"\"• Are you connected to the Internet?\n• Is your antivirus program blocking the program?\n• Make sure you are not putting the port (:####) into the IP box of Nitro Stream.\n• Make sure you are on the latest preview for NTR 3.6.\"\"\"\n )\n", "await self.bot.say('', VAR_7=embed)\n" ]
[ "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='NTR Streaming Guide', color=discord.Color.blue())\n", "embed.url = (\n 'https://gbatemp.net/threads/tutorial-3ds-screen-recording-without-a-capture-card-ntr-cfw-method.423445/'\n )\n", "embed.description = 'How to use NTR CFW with Nitro Stream to Wirelessly Stream'\n", "embed.add_field(name='4 common fixes', value=\n \"\"\"• Are you connected to the Internet?\n• Is your antivirus program blocking the program?\n• Make sure you are not putting the port (:####) into the IP box of Nitro Stream.\n• Make sure you are on the latest preview for NTR 3.6.\"\"\"\n )\n", "await self.bot.say('', embed=embed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_23(self):...\n", "VAR_4 = 'hubba-bubba'\n", "VAR_22 = 'csrf token'\n", "VAR_32['_csrft_'] = VAR_22\n", "VAR_32['user_eppn'] = VAR_4\n", "VAR_32['eduPersonPrincipalName'] = VAR_4\n", "VAR_31 = self.app.dispatch_request()\n", "self.assertEqual(VAR_31.status, '200 OK')\n", "self.assertIn(self.app.config['SAML2_LOGOUT_REDIRECT_URL'], json.loads(\n VAR_31.data)['payload']['location'])\n" ]
[ "def test_logout_nologgedin(self):...\n", "eppn = 'hubba-bubba'\n", "csrft = 'csrf token'\n", "session['_csrft_'] = csrft\n", "session['user_eppn'] = eppn\n", "session['eduPersonPrincipalName'] = eppn\n", "response = self.app.dispatch_request()\n", "self.assertEqual(response.status, '200 OK')\n", "self.assertIn(self.app.config['SAML2_LOGOUT_REDIRECT_URL'], json.loads(\n response.data)['payload']['location'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(self, VAR_0, VAR_1, VAR_2, VAR_3):...\n", "if not hasattr(VAR_0, 'kbsite'):\n", "return None\n", "VAR_7 = VAR_0.kbsite\n", "if VAR_7.is_active:\n", "return None\n", "if self._path_allowed(VAR_0.path):\n", "return None\n", "if VAR_0.user.is_staff or VAR_0.user.is_superuser:\n", "return None\n", "return HttpResponse('Site temporarily unavailable', status=503)\n" ]
[ "def process_view(self, request, view_func, view_args, view_kwargs):...\n", "if not hasattr(request, 'kbsite'):\n", "return None\n", "kbsite = request.kbsite\n", "if kbsite.is_active:\n", "return None\n", "if self._path_allowed(request.path):\n", "return None\n", "if request.user.is_staff or request.user.is_superuser:\n", "return None\n", "return HttpResponse('Site temporarily unavailable', status=503)\n" ]
[ 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(VAR_8=' '):...\n", "\"\"\"docstring\"\"\"\n", "print('--list: ' + str(VAR_6.list))\n", "if VAR_6.list == 'a' or VAR_6.list == 'all' or VAR_6.list == 'g' or VAR_6.list == 'groups':\n", "VAR_13 = set()\n", "if VAR_6.list == 'a' or VAR_6.list == 'all' or VAR_6.list == 'd' or VAR_6.list == 'descriptions':\n", "for s in VAR_5.tweaks:\n", "VAR_14 = set()\n", "VAR_13.add(s['group'])\n", "print('The groups are:')\n", "for d in VAR_5.tweaks:\n", "for t in sorted(VAR_13):\n", "VAR_14.add(d['group'] + ' | ' + d['description'])\n", "print('group | description:')\n", "print(VAR_8 + t)\n", "for t in sorted(VAR_14):\n", "print(VAR_8 + t)\n" ]
[ "def run_list_mode(indent=' '):...\n", "\"\"\"docstring\"\"\"\n", "print('--list: ' + str(args.list))\n", "if args.list == 'a' or args.list == 'all' or args.list == 'g' or args.list == 'groups':\n", "grp = set()\n", "if args.list == 'a' or args.list == 'all' or args.list == 'd' or args.list == 'descriptions':\n", "for s in tweaks.tweaks:\n", "descriptions = set()\n", "grp.add(s['group'])\n", "print('The groups are:')\n", "for d in tweaks.tweaks:\n", "for t in sorted(grp):\n", "descriptions.add(d['group'] + ' | ' + d['description'])\n", "print('group | description:')\n", "print(indent + t)\n", "for t in sorted(descriptions):\n", "print(indent + t)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Condition", "Assign'", "Condition", "For", "Assign'", "Expr'", "Expr'", "For", "For", "Expr'", "Expr'", "Expr'", "For", "Expr'" ]
[ "@property...\n", "return self.exists and not os.access(self.file, os.W_OK)\n" ]
[ "@property...\n", "return self.exists and not os.access(self.file, os.W_OK)\n" ]
[ 0, 7 ]
[ "Condition", "Return'" ]
[ "def FUNC_24(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_17 = []\n", "for VAR_7, VAR_12 in self.all_processes.items():\n", "for VAR_16 in VAR_12:\n", "return VAR_17\n", "if VAR_16.process.poll() is not None:\n", "VAR_17.append((VAR_7, VAR_16.process))\n" ]
[ "def dead_processes(self):...\n", "\"\"\"docstring\"\"\"\n", "result = []\n", "for process_type, process_infos in self.all_processes.items():\n", "for process_info in process_infos:\n", "return result\n", "if process_info.process.poll() is not None:\n", "result.append((process_type, process_info.process))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "For", "Return'", "Condition", "Expr'" ]
[ "def FUNC_9(self, *VAR_15, **VAR_75):...\n", "" ]
[ "def newfn(self, *a, **env):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_19(self, VAR_14):...\n", "" ]
[ "def is_relation_many_to_many(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_14(self, VAR_10, VAR_11):...\n", "VAR_24 = self.common._cli_run('createhost -add %s %s' % (VAR_10, ' '.join(\n VAR_11)), None)\n" ]
[ "def _modify_3par_fibrechan_host(self, hostname, wwn):...\n", "out = self.common._cli_run('createhost -add %s %s' % (hostname, ' '.join(\n wwn)), None)\n" ]
[ 0, 2 ]
[ "FunctionDef'", "Assign'" ]
[ "from tkinter import *\n", "import datetime\n", "from isoweek import Week\n", "from tkinter import ttk\n", "import sqlite3\n", "from PIL import Image, ImageTk\n", "VAR_0 = 'Trebuchet MS', 24\n", "VAR_1 = 'Trebuchet MS', 14\n", "def __init__(self, VAR_2, VAR_3):...\n", "Frame.__init__(self, VAR_2, bg='#f8f8f8')\n", "VAR_4 = Frame(self, bg='#e7e7e7')\n", "VAR_4.pack(fill='both')\n", "VAR_5 = Image.open('home.jpg')\n", "VAR_6 = ImageTk.PhotoImage(VAR_5)\n", "from landingpage import LandingPage\n", "VAR_7 = Button(VAR_4, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : controller.show_frame(\n LandingPage))\n", "VAR_7.image = VAR_6\n", "VAR_7.pack(side=LEFT)\n", "VAR_8 = Label(VAR_4, text='Meal Planner', font=LARGE_FONT, bg='#e7e7e7', fg\n ='#272822')\n", "VAR_8.pack(side=LEFT, padx=289)\n", "VAR_9 = Button(VAR_4, text='Grocery List', highlightbackground='#e7e7e7',\n command=lambda : view_grocery_list())\n", "VAR_9.pack(side=LEFT)\n", "VAR_10 = Frame(self, bg='#f8f8f8')\n", "VAR_11 = datetime.datetime.now()\n", "VAR_12 = datetime.date(VAR_11.year, VAR_11.month, VAR_11.day)\n", "VAR_13 = VAR_12.isocalendar()[1]\n", "VAR_14 = Week(VAR_11.year, VAR_13)\n", "VAR_15 = Frame(self, bg='#f8f8f8')\n", "VAR_15.rowconfigure(0, weight=1)\n", "VAR_15.columnconfigure(0, weight=1)\n", "VAR_15.rowconfigure(1, weight=3)\n", "VAR_15.columnconfigure(1, weight=3)\n", "VAR_15.pack()\n", "VAR_16 = ['Breakfast', 'Lunch', 'Dinner']\n", "for VAR_44 in range(len(VAR_16)):\n", "Label(VAR_15, text=columnLabels[i], font=('Trebuchet MS', 16), bg='#f8f8f8'\n ).grid(VAR_32=0, VAR_33=i + 2, pady=10, padx=85, sticky='nsew')\n", "VAR_17 = 'Monday ' + str(VAR_14.monday())\n", "VAR_18 = 'Tuesday ' + str(VAR_14.tuesday())\n", "VAR_19 = 'Wednesday ' + str(VAR_14.wednesday())\n", "VAR_20 = 'Thursday ' + str(VAR_14.thursday())\n", "VAR_21 = 'Friday ' + str(VAR_14.friday())\n", "VAR_22 = 'Saturday ' + str(VAR_14.saturday())\n", "VAR_23 = 'Sunday ' + str(VAR_14.sunday())\n", "VAR_24 = [VAR_17, VAR_18, VAR_19, VAR_20, VAR_21, VAR_22, VAR_23]\n", "for VAR_44 in range(len(VAR_24)):\n", "Label(VAR_15, font=('Trebuchet MS', 12), bg='#f8f8f8', text=labels[i]).grid(\n VAR_32=i + 1, VAR_33=0, padx=5, pady=15, sticky='w')\n", "VAR_25 = {}\n", "VAR_35 = ttk.Separator(VAR_15, orient='vertical')\n", "VAR_26 = []\n", "VAR_35.grid(VAR_32=i + 1, VAR_33=1, padx=5, sticky='nsew')\n", "for rows in range(len(VAR_24)):\n", "for columns in range(len(VAR_16)):\n", "def FUNC_0(VAR_27, VAR_28):...\n", "VAR_47 = Button(VAR_15, text='Add meal to day', highlightbackground=\n '#f8f8f8', command=lambda x=rows + 1, y=columns + 2: add_meal(x, y))\n", "VAR_15.pack_forget()\n", "VAR_47.grid(VAR_32=rows + 1, VAR_33=columns + 2)\n", "VAR_10.forget()\n", "VAR_47.position = rows + 1, columns + 2\n", "VAR_36 = Frame(self, bg='#f8f8f8')\n", "VAR_25[VAR_47] = VAR_47.position\n", "VAR_36.rowconfigure(0, weight=1)\n", "VAR_26.append(VAR_47)\n", "VAR_36.columnconfigure(0, weight=1)\n", "VAR_36.rowconfigure(1, weight=3)\n", "VAR_36.columnconfigure(1, weight=3)\n", "VAR_36.pack()\n", "VAR_37 = []\n", "VAR_38 = []\n", "VAR_39 = 'meal_planner.db'\n", "VAR_48 = conn.cursor()\n", "VAR_49 = VAR_48.execute('SELECT * FROM recipe')\n", "for result in [VAR_49]:\n", "for VAR_32 in result.fetchall():\n", "for VAR_44 in range(len(VAR_37)):\n", "VAR_51 = VAR_32[0]\n", "Button(VAR_36, text=recipeNames[i], highlightbackground='#f8f8f8', command=\n lambda x=recipeNames[i], y=ingredientList[i]: add_recipe(x, y,\n add_meal_frame, rowLocation, columnLocation)).grid(VAR_32=i, VAR_33=0)\n", "def FUNC_1(VAR_29, VAR_30, VAR_31, VAR_32, VAR_33):...\n", "VAR_30 = VAR_32[4]\n", "VAR_31.pack_forget()\n", "VAR_37.append(VAR_51)\n", "VAR_10.forget()\n", "VAR_38.append(VAR_30)\n", "VAR_40 = VAR_32, VAR_33\n", "for key, value in VAR_25.items():\n", "if value == VAR_40:\n", "FUNC_5(VAR_29, VAR_32, VAR_33)\n", "key.destroy()\n", "FUNC_4(VAR_30)\n", "VAR_41 = Label(VAR_15, text=recipe, bg='#f8f8f8')\n", "VAR_41.grid(VAR_32=row, VAR_33=column)\n", "VAR_41.bind('<Button-1>', lambda event: FUNC_2(VAR_29))\n", "VAR_15.pack()\n", "def FUNC_2(VAR_34):...\n", "VAR_15.pack_forget()\n", "VAR_10.pack(expand=True, fill='both')\n", "VAR_9.pack_forget()\n", "VAR_39 = 'meal_planner.db'\n", "print(VAR_34)\n", "VAR_48 = conn.cursor()\n", "VAR_49 = VAR_48.execute('SELECT * FROM recipe WHERE name = ' + '\"' + VAR_34 +\n '\"')\n", "for result in [VAR_49]:\n", "for VAR_32 in result.fetchall():\n", "Label(VAR_10, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000').pack(\n side=TOP)\n", "VAR_51 = VAR_32[0]\n", "Label(VAR_10, text=secondString, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000'\n ).pack(side=TOP)\n", "VAR_52 = VAR_32[1]\n", "Label(VAR_10, text=thirdString, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000'\n ).pack(side=TOP)\n", "VAR_53 = VAR_32[2]\n", "VAR_42 = Button(VAR_4, text='Return to Menu', highlightbackground='#e7e7e7',\n command=lambda : [viewRecipeFrame.pack_forget(), menu.pack(),\n returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n", "VAR_30 = VAR_32[4]\n", "VAR_42.pack(side=RIGHT)\n", "VAR_54 = VAR_32[5]\n", "def FUNC_3():...\n", "VAR_55 = \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n \"\"\".format(\n VAR_51, VAR_52, VAR_53)\n", "print('grocery== list')\n", "VAR_56 = 'Ingredients: {}'.format(VAR_30)\n", "VAR_43 = Frame(self)\n", "VAR_57 = 'Directions: {}'.format(VAR_54)\n", "VAR_43.rowconfigure(0, weight=1)\n", "VAR_43.columnconfigure(0, weight=1)\n", "VAR_43.rowconfigure(1, weight=3)\n", "VAR_43.columnconfigure(1, weight=3)\n", "VAR_43.pack()\n", "VAR_15.pack_forget()\n", "VAR_9.pack_forget()\n", "VAR_8.configure(text='Grocery List')\n", "VAR_44 = 0\n", "VAR_39 = 'meal_planner.db'\n", "VAR_45 = []\n", "VAR_48 = conn.cursor()\n", "VAR_50 = 'ingredients_' + str(VAR_13)\n", "VAR_49 = VAR_48.execute('SELECT * FROM ' + VAR_50)\n", "for result in [VAR_49]:\n", "for VAR_32 in result.fetchall():\n", "VAR_46 = 0\n", "print(VAR_32)\n", "for item in VAR_45:\n", "for ingredient in VAR_32:\n", "print(item)\n", "VAR_42 = Button(VAR_4, text='Return to Menu', highlightbackground='#e7e7e7',\n command=lambda : [groceryListFrame.pack_forget(), menu.pack(),\n returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n", "print(ingredient)\n", "VAR_44 = VAR_44 + 1\n", "VAR_42.pack(side=RIGHT)\n", "VAR_45.append(str(ingredient).split())\n", "Label(VAR_43, text=ingredient, font=MEDIUM_FONT, justify=LEFT).grid(VAR_32=\n i, VAR_33=0, sticky='w')\n", "def FUNC_4(VAR_30):...\n", "VAR_39 = 'meal_planner.db'\n", "VAR_50 = 'ingredients_' + str(VAR_13)\n", "conn.execute('CREATE TABLE IF NOT EXISTS ' + VAR_50 + ' (ingredients text)')\n", "conn.execute('INSERT INTO ' + VAR_50 + ' VALUES (?);', (VAR_30,))\n", "def FUNC_5(VAR_34, VAR_32, VAR_33):...\n", "print('save weeks')\n", "VAR_39 = 'meal_planner.db'\n", "VAR_50 = 'recipes_' + str(VAR_13)\n", "conn.execute('CREATE TABLE IF NOT EXISTS ' + VAR_50 +\n ' (recipe text, row int, column int)')\n", "conn.execute('INSERT INTO ' + VAR_50 + ' VALUES (?, ?, ?);', (VAR_34,\n VAR_32, VAR_33))\n" ]
[ "from tkinter import *\n", "import datetime\n", "from isoweek import Week\n", "from tkinter import ttk\n", "import sqlite3\n", "from PIL import Image, ImageTk\n", "LARGE_FONT = 'Trebuchet MS', 24\n", "MEDIUM_FONT = 'Trebuchet MS', 14\n", "def __init__(self, parent, controller):...\n", "Frame.__init__(self, parent, bg='#f8f8f8')\n", "menuFrame = Frame(self, bg='#e7e7e7')\n", "menuFrame.pack(fill='both')\n", "load = Image.open('home.jpg')\n", "render = ImageTk.PhotoImage(load)\n", "from landingpage import LandingPage\n", "img = Button(menuFrame, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : controller.show_frame(\n LandingPage))\n", "img.image = render\n", "img.pack(side=LEFT)\n", "label = Label(menuFrame, text='Meal Planner', font=LARGE_FONT, bg='#e7e7e7',\n fg='#272822')\n", "label.pack(side=LEFT, padx=289)\n", "groceryButton = Button(menuFrame, text='Grocery List', highlightbackground=\n '#e7e7e7', command=lambda : view_grocery_list())\n", "groceryButton.pack(side=LEFT)\n", "viewRecipeFrame = Frame(self, bg='#f8f8f8')\n", "now = datetime.datetime.now()\n", "dt = datetime.date(now.year, now.month, now.day)\n", "weekNumber = dt.isocalendar()[1]\n", "w = Week(now.year, weekNumber)\n", "menu = Frame(self, bg='#f8f8f8')\n", "menu.rowconfigure(0, weight=1)\n", "menu.columnconfigure(0, weight=1)\n", "menu.rowconfigure(1, weight=3)\n", "menu.columnconfigure(1, weight=3)\n", "menu.pack()\n", "columnLabels = ['Breakfast', 'Lunch', 'Dinner']\n", "for i in range(len(columnLabels)):\n", "Label(menu, text=columnLabels[i], font=('Trebuchet MS', 16), bg='#f8f8f8'\n ).grid(row=0, column=i + 2, pady=10, padx=85, sticky='nsew')\n", "mondayText = 'Monday ' + str(w.monday())\n", "tuesdayText = 'Tuesday ' + str(w.tuesday())\n", "wednesdayText = 'Wednesday ' + str(w.wednesday())\n", "thursdayText = 'Thursday ' + str(w.thursday())\n", "fridayText = 'Friday ' + str(w.friday())\n", "saturdayText = 'Saturday ' + str(w.saturday())\n", "sundayText = 'Sunday ' + str(w.sunday())\n", "labels = [mondayText, tuesdayText, wednesdayText, thursdayText, fridayText,\n saturdayText, sundayText]\n", "for i in range(len(labels)):\n", "Label(menu, font=('Trebuchet MS', 12), bg='#f8f8f8', text=labels[i]).grid(row\n =i + 1, column=0, padx=5, pady=15, sticky='w')\n", "buttonDict = {}\n", "sep = ttk.Separator(menu, orient='vertical')\n", "listOfButtons = []\n", "sep.grid(row=i + 1, column=1, padx=5, sticky='nsew')\n", "for rows in range(len(labels)):\n", "for columns in range(len(columnLabels)):\n", "def add_meal(rowLocation, columnLocation):...\n", "buttons = Button(menu, text='Add meal to day', highlightbackground=\n '#f8f8f8', command=lambda x=rows + 1, y=columns + 2: add_meal(x, y))\n", "menu.pack_forget()\n", "buttons.grid(row=rows + 1, column=columns + 2)\n", "viewRecipeFrame.forget()\n", "buttons.position = rows + 1, columns + 2\n", "add_meal_frame = Frame(self, bg='#f8f8f8')\n", "buttonDict[buttons] = buttons.position\n", "add_meal_frame.rowconfigure(0, weight=1)\n", "listOfButtons.append(buttons)\n", "add_meal_frame.columnconfigure(0, weight=1)\n", "add_meal_frame.rowconfigure(1, weight=3)\n", "add_meal_frame.columnconfigure(1, weight=3)\n", "add_meal_frame.pack()\n", "recipeNames = []\n", "ingredientList = []\n", "database_file = 'meal_planner.db'\n", "cursor = conn.cursor()\n", "selection = cursor.execute('SELECT * FROM recipe')\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "for i in range(len(recipeNames)):\n", "name = row[0]\n", "Button(add_meal_frame, text=recipeNames[i], highlightbackground='#f8f8f8',\n command=lambda x=recipeNames[i], y=ingredientList[i]: add_recipe(x, y,\n add_meal_frame, rowLocation, columnLocation)).grid(row=i, column=0)\n", "def add_recipe(recipe, ingredients, view, row, column):...\n", "ingredients = row[4]\n", "view.pack_forget()\n", "recipeNames.append(name)\n", "viewRecipeFrame.forget()\n", "ingredientList.append(ingredients)\n", "searchIndex = row, column\n", "for key, value in buttonDict.items():\n", "if value == searchIndex:\n", "save_weeks_recipes(recipe, row, column)\n", "key.destroy()\n", "save_ingredients(ingredients)\n", "recipeLabel = Label(menu, text=recipe, bg='#f8f8f8')\n", "recipeLabel.grid(row=row, column=column)\n", "recipeLabel.bind('<Button-1>', lambda event: callback(recipe))\n", "menu.pack()\n", "def callback(recipeName):...\n", "menu.pack_forget()\n", "viewRecipeFrame.pack(expand=True, fill='both')\n", "groceryButton.pack_forget()\n", "database_file = 'meal_planner.db'\n", "print(recipeName)\n", "cursor = conn.cursor()\n", "selection = cursor.execute('SELECT * FROM recipe WHERE name = ' + '\"' +\n recipeName + '\"')\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "Label(viewRecipeFrame, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg=\n '#000000').pack(side=TOP)\n", "name = row[0]\n", "Label(viewRecipeFrame, text=secondString, font=MEDIUM_FONT, bg='#f8f8f8',\n fg='#000000').pack(side=TOP)\n", "time = row[1]\n", "Label(viewRecipeFrame, text=thirdString, font=MEDIUM_FONT, bg='#f8f8f8', fg\n ='#000000').pack(side=TOP)\n", "servings = row[2]\n", "returnButton = Button(menuFrame, text='Return to Menu', highlightbackground\n ='#e7e7e7', command=lambda : [viewRecipeFrame.pack_forget(), menu.pack(\n ), returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n", "ingredients = row[4]\n", "returnButton.pack(side=RIGHT)\n", "directions = row[5]\n", "def view_grocery_list():...\n", "string = \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n \"\"\".format(name\n , time, servings)\n", "print('grocery== list')\n", "secondString = 'Ingredients: {}'.format(ingredients)\n", "groceryListFrame = Frame(self)\n", "thirdString = 'Directions: {}'.format(directions)\n", "groceryListFrame.rowconfigure(0, weight=1)\n", "groceryListFrame.columnconfigure(0, weight=1)\n", "groceryListFrame.rowconfigure(1, weight=3)\n", "groceryListFrame.columnconfigure(1, weight=3)\n", "groceryListFrame.pack()\n", "menu.pack_forget()\n", "groceryButton.pack_forget()\n", "label.configure(text='Grocery List')\n", "i = 0\n", "database_file = 'meal_planner.db'\n", "item_array = []\n", "cursor = conn.cursor()\n", "tableName = 'ingredients_' + str(weekNumber)\n", "selection = cursor.execute('SELECT * FROM ' + tableName)\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "j = 0\n", "print(row)\n", "for item in item_array:\n", "for ingredient in row:\n", "print(item)\n", "returnButton = Button(menuFrame, text='Return to Menu', highlightbackground\n ='#e7e7e7', command=lambda : [groceryListFrame.pack_forget(), menu.pack\n (), returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n", "print(ingredient)\n", "i = i + 1\n", "returnButton.pack(side=RIGHT)\n", "item_array.append(str(ingredient).split())\n", "Label(groceryListFrame, text=ingredient, font=MEDIUM_FONT, justify=LEFT).grid(\n row=i, column=0, sticky='w')\n", "def save_ingredients(ingredients):...\n", "database_file = 'meal_planner.db'\n", "tableName = 'ingredients_' + str(weekNumber)\n", "conn.execute('CREATE TABLE IF NOT EXISTS ' + tableName + ' (ingredients text)')\n", "conn.execute('INSERT INTO ' + tableName + ' VALUES (?);', (ingredients,))\n", "def save_weeks_recipes(recipeName, row, column):...\n", "print('save weeks')\n", "database_file = 'meal_planner.db'\n", "tableName = 'recipes_' + str(weekNumber)\n", "conn.execute('CREATE TABLE IF NOT EXISTS ' + tableName +\n ' (recipe text, row int, column int)')\n", "conn.execute('INSERT INTO ' + tableName + ' VALUES (?, ?, ?);', (recipeName,\n row, column))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "For", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "For", "For", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "For", "For", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "For", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "For", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "For", "Assign'", "Expr'", "For", "For", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_5(self):...\n", "VAR_13 = ''\n", "VAR_13 += self.debugger.dut.serial.port + ' '\n", "VAR_13 += str(self.result_data['id']) + ': ' + self.result_data[\n 'outcome_category'] + ' - ' + self.result_data['outcome']\n", "if self.result_data['data_diff'] is not None and self.result_data['data_diff'\n", "VAR_13 += ' {0:.2f}%'.format(max(self.result_data['data_diff'] * 100, 99.99))\n", "print(colored(VAR_13, 'blue'))\n", "db.cursor.execute('SELECT COUNT(*) FROM log_injection WHERE result_id=?', (\n self.result_data['id'],))\n", "if db.cursor.fetchone()[0] > 1:\n", "db.cursor.execute(\n 'DELETE FROM log_injection WHERE result_id=? AND injection_number=0', (\n self.result_data['id'],))\n", "db.update_dict('result', self.result_data)\n" ]
[ "def log_result(self):...\n", "out = ''\n", "out += self.debugger.dut.serial.port + ' '\n", "out += str(self.result_data['id']) + ': ' + self.result_data['outcome_category'\n ] + ' - ' + self.result_data['outcome']\n", "if self.result_data['data_diff'] is not None and self.result_data['data_diff'\n", "out += ' {0:.2f}%'.format(max(self.result_data['data_diff'] * 100, 99.99))\n", "print(colored(out, 'blue'))\n", "db.cursor.execute('SELECT COUNT(*) FROM log_injection WHERE result_id=?', (\n self.result_data['id'],))\n", "if db.cursor.fetchone()[0] > 1:\n", "db.cursor.execute(\n 'DELETE FROM log_injection WHERE result_id=? AND injection_number=0', (\n self.result_data['id'],))\n", "db.update_dict('result', self.result_data)\n" ]
[ 0, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "AugAssign'", "AugAssign'", "Condition", "AugAssign'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_42(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'MEDIUM': 1}, 'CONFIDENCE': {'HIGH': 1}}\n", "self.check_example('yaml_load.py', VAR_2)\n" ]
[ "def test_yaml(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'MEDIUM': 1}, 'CONFIDENCE': {'HIGH': 1}}\n", "self.check_example('yaml_load.py', expect)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_0(VAR_4):...\n", "VAR_3.execute(\"SELECT * FROM players WHERE Name = '%s' COLLATE NOCASE\" % VAR_4)\n", "VAR_14 = dict(VAR_3.fetchone())\n", "return VAR_14\n" ]
[ "def getPlayer(player):...\n", "db.execute(\"SELECT * FROM players WHERE Name = '%s' COLLATE NOCASE\" % player)\n", "playerstats = dict(db.fetchone())\n", "return playerstats\n" ]
[ 0, 4, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_14(self):...\n", "if not self.config:\n", "self.logger.error(' Config not loaded yet!')\n", "if not self.session:\n", "self.logger.error(' Init aborted. No session was found!')\n", "self.logger.debug(self.config)\n", "VAR_9 = FUNC_6(self.session, self.window_name)\n", "if VAR_9:\n", "self.logger.debug(\"window '%s' found running\" % self.window_name)\n", "if not self.kill_mode:\n", "if self.kill_mode:\n", "self.logger.info(\"creating window '%s'\" % self.window_name)\n", "self.logger.info(\n \"There is no component running by the name '%s'. Exiting kill mode\" %\n self.window_name)\n", "self.logger.info('Shutting down window...')\n", "VAR_9 = self.session.new_window(self.window_name)\n", "FUNC_4(VAR_9)\n", "FUNC_5(VAR_9, self.config['cmd'][0]['start'], self.log_file, self.window_name)\n", "self.logger.info('... done!')\n" ]
[ "def init(self):...\n", "if not self.config:\n", "self.logger.error(' Config not loaded yet!')\n", "if not self.session:\n", "self.logger.error(' Init aborted. No session was found!')\n", "self.logger.debug(self.config)\n", "window = find_window(self.session, self.window_name)\n", "if window:\n", "self.logger.debug(\"window '%s' found running\" % self.window_name)\n", "if not self.kill_mode:\n", "if self.kill_mode:\n", "self.logger.info(\"creating window '%s'\" % self.window_name)\n", "self.logger.info(\n \"There is no component running by the name '%s'. Exiting kill mode\" %\n self.window_name)\n", "self.logger.info('Shutting down window...')\n", "window = self.session.new_window(self.window_name)\n", "kill_window(window)\n", "start_window(window, self.config['cmd'][0]['start'], self.log_file, self.\n window_name)\n", "self.logger.info('... done!')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "from osv import fields, osv\n", "from tools.translate import _\n", "VAR_0 = 'product.product'\n", "def FUNC_0(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_4 is None:\n", "VAR_4 = {}\n", "VAR_17 = self.pool.get('product.product').browse(VAR_1, VAR_2, VAR_3, VAR_4\n =context)\n", "VAR_18 = (VAR_17.property_stock_account_input and VAR_17.\n property_stock_account_input.id or False)\n", "if not VAR_18:\n", "VAR_18 = (VAR_17.categ_id.property_stock_account_input_categ and VAR_17.\n categ_id.property_stock_account_input_categ.id or False)\n", "VAR_19 = (VAR_17.property_stock_account_output and VAR_17.\n property_stock_account_output.id or False)\n", "if not VAR_19:\n", "VAR_19 = (VAR_17.categ_id.property_stock_account_output_categ and VAR_17.\n categ_id.property_stock_account_output_categ.id or False)\n", "VAR_20 = (VAR_17.categ_id.property_stock_journal and VAR_17.categ_id.\n property_stock_journal.id or False)\n", "VAR_21 = (VAR_17.categ_id.property_stock_variation and VAR_17.categ_id.\n property_stock_variation.id or False)\n", "return {'stock_account_input': VAR_18, 'stock_account_output': VAR_19,\n 'stock_journal': VAR_20, 'property_stock_variation': VAR_21}\n" ]
[ "from osv import fields, osv\n", "from tools.translate import _\n", "_inherit = 'product.product'\n", "def get_product_accounts(self, cr, uid, product_id, context=None):...\n", "\"\"\"docstring\"\"\"\n", "if context is None:\n", "context = {}\n", "product_obj = self.pool.get('product.product').browse(cr, uid, product_id,\n context=context)\n", "stock_input_acc = (product_obj.property_stock_account_input and product_obj\n .property_stock_account_input.id or False)\n", "if not stock_input_acc:\n", "stock_input_acc = (product_obj.categ_id.property_stock_account_input_categ and\n product_obj.categ_id.property_stock_account_input_categ.id or False)\n", "stock_output_acc = (product_obj.property_stock_account_output and\n product_obj.property_stock_account_output.id or False)\n", "if not stock_output_acc:\n", "stock_output_acc = (product_obj.categ_id.\n property_stock_account_output_categ and product_obj.categ_id.\n property_stock_account_output_categ.id or False)\n", "journal_id = (product_obj.categ_id.property_stock_journal and product_obj.\n categ_id.property_stock_journal.id or False)\n", "account_variation = (product_obj.categ_id.property_stock_variation and\n product_obj.categ_id.property_stock_variation.id or False)\n", "return {'stock_account_input': stock_input_acc, 'stock_account_output':\n stock_output_acc, 'stock_journal': journal_id,\n 'property_stock_variation': account_variation}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_16(self, VAR_13):...\n", "VAR_35 = self.accept(VAR_13)\n", "if VAR_35:\n", "return VAR_35\n" ]
[ "def expect(self, ttype):...\n", "result = self.accept(ttype)\n", "if result:\n", "return result\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_10(self):...\n", "self.authn('/terminate', VAR_6=True)\n" ]
[ "def test_terminate_authn(self):...\n", "self.authn('/terminate', force_authn=True)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_5(self, VAR_2, VAR_3='app.html', **VAR_4):...\n", "if self.is_api:\n", "self.write(VAR_2)\n", "self.render(VAR_3, VAR_2=page_json, **kw)\n" ]
[ "def page_render(self, page_json, template='app.html', **kw):...\n", "if self.is_api:\n", "self.write(page_json)\n", "self.render(template, page_json=page_json, **kw)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_0(*VAR_0, **VAR_1):...\n", "VAR_2 = requests.Response()\n", "VAR_2.status_code = 200\n", "VAR_2._content_consumed = True\n", "return VAR_2\n" ]
[ "def mock_import_repository_task(*args, **kwargs):...\n", "resp = requests.Response()\n", "resp.status_code = 200\n", "resp._content_consumed = True\n", "return resp\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "VAR_4 = VAR_5 = None\n", "website_send_message(VAR_0, VAR_1, VAR_2)\n", "VAR_5 = frappe.db.sql('string'.format(email_id=sender))\n", "if not VAR_5:\n", "VAR_4 = frappe.db.get_value('Lead', dict(email_id=sender))\n", "VAR_6 = frappe.get_doc(dict(doctype='Opportunity', enquiry_from='Customer' if\n customer else 'Lead', VAR_3='Open', title=subject, contact_email=sender,\n to_discuss=message))\n", "if not VAR_4:\n", "if VAR_5:\n", "VAR_8 = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "VAR_6.customer = VAR_5[0][0]\n", "if VAR_4:\n", "VAR_6.insert(ignore_permissions=True)\n", "VAR_6.lead = VAR_4\n", "VAR_6.lead = VAR_8.name\n", "VAR_7 = frappe.get_doc({'doctype': 'Communication', 'subject': VAR_0,\n 'content': VAR_1, 'sender': VAR_2, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': VAR_6.name})\n", "VAR_7.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "lead = customer = None\n", "website_send_message(subject, message, sender)\n", "customer = frappe.db.sql(\n \"\"\"select distinct dl.link_name from `tabDynamic Link` dl\n\t\tleft join `tabContact` c on dl.parent=c.name where dl.link_doctype='Customer'\n\t\tand c.email_id='{email_id}'\"\"\"\n .format(email_id=sender))\n", "if not customer:\n", "lead = frappe.db.get_value('Lead', dict(email_id=sender))\n", "opportunity = frappe.get_doc(dict(doctype='Opportunity', enquiry_from=\n 'Customer' if customer else 'Lead', status='Open', title=subject,\n contact_email=sender, to_discuss=message))\n", "if not lead:\n", "if customer:\n", "new_lead = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "opportunity.customer = customer[0][0]\n", "if lead:\n", "opportunity.insert(ignore_permissions=True)\n", "opportunity.lead = lead\n", "opportunity.lead = new_lead.name\n", "comm = frappe.get_doc({'doctype': 'Communication', 'subject': subject,\n 'content': message, 'sender': sender, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': opportunity.name})\n", "comm.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Condition", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "from .api import get_query_manager\n", "__all__ = 'get_query_manager',\n", "\"\"\"string\"\"\"\n" ]
[ "from .api import get_query_manager\n", "__all__ = 'get_query_manager',\n", "\"\"\"\nopportunity_qm = models.Opportunity.get_query_manager()\n\nopportunities = opportunity_qm.run(\n {\n 'id': o.Id,\n 'accounts': [\n {'id': a.Id}\n for a in o.Accounts\n ]\n }\n for o in opportunity_qm\n)\n\"\"\"\n" ]
[ 4, 4, 4 ]
[ "ImportFrom'", "Assign'", "Expr'" ]
[ "def FUNC_3(VAR_2):...\n", "if VAR_2 % 10 == 1 and VAR_2 % 100 != 11:\n", "return 'st'\n", "if VAR_2 % 10 == 2 and VAR_2 % 100 != 12:\n", "return 'nd'\n", "if VAR_2 % 10 == 3 and VAR_2 % 100 != 13:\n", "return 'rd'\n", "return 'th'\n" ]
[ "def getPostFix(index):...\n", "if index % 10 == 1 and index % 100 != 11:\n", "return 'st'\n", "if index % 10 == 2 and index % 100 != 12:\n", "return 'nd'\n", "if index % 10 == 3 and index % 100 != 13:\n", "return 'rd'\n", "return 'th'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def __virtual__():...\n", "\"\"\"docstring\"\"\"\n", "if salt.utils.is_windows():\n", "return False\n", "return 'disk'\n" ]
[ "def __virtual__():...\n", "\"\"\"docstring\"\"\"\n", "if salt.utils.is_windows():\n", "return False\n", "return 'disk'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_26(VAR_5, VAR_8):...\n", "api.reset_local_state()\n", "VAR_11[0] = VAR_5\n", "VAR_15 = VAR_7.get('/request', extra_environ={'REMOTE_ADDR': ip},\n expect_errors=True)\n", "return VAR_15.status_int\n" ]
[ "def call(ident, ip):...\n", "api.reset_local_state()\n", "mocked_ident[0] = ident\n", "response = app.get('/request', extra_environ={'REMOTE_ADDR': ip},\n expect_errors=True)\n", "return response.status_int\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def __setstate__(self, VAR_0):...\n", "return self.deserialize(VAR_0)\n" ]
[ "def __setstate__(self, data):...\n", "return self.deserialize(data)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_5(self):...\n", "VAR_17 = []\n", "VAR_16 = datetime.now(timezone(timedelta(hours=+9), 'JST'))\n", "VAR_16 = VAR_16.replace(hour=0, minute=0, second=0, microsecond=0)\n", "for day in range(30)[::-1]:\n", "VAR_20 = VAR_16 - timedelta(days=day)\n", "return VAR_17\n", "VAR_21 = VAR_20.strftime('%Y-%m-%d')\n", "VAR_17.append(VAR_21)\n" ]
[ "def thirty_day_labels(self):...\n", "labels = []\n", "today = datetime.now(timezone(timedelta(hours=+9), 'JST'))\n", "today = today.replace(hour=0, minute=0, second=0, microsecond=0)\n", "for day in range(30)[::-1]:\n", "date = today - timedelta(days=day)\n", "return labels\n", "label = date.strftime('%Y-%m-%d')\n", "labels.append(label)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Expr'" ]
[ "@VAR_12.setter...\n", "self._parse('parts', VAR_9)\n" ]
[ "@parts.setter...\n", "self._parse('parts', value)\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def FUNC_44(self):...\n", "from frappe.utils.file_manager import extract_images_from_doc\n", "if self.doctype != 'DocType':\n", "for VAR_25 in self.meta.get('fields', {'fieldtype': ('=', 'Text Editor')}):\n", "extract_images_from_doc(self, VAR_25.fieldname)\n" ]
[ "def _extract_images_from_text_editor(self):...\n", "from frappe.utils.file_manager import extract_images_from_doc\n", "if self.doctype != 'DocType':\n", "for df in self.meta.get('fields', {'fieldtype': ('=', 'Text Editor')}):\n", "extract_images_from_doc(self, df.fieldname)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Condition", "For", "Expr'" ]
[ "from django.test import TestCase\n", "from dashboard.tests.loader import load_model_objects\n", "from dashboard.models import QAGroup, ExtractedText\n", "def FUNC_0(self):...\n", "self.objects = load_model_objects()\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def FUNC_1(self):...\n", "VAR_0 = self.objects.extext.pk\n", "self.assertIsNone(self.objects.extext.qa_group)\n", "self.assertEqual(len(QAGroup.objects.all()), 0)\n", "VAR_0 = self.objects.extext.extraction_script.pk\n", "VAR_1 = self.client.get(f'/qa/extractionscript/{VAR_0}/')\n", "self.assertEqual(VAR_1.status_code, 200)\n", "VAR_2 = QAGroup.objects.get(extraction_script=self.objects.extext.\n extraction_script)\n", "VAR_3 = ExtractedText.objects.get(VAR_2=qa_group)\n", "self.assertIsNotNone(VAR_3.qa_group)\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_3.pk}/')\n", "def FUNC_2(self):...\n", "self.client.get(f'/qa/extractionscript/{self.objects.exscript.pk}/')\n", "VAR_0 = self.objects.extext.pk\n", "VAR_1 = self.client.post(f'/qa/extractedtext/{VAR_0}/', {'approve': [47]})\n", "self.assertEqual(VAR_1.url, '/qa/extractionscript/',\n 'User should be redirected to QA homepage after last extext is approved.')\n" ]
[ "from django.test import TestCase\n", "from dashboard.tests.loader import load_model_objects\n", "from dashboard.models import QAGroup, ExtractedText\n", "def setUp(self):...\n", "self.objects = load_model_objects()\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def test_qa_group_creation(self):...\n", "pk = self.objects.extext.pk\n", "self.assertIsNone(self.objects.extext.qa_group)\n", "self.assertEqual(len(QAGroup.objects.all()), 0)\n", "pk = self.objects.extext.extraction_script.pk\n", "response = self.client.get(f'/qa/extractionscript/{pk}/')\n", "self.assertEqual(response.status_code, 200)\n", "qa_group = QAGroup.objects.get(extraction_script=self.objects.extext.\n extraction_script)\n", "ext = ExtractedText.objects.get(qa_group=qa_group)\n", "self.assertIsNotNone(ext.qa_group)\n", "response = self.client.get(f'/qa/extractedtext/{ext.pk}/')\n", "def test_qa_approval_redirect(self):...\n", "self.client.get(f'/qa/extractionscript/{self.objects.exscript.pk}/')\n", "pk = self.objects.extext.pk\n", "response = self.client.post(f'/qa/extractedtext/{pk}/', {'approve': [47]})\n", "self.assertEqual(response.url, '/qa/extractionscript/',\n 'User should be redirected to QA homepage after last extext is approved.')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_16(self):...\n", "VAR_31 = False\n", "if self.name in ['backup', 'remove']:\n", "VAR_31 = self.contains('ynh_abort_if_errors') or self.contains('set -eu')\n", "VAR_31 = self.contains('ynh_abort_if_errors')\n", "if self.name == 'remove':\n", "if VAR_31:\n", "if not VAR_31:\n", "FUNC_4('string')\n", "FUNC_4('string')\n" ]
[ "def check_set_usage(self):...\n", "present = False\n", "if self.name in ['backup', 'remove']:\n", "present = self.contains('ynh_abort_if_errors') or self.contains('set -eu')\n", "present = self.contains('ynh_abort_if_errors')\n", "if self.name == 'remove':\n", "if present:\n", "if not present:\n", "print_error(\n '[YEP-2.4] set -eu or ynh_abort_if_errors is present. If there is a crash, it could put yunohost system in a broken state. For details, look at https://github.com/YunoHost/issues/issues/419'\n )\n", "print_error(\n '[YEP-2.4] ynh_abort_if_errors is missing. For details, look at https://github.com/YunoHost/issues/issues/419'\n )\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_9(VAR_26, VAR_27, VAR_28=bconfig.CFG_MAIN_FIELD, VAR_29=bconfig....\n", "\"\"\"docstring\"\"\"\n", "VAR_53 = 'string'\n", "VAR_52 = []\n", "VAR_54, VAR_55, VAR_56 = FUNC_29(VAR_28)\n", "for VAR_36 in (VAR_26['Single keywords'], VAR_26['Core keywords']):\n", "for VAR_62 in VAR_36:\n", "for VAR_42, VAR_36 in ((VAR_29, VAR_26['Author keywords']), (VAR_30, VAR_26\n", "VAR_52.append(VAR_53 % (VAR_54, VAR_55, VAR_56, encode_for_xml(VAR_31),\n encode_for_xml(VAR_62), VAR_36[VAR_62], encode_for_xml(VAR_27[VAR_62])))\n", "if VAR_36 and len(VAR_36) and VAR_42:\n", "return ''.join(VAR_52)\n", "VAR_54, VAR_55, VAR_56 = FUNC_29(VAR_42)\n", "for VAR_62, info in VAR_36.items():\n", "VAR_52.append(VAR_53 % (VAR_54, VAR_55, VAR_56, encode_for_xml(VAR_31),\n encode_for_xml(VAR_62), '', encode_for_xml(VAR_27[VAR_62])))\n" ]
[ "def _output_marc(output_complete, categories, kw_field=bconfig....\n", "\"\"\"docstring\"\"\"\n", "kw_template = \"\"\"<datafield tag=\"%s\" ind1=\"%s\" ind2=\"%s\">\n <subfield code=\"2\">%s</subfield>\n <subfield code=\"a\">%s</subfield>\n <subfield code=\"n\">%s</subfield>\n <subfield code=\"9\">%s</subfield>\n</datafield>\n\"\"\"\n", "output = []\n", "tag, ind1, ind2 = _parse_marc_code(kw_field)\n", "for keywords in (output_complete['Single keywords'], output_complete[\n", "for kw in keywords:\n", "for field, keywords in ((auth_field, output_complete['Author keywords']), (\n", "output.append(kw_template % (tag, ind1, ind2, encode_for_xml(provenience),\n encode_for_xml(kw), keywords[kw], encode_for_xml(categories[kw])))\n", "if keywords and len(keywords) and field:\n", "return ''.join(output)\n", "tag, ind1, ind2 = _parse_marc_code(field)\n", "for kw, info in keywords.items():\n", "output.append(kw_template % (tag, ind1, ind2, encode_for_xml(provenience),\n encode_for_xml(kw), '', encode_for_xml(categories[kw])))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "For", "For", "For", "Expr'", "Condition", "Return'", "Assign'", "For", "Expr'" ]
[ "def FUNC_21(VAR_16):...\n", "return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n" ]
[ "def get_io(rule):...\n", "return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = os.path.basename(VAR_0)\n", "if os.path.exists(VAR_1):\n", "return\n", "VAR_15 = requests.get(VAR_0, stream=True)\n", "VAR_15.raise_for_status()\n", "for chunk in VAR_15.iter_content(4096):\n", "fd.write(chunk)\n" ]
[ "def fetch_file(url):...\n", "\"\"\"docstring\"\"\"\n", "filename = os.path.basename(url)\n", "if os.path.exists(filename):\n", "return\n", "r = requests.get(url, stream=True)\n", "r.raise_for_status()\n", "for chunk in r.iter_content(4096):\n", "fd.write(chunk)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Expr'", "For", "Expr'" ]
[ "def FUNC_5(self):...\n", "VAR_3 = 'SELECT MAX(id) FROM jdk_entries;'\n", "self.entry_id = str(FUNC_0(VAR_3, True)[0][0])\n", "return None\n" ]
[ "def set_entry_id(self):...\n", "sql = 'SELECT MAX(id) FROM jdk_entries;'\n", "self.entry_id = str(db_execute(sql, True)[0][0])\n", "return None\n" ]
[ 0, 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_9():...\n", "populate_test_database()\n", "FUNC_1('first playlist')\n", "FUNC_1('second playlist')\n", "FUNC_0(1, 'f title', 'f url', 1)\n", "FUNC_0(1, 's title', 's url', 2)\n", "FUNC_0(1, 't title', 't url', 3)\n", "FUNC_0(2, 'f title', 'f url', 1)\n", "FUNC_0(2, 'fh title', 'fh url', 2)\n", "VAR_11 = VAR_3.get('/videos')\n", "assert VAR_11.json['status'] == 'OK'\n", "assert VAR_11.json['data'] == [dict(id=1, VAR_4=1, VAR_5='f title', VAR_6=\n 'f url', VAR_7=1), dict(id=2, VAR_4=1, VAR_5='s title', VAR_6='s url',\n VAR_7=2), dict(id=3, VAR_4=1, VAR_5='t title', VAR_6='t url', VAR_7=3),\n dict(id=4, VAR_4=2, VAR_5='f title', VAR_6='f url', VAR_7=1), dict(id=5,\n VAR_4=2, VAR_5='fh title', VAR_6='fh url', VAR_7=2)]\n" ]
[ "def test_should_return_all_the_videos():...\n", "populate_test_database()\n", "create_playlist('first playlist')\n", "create_playlist('second playlist')\n", "create_video(1, 'f title', 'f url', 1)\n", "create_video(1, 's title', 's url', 2)\n", "create_video(1, 't title', 't url', 3)\n", "create_video(2, 'f title', 'f url', 1)\n", "create_video(2, 'fh title', 'fh url', 2)\n", "response = test_app.get('/videos')\n", "assert response.json['status'] == 'OK'\n", "assert response.json['data'] == [dict(id=1, playlist_id=1, title='f title',\n thumbnail='f url', position=1), dict(id=2, playlist_id=1, title=\n 's title', thumbnail='s url', position=2), dict(id=3, playlist_id=1,\n title='t title', thumbnail='t url', position=3), dict(id=4, playlist_id\n =2, title='f title', thumbnail='f url', position=1), dict(id=5,\n playlist_id=2, title='fh title', thumbnail='fh url', position=2)]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assert'", "Assert'" ]
[ "def FUNC_4(self, VAR_8):...\n", "VAR_4 = self.size or ''\n", "return '%s[%s]' % (self.base_field.db_type(VAR_8), VAR_4)\n" ]
[ "def db_type(self, connection):...\n", "size = self.size or ''\n", "return '%s[%s]' % (self.base_field.db_type(connection), size)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_5(self, VAR_1=None, VAR_8=None, **VAR_7):...\n", "if self.closed:\n", "if VAR_8 is None:\n", "VAR_8 = self._adapter\n", "if self._adapter is not None:\n", "if VAR_8 is None:\n", "if self._session is not None:\n", "if VAR_1 is None:\n", "VAR_1 = VAR_8.address\n", "self._attach(VAR_1, **kwargs)\n", "return self._session\n" ]
[ "def attach_socket(self, addr=None, adapter=None, **kwargs):...\n", "if self.closed:\n", "if adapter is None:\n", "adapter = self._adapter\n", "if self._adapter is not None:\n", "if adapter is None:\n", "if self._session is not None:\n", "if addr is None:\n", "addr = adapter.address\n", "self._attach(addr, **kwargs)\n", "return self._session\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Condition", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_4):...\n", "return SQLConnector.insert_into(self, VAR_4.to_database_query())\n" ]
[ "def insert_into(self, transaction):...\n", "return SQLConnector.insert_into(self, transaction.to_database_query())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_11(VAR_27):...\n", "VAR_36 = datetime.utcfromtimestamp(VAR_27.created_utc)\n", "VAR_14 = str(VAR_27.author)\n", "VAR_13.execute(\n \"SELECT address, private_key FROM accounts WHERE username='%s'\" % VAR_14)\n", "VAR_34 = VAR_13.fetchall()\n", "if len(VAR_34) > 0:\n", "open_or_receive(VAR_34[0][0], VAR_34[0][1])\n", "FUNC_2(VAR_14=username, VAR_15='receive', VAR_25=message_time.strftime(\n '%Y-%m-%d %H:%M:%S'), VAR_18='message')\n", "VAR_56 = check_balance(VAR_34[0][0])\n", "VAR_39 = 'string'\n", "FUNC_2(VAR_14=username, VAR_15='receive', VAR_25=message_time.strftime(\n '%Y-%m-%d %H:%M:%S'), VAR_17=result[0][0], VAR_18='message')\n", "VAR_27.reply(VAR_39)\n", "VAR_39 = 'string' % (VAR_56[0] / 10 ** 30, VAR_56[1] / 10 ** 30)\n", "VAR_27.reply(VAR_39)\n" ]
[ "def handle_receive(message):...\n", "message_time = datetime.utcfromtimestamp(message.created_utc)\n", "username = str(message.author)\n", "mycursor.execute(\n \"SELECT address, private_key FROM accounts WHERE username='%s'\" % username)\n", "result = mycursor.fetchall()\n", "if len(result) > 0:\n", "open_or_receive(result[0][0], result[0][1])\n", "add_history_record(username=username, action='receive', reddit_time=\n message_time.strftime('%Y-%m-%d %H:%M:%S'), comment_or_message='message')\n", "balance = check_balance(result[0][0])\n", "response = (\n \"You do not currently have an account open. To create one, respond with the text 'create' in the message body.\"\n )\n", "add_history_record(username=username, action='receive', reddit_time=\n message_time.strftime('%Y-%m-%d %H:%M:%S'), address=result[0][0],\n comment_or_message='message')\n", "message.reply(response)\n", "response = (\n \"You currently have %s Nano available, and %s Nano unpocketed. To pocket any, create a new message containing the word 'receive' in the body\"\n % (balance[0] / 10 ** 30, balance[1] / 10 ** 30))\n", "message.reply(response)\n" ]
[ 0, 4, 0, 0, 0, 0, 4, 0, 4, 4, 4, 0, 4, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.log = VAR_79, VAR_80\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.log = logs, kwlogs\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_0():...\n", "return [webapp2.Route('/restricted/ereporter2/errors', CLASS_2), webapp2.\n Route('/restricted/ereporter2/errors/<error_id:\\\\d+>', CLASS_3),\n webapp2.Route('/restricted/ereporter2/report', CLASS_0), webapp2.Route(\n '/restricted/ereporter2/request/<request_id:[0-9a-fA-F]+>', CLASS_1),\n webapp2.Route('/restricted/ereporter2/silence', CLASS_4), webapp2.Route\n ('/ereporter2/api/v1/on_error', CLASS_7)]\n" ]
[ "def get_frontend_routes():...\n", "return [webapp2.Route('/restricted/ereporter2/errors',\n RestrictedEreporter2ErrorsList), webapp2.Route(\n '/restricted/ereporter2/errors/<error_id:\\\\d+>',\n RestrictedEreporter2Error), webapp2.Route(\n '/restricted/ereporter2/report', RestrictedEreporter2Report), webapp2.\n Route('/restricted/ereporter2/request/<request_id:[0-9a-fA-F]+>',\n RestrictedEreporter2Request), webapp2.Route(\n '/restricted/ereporter2/silence', RestrictedEreporter2Silence), webapp2\n .Route('/ereporter2/api/v1/on_error', OnErrorHandler)]\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_12(VAR_35):...\n", "return VAR_35.format(host=url.host, date=datetime.utcnow().isoformat(),\n VAR_30=seqnum)\n" ]
[ "def formatOutput(p):...\n", "return p.format(host=url.host, date=datetime.utcnow().isoformat(), seqnum=\n seqnum)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_9(self, VAR_12):...\n", "print('Test type: %s' % self.__class__.__name__, file=fd)\n", "print('Execution start time: %s' % datetime.datetime.fromtimestamp(self.\n start_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Execution stop time: %s' % datetime.datetime.fromtimestamp(self.\n stop_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Duration: %f seconds' % self.duration, file=fd)\n", "print('Outcome: %s' % self.outcome, file=fd)\n", "VAR_12.write(self.specific_info())\n", "if self.exception_data is not None:\n", "print('', file=fd)\n", "print('EXCEPTION CASTED', file=fd)\n", "VAR_12.write(unicode(self.exception_data))\n" ]
[ "def store_to_file(self, fd):...\n", "print('Test type: %s' % self.__class__.__name__, file=fd)\n", "print('Execution start time: %s' % datetime.datetime.fromtimestamp(self.\n start_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Execution stop time: %s' % datetime.datetime.fromtimestamp(self.\n stop_time).strftime('%d/%m/%Y %H:%M:%S.%f'), file=fd)\n", "print('Duration: %f seconds' % self.duration, file=fd)\n", "print('Outcome: %s' % self.outcome, file=fd)\n", "fd.write(self.specific_info())\n", "if self.exception_data is not None:\n", "print('', file=fd)\n", "print('EXCEPTION CASTED', file=fd)\n", "fd.write(unicode(self.exception_data))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_26():...\n", "frappe.throw(_('Use of sub-query or function is restricted'), frappe.DataError)\n" ]
[ "def _raise_exception():...\n", "frappe.throw(_('Use of sub-query or function is restricted'), frappe.DataError)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_12(self):...\n", "return self.vars.copy()\n" ]
[ "def get_vars(self):...\n", "return self.vars.copy()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "from django.http import HttpResponse, HttpResponseRedirect\n", "from django.shortcuts import get_object_or_404, render, redirect\n", "from django.views.generic import ListView, DetailView\n", "from django.views.generic.edit import CreateView, UpdateView\n", "from pure_pagination.mixins import PaginationMixin\n", "from django.db.models import Q, Count\n", "from .models import Hunt\n", "from apps.threat.models import Event\n", "from .forms import HuntForm\n", "import csv\n", "from io import StringIO, BytesIO\n", "from codecs import BOM_UTF8\n", "from pytz import timezone\n", "from django.http import JsonResponse\n", "from urllib.parse import urlparse\n", "from http.client import HTTPConnection\n", "VAR_2 = Hunt\n", "VAR_3 = 'threat_hunter/index.html'\n", "VAR_4 = 'hunts'\n", "VAR_5 = 30\n", "def FUNC_4(self, **VAR_6):...\n", "VAR_14 = super().get_context_data(**kwargs)\n", "return VAR_14\n" ]
[ "from django.http import HttpResponse, HttpResponseRedirect\n", "from django.shortcuts import get_object_or_404, render, redirect\n", "from django.views.generic import ListView, DetailView\n", "from django.views.generic.edit import CreateView, UpdateView\n", "from pure_pagination.mixins import PaginationMixin\n", "from django.db.models import Q, Count\n", "from .models import Hunt\n", "from apps.threat.models import Event\n", "from .forms import HuntForm\n", "import csv\n", "from io import StringIO, BytesIO\n", "from codecs import BOM_UTF8\n", "from pytz import timezone\n", "from django.http import JsonResponse\n", "from urllib.parse import urlparse\n", "from http.client import HTTPConnection\n", "model = Hunt\n", "template_name = 'threat_hunter/index.html'\n", "context_object_name = 'hunts'\n", "paginate_by = 30\n", "def get_context_data(self, **kwargs):...\n", "context = super().get_context_data(**kwargs)\n", "return context\n" ]
[ 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_11):...\n", "VAR_5.pack_forget()\n", "VAR_12 = 'meal_planner.db'\n", "VAR_6.pack(fill='both')\n", "VAR_8 = Image.open('home.jpg')\n", "VAR_9 = ImageTk.PhotoImage(VAR_8)\n", "VAR_10 = Button(VAR_6, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : [frame.pack(expand=True,\n fill='both'), menuFrame.pack_forget(), viewDetailsFrame.pack_forget()])\n", "VAR_10.image = VAR_9\n", "VAR_10.pack(side=LEFT)\n", "VAR_13 = Label(VAR_6, text='View Recipe', font=LARGE_FONT, bg='#e7e7e7', fg\n ='#272822')\n", "VAR_13.pack(side=LEFT, padx=300)\n", "VAR_14 = Frame(self, bg='#f8f8f8')\n", "VAR_14.pack(expand=True, fill='both')\n", "VAR_20 = conn.cursor()\n", "VAR_21 = VAR_20.execute('SELECT * FROM recipe WHERE name = ' + '\"' + VAR_11 +\n '\"')\n", "for result in [VAR_21]:\n", "for row in result.fetchall():\n", "VAR_22 = (\n \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n Ingredients: {} \n Directions: {}\"\"\"\n .format(VAR_24, VAR_25, VAR_26, VAR_28, VAR_29))\n", "VAR_24 = row[0]\n", "Label(VAR_14, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000').pack(\n side=LEFT)\n", "VAR_25 = row[1]\n", "conn.close()\n", "VAR_26 = row[2]\n", "Button(VAR_6, text='Delete', highlightbackground='#e7e7e7', command=lambda :\n delete_recipe(name)).pack(side=RIGHT)\n", "VAR_27 = row[3]\n", "VAR_28 = row[4]\n", "VAR_29 = row[5]\n" ]
[ "def callback(recipeName):...\n", "viewRecipeFrame.pack_forget()\n", "database_file = 'meal_planner.db'\n", "menuFrame.pack(fill='both')\n", "load = Image.open('home.jpg')\n", "render = ImageTk.PhotoImage(load)\n", "img = Button(menuFrame, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : [frame.pack(expand=True,\n fill='both'), menuFrame.pack_forget(), viewDetailsFrame.pack_forget()])\n", "img.image = render\n", "img.pack(side=LEFT)\n", "label = Label(menuFrame, text='View Recipe', font=LARGE_FONT, bg='#e7e7e7',\n fg='#272822')\n", "label.pack(side=LEFT, padx=300)\n", "viewDetailsFrame = Frame(self, bg='#f8f8f8')\n", "viewDetailsFrame.pack(expand=True, fill='both')\n", "cursor = conn.cursor()\n", "selection = cursor.execute('SELECT * FROM recipe WHERE name = ' + '\"' +\n recipeName + '\"')\n", "for result in [selection]:\n", "for row in result.fetchall():\n", "string = (\n \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n Ingredients: {} \n Directions: {}\"\"\"\n .format(name, time, servings, ingredients, directions))\n", "name = row[0]\n", "Label(viewDetailsFrame, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg=\n '#000000').pack(side=LEFT)\n", "time = row[1]\n", "conn.close()\n", "servings = row[2]\n", "Button(menuFrame, text='Delete', highlightbackground='#e7e7e7', command=lambda\n : delete_recipe(name)).pack(side=RIGHT)\n", "favorite = row[3]\n", "ingredients = row[4]\n", "directions = row[5]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "For", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "@property...\n", "if hasattr(self, 'curated_chemical') and self.curated_chemical is not None:\n", "return self.curated_chemical.sid\n", "return None\n" ]
[ "@property...\n", "if hasattr(self, 'curated_chemical') and self.curated_chemical is not None:\n", "return self.curated_chemical.sid\n", "return None\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_3(self, VAR_0, VAR_1):...\n", "" ]
[ "def del_req_handler(self, interface, method):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0():...\n", "VAR_13 = VAR_0.path.dirname(VAR_0.path.abspath(__file__))\n", "return VAR_0.path.join(VAR_13, 'server/ycmd.py')\n" ]
[ "def _PathToServerScript():...\n", "dir_of_current_script = os.path.dirname(os.path.abspath(__file__))\n", "return os.path.join(dir_of_current_script, 'server/ycmd.py')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_18, *VAR_15, **VAR_16):...\n", "CLASS_3.__init__(self, VAR_18, errors.BAD_USERNAME, *VAR_15, **kw)\n" ]
[ "def __init__(self, item, *a, **kw):...\n", "VRequired.__init__(self, item, errors.BAD_USERNAME, *a, **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __repr__(self):...\n", "return 'Node(%r, %r, %r)' % (self.cname, self.pname, self.ename)\n" ]
[ "def __repr__(self):...\n", "return 'Node(%r, %r, %r)' % (self.cname, self.pname, self.ename)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_61():...\n", "self.cursor.execute('create table t1 (word varchar (100))')\n", "VAR_39 = set(['a'])\n", "self.cursor.execute('insert into t1 (word) VALUES (?)', [VAR_39])\n" ]
[ "def f():...\n", "self.cursor.execute('create table t1 (word varchar (100))')\n", "words = set(['a'])\n", "self.cursor.execute('insert into t1 (word) VALUES (?)', [words])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n", "VAR_7 = []\n", "VAR_8 = self.pool.get('ir.model.data')\n", "VAR_9 = self.pool.get('account.bank.statement')\n", "VAR_10 = self.pool.get('account.journal')\n", "VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n", "VAR_11 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_11)))\n", "VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "for journal in VAR_10.browse(VAR_2, VAR_3, VAR_12):\n", "VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n", "VAR_13 = self.pool.get('ir.model.data')\n", "if not VAR_4:\n", "VAR_14 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n", "VAR_7.append(VAR_4[0])\n", "VAR_15 = VAR_13._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n", "if not journal.check_dtls:\n", "if VAR_14:\n", "VAR_9.button_confirm_cash(VAR_2, VAR_3, VAR_4, VAR_5)\n", "VAR_14 = VAR_13.browse(VAR_2, VAR_3, VAR_14, VAR_5=context).res_id\n", "if VAR_15:\n", "VAR_15 = VAR_13.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n", "return {'domain': \"[('id','in',\" + str(VAR_7) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(VAR_14, 'tree'), (\n VAR_15, 'form')], 'type': 'ir.actions.act_window'}\n" ]
[ "def close_statement(self, cr, uid, ids, context):...\n", "\"\"\"docstring\"\"\"\n", "company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n", "list_statement = []\n", "mod_obj = self.pool.get('ir.model.data')\n", "statement_obj = self.pool.get('account.bank.statement')\n", "journal_obj = self.pool.get('account.journal')\n", "cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n", "j_ids = map(lambda x1: x1[0], cr.fetchall())\n", "cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n", "journal_ids = map(lambda x1: x1[0], cr.fetchall())\n", "for journal in journal_obj.browse(cr, uid, journal_ids):\n", "ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n", "data_obj = self.pool.get('ir.model.data')\n", "if not ids:\n", "id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n", "list_statement.append(ids[0])\n", "id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n", "if not journal.check_dtls:\n", "if id2:\n", "statement_obj.button_confirm_cash(cr, uid, ids, context)\n", "id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n", "if id3:\n", "id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n", "return {'domain': \"[('id','in',\" + str(list_statement) + ')]', 'name':\n 'Close Statements', 'view_type': 'form', 'view_mode': 'tree,form',\n 'res_model': 'account.bank.statement', 'views': [(id2, 'tree'), (id3,\n 'form')], 'type': 'ir.actions.act_window'}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_2(self):...\n", "return {option.name: self.config(option.name) for option in VAR_4}\n" ]
[ "def config_dict(self):...\n", "return {option.name: self.config(option.name) for option in CONFIG_OPTIONS}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_11, *VAR_12, **VAR_9):...\n", "super().__init__(*VAR_12, **kwargs)\n", "self.key_name = VAR_11\n" ]
[ "def __init__(self, key_name, *args, **kwargs):...\n", "super().__init__(*args, **kwargs)\n", "self.key_name = key_name\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "import re\n", "from flask import request\n", "\"\"\"\n Stack data structure will not insert\n equal sequential data\n \"\"\"\n", "def __init__(self, VAR_1=None, VAR_2=5):...\n", "self.size = VAR_2\n", "self.data = VAR_1 or []\n", "def FUNC_5(self, VAR_3):...\n", "if self.data:\n", "if VAR_3 != self.data[len(self.data) - 1]:\n", "self.data.append(VAR_3)\n", "self.data.append(VAR_3)\n", "if len(self.data) > self.size:\n", "self.data.pop(0)\n", "def FUNC_6(self):...\n", "if len(self.data) == 0:\n", "return None\n", "return self.data.pop(len(self.data) - 1)\n" ]
[ "import re\n", "from flask import request\n", "\"\"\"\n Stack data structure will not insert\n equal sequential data\n \"\"\"\n", "def __init__(self, list=None, size=5):...\n", "self.size = size\n", "self.data = list or []\n", "def push(self, item):...\n", "if self.data:\n", "if item != self.data[len(self.data) - 1]:\n", "self.data.append(item)\n", "self.data.append(item)\n", "if len(self.data) > self.size:\n", "self.data.pop(0)\n", "def pop(self):...\n", "if len(self.data) == 0:\n", "return None\n", "return self.data.pop(len(self.data) - 1)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_22(VAR_2, VAR_18, VAR_19):...\n", "if VAR_18.provider.id == '8.9.10.11':\n" ]
[ "def _validate(cluster_api, endpoint, conn):...\n", "if endpoint.provider.id == '8.9.10.11':\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def __init__(self, VAR_2=None):...\n", "self.total = 0\n", "self.success = 0\n", "self.failure = 0\n", "self.error = 0\n", "self.undecided = 0\n", "self.total_time = 0.0\n", "self.max_time = 0.0\n", "self.log_dir = VAR_2\n", "if self.log_dir is not None:\n", "os.makedirs(self.log_dir)\n" ]
[ "def __init__(self, log_dir=None):...\n", "self.total = 0\n", "self.success = 0\n", "self.failure = 0\n", "self.error = 0\n", "self.undecided = 0\n", "self.total_time = 0.0\n", "self.max_time = 0.0\n", "self.log_dir = log_dir\n", "if self.log_dir is not None:\n", "os.makedirs(self.log_dir)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "import stack.commands\n", "from stack.exception import ArgError, ParamValue\n", "\"\"\"string\"\"\"\n", "def FUNC_0(self, VAR_0, VAR_1):...\n", "VAR_2 = None\n", "VAR_3 = []\n", "VAR_4 = []\n", "VAR_5 = []\n", "if len(VAR_1) == 0:\n", "VAR_2 = 'global'\n", "if len(VAR_1) == 1:\n", "if not VAR_2:\n", "VAR_3 = self.getOSNames(VAR_1)\n", "VAR_3 = []\n", "VAR_4 = self.getApplianceNames()\n", "VAR_4 = []\n", "VAR_5 = self.getHostnames()\n", "VAR_5 = []\n", "if VAR_1[0] in VAR_3:\n", "if not VAR_2:\n", "VAR_2 = 'os'\n", "if VAR_1[0] in VAR_4:\n", "VAR_6 = None\n", "VAR_2 = 'appliance'\n", "if VAR_1[0] in VAR_5:\n", "if VAR_2 == 'global':\n", "VAR_2 = 'host'\n", "VAR_6 = 'string'\n", "if VAR_2 == 'os':\n", "if not VAR_6:\n", "return\n", "if VAR_2 == 'appliance':\n", "return\n", "VAR_7 = None\n", "VAR_6 = 'string' % VAR_1[0]\n", "if VAR_2 == 'host':\n", "if VAR_2 == 'global':\n", "VAR_6 = 'string' % VAR_1[0]\n", "VAR_7 = 'global'\n", "if VAR_2 in ['appliance', 'host']:\n", "self.beginOutput()\n", "VAR_7 = VAR_1[0]\n", "self.db.execute(VAR_6)\n", "VAR_8 = 0\n", "for row in self.db.fetchall():\n", "VAR_9, VAR_10, VAR_11, VAR_12, VAR_13, VAR_14 = row\n", "self.endOutput(header=['scope', 'enclosure', 'adapter', 'slot', 'raidlevel',\n 'arrayid', 'options'], trimOwner=False)\n", "if VAR_8 > 0:\n", "VAR_7 = None\n", "if VAR_9 == -1:\n", "VAR_9 = None\n", "if VAR_10 == -1:\n", "VAR_10 = None\n", "if VAR_11 == -1:\n", "VAR_11 = '*'\n", "if VAR_12 == '-1':\n", "VAR_12 = 'hotspare'\n", "if VAR_13 == -1:\n", "VAR_13 = 'global'\n", "if VAR_13 == -2:\n", "VAR_14 = VAR_14.strip('\"')\n", "VAR_13 = '*'\n", "self.addOutput(VAR_7, [VAR_10, VAR_9, VAR_11, VAR_12, VAR_13, VAR_14])\n", "VAR_8 += 1\n" ]
[ "import stack.commands\n", "from stack.exception import ArgError, ParamValue\n", "\"\"\"\n\tList the storage controller configuration for one of the following:\n\tglobal, os, appliance or host.\n\n\t<arg optional='1' type='string' name='host'>\n\tThis argument can be nothing, a valid 'os' (e.g., 'redhat'), a valid\n\tappliance (e.g., 'backend') or a host.\n\tIf nothing is supplied, then the global storage controller\n\tconfiguration will be output.\n\t</arg>\n\n\t<example cmd='list storage controller backend-0-0'>\n\tList host-specific storage controller configuration for backend-0-0.\n\t</example>\n\n\t<example cmd='list storage controller backend'>\n\tList appliance-specific storage controller configuration for all\n\tbackend appliances.\n\t</example>\n\n\t<example cmd='list storage controller'>\n\tList global storage controller configuration for all hosts.\n\t</example>\n\n\t\"\"\"\n", "def run(self, params, args):...\n", "scope = None\n", "oses = []\n", "appliances = []\n", "hosts = []\n", "if len(args) == 0:\n", "scope = 'global'\n", "if len(args) == 1:\n", "if not scope:\n", "oses = self.getOSNames(args)\n", "oses = []\n", "appliances = self.getApplianceNames()\n", "appliances = []\n", "hosts = self.getHostnames()\n", "hosts = []\n", "if args[0] in oses:\n", "if not scope:\n", "scope = 'os'\n", "if args[0] in appliances:\n", "query = None\n", "scope = 'appliance'\n", "if args[0] in hosts:\n", "if scope == 'global':\n", "scope = 'host'\n", "query = \"\"\"select adapter, enclosure, slot, raidlevel,\n\t\t\t\tarrayid, options from storage_controller \n\t\t\t\twhere scope = 'global'\n\t\t\t\torder by enclosure, adapter, slot\"\"\"\n", "if scope == 'os':\n", "if not query:\n", "return\n", "if scope == 'appliance':\n", "return\n", "name = None\n", "query = (\n \"\"\"select adapter, enclosure, slot,\n\t\t\t\traidlevel, arrayid, options\n\t\t\t\tfrom storage_controller where\n\t\t\t\tscope = \"appliance\" and tableid = (select\n\t\t\t\tid from appliances\n\t\t\t\twhere name = '%s')\n\t\t\t\torder by enclosure, adapter, slot\"\"\"\n % args[0])\n", "if scope == 'host':\n", "if scope == 'global':\n", "query = (\n \"\"\"select adapter, enclosure, slot,\n\t\t\t\traidlevel, arrayid, options\n\t\t\t\tfrom storage_controller where\n\t\t\t\tscope = \"host\" and tableid = (select\n\t\t\t\tid from nodes where name = '%s')\n\t\t\t\torder by enclosure, adapter, slot\"\"\"\n % args[0])\n", "name = 'global'\n", "if scope in ['appliance', 'host']:\n", "self.beginOutput()\n", "name = args[0]\n", "self.db.execute(query)\n", "i = 0\n", "for row in self.db.fetchall():\n", "adapter, enclosure, slot, raidlevel, arrayid, options = row\n", "self.endOutput(header=['scope', 'enclosure', 'adapter', 'slot', 'raidlevel',\n 'arrayid', 'options'], trimOwner=False)\n", "if i > 0:\n", "name = None\n", "if adapter == -1:\n", "adapter = None\n", "if enclosure == -1:\n", "enclosure = None\n", "if slot == -1:\n", "slot = '*'\n", "if raidlevel == '-1':\n", "raidlevel = 'hotspare'\n", "if arrayid == -1:\n", "arrayid = 'global'\n", "if arrayid == -2:\n", "options = options.strip('\"')\n", "arrayid = '*'\n", "self.addOutput(name, [enclosure, adapter, slot, raidlevel, arrayid, options])\n", "i += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Return'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "AugAssign'" ]
[ "def FUNC_57(self):...\n", "VAR_13 = self.cursor.execute('select ?', None).fetchone()[0]\n", "self.assertEqual(VAR_13, None)\n" ]
[ "def test_untyped_none(self):...\n", "value = self.cursor.execute('select ?', None).fetchone()[0]\n", "self.assertEqual(value, None)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_15(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_21 = [VAR_28 for VAR_28 in self.expanded_output if VAR_28.exists]\n", "if VAR_21:\n", "logger.info(\n \"\"\"Removing output files of failed job {} since they might be corrupted:\n{}\"\"\"\n .format(self, ', '.join(VAR_21)))\n", "for VAR_28 in VAR_21:\n", "VAR_28.remove()\n" ]
[ "def cleanup(self):...\n", "\"\"\"docstring\"\"\"\n", "to_remove = [f for f in self.expanded_output if f.exists]\n", "if to_remove:\n", "logger.info(\n \"\"\"Removing output files of failed job {} since they might be corrupted:\n{}\"\"\"\n .format(self, ', '.join(to_remove)))\n", "for f in to_remove:\n", "f.remove()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "For", "Expr'" ]
[ "def FUNC_0(VAR_2):...\n", "VAR_8 = psycopg2.connect('dbname=fluffy_bot user=censored password=Laumau11p')\n", "VAR_9 = VAR_8.cursor()\n", "VAR_9.execute(VAR_2)\n", "VAR_10 = VAR_9.fetchall()\n", "VAR_9.close()\n", "VAR_8.close()\n", "return VAR_10\n" ]
[ "def fetch(query):...\n", "conn = psycopg2.connect('dbname=fluffy_bot user=censored password=Laumau11p')\n", "cur = conn.cursor()\n", "cur.execute(query)\n", "result = cur.fetchall()\n", "cur.close()\n", "conn.close()\n", "return result\n" ]
[ 0, 4, 0, 4, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_3():...\n", "print(*VAR_3, sep='/')\n" ]
[ "def pwd():...\n", "print(*curr_path, sep='/')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@property...\n", "VAR_4 = self.request.session.get('oidc_login_next', None)\n", "return VAR_4 or import_from_settings('LOGIN_REDIRECT_URL', '/')\n" ]
[ "@property...\n", "next_url = self.request.session.get('oidc_login_next', None)\n", "return next_url or import_from_settings('LOGIN_REDIRECT_URL', '/')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_11, VAR_10=[]):...\n", "\"\"\"docstring\"\"\"\n", "VAR_14 = self.session.get(\n 'https://www.kijiji.ca/p-admarkt-post-ad.html?categoryId=773')\n", "VAR_22 = \"initialXsrfToken: '\\\\S+'\"\n", "VAR_23 = re.findall(VAR_22, VAR_14.text)[0].strip(\"initialXsrfToken: '\").strip(\n \"'\")\n", "VAR_24 = self.upload_image(VAR_23, VAR_10)\n", "VAR_11['images'] = ','.join(VAR_24)\n", "VAR_11['ca.kijiji.xsrf.token'] = FUNC_0(VAR_14.text, 'ca.kijiji.xsrf.token')\n", "VAR_11['postAdForm.fraudToken'] = FUNC_0(VAR_14.text, 'postAdForm.fraudToken')\n", "VAR_11['postAdForm.description'] = VAR_11['postAdForm.description'].replace(\n '\\\\n', '\\n')\n", "VAR_25 = 'https://www.kijiji.ca/p-submit-ad.html'\n", "VAR_14 = self.session.post(VAR_25, VAR_11=data)\n", "if not len(VAR_11.get('postAdForm.title', '')) >= 10:\n", "if int(VAR_14.status_code) != 200 or 'Delete Ad?' not in VAR_14.text:\n", "if 'There was an issue posting your ad, please contact Customer Service.' in VAR_14.text:\n", "VAR_26 = VAR_14.headers['Set-Cookie']\n", "VAR_7 = re.search('\\\\d+', VAR_26).group()\n", "return VAR_7\n" ]
[ "def post_ad_using_data(self, data, image_files=[]):...\n", "\"\"\"docstring\"\"\"\n", "resp = self.session.get(\n 'https://www.kijiji.ca/p-admarkt-post-ad.html?categoryId=773')\n", "token_regex = \"initialXsrfToken: '\\\\S+'\"\n", "image_upload_token = re.findall(token_regex, resp.text)[0].strip(\n \"initialXsrfToken: '\").strip(\"'\")\n", "imageList = self.upload_image(image_upload_token, image_files)\n", "data['images'] = ','.join(imageList)\n", "data['ca.kijiji.xsrf.token'] = get_token(resp.text, 'ca.kijiji.xsrf.token')\n", "data['postAdForm.fraudToken'] = get_token(resp.text, 'postAdForm.fraudToken')\n", "data['postAdForm.description'] = data['postAdForm.description'].replace('\\\\n',\n '\\n')\n", "new_ad_url = 'https://www.kijiji.ca/p-submit-ad.html'\n", "resp = self.session.post(new_ad_url, data=data)\n", "if not len(data.get('postAdForm.title', '')) >= 10:\n", "if int(resp.status_code) != 200 or 'Delete Ad?' not in resp.text:\n", "if 'There was an issue posting your ad, please contact Customer Service.' in resp.text:\n", "new_cookie_with_ad_id = resp.headers['Set-Cookie']\n", "ad_id = re.search('\\\\d+', new_cookie_with_ad_id).group()\n", "return ad_id\n" ]
[ 0, 0, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 5, 5, 0, 5, 5, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "return 'submit user test %s for task %s (ID %d) %s' % (repr(self.filenames),\n self.task[1], self.task[0], self.url)\n" ]
[ "def describe(self):...\n", "return 'submit user test %s for task %s (ID %d) %s' % (repr(self.filenames),\n self.task[1], self.task[0], self.url)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.getint(self.section, 'num_threads')\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.getint(self.section, 'num_threads')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_4(VAR_0, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('ALTER TABLE \"{0}\" DROP COLUMN IF EXISTS \"{1}\"'.format(\n VAR_0, VAR_5))\n", "print('FAILED TO DROP ATTRIBUTE {0} FROM {1}'.format(VAR_5, VAR_0))\n" ]
[ "def drop_attribute(table_name, attr):...\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('ALTER TABLE \"{0}\" DROP COLUMN IF EXISTS \"{1}\"'.format(\n table_name, attr))\n", "print('FAILED TO DROP ATTRIBUTE {0} FROM {1}'.format(attr, table_name))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]
[ "def FUNC_36(self, VAR_14):...\n", "return self.__class__(self.get_related_model(VAR_14), self.session)\n" ]
[ "def get_related_interface(self, col_name):...\n", "return self.__class__(self.get_related_model(col_name), self.session)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(self, VAR_5):...\n", "self.ids += 1\n", "self.actors[self.ids] = VAR_5\n", "self.actors[self.ids].bound(self.max_x, self.max_y)\n", "return self.ids\n" ]
[ "def addActor(self, actor):...\n", "self.ids += 1\n", "self.actors[self.ids] = actor\n", "self.actors[self.ids].bound(self.max_x, self.max_y)\n", "return self.ids\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "AugAssign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_5(self):...\n", "VAR_22 = self.add_argument_group('Mechanical Turk')\n", "VAR_23 = VAR_20.path.join(self.parlai_home, 'logs', 'mturk')\n", "VAR_22.add_argument('--mturk-log-path', default=default_log_path, help=\n 'path to MTurk logs, defaults to {parlai_dir}/logs/mturk')\n", "VAR_22.add_argument('-t', '--task', help=\n 'MTurk task, e.g. \"qa_data_collection\" or \"model_evaluator\"')\n", "VAR_22.add_argument('-nc', '--num-conversations', default=1, type=int, help\n ='number of conversations you want to create for this task')\n", "VAR_22.add_argument('--unique', dest='unique_worker', default=False, action\n ='store_true', help='enforce that no worker can work on your task twice')\n", "VAR_22.add_argument('--unique-qual-name', dest='unique_qual_name', default=\n None, type=str, help=\n 'qualification name to use for uniqueness between HITs')\n", "VAR_22.add_argument('-r', '--reward', default=0.05, type=float, help=\n 'reward for each worker for finishing the conversation, in US dollars')\n", "VAR_22.add_argument('--sandbox', dest='is_sandbox', action='store_true',\n help='submit the HITs to MTurk sandbox site')\n", "VAR_22.add_argument('--live', dest='is_sandbox', action='store_false', help\n ='submit the HITs to MTurk live site')\n", "VAR_22.add_argument('--debug', dest='is_debug', action='store_true', help=\n 'print and log all server interactions and messages')\n", "VAR_22.add_argument('--verbose', dest='verbose', action='store_true', help=\n 'print all messages sent to and from Turkers')\n", "VAR_22.add_argument('--hard-block', dest='hard_block', action='store_true',\n default=False, help=\n 'Hard block disconnecting Turkers from all of your HITs')\n", "VAR_22.add_argument('--log-level', dest='log_level', type=int, default=20,\n help=\n 'importance level for what to put into the logs. the lower the level the more that gets logged. values are 0-50'\n )\n", "VAR_22.add_argument('--block-qualification', dest='block_qualification',\n default='', help=\n 'Qualification to use for soft blocking users. By default turkers are never blocked, though setting this will allow you to filter out turkers that have disconnected too many times on previous HITs where this qualification was set.'\n )\n", "VAR_22.add_argument('--count-complete', dest='count_complete', default=\n False, action='store_true', help=\n 'continue until the requested number of conversations are completed rather than attempted'\n )\n", "VAR_22.add_argument('--allowed-conversations', dest='allowed_conversations',\n default=0, type=int, help=\n 'number of concurrent conversations that one mturk worker is able to be involved in, 0 is unlimited'\n )\n", "VAR_22.add_argument('--max-connections', dest='max_connections', default=30,\n type=int, help=\n 'number of HITs that can be launched at the same time, 0 is unlimited.')\n", "VAR_22.add_argument('--min-messages', dest='min_messages', default=0, type=\n int, help=\n 'number of messages required to be sent by MTurk agent when considering whether to approve a HIT in the event of a partner disconnect. I.e. if the number of messages exceeds this number, the turker can submit the HIT.'\n )\n", "VAR_22.add_argument('--local', dest='local', default=False, action=\n 'store_true', help=\n 'Run the server locally on this server rather than setting up a heroku server.'\n )\n", "VAR_22.set_defaults(is_sandbox=True)\n", "VAR_22.set_defaults(is_debug=False)\n", "VAR_22.set_defaults(verbose=False)\n" ]
[ "def add_mturk_args(self):...\n", "mturk = self.add_argument_group('Mechanical Turk')\n", "default_log_path = os.path.join(self.parlai_home, 'logs', 'mturk')\n", "mturk.add_argument('--mturk-log-path', default=default_log_path, help=\n 'path to MTurk logs, defaults to {parlai_dir}/logs/mturk')\n", "mturk.add_argument('-t', '--task', help=\n 'MTurk task, e.g. \"qa_data_collection\" or \"model_evaluator\"')\n", "mturk.add_argument('-nc', '--num-conversations', default=1, type=int, help=\n 'number of conversations you want to create for this task')\n", "mturk.add_argument('--unique', dest='unique_worker', default=False, action=\n 'store_true', help='enforce that no worker can work on your task twice')\n", "mturk.add_argument('--unique-qual-name', dest='unique_qual_name', default=\n None, type=str, help=\n 'qualification name to use for uniqueness between HITs')\n", "mturk.add_argument('-r', '--reward', default=0.05, type=float, help=\n 'reward for each worker for finishing the conversation, in US dollars')\n", "mturk.add_argument('--sandbox', dest='is_sandbox', action='store_true',\n help='submit the HITs to MTurk sandbox site')\n", "mturk.add_argument('--live', dest='is_sandbox', action='store_false', help=\n 'submit the HITs to MTurk live site')\n", "mturk.add_argument('--debug', dest='is_debug', action='store_true', help=\n 'print and log all server interactions and messages')\n", "mturk.add_argument('--verbose', dest='verbose', action='store_true', help=\n 'print all messages sent to and from Turkers')\n", "mturk.add_argument('--hard-block', dest='hard_block', action='store_true',\n default=False, help=\n 'Hard block disconnecting Turkers from all of your HITs')\n", "mturk.add_argument('--log-level', dest='log_level', type=int, default=20,\n help=\n 'importance level for what to put into the logs. the lower the level the more that gets logged. values are 0-50'\n )\n", "mturk.add_argument('--block-qualification', dest='block_qualification',\n default='', help=\n 'Qualification to use for soft blocking users. By default turkers are never blocked, though setting this will allow you to filter out turkers that have disconnected too many times on previous HITs where this qualification was set.'\n )\n", "mturk.add_argument('--count-complete', dest='count_complete', default=False,\n action='store_true', help=\n 'continue until the requested number of conversations are completed rather than attempted'\n )\n", "mturk.add_argument('--allowed-conversations', dest='allowed_conversations',\n default=0, type=int, help=\n 'number of concurrent conversations that one mturk worker is able to be involved in, 0 is unlimited'\n )\n", "mturk.add_argument('--max-connections', dest='max_connections', default=30,\n type=int, help=\n 'number of HITs that can be launched at the same time, 0 is unlimited.')\n", "mturk.add_argument('--min-messages', dest='min_messages', default=0, type=\n int, help=\n 'number of messages required to be sent by MTurk agent when considering whether to approve a HIT in the event of a partner disconnect. I.e. if the number of messages exceeds this number, the turker can submit the HIT.'\n )\n", "mturk.add_argument('--local', dest='local', default=False, action=\n 'store_true', help=\n 'Run the server locally on this server rather than setting up a heroku server.'\n )\n", "mturk.set_defaults(is_sandbox=True)\n", "mturk.set_defaults(is_debug=False)\n", "mturk.set_defaults(verbose=False)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_2(self):...\n", "VAR_13 = []\n", "VAR_14 = datetime.now(timezone(timedelta(hours=+9), 'JST'))\n", "VAR_14 = VAR_14.replace(hour=0, minute=0, second=0, microsecond=0)\n", "for day in range(30)[::-1]:\n", "VAR_23 = VAR_14 - timedelta(days=day)\n", "return VAR_13\n", "VAR_24 = VAR_14 - timedelta(days=day - 1)\n", "VAR_22 = self.object_list.filter(publish_timestamp__gte=from_date,\n publish_timestamp__lte=to_date).count()\n", "VAR_13.append(VAR_22)\n" ]
[ "def thirty_day_data(self):...\n", "data = []\n", "today = datetime.now(timezone(timedelta(hours=+9), 'JST'))\n", "today = today.replace(hour=0, minute=0, second=0, microsecond=0)\n", "for day in range(30)[::-1]:\n", "from_date = today - timedelta(days=day)\n", "return data\n", "to_date = today - timedelta(days=day - 1)\n", "count = self.object_list.filter(publish_timestamp__gte=from_date,\n publish_timestamp__lte=to_date).count()\n", "data.append(count)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "@VAR_1.route('/run')...\n", "return render_template('run.html')\n" ]
[ "@app.route('/run')...\n", "return render_template('run.html')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_2(VAR_7):...\n", "return urlparse.urljoin(CLASS_0.server_location, VAR_7)\n" ]
[ "def _BuildUri(handler):...\n", "return urlparse.urljoin(BaseRequest.server_location, handler)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@detail_route(methods=['get'])...\n", "\"\"\"docstring\"\"\"\n", "get_and_check_project(VAR_9, VAR_11)\n", "VAR_28 = self.queryset.get(VAR_10=pk, VAR_1=project_pk)\n", "VAR_23 = max(0, int(VAR_9.query_params.get('line', 0)))\n", "VAR_24 = VAR_28.console_output or ''\n", "return Response('\\n'.join(VAR_24.split('\\n')[VAR_23:]))\n" ]
[ "@detail_route(methods=['get'])...\n", "\"\"\"docstring\"\"\"\n", "get_and_check_project(request, project_pk)\n", "task = self.queryset.get(pk=pk, project=project_pk)\n", "line_num = max(0, int(request.query_params.get('line', 0)))\n", "output = task.console_output or ''\n", "return Response('\\n'.join(output.split('\\n')[line_num:]))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12(self, *VAR_9, **VAR_10):...\n", "self.render('app.html', VAR_2={})\n" ]
[ "def get(self, *a, **kwargs):...\n", "self.render('app.html', page_json={})\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_13(self):...\n", "self.conditions = []\n", "self.grouped_or_conditions = []\n", "self.build_filter_conditions(self.filters, self.conditions)\n", "self.build_filter_conditions(self.or_filters, self.grouped_or_conditions)\n", "if not self.flags.ignore_permissions:\n", "VAR_59 = self.build_match_conditions()\n", "if VAR_59:\n", "self.conditions.append('(' + VAR_59 + ')')\n" ]
[ "def build_conditions(self):...\n", "self.conditions = []\n", "self.grouped_or_conditions = []\n", "self.build_filter_conditions(self.filters, self.conditions)\n", "self.build_filter_conditions(self.or_filters, self.grouped_or_conditions)\n", "if not self.flags.ignore_permissions:\n", "match_conditions = self.build_match_conditions()\n", "if match_conditions:\n", "self.conditions.append('(' + match_conditions + ')')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Condition", "Expr'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(VAR_5.data['count'], 1)\n", "VAR_7 = {'image': 'autotest/example'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_8 = VAR_5.data['uuid']\n", "VAR_9 = VAR_5.data\n", "self.assertEqual(VAR_5.data['image'], VAR_7['image'])\n", "VAR_4 = '/api/apps/{app_id}/builds/{build_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_10 = VAR_5.data\n", "self.assertEqual(VAR_9, VAR_10)\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_7 = {'image': 'autotest/example'}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "self.assertIn('x-deis-release', VAR_5._headers)\n", "VAR_11 = VAR_5.data\n", "self.assertEqual(VAR_5.data['image'], VAR_7['image'])\n", "self.assertNotEqual(VAR_10['uuid'], VAR_11['uuid'])\n", "self.assertEqual(self.client.put(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.patch(VAR_4).status_code, 405)\n", "self.assertEqual(self.client.delete(VAR_4).status_code, 405)\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "\"\"\"docstring\"\"\"\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(response.data['count'], 1)\n", "body = {'image': 'autotest/example'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "build_id = response.data['uuid']\n", "build1 = response.data\n", "self.assertEqual(response.data['image'], body['image'])\n", "url = '/api/apps/{app_id}/builds/{build_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "build2 = response.data\n", "self.assertEqual(build1, build2)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertIn('x-deis-release', response._headers)\n", "build3 = response.data\n", "self.assertEqual(response.data['image'], body['image'])\n", "self.assertNotEqual(build2['uuid'], build3['uuid'])\n", "self.assertEqual(self.client.put(url).status_code, 405)\n", "self.assertEqual(self.client.patch(url).status_code, 405)\n", "self.assertEqual(self.client.delete(url).status_code, 405)\n" ]
[ 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "__all__ = []\n", "__version__ = '2019.7.7'\n", "VAR_0 = 'common.apps.CommonConfig'\n" ]
[ "__all__ = []\n", "__version__ = '2019.7.7'\n", "default_app_config = 'common.apps.CommonConfig'\n" ]
[ 0, 4, 0 ]
[ "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_15(self, VAR_6):...\n", "" ]
[ "def set_priority(self, priority):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = shlex.split('{0} --install'.format(self.binary))\n", "VAR_7 = spawn(VAR_6, stdout=PIPE, stderr=PIPE, env=self.env)\n", "VAR_10, VAR_9 = VAR_7.communicate()\n", "if VAR_9 is not None and len(VAR_9) > 0:\n", "if sys.version_info >= (3,):\n", "VAR_9 = VAR_9.decode('utf8')\n" ]
[ "def _install_linters(self):...\n", "\"\"\"docstring\"\"\"\n", "args = shlex.split('{0} --install'.format(self.binary))\n", "gometalinter = spawn(args, stdout=PIPE, stderr=PIPE, env=self.env)\n", "_, err = gometalinter.communicate()\n", "if err is not None and len(err) > 0:\n", "if sys.version_info >= (3,):\n", "err = err.decode('utf8')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'" ]
[ "def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5=False):...\n", "VAR_8 = \"SELECT * FROM analyzed WHERE base_url = '\" + str(VAR_2) + \"';\"\n", "VAR_9 = self.db.exec(VAR_8)\n", "if len(VAR_9) > 0:\n", "VAR_0.info('tried to analyze {}, but has already been done.'.format(VAR_2))\n", "if 'smash.gg' in VAR_2:\n", "return\n", "VAR_23 = get_results.process(VAR_2, VAR_3, self.db, VAR_4)\n", "VAR_24, VAR_25 = bracket_utils.hit_url(VAR_2)\n", "if VAR_23:\n", "if VAR_25 == 200 and bracket_utils.is_valid(VAR_24):\n", "self.insert_placing_data(VAR_2, VAR_5)\n", "VAR_0.exc('Analyzing smashgg tournament {} was not successful'.format(VAR_2))\n", "get_results.process(VAR_2, VAR_3, self.db, VAR_4)\n", "self.insert_placing_data(VAR_2, VAR_5)\n" ]
[ "def process(self, bracket, scene, display_name, new_bracket=False):...\n", "sql = \"SELECT * FROM analyzed WHERE base_url = '\" + str(bracket) + \"';\"\n", "result = self.db.exec(sql)\n", "if len(result) > 0:\n", "LOG.info('tried to analyze {}, but has already been done.'.format(bracket))\n", "if 'smash.gg' in bracket:\n", "return\n", "success = get_results.process(bracket, scene, self.db, display_name)\n", "html, status = bracket_utils.hit_url(bracket)\n", "if success:\n", "if status == 200 and bracket_utils.is_valid(html):\n", "self.insert_placing_data(bracket, new_bracket)\n", "LOG.exc('Analyzing smashgg tournament {} was not successful'.format(bracket))\n", "get_results.process(bracket, scene, self.db, display_name)\n", "self.insert_placing_data(bracket, new_bracket)\n" ]
[ 0, 4, 4, 0, 0, 0, 0, 4, 0, 0, 0, 4, 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]