lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_22(self):...\n", "self.cursor.execute('create table t1(s varchar(20))')\n", "self.cursor.execute('insert into t1 values(?)', '1')\n", "VAR_24 = self.cursor.execute('select * from t1').fetchone()\n", "self.assertEqual(VAR_24[0], '1')\n", "self.assertEqual(VAR_24[-1], '1')\n" ]
[ "def test_negative_row_index(self):...\n", "self.cursor.execute('create table t1(s varchar(20))')\n", "self.cursor.execute('insert into t1 values(?)', '1')\n", "row = self.cursor.execute('select * from t1').fetchone()\n", "self.assertEqual(row[0], '1')\n", "self.assertEqual(row[-1], '1')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def __init__(self, *VAR_13, **VAR_14):...\n", "super(CLASS_8, self).__init__(*VAR_13, **kwargs)\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super(CustomConfigForm, self).__init__(*args, **kwargs)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_0(VAR_0, VAR_1):...\n", "VAR_10 = get_object_or_404(JustURL, VAR_1=pk)\n", "return redirect(VAR_10.input_url)\n" ]
[ "def link_redirect(request, pk):...\n", "instance = get_object_or_404(JustURL, pk=pk)\n", "return redirect(instance.input_url)\n" ]
[ 0, 6, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "from python_version_check import check_version\n", "check_version((3, 4, 3))\n", "VAR_0 = 1, 0, 10\n", "import logging\n", "from random import choice\n", "from time import time\n", "import requests, json\n", "from threading import Thread\n", "from queue import Queue\n", "from traceback_printer import full_traceback\n", "from telegramHigh import TelegramHigh\n", "from textual_data import *\n", "from userparams import UserParams\n", "from language_support import LanguageSupport\n", "import utils\n", "from file_db import FileDB\n", "from button_handler import getMainMenu\n", "from settings_reader import SettingsReader\n", "VAR_1 = SettingsReader()\n", "VAR_2 = VAR_1.settings_reader(0)\n", "VAR_3 = bool(VAR_1.settings_reader(2) == 'DB')\n", "VAR_4 = 60\n", "VAR_5 = 86400\n", "VAR_6 = VAR_1.settings_reader(1)\n", "VAR_7 = {'lang': 'EN', 'subscribed': 0, 'period': VAR_6, 'last_update_time': 0}\n", "\"\"\"The bot class\"\"\"\n", "VAR_8 = None\n", "VAR_9 = {}\n", "def __init__(self, VAR_10):...\n", "super(CLASS_0, self).__init__()\n", "self.bot = TelegramHigh(VAR_10)\n", "self.userparams = UserParams('users', initial=INITIAL_SUBSCRIBER_PARAMS)\n", "self.file_db = FileDB('files')\n", "self.updateFileListThread()\n", "self.files = []\n", "self.bot.start(processingFunction=self.processUpdate, periodicFunction=self\n .periodicRoutine)\n", "def FUNC_1(self, VAR_11):...\n", "VAR_17 = self.bot\n", "VAR_18 = VAR_11.message\n", "VAR_19 = VAR_18.text\n", "VAR_20 = VAR_18.message_id\n", "VAR_15 = VAR_18.chat_id\n", "VAR_21 = self.userparams\n", "VAR_21.initializeUser(VAR_15=chat_id, VAR_28=INITIAL_SUBSCRIBER_PARAMS)\n", "VAR_22 = LanguageSupport(VAR_21.getEntry(VAR_15=chat_id, param='lang'))\n", "VAR_23 = VAR_22.languageSupport\n", "VAR_24 = VAR_22.allVariants\n", "VAR_16 = VAR_23(getMainMenu(VAR_21.getEntry(VAR_15=chat_id, param=\n 'subscribed')))\n", "if VAR_19 == '/start':\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(START_MESSAGE), key_markup=MMKM)\n", "if VAR_19 == '/help' or VAR_19 == HELP_BUTTON:\n", "def FUNC_2(self):...\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(HELP_MESSAGE).format(str(\n MIN_PICTURE_SEND_PERIOD), str(MAX_PICTURE_SEND_PERIOD)), key_markup=\n MMKM, markdown=True)\n", "if VAR_19 == '/about' or VAR_19 == ABOUT_BUTTON:\n", "\"\"\"docstring\"\"\"\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(ABOUT_MESSAGE).format('.'.join\n ([str(i) for i in VERSION_NUMBER])), key_markup=MMKM, markdown=True)\n", "if VAR_19 == '/otherbots' or VAR_19 == VAR_23(OTHER_BOTS_BUTTON):\n", "if not hasattr(self, 'update_filelist_thread_queue'):\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(OTHER_BOTS_MESSAGE),\n key_markup=MMKM, markdown=True)\n", "if VAR_19 == '/period' or VAR_19 == VAR_23(SHOW_PERIOD_BUTTON):\n", "self.update_filelist_thread_queue = Queue()\n", "while not self.update_filelist_thread_queue.empty():\n", "VAR_40 = self.userparams.getEntry(VAR_15, 'period')\n", "if VAR_19 == '/subscribe' or VAR_19 == SUBSCRIBE_BUTTON:\n", "VAR_34 = self.update_filelist_thread_queue.get()\n", "self.updateFileListThread()\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n 'An image is sent to you every {0} seconds.'.format(period), key_markup\n =MMKM)\n", "VAR_40 = self.userparams.getEntry(VAR_15, 'period')\n", "if VAR_19 == '/unsubscribe' or VAR_19 == UNSUBSCRIBE_BUTTON:\n", "self.last_filelist_update_time = VAR_34[0]\n", "for user in self.userparams.getAllEntries(fields=['subscribed', 'period',\n", "if self.userparams.getEntry(VAR_15, 'subscribed') == 0:\n", "if self.userparams.getEntry(VAR_15, 'subscribed') == 1:\n", "if VAR_19 == '/gimmepic' or VAR_19 == GIMMEPIC_BUTTON:\n", "if user[0] == 1:\n", "def FUNC_3(self):...\n", "self.userparams.setEntry(VAR_15, 'subscribed', 1)\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"You have already subscribed!\nTo cancel subscription enter /unsubscribe.\nTo change the period of picture sending type a number.\nYour current period is {0} seconds.\"\"\"\n .format(period), key_markup=MMKM)\n", "self.userparams.setEntry(VAR_15, 'subscribed', 0)\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"You haven't subscribed yet! To subscribe type /subscribe\", key_markup=MMKM\n )\n", "self.startRandomPicThread(VAR_15, VAR_16)\n", "VAR_41 = int(VAR_19)\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19='Unknown command!', key_markup=MMKM)\n", "VAR_37 = time()\n", "\"\"\"docstring\"\"\"\n", "self.userparams.setEntry(VAR_15, 'last_update_time', time())\n", "VAR_16 = getMainMenu(subscribed=False)\n", "if self.userparams.getEntry(VAR_15, 'subscribed') == 0:\n", "if VAR_37 - user[2] > user[1]:\n", "if not hasattr(self, 'last_filelist_update_time') or time(\n", "VAR_16 = getMainMenu(subscribed=True)\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n 'You have unsubscribed. To subscribe again type /subscribe', key_markup\n =MMKM)\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"You're not subscribed yet! /subscribe first!\", key_markup=MMKM)\n", "if VAR_41 < VAR_4:\n", "self.startRandomPicThread(user[3], VAR_16=getMainMenu(True))\n", "if not (hasattr(self, 'filelist_updater_thread') and self.\n", "def FUNC_4(self, VAR_12, VAR_13):...\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"You're subscribed now! \nAn image will be sent to you every {0} seconds. \nTo cancel subscription enter /unsubscribe. \nTo change the period of picture sending type a number.\"\"\"\n .format(period), key_markup=MMKM)\n", "self.userparams.setEntry(VAR_15, 'period', VAR_4)\n", "if VAR_41 > VAR_5:\n", "self.userparams.setEntry(user[3], 'last_update_time', VAR_37)\n", "self.filelist_updater_thread = Thread(target=self.updateFileList)\n", "print('updater already running!')\n", "\"\"\"docstring\"\"\"\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"The minimum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MIN_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n", "self.userparams.setEntry(VAR_15, 'period', VAR_5)\n", "self.userparams.setEntry(VAR_15, 'period', VAR_41)\n", "self.filelist_updater_thread.start()\n", "VAR_25 = self.file_db\n", "self.userparams.setEntry(VAR_15, 'last_update_time', int(time()))\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"The maximum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MAX_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n", "VAR_17.sendMessage(VAR_15=chat_id, VAR_19='Setting period to ' + str(\n new_period) + '.', key_markup=MMKM)\n", "if path.splitext(VAR_12)[1].replace('.', '').lower() != 'txt':\n", "if not VAR_25.fileExists(VAR_12):\n", "if path.basename(VAR_12) == METADATA_FILENAME:\n", "VAR_25.addFile(VAR_12, VAR_13=mod_time)\n", "if VAR_13 > VAR_25.getModTime(VAR_12):\n", "def FUNC_12():...\n", "def FUNC_5(self, VAR_14):...\n", "VAR_25.invalidateCached(VAR_12)\n", "VAR_39 = ''\n", "\"\"\"docstring\"\"\"\n", "VAR_25.updateModTime(VAR_12, VAR_13)\n", "if not VAR_3:\n", "logging.error('Could not read metafile!', full_traceback())\n", "return VAR_39\n", "VAR_25 = self.file_db\n", "VAR_39 = f.read()\n", "VAR_39 = self.getDropboxFile(VAR_12).decode()\n", "VAR_26 = VAR_25.getFileList()\n", "for f in VAR_26:\n", "if not f in VAR_14:\n", "def FUNC_6(self):...\n", "VAR_25.deleteFile(f)\n", "\"\"\"docstring\"\"\"\n", "if not VAR_3:\n", "VAR_14 = utils.FolderSearch.getFilepathsInclSubfolders(PIC_FOLDER,\n allowed_extensions=['txt', 'png', 'jpg', 'jpeg'])\n", "VAR_35 = utils.DropboxFolderSearch.getFilepathsInclSubfoldersDropboxPublic(\n DROPBOX_FOLDER_LINK, DROPBOX_APP_KEY, DROPBOX_SECRET_KEY,\n unixify_mod_time=True)\n", "VAR_35 = list(zip(VAR_14, [utils.FileUtils.getModificationTimeUnix(f) for f in\n VAR_14]))\n", "VAR_14 = [i[0] for i in VAR_35]\n", "for i in VAR_35:\n", "self.fileToDB(i[0], i[1])\n", "self.checkFilesForDeletion(VAR_14)\n", "VAR_27 = time()\n", "self.update_filelist_thread_queue.put((VAR_27,))\n", "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = None\n", "VAR_29 = requests.post('https://api.dropbox.com/1/metadata/link', VAR_28=\n dict(link=DROPBOX_FOLDER_LINK, client_id=DROPBOX_APP_KEY, client_secret\n =DROPBOX_SECRET_KEY, path=filepath), timeout=5)\n", "if VAR_29.ok:\n", "VAR_29 = json.loads(VAR_29.content.decode())['link'].split('?')[0] + '?dl=1'\n", "VAR_28 = None\n", "VAR_29 = requests.get(VAR_29, timeout=5)\n", "VAR_28 = None\n", "return VAR_28\n", "if VAR_29.ok:\n", "VAR_28 = VAR_29.content\n" ]
[ "from python_version_check import check_version\n", "check_version((3, 4, 3))\n", "VERSION_NUMBER = 1, 0, 10\n", "import logging\n", "from random import choice\n", "from time import time\n", "import requests, json\n", "from threading import Thread\n", "from queue import Queue\n", "from traceback_printer import full_traceback\n", "from telegramHigh import TelegramHigh\n", "from textual_data import *\n", "from userparams import UserParams\n", "from language_support import LanguageSupport\n", "import utils\n", "from file_db import FileDB\n", "from button_handler import getMainMenu\n", "from settings_reader import SettingsReader\n", "sr = SettingsReader()\n", "FILE_UPDATE_PERIOD = sr.settings_reader(0)\n", "FROM_DROPBOX = bool(sr.settings_reader(2) == 'DB')\n", "MIN_PICTURE_SEND_PERIOD = 60\n", "MAX_PICTURE_SEND_PERIOD = 86400\n", "PICTURE_SEND_PERIOD = sr.settings_reader(1)\n", "INITIAL_SUBSCRIBER_PARAMS = {'lang': 'EN', 'subscribed': 0, 'period':\n PICTURE_SEND_PERIOD, 'last_update_time': 0}\n", "\"\"\"The bot class\"\"\"\n", "LAST_UPDATE_ID = None\n", "pic_sender_threads = {}\n", "def __init__(self, token):...\n", "super(MainPicSender, self).__init__()\n", "self.bot = TelegramHigh(token)\n", "self.userparams = UserParams('users', initial=INITIAL_SUBSCRIBER_PARAMS)\n", "self.file_db = FileDB('files')\n", "self.updateFileListThread()\n", "self.files = []\n", "self.bot.start(processingFunction=self.processUpdate, periodicFunction=self\n .periodicRoutine)\n", "def processUpdate(self, u):...\n", "bot = self.bot\n", "Message = u.message\n", "message = Message.text\n", "message_id = Message.message_id\n", "chat_id = Message.chat_id\n", "subs = self.userparams\n", "subs.initializeUser(chat_id=chat_id, data=INITIAL_SUBSCRIBER_PARAMS)\n", "LS = LanguageSupport(subs.getEntry(chat_id=chat_id, param='lang'))\n", "lS = LS.languageSupport\n", "allv = LS.allVariants\n", "MMKM = lS(getMainMenu(subs.getEntry(chat_id=chat_id, param='subscribed')))\n", "if message == '/start':\n", "bot.sendMessage(chat_id=chat_id, message=lS(START_MESSAGE), key_markup=MMKM)\n", "if message == '/help' or message == HELP_BUTTON:\n", "def periodicRoutine(self):...\n", "bot.sendMessage(chat_id=chat_id, message=lS(HELP_MESSAGE).format(str(\n MIN_PICTURE_SEND_PERIOD), str(MAX_PICTURE_SEND_PERIOD)), key_markup=\n MMKM, markdown=True)\n", "if message == '/about' or message == ABOUT_BUTTON:\n", "\"\"\"docstring\"\"\"\n", "bot.sendMessage(chat_id=chat_id, message=lS(ABOUT_MESSAGE).format('.'.join(\n [str(i) for i in VERSION_NUMBER])), key_markup=MMKM, markdown=True)\n", "if message == '/otherbots' or message == lS(OTHER_BOTS_BUTTON):\n", "if not hasattr(self, 'update_filelist_thread_queue'):\n", "bot.sendMessage(chat_id=chat_id, message=lS(OTHER_BOTS_MESSAGE), key_markup\n =MMKM, markdown=True)\n", "if message == '/period' or message == lS(SHOW_PERIOD_BUTTON):\n", "self.update_filelist_thread_queue = Queue()\n", "while not self.update_filelist_thread_queue.empty():\n", "period = self.userparams.getEntry(chat_id, 'period')\n", "if message == '/subscribe' or message == SUBSCRIBE_BUTTON:\n", "q = self.update_filelist_thread_queue.get()\n", "self.updateFileListThread()\n", "bot.sendMessage(chat_id=chat_id, message=\n 'An image is sent to you every {0} seconds.'.format(period), key_markup\n =MMKM)\n", "period = self.userparams.getEntry(chat_id, 'period')\n", "if message == '/unsubscribe' or message == UNSUBSCRIBE_BUTTON:\n", "self.last_filelist_update_time = q[0]\n", "for user in self.userparams.getAllEntries(fields=['subscribed', 'period',\n", "if self.userparams.getEntry(chat_id, 'subscribed') == 0:\n", "if self.userparams.getEntry(chat_id, 'subscribed') == 1:\n", "if message == '/gimmepic' or message == GIMMEPIC_BUTTON:\n", "if user[0] == 1:\n", "def updateFileListThread(self):...\n", "self.userparams.setEntry(chat_id, 'subscribed', 1)\n", "bot.sendMessage(chat_id=chat_id, message=\n \"\"\"You have already subscribed!\nTo cancel subscription enter /unsubscribe.\nTo change the period of picture sending type a number.\nYour current period is {0} seconds.\"\"\"\n .format(period), key_markup=MMKM)\n", "self.userparams.setEntry(chat_id, 'subscribed', 0)\n", "bot.sendMessage(chat_id=chat_id, message=\n \"You haven't subscribed yet! To subscribe type /subscribe\", key_markup=MMKM\n )\n", "self.startRandomPicThread(chat_id, MMKM)\n", "new_period = int(message)\n", "bot.sendMessage(chat_id=chat_id, message='Unknown command!', key_markup=MMKM)\n", "cur_time = time()\n", "\"\"\"docstring\"\"\"\n", "self.userparams.setEntry(chat_id, 'last_update_time', time())\n", "MMKM = getMainMenu(subscribed=False)\n", "if self.userparams.getEntry(chat_id, 'subscribed') == 0:\n", "if cur_time - user[2] > user[1]:\n", "if not hasattr(self, 'last_filelist_update_time') or time(\n", "MMKM = getMainMenu(subscribed=True)\n", "bot.sendMessage(chat_id=chat_id, message=\n 'You have unsubscribed. To subscribe again type /subscribe', key_markup\n =MMKM)\n", "bot.sendMessage(chat_id=chat_id, message=\n \"You're not subscribed yet! /subscribe first!\", key_markup=MMKM)\n", "if new_period < MIN_PICTURE_SEND_PERIOD:\n", "self.startRandomPicThread(user[3], MMKM=getMainMenu(True))\n", "if not (hasattr(self, 'filelist_updater_thread') and self.\n", "def fileToDB(self, filepath, mod_time):...\n", "bot.sendMessage(chat_id=chat_id, message=\n \"\"\"You're subscribed now! \nAn image will be sent to you every {0} seconds. \nTo cancel subscription enter /unsubscribe. \nTo change the period of picture sending type a number.\"\"\"\n .format(period), key_markup=MMKM)\n", "self.userparams.setEntry(chat_id, 'period', MIN_PICTURE_SEND_PERIOD)\n", "if new_period > MAX_PICTURE_SEND_PERIOD:\n", "self.userparams.setEntry(user[3], 'last_update_time', cur_time)\n", "self.filelist_updater_thread = Thread(target=self.updateFileList)\n", "print('updater already running!')\n", "\"\"\"docstring\"\"\"\n", "bot.sendMessage(chat_id=chat_id, message=\n \"\"\"The minimum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MIN_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n", "self.userparams.setEntry(chat_id, 'period', MAX_PICTURE_SEND_PERIOD)\n", "self.userparams.setEntry(chat_id, 'period', new_period)\n", "self.filelist_updater_thread.start()\n", "file_db = self.file_db\n", "self.userparams.setEntry(chat_id, 'last_update_time', int(time()))\n", "bot.sendMessage(chat_id=chat_id, message=\n \"\"\"The maximum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MAX_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n", "bot.sendMessage(chat_id=chat_id, message='Setting period to ' + str(\n new_period) + '.', key_markup=MMKM)\n", "if path.splitext(filepath)[1].replace('.', '').lower() != 'txt':\n", "if not file_db.fileExists(filepath):\n", "if path.basename(filepath) == METADATA_FILENAME:\n", "file_db.addFile(filepath, mod_time=mod_time)\n", "if mod_time > file_db.getModTime(filepath):\n", "def getMetadata():...\n", "def checkFilesForDeletion(self, files):...\n", "file_db.invalidateCached(filepath)\n", "metadata = ''\n", "\"\"\"docstring\"\"\"\n", "file_db.updateModTime(filepath, mod_time)\n", "if not FROM_DROPBOX:\n", "logging.error('Could not read metafile!', full_traceback())\n", "return metadata\n", "file_db = self.file_db\n", "metadata = f.read()\n", "metadata = self.getDropboxFile(filepath).decode()\n", "DB_files = file_db.getFileList()\n", "for f in DB_files:\n", "if not f in files:\n", "def updateFileList(self):...\n", "file_db.deleteFile(f)\n", "\"\"\"docstring\"\"\"\n", "if not FROM_DROPBOX:\n", "files = utils.FolderSearch.getFilepathsInclSubfolders(PIC_FOLDER,\n allowed_extensions=['txt', 'png', 'jpg', 'jpeg'])\n", "files_and_mods = (utils.DropboxFolderSearch.\n getFilepathsInclSubfoldersDropboxPublic(DROPBOX_FOLDER_LINK,\n DROPBOX_APP_KEY, DROPBOX_SECRET_KEY, unixify_mod_time=True))\n", "files_and_mods = list(zip(files, [utils.FileUtils.getModificationTimeUnix(f\n ) for f in files]))\n", "files = [i[0] for i in files_and_mods]\n", "for i in files_and_mods:\n", "self.fileToDB(i[0], i[1])\n", "self.checkFilesForDeletion(files)\n", "last_filelist_update_time = time()\n", "self.update_filelist_thread_queue.put((last_filelist_update_time,))\n", "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "data = None\n", "req = requests.post('https://api.dropbox.com/1/metadata/link', data=dict(\n link=DROPBOX_FOLDER_LINK, client_id=DROPBOX_APP_KEY, client_secret=\n DROPBOX_SECRET_KEY, path=filepath), timeout=5)\n", "if req.ok:\n", "req = json.loads(req.content.decode())['link'].split('?')[0] + '?dl=1'\n", "data = None\n", "req = requests.get(req, timeout=5)\n", "data = None\n", "return data\n", "if req.ok:\n", "data = req.content\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Expr'", "Assign'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "FunctionDef'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "For", "Condition", "Condition", "Condition", "Condition", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Docstring", "Expr'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "FunctionDef'", "Expr'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "Docstring", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Condition", "FunctionDef'", "FunctionDef'", "Expr'", "Assign'", "Docstring", "Expr'", "Condition", "Expr'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "FunctionDef'", "Expr'", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Assign'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.message = VAR_73\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.message = message\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_17(self, VAR_23):...\n", "VAR_32 = ', '.join([str(each) for each in VAR_23])\n", "for VAR_16 in VAR_23:\n", "VAR_7 = self.get_directory_content(VAR_16)\n", "VAR_0.delete('directory', 'id in (' + VAR_32 + ')')\n", "self.remove_files([each['id'] for each in VAR_7['files']])\n", "self.remove_directories([each['id'] for each in VAR_7['directories']])\n" ]
[ "def remove_directories(self, directory_ids):...\n", "directory_ids_string = ', '.join([str(each) for each in directory_ids])\n", "for directory_id in directory_ids:\n", "content = self.get_directory_content(directory_id)\n", "db.delete('directory', 'id in (' + directory_ids_string + ')')\n", "self.remove_files([each['id'] for each in content['files']])\n", "self.remove_directories([each['id'] for each in content['directories']])\n" ]
[ 0, 4, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_14(self):...\n", "return super().get_queryset().filter(Q(child_branch__isnull=False) | Q(\n child_leaf__learning_container_year__isnull=False))\n" ]
[ "def get_queryset(self):...\n", "return super().get_queryset().filter(Q(child_branch__isnull=False) | Q(\n child_leaf__learning_container_year__isnull=False))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_23(self, VAR_28=None, VAR_29=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_42 = set(self.targets(VAR_28))\n", "VAR_43 = defaultdict(set)\n", "for target in self.targets(VAR_29):\n", "for dependency in target.dependencies:\n", "return VAR_43\n", "if dependency in VAR_42:\n", "VAR_43[target].add(dependency)\n" ]
[ "def dependents(self, on_predicate=None, from_predicate=None):...\n", "\"\"\"docstring\"\"\"\n", "core = set(self.targets(on_predicate))\n", "dependees = defaultdict(set)\n", "for target in self.targets(from_predicate):\n", "for dependency in target.dependencies:\n", "return dependees\n", "if dependency in core:\n", "dependees[target].add(dependency)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "For", "Return'", "Condition", "Expr'" ]
[ "@property...\n", "return os.path.exists(self.file)\n" ]
[ "@property...\n", "return os.path.exists(self.file)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_0(VAR_1):...\n", "for key in VAR_6.__all__:\n", "if not hasattr(VAR_1, key):\n", "setattr(VAR_1, key, getattr(VAR_6, key))\n" ]
[ "def _include_filters(obj):...\n", "for key in filters.__all__:\n", "if not hasattr(obj, key):\n", "setattr(obj, key, getattr(filters, key))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'" ]
[ "def FUNC_42(self, VAR_16):...\n", "VAR_25 = self.meta.get_field(VAR_16)\n", "VAR_46 = self.get(VAR_16)\n", "return self.cast(VAR_46, VAR_25)\n" ]
[ "def get_value(self, fieldname):...\n", "df = self.meta.get_field(fieldname)\n", "val = self.get(fieldname)\n", "return self.cast(val, df)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "@VAR_1.route('/')...\n", "VAR_8 = ''\n", "if VAR_18.get('tempFile') is not None:\n", "if VAR_18['tempFile'] != '':\n", "if 'filename' in request.args or VAR_4 != '' or 'currentFile' in VAR_18:\n", "VAR_8 = open(VAR_18['tempFile']).read()\n", "if not VAR_4:\n", "return render_template('editor.html', VAR_8=editor_content)\n", "if 'filename' in request.args:\n", "if 'email' in VAR_18 or 'social' in VAR_18:\n", "VAR_4 = request.args['filename']\n", "VAR_4 = VAR_18['currentFile']\n", "if 'email' in VAR_18:\n", "VAR_12 = VAR_18['email']\n", "if 'social' in VAR_18:\n", "VAR_10 = os.path.join(VAR_1.config['UPLOAD_FOLDER'], VAR_12)\n", "VAR_12 = VAR_18['social']\n", "VAR_22 = os.path.join(VAR_10, VAR_4)\n", "VAR_18['currentFile'] = VAR_4\n", "VAR_8 = f.read()\n", "VAR_8 = ''\n" ]
[ "@app.route('/')...\n", "editor_content = ''\n", "if session.get('tempFile') is not None:\n", "if session['tempFile'] != '':\n", "if 'filename' in request.args or filename != '' or 'currentFile' in session:\n", "editor_content = open(session['tempFile']).read()\n", "if not filename:\n", "return render_template('editor.html', editor_content=editor_content)\n", "if 'filename' in request.args:\n", "if 'email' in session or 'social' in session:\n", "filename = request.args['filename']\n", "filename = session['currentFile']\n", "if 'email' in session:\n", "email = session['email']\n", "if 'social' in session:\n", "userpath = os.path.join(app.config['UPLOAD_FOLDER'], email)\n", "email = session['social']\n", "filepath = os.path.join(userpath, filename)\n", "session['currentFile'] = filename\n", "editor_content = f.read()\n", "editor_content = ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_4(VAR_2, VAR_3, VAR_4, VAR_5):...\n", "VAR_11, VAR_10 = FUNC_1()\n", "VAR_12 = FUNC_0()\n", "VAR_11.execute(\"select image, avg(legit) as l FROM npc WHERE race='\" +\n VAR_2 + \"' AND class='\" + VAR_3 + \"' AND sex='\" + VAR_4 +\n \"' GROUP BY image HAVING l > 5 ORDER BY SUM(legit) DESC;\")\n", "VAR_10.commit()\n", "VAR_13 = VAR_11.fetchmany(5)\n", "VAR_10.close()\n", "return VAR_13\n" ]
[ "def findNPC(race, classe, sex, level):...\n", "c, conn = getConnection()\n", "date = now()\n", "c.execute(\"select image, avg(legit) as l FROM npc WHERE race='\" + race +\n \"' AND class='\" + classe + \"' AND sex='\" + sex +\n \"' GROUP BY image HAVING l > 5 ORDER BY SUM(legit) DESC;\")\n", "conn.commit()\n", "out = c.fetchmany(5)\n", "conn.close()\n", "return out\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = '[email protected]'\n", "__license__ = 'MIT'\n", "import os\n", "import re\n", "import sys\n", "import inspect\n", "import sre_constants\n", "from collections import defaultdict\n", "from snakemake.io import IOFile, _IOFile, protected, temp, dynamic, Namedlist\n", "from snakemake.io import expand, InputFiles, OutputFiles, Wildcards, Params, Log\n", "from snakemake.io import apply_wildcards, is_flagged, not_iterable\n", "from snakemake.exceptions import RuleException, IOFileException, WildcardError, InputFunctionException\n", "def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_0) == 2:\n", "VAR_10, VAR_41 = VAR_0\n", "if len(VAR_0) == 1:\n", "self.name = VAR_10\n", "VAR_17 = VAR_0[0]\n", "def FUNC_0(self, VAR_3, VAR_4=True):...\n", "self.workflow = VAR_41\n", "self.name = VAR_17.name\n", "def FUNC_21(VAR_16):...\n", "self.docstring = None\n", "self.workflow = VAR_17.workflow\n", "return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n", "self.message = None\n", "self.docstring = VAR_17.docstring\n", "self._input = InputFiles()\n", "self.message = VAR_17.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(VAR_17._input)\n", "self._params = Params()\n", "self._output = OutputFiles(VAR_17._output)\n", "self.dependencies = dict()\n", "self._params = Params(VAR_17._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(VAR_17.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(VAR_17.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(VAR_17.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(VAR_17.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(VAR_17.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(VAR_17.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(VAR_17.subworkflow_input)\n", "self.priority = 0\n", "self.resources = VAR_17.resources\n", "self.version = None\n", "self.priority = VAR_17.priority\n", "self._log = Log()\n", "self.version = VAR_17.version\n", "self._benchmark = None\n", "self._log = VAR_17._log\n", "self.wildcard_names = set()\n", "self._benchmark = VAR_17._benchmark\n", "self.lineno = VAR_1\n", "self.wildcard_names = set(VAR_17.wildcard_names)\n", "self.snakefile = VAR_2\n", "self.lineno = VAR_17.lineno\n", "self.run_func = None\n", "self.snakefile = VAR_17.snakefile\n", "self.shellcmd = None\n", "self.run_func = VAR_17.run_func\n", "self.norun = False\n", "self.shellcmd = VAR_17.shellcmd\n", "self.norun = VAR_17.norun\n" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = '[email protected]'\n", "__license__ = 'MIT'\n", "import os\n", "import re\n", "import sys\n", "import inspect\n", "import sre_constants\n", "from collections import defaultdict\n", "from snakemake.io import IOFile, _IOFile, protected, temp, dynamic, Namedlist\n", "from snakemake.io import expand, InputFiles, OutputFiles, Wildcards, Params, Log\n", "from snakemake.io import apply_wildcards, is_flagged, not_iterable\n", "from snakemake.exceptions import RuleException, IOFileException, WildcardError, InputFunctionException\n", "def __init__(self, *args, lineno=None, snakefile=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(args) == 2:\n", "name, workflow = args\n", "if len(args) == 1:\n", "self.name = name\n", "other = args[0]\n", "def dynamic_branch(self, wildcards, input=True):...\n", "self.workflow = workflow\n", "self.name = other.name\n", "def get_io(rule):...\n", "self.docstring = None\n", "self.workflow = other.workflow\n", "return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n", "self.message = None\n", "self.docstring = other.docstring\n", "self._input = InputFiles()\n", "self.message = other.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(other._input)\n", "self._params = Params()\n", "self._output = OutputFiles(other._output)\n", "self.dependencies = dict()\n", "self._params = Params(other._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(other.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(other.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(other.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(other.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(other.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(other.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(other.subworkflow_input)\n", "self.priority = 0\n", "self.resources = other.resources\n", "self.version = None\n", "self.priority = other.priority\n", "self._log = Log()\n", "self.version = other.version\n", "self._benchmark = None\n", "self._log = other._log\n", "self.wildcard_names = set()\n", "self._benchmark = other._benchmark\n", "self.lineno = lineno\n", "self.wildcard_names = set(other.wildcard_names)\n", "self.snakefile = snakefile\n", "self.lineno = other.lineno\n", "self.run_func = None\n", "self.snakefile = other.snakefile\n", "self.shellcmd = None\n", "self.run_func = other.run_func\n", "self.norun = False\n", "self.shellcmd = other.shellcmd\n", "self.norun = other.norun\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Assign'", "Assign'", "Assign'", "Assign'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Return'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_4(self):...\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 0)\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_8 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'web': 4, 'worker': 2}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 6)\n", "VAR_4 = '/api/apps/{app_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_4 = '/api/apps/{app_id}/containers/web'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 4)\n", "VAR_9 = VAR_5.data['results'][0]['num']\n", "VAR_4 = '/api/apps/{app_id}/containers/web/{num}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(VAR_5.data['num'], VAR_9)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'web': 2, 'worker': 1}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 3)\n", "self.assertEqual(max(VAR_7['num'] for VAR_7 in VAR_5.data['results']), 2)\n", "VAR_4 = '/api/apps/{app_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'web': 0, 'worker': 0}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 0)\n", "VAR_4 = '/api/apps/{app_id}'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n" ]
[ "def test_container_api_heroku(self):...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 0)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 4, 'worker': 2}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 6)\n", "url = '/api/apps/{app_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "url = '/api/apps/{app_id}/containers/web'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 4)\n", "num = response.data['results'][0]['num']\n", "url = '/api/apps/{app_id}/containers/web/{num}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(response.data['num'], num)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 2, 'worker': 1}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 3)\n", "self.assertEqual(max(c['num'] for c in response.data['results']), 2)\n", "url = '/api/apps/{app_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 0, 'worker': 0}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 0)\n", "url = '/api/apps/{app_id}'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n" ]
[ 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_22(self, VAR_17, VAR_16):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def remove_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "import sys\n", "import os\n", "import re\n", "import json\n", "import shlex\n", "import urllib.request\n", "import codecs\n", "VAR_0 = codecs.getreader('utf-8')\n", "VAR_1 = 0\n", "VAR_7 = '\\x1b[94m'\n", "VAR_8 = '\\x1b[94m'\n", "VAR_9 = '\\x1b[92m'\n", "VAR_10 = '\\x1b[93m'\n", "VAR_11 = '\\x1b[96m'\n", "VAR_12 = '\\x1b[91m'\n", "VAR_13 = '\\x1b[0m'\n", "VAR_14 = '\\x1b[1m'\n", "VAR_15 = '\\x1b[4m'\n", "def FUNC_0(VAR_2):...\n", "print('string'.format(header=c.HEADER, bold=c.BOLD, end=c.END, VAR_2=app))\n", "def FUNC_1(VAR_3):...\n", "print('\\n [' + CLASS_0.BOLD + CLASS_0.HEADER + VAR_3.title() + CLASS_0.END +\n ']\\n')\n", "def FUNC_2(VAR_3):...\n", "print(CLASS_0.OKGREEN + '✔', VAR_3, CLASS_0.END)\n", "def FUNC_3(VAR_3):...\n", "print(CLASS_0.WARNING + '!', VAR_3, CLASS_0.END)\n", "def FUNC_4(VAR_3, VAR_4=True):...\n", "if VAR_4:\n", "VAR_1 = 1\n", "print(CLASS_0.MAYBE_FAIL + '?', VAR_3, CLASS_0.END)\n", "print(CLASS_0.FAIL + '✘', VAR_3, CLASS_0.END)\n", "def FUNC_5(VAR_5):...\n", "VAR_20 = urllib.request.urlopen(VAR_5)\n", "return {'content': '', 'code': e.code}\n", "return {'content': VAR_20.read().decode('UTF8'), 'code': 200}\n" ]
[ "import sys\n", "import os\n", "import re\n", "import json\n", "import shlex\n", "import urllib.request\n", "import codecs\n", "reader = codecs.getreader('utf-8')\n", "return_code = 0\n", "HEADER = '\\x1b[94m'\n", "OKBLUE = '\\x1b[94m'\n", "OKGREEN = '\\x1b[92m'\n", "WARNING = '\\x1b[93m'\n", "MAYBE_FAIL = '\\x1b[96m'\n", "FAIL = '\\x1b[91m'\n", "END = '\\x1b[0m'\n", "BOLD = '\\x1b[1m'\n", "UNDERLINE = '\\x1b[4m'\n", "def header(app):...\n", "print(\n \"\"\"\n [{header}{bold}YunoHost App Package Linter{end}]\n\n App packaging documentation - https://yunohost.org/#/packaging_apps\n App package example - https://github.com/YunoHost/example_ynh\n Official helpers - https://yunohost.org/#/packaging_apps_helpers_en\n Experimental helpers - https://github.com/YunoHost-Apps/Experimental_helpers\n\n Analyzing package {header}{app}{end}\"\"\"\n .format(header=c.HEADER, bold=c.BOLD, end=c.END, app=app))\n", "def print_header(str):...\n", "print('\\n [' + c.BOLD + c.HEADER + str.title() + c.END + ']\\n')\n", "def print_right(str):...\n", "print(c.OKGREEN + '✔', str, c.END)\n", "def print_warning(str):...\n", "print(c.WARNING + '!', str, c.END)\n", "def print_error(str, reliable=True):...\n", "if reliable:\n", "return_code = 1\n", "print(c.MAYBE_FAIL + '?', str, c.END)\n", "print(c.FAIL + '✘', str, c.END)\n", "def urlopen(url):...\n", "conn = urllib.request.urlopen(url)\n", "return {'content': '', 'code': e.code}\n", "return {'content': conn.read().decode('UTF8'), 'code': 200}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "FunctionDef'", "Expr'", "FunctionDef'", "Expr'", "FunctionDef'", "Expr'", "FunctionDef'", "Condition", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Return'", "Return'" ]
[ "import argparse\n", "import logging\n", "import os\n", "import shutil\n", "from .pyweb import DEFAULT_PYWEB_CONTENT_DIR, DEFAULT_PYWEB_LOG_DIR\n", "def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n", "self.logger = VAR_4 or logging.getLogger('ContentInstaller')\n", "VAR_12, VAR_13, VAR_14 = self._sanity_check_path(VAR_1, VAR_2, VAR_3)\n", "self.src_path = VAR_12\n", "self.dst_path = VAR_13\n", "self.www_root = VAR_14\n", "def FUNC_2(self, VAR_5, VAR_6, VAR_3):...\n", "VAR_4 = self.logger\n", "if not os.path.isdir(VAR_5):\n", "VAR_19 = 'Source path: %s does not exist or is not a directory.' % VAR_5\n", "if not os.path.isdir(VAR_3):\n", "VAR_4.critical(VAR_19)\n", "VAR_19 = 'Web root % s does not exist or is not a directory.' % VAR_5\n", "VAR_15 = os.path.abspath(VAR_3)\n", "VAR_4.critical(VAR_19)\n", "VAR_16 = VAR_6\n", "if os.path.isabs(VAR_6):\n", "VAR_20 = os.path.commonprefix([VAR_15, VAR_6])\n", "VAR_13 = os.path.join(VAR_15, VAR_6)\n", "if VAR_20 is not VAR_15:\n", "VAR_13 = os.path.realpath(VAR_13)\n", "VAR_19 = (\n 'Destination path is absolute and is not a subdirectory of web root. {}'\n .format([VAR_3, VAR_6]))\n", "VAR_16 = os.path.relpath(VAR_15, VAR_6)\n", "VAR_20 = os.path.commonprefix([VAR_15, VAR_13])\n", "VAR_4.critical(VAR_19)\n", "VAR_17 = os.path.join(VAR_15, VAR_16)\n", "if VAR_20 is not VAR_15:\n", "if os.path.exists(VAR_17):\n", "VAR_19 = (\n 'Destination is a relative path that resolves outside of web root. {}'.\n format([VAR_15, VAR_6]))\n", "VAR_16 = os.path.relpath(VAR_15, VAR_13)\n", "VAR_19 = 'Destination directory already exists: {}'.format(VAR_17)\n", "return VAR_5, VAR_16, VAR_15\n", "VAR_4.critical(VAR_19)\n", "VAR_4.critical(VAR_19)\n" ]
[ "import argparse\n", "import logging\n", "import os\n", "import shutil\n", "from .pyweb import DEFAULT_PYWEB_CONTENT_DIR, DEFAULT_PYWEB_LOG_DIR\n", "def __init__(self, src_path, dst_path, www_root, logger=None):...\n", "self.logger = logger or logging.getLogger('ContentInstaller')\n", "_src, _dst, _www = self._sanity_check_path(src_path, dst_path, www_root)\n", "self.src_path = _src\n", "self.dst_path = _dst\n", "self.www_root = _www\n", "def _sanity_check_path(self, src, dst, www_root):...\n", "logger = self.logger\n", "if not os.path.isdir(src):\n", "msg = 'Source path: %s does not exist or is not a directory.' % src\n", "if not os.path.isdir(www_root):\n", "logger.critical(msg)\n", "msg = 'Web root % s does not exist or is not a directory.' % src\n", "www_root_abs = os.path.abspath(www_root)\n", "logger.critical(msg)\n", "rel_dst = dst\n", "if os.path.isabs(dst):\n", "_root = os.path.commonprefix([www_root_abs, dst])\n", "_dst = os.path.join(www_root_abs, dst)\n", "if _root is not www_root_abs:\n", "_dst = os.path.realpath(_dst)\n", "msg = ('Destination path is absolute and is not a subdirectory of web root. {}'\n .format([www_root, dst]))\n", "rel_dst = os.path.relpath(www_root_abs, dst)\n", "_root = os.path.commonprefix([www_root_abs, _dst])\n", "logger.critical(msg)\n", "abs_dst = os.path.join(www_root_abs, rel_dst)\n", "if _root is not www_root_abs:\n", "if os.path.exists(abs_dst):\n", "msg = ('Destination is a relative path that resolves outside of web root. {}'\n .format([www_root_abs, dst]))\n", "rel_dst = os.path.relpath(www_root_abs, _dst)\n", "msg = 'Destination directory already exists: {}'.format(abs_dst)\n", "return src, rel_dst, www_root_abs\n", "logger.critical(msg)\n", "logger.critical(msg)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_1():...\n", "\"\"\"docstring\"\"\"\n", "VAR_3 = 'testing inband sql injection on parameter '\n", "VAR_3 += \"'%s'\" % kb.injParameter\n", "logger.info(VAR_3)\n", "VAR_4 = ''\n", "VAR_0 = agent.prefixQuery(' UNION ALL SELECT NULL')\n", "for VAR_1 in (queries[kb.dbms].comment, ''):\n", "VAR_4 = FUNC_0(VAR_0, VAR_1)\n", "if kb.unionCount:\n", "if VAR_4:\n", "VAR_3 = 'the target url could be affected by an '\n", "VAR_8 = 'the target url is not affected by an '\n", "setUnion(VAR_1, VAR_4.count('NULL'))\n", "VAR_3 += 'inband sql injection vulnerability'\n", "VAR_8 += 'inband sql injection vulnerability'\n", "logger.info(VAR_3)\n", "logger.warn(VAR_8)\n", "return VAR_4\n" ]
[ "def unionTest():...\n", "\"\"\"docstring\"\"\"\n", "logMsg = 'testing inband sql injection on parameter '\n", "logMsg += \"'%s'\" % kb.injParameter\n", "logger.info(logMsg)\n", "value = ''\n", "query = agent.prefixQuery(' UNION ALL SELECT NULL')\n", "for comment in (queries[kb.dbms].comment, ''):\n", "value = __effectiveUnionTest(query, comment)\n", "if kb.unionCount:\n", "if value:\n", "logMsg = 'the target url could be affected by an '\n", "warnMsg = 'the target url is not affected by an '\n", "setUnion(comment, value.count('NULL'))\n", "logMsg += 'inband sql injection vulnerability'\n", "warnMsg += 'inband sql injection vulnerability'\n", "logger.info(logMsg)\n", "logger.warn(warnMsg)\n", "return value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "AugAssign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "AugAssign'", "AugAssign'", "Expr'", "Expr'", "Return'" ]
[ "async def FUNC_5(self):...\n", "self.username = self.get_argument('username').lower()\n", "self.email = self.get_argument('email').lower()\n", "self.password = self.get_argument('psword').lower()\n", "if re.fullmatch('^(?=.{8,20}$)(?![_.])(?!.*[_.]{2})[a-zA-Z0-9._]+(?<![_.])$',\n", "self.render('signup.html', VAR_6=\n \"Your username doesn't follow our username rules. Please fix it.\")\n", "if re.fullmatch('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\\\.[a-zA-Z0-9-.]+$)', self\n", "return\n", "self.render('signup.html', VAR_6=\"Your email doesn't look like a valid email\")\n", "VAR_12 = self.check_if_exists()\n", "return\n", "if VAR_12 != None:\n", "self.render('signup.html', VAR_6=does_it_exist)\n", "VAR_0 = self.hash_password()\n", "return\n", "await self.do_insert(VAR_0)\n", "self.set_secure_cookie('user', self.username)\n", "self.redirect('/postlogin')\n", "return\n" ]
[ "async def post(self):...\n", "self.username = self.get_argument('username').lower()\n", "self.email = self.get_argument('email').lower()\n", "self.password = self.get_argument('psword').lower()\n", "if re.fullmatch('^(?=.{8,20}$)(?![_.])(?!.*[_.]{2})[a-zA-Z0-9._]+(?<![_.])$',\n", "self.render('signup.html', error=\n \"Your username doesn't follow our username rules. Please fix it.\")\n", "if re.fullmatch('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\\\.[a-zA-Z0-9-.]+$)', self\n", "return\n", "self.render('signup.html', error=\"Your email doesn't look like a valid email\")\n", "does_it_exist = self.check_if_exists()\n", "return\n", "if does_it_exist != None:\n", "self.render('signup.html', error=does_it_exist)\n", "hashed_password = self.hash_password()\n", "return\n", "await self.do_insert(hashed_password)\n", "self.set_secure_cookie('user', self.username)\n", "self.redirect('/postlogin')\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Expr'", "Assign'", "Return'", "Condition", "Expr'", "Assign'", "Return'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_9(self, VAR_0, VAR_1, VAR_8, VAR_2, VAR_3=None):...\n", "if not VAR_3:\n", "VAR_3 = self.make_reqid()\n", "VAR_5 = FUNC_9(VAR_0, VAR_1, VAR_8, VAR_3)\n", "self.set_response_handler(VAR_3, VAR_2)\n", "return VAR_5\n" ]
[ "def make_req_msg(self, interface, method, args, fun, reqid=None):...\n", "if not reqid:\n", "reqid = self.make_reqid()\n", "msg = make_req_msg(interface, method, args, reqid)\n", "self.set_response_handler(reqid, fun)\n", "return msg\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_19():...\n", "VAR_28 = getattr(VAR_1, VAR_30, None)\n", "if not VAR_28:\n", "VAR_64 = self.inspect()[VAR_30]\n", "VAR_55 = self.sql_time_limit_ms\n", "if VAR_64['file'] == ':memory:':\n", "if VAR_34 and VAR_34 < VAR_55:\n", "VAR_28 = sqlite3.connect(':memory:')\n", "VAR_28 = sqlite3.connect('file:{}?immutable=1'.format(VAR_64['file']), uri=\n True, check_same_thread=False)\n", "VAR_55 = VAR_34\n", "VAR_70 = VAR_28.cursor()\n", "if e.args == ('interrupted',):\n", "if VAR_33:\n", "self.prepare_connection(VAR_28)\n", "VAR_70.execute(VAR_31, VAR_32 or {})\n", "print('ERROR: conn={}, sql = {}, params = {}: {}'.format(VAR_28, repr(\n VAR_31), VAR_32, e))\n", "return Results(VAR_72, VAR_73, VAR_70.description)\n", "return Results(VAR_72, False, VAR_70.description)\n", "setattr(VAR_1, VAR_30, VAR_28)\n", "VAR_71 = self.max_returned_rows\n", "if VAR_71 == VAR_35:\n", "VAR_71 += 1\n", "if VAR_71 and VAR_33:\n", "VAR_72 = VAR_70.fetchmany(VAR_71 + 1)\n", "VAR_72 = VAR_70.fetchall()\n", "VAR_73 = len(VAR_72) > VAR_71\n", "VAR_73 = False\n", "VAR_72 = VAR_72[:VAR_71]\n" ]
[ "def sql_operation_in_thread():...\n", "conn = getattr(connections, db_name, None)\n", "if not conn:\n", "info = self.inspect()[db_name]\n", "time_limit_ms = self.sql_time_limit_ms\n", "if info['file'] == ':memory:':\n", "if custom_time_limit and custom_time_limit < time_limit_ms:\n", "conn = sqlite3.connect(':memory:')\n", "conn = sqlite3.connect('file:{}?immutable=1'.format(info['file']), uri=True,\n check_same_thread=False)\n", "time_limit_ms = custom_time_limit\n", "cursor = conn.cursor()\n", "if e.args == ('interrupted',):\n", "if truncate:\n", "self.prepare_connection(conn)\n", "cursor.execute(sql, params or {})\n", "print('ERROR: conn={}, sql = {}, params = {}: {}'.format(conn, repr(sql),\n params, e))\n", "return Results(rows, truncated, cursor.description)\n", "return Results(rows, False, cursor.description)\n", "setattr(connections, db_name, conn)\n", "max_returned_rows = self.max_returned_rows\n", "if max_returned_rows == page_size:\n", "max_returned_rows += 1\n", "if max_returned_rows and truncate:\n", "rows = cursor.fetchmany(max_returned_rows + 1)\n", "rows = cursor.fetchall()\n", "truncated = len(rows) > max_returned_rows\n", "truncated = False\n", "rows = rows[:max_returned_rows]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Return'", "Return'", "Expr'", "Assign'", "Condition", "AugAssign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def __str__(self):...\n", "return self.get_name()\n" ]
[ "def __str__(self):...\n", "return self.get_name()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "import psycopg2\n", "import secretConstants\n", "VAR_0 = ('dbname=' + secretConstants.DATABASE_NAME + ' user=' +\n secretConstants.DATABASE_USER + ' host=' + secretConstants.\n DATABASE_HOST + ' password=' + secretConstants.DATABASE_PASSWORD +\n ' port=' + secretConstants.DATABASE_PORT)\n", "VAR_1 = None\n", "VAR_2 = None\n", "def FUNC_0(VAR_3):...\n", "VAR_5 = (\n \"SELECT item_id from twitter_bot_vac_last_replied_id where name = '{0}'\"\n .format(VAR_3))\n", "VAR_1 = psycopg2.connect(VAR_0)\n", "print('Error %s' % e)\n", "if VAR_1:\n", "return VAR_2[0]\n", "VAR_6 = VAR_1.cursor()\n", "VAR_1.close()\n", "VAR_6.execute(VAR_5)\n", "VAR_2 = VAR_6.fetchone()\n" ]
[ "import psycopg2\n", "import secretConstants\n", "connectionString = ('dbname=' + secretConstants.DATABASE_NAME + ' user=' +\n secretConstants.DATABASE_USER + ' host=' + secretConstants.\n DATABASE_HOST + ' password=' + secretConstants.DATABASE_PASSWORD +\n ' port=' + secretConstants.DATABASE_PORT)\n", "conn = None\n", "result = None\n", "def getLastReplied(messageType):...\n", "QUERY = (\n \"SELECT item_id from twitter_bot_vac_last_replied_id where name = '{0}'\"\n .format(messageType))\n", "conn = psycopg2.connect(connectionString)\n", "print('Error %s' % e)\n", "if conn:\n", "return result[0]\n", "cur = conn.cursor()\n", "conn.close()\n", "cur.execute(QUERY)\n", "result = cur.fetchone()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_13(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_9 = '10.1.2.3'\n", "for feature in ('confirmation', 'paging', 'events', 'formatoutput'):\n", "self.driver._eql_execute('cli-settings', feature, 'off')\n", "self.driver._eql_execute('grpparams', 'show').AndReturn([\n 'Group-Ipaddress: %s' % VAR_9])\n", "self.mox.ReplayAll()\n", "self.driver.do_setup(self._context)\n", "self.assertEqual(VAR_9, self.driver._group_ip)\n" ]
[ "def test_do_setup(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "fake_group_ip = '10.1.2.3'\n", "for feature in ('confirmation', 'paging', 'events', 'formatoutput'):\n", "self.driver._eql_execute('cli-settings', feature, 'off')\n", "self.driver._eql_execute('grpparams', 'show').AndReturn([\n 'Group-Ipaddress: %s' % fake_group_ip])\n", "self.mox.ReplayAll()\n", "self.driver.do_setup(self._context)\n", "self.assertEqual(fake_group_ip, self.driver._group_ip)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = request.args.get('group_by')\n", "if not VAR_4:\n", "VAR_4 = ''\n", "return VAR_4\n" ]
[ "def get_group_by_args():...\n", "\"\"\"docstring\"\"\"\n", "group_by = request.args.get('group_by')\n", "if not group_by:\n", "group_by = ''\n", "return group_by\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_3(self):...\n", "return 'magnet:?xt=urn:btih:%s&dn=%s' % (str(self.infohash).encode('hex'),\n self.title) + ('&tr=%s' % self.tracker_info if self.tracker_info else '')\n" ]
[ "def get_magnet(self):...\n", "return 'magnet:?xt=urn:btih:%s&dn=%s' % (str(self.infohash).encode('hex'),\n self.title) + ('&tr=%s' % self.tracker_info if self.tracker_info else '')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_35(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = {'SEVERITY': {'LOW': 7}, 'CONFIDENCE': {'HIGH': 7}}\n", "self.check_example('skip.py', VAR_2, VAR_1=True)\n" ]
[ "def test_ignore_skip(self):...\n", "\"\"\"docstring\"\"\"\n", "expect = {'SEVERITY': {'LOW': 7}, 'CONFIDENCE': {'HIGH': 7}}\n", "self.check_example('skip.py', expect, ignore_nosec=True)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_16(self, VAR_8, **VAR_9):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = dict()\n", "VAR_22.update(self.rule.workflow.globals)\n", "VAR_22.update(dict(input=self.input, output=self.output, VAR_24=self.params,\n VAR_16=self._format_wildcards, threads=self.threads, VAR_23=self.\n resources, log=self.log, version=self.rule.version, VAR_3=self.rule.name))\n", "VAR_22.update(VAR_9)\n", "return format(VAR_8, **_variables)\n" ]
[ "def format_wildcards(self, string, **variables):...\n", "\"\"\"docstring\"\"\"\n", "_variables = dict()\n", "_variables.update(self.rule.workflow.globals)\n", "_variables.update(dict(input=self.input, output=self.output, params=self.\n params, wildcards=self._format_wildcards, threads=self.threads,\n resources=self.resources, log=self.log, version=self.rule.version, rule\n =self.rule.name))\n", "_variables.update(variables)\n", "return format(string, **_variables)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_0, VAR_2, VAR_1=False):...\n", "\"\"\"docstring\"\"\"\n", "self.b_mgr.scores = []\n", "self.run_example(VAR_0, VAR_1=ignore_nosec)\n", "VAR_5 = 0\n", "VAR_6 = 0\n", "for test_scores in self.b_mgr.scores:\n", "for score_type in test_scores:\n", "self.assertEqual(VAR_5, VAR_6)\n", "self.assertIn(score_type, VAR_2)\n", "for rating in VAR_2[score_type]:\n", "VAR_5 += VAR_2[score_type][rating] * C.RANKING_VALUES[rating]\n", "VAR_6 += sum(test_scores[score_type])\n" ]
[ "def check_example(self, example_script, expect, ignore_nosec=False):...\n", "\"\"\"docstring\"\"\"\n", "self.b_mgr.scores = []\n", "self.run_example(example_script, ignore_nosec=ignore_nosec)\n", "expected = 0\n", "result = 0\n", "for test_scores in self.b_mgr.scores:\n", "for score_type in test_scores:\n", "self.assertEqual(expected, result)\n", "self.assertIn(score_type, expect)\n", "for rating in expect[score_type]:\n", "expected += expect[score_type][rating] * C.RANKING_VALUES[rating]\n", "result += sum(test_scores[score_type])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "For", "For", "Expr'", "Expr'", "For", "AugAssign'", "AugAssign'" ]
[ "def FUNC_1():...\n", "" ]
[ "def b():...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@staticmethod...\n", "return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', VAR_0)))\n" ]
[ "@staticmethod...\n", "return ' '.join(escape_path_argument(s) for s in (sys.executable, os.path.\n join(os.path.dirname(os.path.realpath(__file__)),\n 'run_shell_command_testfiles', scriptname)))\n" ]
[ 0, 2 ]
[ "Condition", "Return'" ]
[ "from . import pathexpr\n", "from . import attrexpr\n", "from . import ugoexpr\n", "from . import fs\n", "import copy\n", "import collections\n", "import itertools\n", "import os\n", "import glob\n", "\"\"\"string\"\"\"\n", "\"\"\"string\"\"\"\n", "VAR_0 = {}\n", "def FUNC_0(VAR_1):...\n", "VAR_0[VAR_1.__name__] = VAR_1()\n", "return VAR_1\n" ]
[ "from . import pathexpr\n", "from . import attrexpr\n", "from . import ugoexpr\n", "from . import fs\n", "import copy\n", "import collections\n", "import itertools\n", "import os\n", "import glob\n", "\"\"\"\n\n{\n \n 'collections' : {\n },\n \n 'globals' : {},\n \n 'rules' : {\n \n 'ROOT' : [\n ['multiple', { \n 'key' : 'department'\n 'bookmarks' : ['workarea'],\n 'localattributes' : {},\n 'treeattributes' : {},\n 'user' : '(parameter user)',\n 'group' : 'vfx',\n 'permissions' : 'rwxr-xr-x' \n }],\n \n ['directory', {\n 'name' : 'value'\n }]\n \n ],\n \n 'alternative' : [\n ],\n \n 'rule2' : [\n ]\n }\n}\n\n\"\"\"\n", "\"\"\"\na rule is a list of directory levels.\na compiled rule has:\n a set of bookmarks under it\n a set of parameters under it\n a set of attributes under it\n\nDirectory level types:\n fixed : one or more fixed names, not parameterized\n fields : bookmarks, local attrs, tree attrs, name, user, group, permissions\n branch : redirects to one or more other rules, IN ORDER, no special attributes of its own\n fields: rules\n parameterized : any number of parameterized directories, there is one key and potentially many values.\n fields : bookmarks, local attrs, tree attrs, key, collection, user group, permissions\n if there is an collection attribute, then the values are restricted.\n regex : can represent zero or more parameters, as defined by the groups in the expression. Also good when\n there is a prefix or suffix or restrictions on the character set.\n fields: bookmarks, local attrs, tree attrs, pattern, collections, user, group, permissions\n regex is TODO\n\"\"\"\n", "FnLevel = {}\n", "def register_level(cls):...\n", "FnLevel[cls.__name__] = cls()\n", "return cls\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Expr'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "VAR_30 = os.environ.get('OS_CONFIG_APPLIER_TEMPLATES', None)\n", "if VAR_30 is None:\n", "VAR_30 = '/opt/stack/os-apply-config/templates'\n", "return VAR_30\n", "if not os.path.isdir(VAR_30):\n", "VAR_30 = '/opt/stack/os-config-applier/templates'\n", "if os.path.isdir(VAR_30) and not os.path.isdir(VAR_0):\n", "logging.warning(\n 'Template directory %s is deprecated. The recommended location for template files is %s'\n , VAR_30, VAR_0)\n", "VAR_30 = VAR_0\n" ]
[ "def templates_dir():...\n", "\"\"\"docstring\"\"\"\n", "templates_dir = os.environ.get('OS_CONFIG_APPLIER_TEMPLATES', None)\n", "if templates_dir is None:\n", "templates_dir = '/opt/stack/os-apply-config/templates'\n", "return templates_dir\n", "if not os.path.isdir(templates_dir):\n", "templates_dir = '/opt/stack/os-config-applier/templates'\n", "if os.path.isdir(templates_dir) and not os.path.isdir(DEFAULT_TEMPLATES_DIR):\n", "logging.warning(\n 'Template directory %s is deprecated. The recommended location for template files is %s'\n , templates_dir, DEFAULT_TEMPLATES_DIR)\n", "templates_dir = DEFAULT_TEMPLATES_DIR\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_34(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_80 = map(int, self.request.POST.getlist('case'))\n", "self._update_objects = TestCase.objects.filter(pk__in=case_ids)\n", "return self._update_objects\n" ]
[ "def get_update_targets(self):...\n", "\"\"\"docstring\"\"\"\n", "case_ids = map(int, self.request.POST.getlist('case'))\n", "self._update_objects = TestCase.objects.filter(pk__in=case_ids)\n", "return self._update_objects\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "import time\n", "from urllib.parse import urlencode\n", "from urllib import urlencode\n", "from django.core.exceptions import SuspiciousOperation\n", "from django.core.urlresolvers import reverse\n", "from django.contrib import auth\n", "from django.http import HttpResponseRedirect\n", "from django.utils.crypto import get_random_string\n", "from django.utils.module_loading import import_string\n", "from django.views.generic import View\n", "from mozilla_django_oidc.utils import absolutify, import_from_settings, is_authenticated\n", "\"\"\"OIDC client authentication callback HTTP endpoint\"\"\"\n", "VAR_0 = ['get']\n", "@property...\n", "return import_from_settings('LOGIN_REDIRECT_URL_FAILURE', '/')\n" ]
[ "import time\n", "from urllib.parse import urlencode\n", "from urllib import urlencode\n", "from django.core.exceptions import SuspiciousOperation\n", "from django.core.urlresolvers import reverse\n", "from django.contrib import auth\n", "from django.http import HttpResponseRedirect\n", "from django.utils.crypto import get_random_string\n", "from django.utils.module_loading import import_string\n", "from django.views.generic import View\n", "from mozilla_django_oidc.utils import absolutify, import_from_settings, is_authenticated\n", "\"\"\"OIDC client authentication callback HTTP endpoint\"\"\"\n", "http_method_names = ['get']\n", "@property...\n", "return import_from_settings('LOGIN_REDIRECT_URL_FAILURE', '/')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_1(VAR_29, VAR_30, VAR_31, VAR_32, VAR_33):...\n", "VAR_31.pack_forget()\n", "VAR_10.forget()\n", "VAR_40 = VAR_32, VAR_33\n", "for key, value in VAR_25.items():\n", "if value == VAR_40:\n", "FUNC_5(VAR_29, VAR_32, VAR_33)\n", "key.destroy()\n", "FUNC_4(VAR_30)\n", "VAR_41 = Label(VAR_15, text=recipe, bg='#f8f8f8')\n", "VAR_41.grid(VAR_32=row, VAR_33=column)\n", "VAR_41.bind('<Button-1>', lambda event: FUNC_2(VAR_29))\n", "VAR_15.pack()\n" ]
[ "def add_recipe(recipe, ingredients, view, row, column):...\n", "view.pack_forget()\n", "viewRecipeFrame.forget()\n", "searchIndex = row, column\n", "for key, value in buttonDict.items():\n", "if value == searchIndex:\n", "save_weeks_recipes(recipe, row, column)\n", "key.destroy()\n", "save_ingredients(ingredients)\n", "recipeLabel = Label(menu, text=recipe, bg='#f8f8f8')\n", "recipeLabel.grid(row=row, column=column)\n", "recipeLabel.bind('<Button-1>', lambda event: callback(recipe))\n", "menu.pack()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "For", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "@VAR_7.route('/create-calendar', methods=['GET', 'POST'])...\n", "VAR_10 = FUNC_0()\n", "if not VAR_10:\n", "return 'Unauthorized'\n", "VAR_8, VAR_3 = FUNC_1()\n", "if request.method == 'POST':\n", "VAR_16, VAR_17 = calendar.createCalendar(request.form['calendarName'],\n request.form['day'], VAR_8, VAR_6)\n", "return render_template('calendar/create/create.html')\n", "if VAR_16:\n", "return render_template('calendar/create/success.html', VAR_17=calendarId)\n", "return render_template('calendar/error.html')\n" ]
[ "@app.route('/create-calendar', methods=['GET', 'POST'])...\n", "isAuthorized = isUserAuthorized()\n", "if not isAuthorized:\n", "return 'Unauthorized'\n", "username, password = getUsernameAndPassword()\n", "if request.method == 'POST':\n", "res, calendarId = calendar.createCalendar(request.form['calendarName'],\n request.form['day'], username, mysql)\n", "return render_template('calendar/create/create.html')\n", "if res:\n", "return render_template('calendar/create/success.html', calendarId=calendarId)\n", "return render_template('calendar/error.html')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_0) == 22 and VAR_0[16:22] == '.onion':\n", "return True\n", "VAR_2 = str(VAR_0).split(':')\n", "if len(VAR_2) == 2:\n", "VAR_0 = VAR_2[0]\n", "if VAR_0 in GLSetting.accepted_hosts:\n", "return True\n", "log.debug('Error in host requested: %s not accepted between: %s ' % (VAR_0,\n GLSetting.accepted_hosts))\n", "return False\n" ]
[ "def validate_host(host_key):...\n", "\"\"\"docstring\"\"\"\n", "if len(host_key) == 22 and host_key[16:22] == '.onion':\n", "return True\n", "hostchunk = str(host_key).split(':')\n", "if len(hostchunk) == 2:\n", "host_key = hostchunk[0]\n", "if host_key in GLSetting.accepted_hosts:\n", "return True\n", "log.debug('Error in host requested: %s not accepted between: %s ' % (\n host_key, GLSetting.accepted_hosts))\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Expr'", "Return'" ]
[ "def FUNC_0(VAR_1):...\n", "VAR_0[VAR_1.__name__] = VAR_1()\n", "return VAR_1\n" ]
[ "def register_level(cls):...\n", "FnLevel[cls.__name__] = cls()\n", "return cls\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "@VAR_0.simple_tag...\n", "VAR_24 = {'json': {'name': 'json', 'verbose_name': 'JSON'}, 'csv': {'name':\n 'csv', 'verbose_name': 'CSV'}, 'excel.csv': {'name': 'excel.csv',\n 'verbose_name': VAR_29('Excel compatible CSV')}}\n", "return VAR_24[VAR_11]\n" ]
[ "@register.simple_tag...\n", "format_infos = {'json': {'name': 'json', 'verbose_name': 'JSON'}, 'csv': {\n 'name': 'csv', 'verbose_name': 'CSV'}, 'excel.csv': {'name':\n 'excel.csv', 'verbose_name': _('Excel compatible CSV')}}\n", "return format_infos[format]\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_7(self, VAR_7, VAR_3=None):...\n", "return self.get(VAR_7, VAR_3=filters, VAR_4=1)[0]\n" ]
[ "def getone(self, key, filters=None):...\n", "return self.get(key, filters=filters, limit=1)[0]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "self.comp_name = VAR_2\n" ]
[ "def __init__(self, comp_name):...\n", "\"\"\"docstring\"\"\"\n", "self.comp_name = comp_name\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'" ]
[ "def FUNC_4(VAR_11, VAR_12=False):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1['VERBOSITY'] >= 1:\n", "VAR_0.info('$ ' + ' '.join(VAR_11))\n", "VAR_19 = dict()\n", "if VAR_1['VERBOSITY'] >= 3:\n", "VAR_19['stdout'] = io.open(os.devnull, 'wb')\n", "VAR_32 = subprocess.call(VAR_11, **kwargs)\n", "VAR_19['stderr'] = subprocess.STDOUT\n", "if not VAR_12 and VAR_32 != 0:\n" ]
[ "def sh(cmdline, ignore_failure=False):...\n", "\"\"\"docstring\"\"\"\n", "if CONFIG['VERBOSITY'] >= 1:\n", "logger.info('$ ' + ' '.join(cmdline))\n", "kwargs = dict()\n", "if CONFIG['VERBOSITY'] >= 3:\n", "kwargs['stdout'] = io.open(os.devnull, 'wb')\n", "ret = subprocess.call(cmdline, **kwargs)\n", "kwargs['stderr'] = subprocess.STDOUT\n", "if not ignore_failure and ret != 0:\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = exifread.process_file(VAR_1, details=False)\n", "if not len(VAR_9.keys()):\n", "VAR_28 = \"This picture doesn't contain EXIF.\"\n", "VAR_10 = VAR_9.get('EXIF DateTimeOriginal', None)\n", "log.info(VAR_28)\n", "VAR_10 = str(VAR_10) if VAR_10 else None\n", "VAR_11 = str(VAR_9.get('Image Make', ''))\n", "VAR_12 = str(VAR_9.get('Image Model', ''))\n", "VAR_13 = str(VAR_9.get('EXIF LensMake', ''))\n", "VAR_14 = str(VAR_9.get('EXIF LensModel', ''))\n", "if not any([VAR_10, VAR_11, VAR_12, VAR_13, VAR_14]):\n", "VAR_28 = 'There is no data of interest in this photo'\n", "VAR_29 = str(VAR_9['GPS GPSLatitudeRef'])\n", "log.info(\"This picture doesn't contain coordinates.\")\n", "return VAR_10, VAR_11, VAR_12, VAR_13, VAR_14, VAR_29, VAR_30, VAR_31, VAR_32\n", "log.info(VAR_28)\n", "VAR_30 = VAR_9['GPS GPSLatitude']\n", "return VAR_10, VAR_11, VAR_12, VAR_13, VAR_14\n", "VAR_31 = str(VAR_9['GPS GPSLongitudeRef'])\n", "VAR_32 = VAR_9['GPS GPSLongitude']\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "exif = exifread.process_file(file, details=False)\n", "if not len(exif.keys()):\n", "reason = \"This picture doesn't contain EXIF.\"\n", "date_time = exif.get('EXIF DateTimeOriginal', None)\n", "log.info(reason)\n", "date_time = str(date_time) if date_time else None\n", "camera_brand = str(exif.get('Image Make', ''))\n", "camera_model = str(exif.get('Image Model', ''))\n", "lens_brand = str(exif.get('EXIF LensMake', ''))\n", "lens_model = str(exif.get('EXIF LensModel', ''))\n", "if not any([date_time, camera_brand, camera_model, lens_brand, lens_model]):\n", "reason = 'There is no data of interest in this photo'\n", "latitude_reference = str(exif['GPS GPSLatitudeRef'])\n", "log.info(\"This picture doesn't contain coordinates.\")\n", "return date_time, camera_brand, camera_model, lens_brand, lens_model, latitude_reference, raw_latitude, longitude_reference, raw_longitude\n", "log.info(reason)\n", "raw_latitude = exif['GPS GPSLatitude']\n", "return date_time, camera_brand, camera_model, lens_brand, lens_model\n", "longitude_reference = str(exif['GPS GPSLongitudeRef'])\n", "raw_longitude = exif['GPS GPSLongitude']\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Expr'", "Assign'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_4(VAR_3):...\n", "if not VAR_3:\n", "return errors.BAD_USERNAME_CHARS\n", "if len(VAR_3) < 3:\n", "return errors.BAD_USERNAME_SHORT\n", "if len(VAR_3) > 20:\n", "return errors.BAD_USERNAME_LONG\n", "return errors.BAD_USERNAME_CHARS\n" ]
[ "def whyuserbad(x):...\n", "if not x:\n", "return errors.BAD_USERNAME_CHARS\n", "if len(x) < 3:\n", "return errors.BAD_USERNAME_SHORT\n", "if len(x) > 20:\n", "return errors.BAD_USERNAME_LONG\n", "return errors.BAD_USERNAME_CHARS\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_13(self, *VAR_16, **VAR_10):...\n", "if not self.is_group_user(VAR_8):\n", "return VAR_7(self, *VAR_16, **kwargs)\n" ]
[ "def wrapper(self, *args, **kwargs):...\n", "if not self.is_group_user(group):\n", "return method(self, *args, **kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'" ]
[ "@VAR_0.route('/entry_type')...\n", "VAR_5 = ['Metals', 'Organics']\n", "return render_template('entry.html', VAR_5=methods)\n" ]
[ "@app.route('/entry_type')...\n", "methods = ['Metals', 'Organics']\n", "return render_template('entry.html', methods=methods)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "import discord\n", "from discord.ext import commands\n", "from sys import argv\n", "\"\"\"\n Commands that will mostly be used in #help-and-questions.\n \"\"\"\n", "def __init__(self, VAR_0):...\n", "self.bot = VAR_0\n", "print('Addon \"{}\" loaded'.format(self.__class__.__name__))\n", "async def FUNC_1(self, VAR_1, VAR_2='', VAR_3=discord.Color.default()):...\n", "VAR_7 = discord.Embed(VAR_2=title, VAR_3=color)\n", "VAR_7.description = VAR_1\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command(pass_context=True, name='sr', hidden=True)...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = VAR_4.message.author\n", "if self.bot.helpers_role not in VAR_8.roles and self.bot.staff_role not in VAR_8.roles and self.bot.verified_role not in VAR_8.roles and self.bot.trusted_role not in VAR_8.roles:\n", "VAR_9 = 'string'.format(VAR_8.mention)\n", "await self.bot.delete_message(VAR_4.message)\n", "await self.bot.say(VAR_9)\n", "VAR_9 = '❗️ **Assistance requested**: {0} by {1} | {2}#{3} @here'.format(VAR_4\n .message.channel.mention, VAR_8.mention, VAR_8.name, VAR_4.message.\n author.discriminator)\n", "return\n", "if VAR_5 != '':\n", "VAR_7 = discord.Embed(VAR_3=discord.Color.gold())\n", "await self.bot.send_message(self.bot.mods_channel, VAR_9, VAR_7=embed if \n msg_request != '' else None)\n", "VAR_7.description = VAR_5\n", "await self.bot.send_message(VAR_8,\n '✅ Online staff has been notified of your request in {0}.'.format(VAR_4\n .message.channel.mention), VAR_7=embed if msg_request != '' else None)\n", "@commands.command(pass_context=True)...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = VAR_6.lower()\n", "if VAR_6 == '3ds' or VAR_6 == 'auto' and 'wiiu' not in VAR_4.message.channel.name:\n", "VAR_7 = discord.Embed(VAR_2='Guide', VAR_3=discord.Color(13506590))\n", "if (VAR_6 == 'wiiu' or VAR_6 == 'wii u'\n", "VAR_7.set_author(name='Plailect', url='https://3ds.guide/')\n", "VAR_7 = discord.Embed(VAR_2='Guide', VAR_3=discord.Color(39623))\n", "@commands.command()...\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.set_author(name='FlimFlam69 & Plailect', url='https://wiiu.guide/')\n", "\"\"\"docstring\"\"\"\n", "VAR_7.url = 'https://3ds.guide/'\n", "VAR_7.set_thumbnail(url='http://i.imgur.com/CpF12I4.png')\n", "VAR_7 = discord.Embed(VAR_2='Soundhax', VAR_3=discord.Color.blue())\n", "VAR_7.description = (\n 'A complete guide to 3DS custom firmware, from stock to boot9strap.')\n", "VAR_7.url = 'https://wiiu.guide/'\n", "VAR_7.set_author(name='Ned Williamson', url='http://soundhax.com/')\n", "await self.bot.say('', VAR_7=embed)\n", "VAR_7.description = (\n \"FlimFlam69 and Plailect's Wii U custom firmware + coldboothax guide\")\n", "VAR_7.set_thumbnail(url='http://i.imgur.com/lYf0jan.png')\n", "await self.bot.say('', VAR_7=embed)\n", "VAR_7.url = 'http://soundhax.com'\n", "VAR_7.description = 'Free 3DS Primary Entrypoint <= 11.3'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Dsp1', VAR_3=discord.Color.green())\n", "VAR_7.set_author(name='zoogie', url='https://github.com/zoogie', icon_url=\n 'https://gbatemp.net/data/avatars/l/357/357147.jpg?1426471484')\n", "VAR_7.description = \"Dump 3DS's DSP component to SD for homebrew audio.\"\n", "VAR_7.set_thumbnail(url=\n 'https://raw.githubusercontent.com/Cruel/DspDump/master/icon.png')\n", "VAR_7.url = 'https://github.com/zoogie/DSP1/releases'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='NTR Streaming Guide', VAR_3=discord.Color.blue())\n", "VAR_7.url = 'string'\n", "VAR_7.description = 'How to use NTR CFW with Nitro Stream to Wirelessly Stream'\n", "VAR_7.add_field(name='4 common fixes', value=\n \"\"\"• Are you connected to the Internet?\n• Is your antivirus program blocking the program?\n• Make sure you are not putting the port (:####) into the IP box of Nitro Stream.\n• Make sure you are on the latest preview for NTR 3.6.\"\"\"\n )\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Updating B9S Guide', VAR_3=discord.Color(13506590)\n )\n", "VAR_7.set_author(name='Plailect', url='https://3ds.guide/updating-b9s')\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.url = 'https://3ds.guide/updating-b9s'\n", "VAR_7.description = 'A guide for updating to new B9S versions.'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command(aliases=['a9lhtob9s', 'updatea9lh'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Upgrading a9lh to b9s', VAR_3=discord.Color(\n 13506590))\n", "VAR_7.set_author(name='Plailect', url='https://3ds.guide/a9lh-to-b9s')\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.url = 'https://3ds.guide/a9lh-to-b9s'\n", "VAR_7.description = (\n 'A guide for upgrading your device from arm9loaderhax to boot9strap.')\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.bot.say('https://3ds.guide/troubleshooting#gw_fbi')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Guide - ctrtransfer', VAR_3=discord.Color.orange()\n )\n", "VAR_7.set_author(name='Plailect', url='https://3ds.guide/')\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.url = 'https://3ds.guide/ctrtransfer'\n", "VAR_7.description = 'How to do the 11.5.0-38 ctrtransfer'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_3=discord.Color.red())\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Why you should not use video guides', VAR_3=\n discord.Color.dark_orange())\n", "VAR_7.description = 'string'\n", "VAR_7.add_field(name='Recommended', value=\n \"The recommended thing to do is to use [Plailect's written complete guide for boot9strap](https://3ds.guide). It is the most up to date one and is recommended for everyone.\"\n )\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.bot.say('https://www.youtube.com/watch?v=miVDKgInzyg')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Check your 3DSs IP (CFW)', VAR_3=discord.Color\n .dark_orange())\n", "VAR_7.description = \"\"\"1. FBI\n2. Remote Install\n3. Recieve URLs over the network\"\"\"\n", "VAR_7.add_field(name='Check your 3DSs IP (Homebrew)', value=\n \"\"\"1. Open Homebrew Launcher\n2. Press Y\"\"\")\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command(aliases=['stock115', 'stock'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Running stock (unmodified) 11.4+ firmware?',\n VAR_3=discord.Color.dark_orange())\n", "VAR_7.description = 'string'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='Please read the guide')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='Big SD cards')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='SD Card Errors')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='No. You are not bricked')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='EmuNAND Recommendation')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_3=discord.Color(39623))\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.bot.say('https://www.nintendo.co.jp/netinfo/en_US/index.html')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_3=discord.Color.red())\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Fix broken TWL', VAR_3=discord.Color(10664672))\n", "VAR_7.set_author(name='Plailect', url=\n 'https://3ds.guide/troubleshooting#twl_broken')\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.url = 'https://3ds.guide/troubleshooting#twl_broken'\n", "VAR_7.description = (\n 'Instructions on how to fix a broken TWL after doing the guide')\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2=\n 'If you get a red screen trying to open the Homebrew Launcher')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='How to clear Home Menu extdata')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string', VAR_2='How to delete Home Menu Theme Data')\n", "@commands.command(aliases=['godmode9'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='GodMode9 Usage', VAR_3=discord.Color(6750207))\n", "VAR_7.set_author(name='Plailect', url='https://3ds.guide/godmode9-usage')\n", "VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "VAR_7.url = 'https://3ds.guide/godmode9-usage'\n", "VAR_7.description = 'GodMode9 usage guide'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed('string')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2=\n 'Launcher for old flashcards (r4,m3,dstt,dsx,etc)', VAR_3=discord.Color\n (4387938))\n", "VAR_7.set_author(name='Apache Thunder', url=\n 'https://gbatemp.net/threads/r4-stage2-twl-flashcart-launcher-and-perhaps-other-cards-soon%E2%84%A2.416434/'\n )\n", "VAR_7.set_thumbnail(url='https://gbatemp.net/data/avatars/m/105/105648.jpg')\n", "VAR_7.url = 'string'\n", "VAR_7.description = 'Launcher for old flashcards'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Virtual Console Injects for 3DS', VAR_3=\n discord.Color.blue())\n", "VAR_7.set_author(name='Asdolo', url=\n 'https://gbatemp.net/members/asdolo.389539/')\n", "VAR_7.set_thumbnail(url='https://i.imgur.com/rHa76XM.png')\n", "VAR_7.url = (\n 'https://gbatemp.net/search/40920047/?q=injector&t=post&o=date&g=1&c[title_only]=1&c[user][0]=389539'\n )\n", "VAR_7.description = 'The recommended way to play old classics on your 3DS'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='GodMode9 dump/build Guide', VAR_3=discord.\n Color(6750207))\n", "VAR_7.set_author(name='ih8ih8sn0w', url='https://pastebin.com/sx8HYULr')\n", "VAR_7.set_thumbnail(url='http://i.imgur.com/QEUfyrp.png')\n", "VAR_7.url = 'https://pastebin.com/sx8HYULr'\n", "VAR_7.description = 'How to dump/build CIAs using GodMode9'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='LayeredFs Guide', VAR_3=discord.Color(6750207))\n", "VAR_7.set_author(name='ih8ih8sn0w', url='https://pastebin.com/sx8HYULr')\n", "VAR_7.set_thumbnail(url='http://i.imgur.com/QEUfyrp.png')\n", "VAR_7.url = 'https://pastebin.com/QdzBv4Te'\n", "VAR_7.description = 'How to use Luma 8.0+ LayeredFs for ROM Hacking.'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Sighax Information', VAR_3=discord.Color(255))\n", "VAR_7.set_author(name='SciresM', url=\n 'https://www.reddit.com/r/3dshacks/comments/67f6as/psa_clearing_up_some_misconceptions_about_sighax/'\n )\n", "VAR_7.set_thumbnail(url='https://i.imgur.com/11ajkdJ.jpg')\n", "VAR_7.url = (\n 'https://www.reddit.com/r/3dshacks/comments/67f6as/psa_clearing_up_some_misconceptions_about_sighax/'\n )\n", "VAR_7.description = 'PSA About Sighax'\n", "await self.bot.say('', VAR_7=embed)\n", "@commands.command(pass_context=True, name='7zip')...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = discord.Embed(VAR_2='Download 7zip', VAR_3=discord.Color(255))\n", "VAR_7.set_thumbnail(url='http://i.imgur.com/cX1fuf6.png')\n", "VAR_7.url = 'http://www.7-zip.org/download.html'\n", "VAR_7.description = (\n 'To be able to extract .7z files you need 7zip installed, get it here.')\n", "await self.bot.say('', VAR_7=embed)\n", "def FUNC_0(VAR_0):...\n", "VAR_0.add_cog(CLASS_0(VAR_0))\n" ]
[ "import discord\n", "from discord.ext import commands\n", "from sys import argv\n", "\"\"\"\n Commands that will mostly be used in #help-and-questions.\n \"\"\"\n", "def __init__(self, bot):...\n", "self.bot = bot\n", "print('Addon \"{}\" loaded'.format(self.__class__.__name__))\n", "async def simple_embed(self, text, title='', color=discord.Color.default()):...\n", "embed = discord.Embed(title=title, color=color)\n", "embed.description = text\n", "await self.bot.say('', embed=embed)\n", "@commands.command(pass_context=True, name='sr', hidden=True)...\n", "\"\"\"docstring\"\"\"\n", "author = ctx.message.author\n", "if self.bot.helpers_role not in author.roles and self.bot.staff_role not in author.roles and self.bot.verified_role not in author.roles and self.bot.trusted_role not in author.roles:\n", "msg = (\n '{0} You cannot used this command at this time. Please ask individual staff members if you need help.'\n .format(author.mention))\n", "await self.bot.delete_message(ctx.message)\n", "await self.bot.say(msg)\n", "msg = '❗️ **Assistance requested**: {0} by {1} | {2}#{3} @here'.format(ctx.\n message.channel.mention, author.mention, author.name, ctx.message.\n author.discriminator)\n", "return\n", "if msg_request != '':\n", "embed = discord.Embed(color=discord.Color.gold())\n", "await self.bot.send_message(self.bot.mods_channel, msg, embed=embed if \n msg_request != '' else None)\n", "embed.description = msg_request\n", "await self.bot.send_message(author,\n '✅ Online staff has been notified of your request in {0}.'.format(ctx.\n message.channel.mention), embed=embed if msg_request != '' else None)\n", "@commands.command(pass_context=True)...\n", "\"\"\"docstring\"\"\"\n", "console = console.lower()\n", "if console == '3ds' or console == 'auto' and 'wiiu' not in ctx.message.channel.name:\n", "embed = discord.Embed(title='Guide', color=discord.Color(13506590))\n", "if (console == 'wiiu' or console == 'wii u'\n", "embed.set_author(name='Plailect', url='https://3ds.guide/')\n", "embed = discord.Embed(title='Guide', color=discord.Color(39623))\n", "@commands.command()...\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.set_author(name='FlimFlam69 & Plailect', url='https://wiiu.guide/')\n", "\"\"\"docstring\"\"\"\n", "embed.url = 'https://3ds.guide/'\n", "embed.set_thumbnail(url='http://i.imgur.com/CpF12I4.png')\n", "embed = discord.Embed(title='Soundhax', color=discord.Color.blue())\n", "embed.description = (\n 'A complete guide to 3DS custom firmware, from stock to boot9strap.')\n", "embed.url = 'https://wiiu.guide/'\n", "embed.set_author(name='Ned Williamson', url='http://soundhax.com/')\n", "await self.bot.say('', embed=embed)\n", "embed.description = (\n \"FlimFlam69 and Plailect's Wii U custom firmware + coldboothax guide\")\n", "embed.set_thumbnail(url='http://i.imgur.com/lYf0jan.png')\n", "await self.bot.say('', embed=embed)\n", "embed.url = 'http://soundhax.com'\n", "embed.description = 'Free 3DS Primary Entrypoint <= 11.3'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Dsp1', color=discord.Color.green())\n", "embed.set_author(name='zoogie', url='https://github.com/zoogie', icon_url=\n 'https://gbatemp.net/data/avatars/l/357/357147.jpg?1426471484')\n", "embed.description = \"Dump 3DS's DSP component to SD for homebrew audio.\"\n", "embed.set_thumbnail(url=\n 'https://raw.githubusercontent.com/Cruel/DspDump/master/icon.png')\n", "embed.url = 'https://github.com/zoogie/DSP1/releases'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='NTR Streaming Guide', color=discord.Color.blue())\n", "embed.url = (\n 'https://gbatemp.net/threads/tutorial-3ds-screen-recording-without-a-capture-card-ntr-cfw-method.423445/'\n )\n", "embed.description = 'How to use NTR CFW with Nitro Stream to Wirelessly Stream'\n", "embed.add_field(name='4 common fixes', value=\n \"\"\"• Are you connected to the Internet?\n• Is your antivirus program blocking the program?\n• Make sure you are not putting the port (:####) into the IP box of Nitro Stream.\n• Make sure you are on the latest preview for NTR 3.6.\"\"\"\n )\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"If you have boot9strap and Luma3DS installed after following Plailect's guide, run Luma Updater to make sure it is on the latest Luma3DS normal version and then you can proceed to update your 3DS through system settings. \nNTR CFW works on the latest version.\n; Use this version of BootNTR: \n<https://github.com/Nanquitas/BootNTR/releases>\nNote: if there is a homebrew application that is no longer working, it may exist as a CIA that you can download under the TitleDB option in FBI.\n\n If you still have arm9loaderhax you can update to boot9strap following [this guide](https://3ds.guide/updating-to-boot9strap)\"\"\"\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Updating B9S Guide', color=discord.Color(13506590)\n )\n", "embed.set_author(name='Plailect', url='https://3ds.guide/updating-b9s')\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.url = 'https://3ds.guide/updating-b9s'\n", "embed.description = 'A guide for updating to new B9S versions.'\n", "await self.bot.say('', embed=embed)\n", "@commands.command(aliases=['a9lhtob9s', 'updatea9lh'])...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Upgrading a9lh to b9s', color=discord.Color(\n 13506590))\n", "embed.set_author(name='Plailect', url='https://3ds.guide/a9lh-to-b9s')\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.url = 'https://3ds.guide/a9lh-to-b9s'\n", "embed.description = (\n 'A guide for upgrading your device from arm9loaderhax to boot9strap.')\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.bot.say('https://3ds.guide/troubleshooting#gw_fbi')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"Don't want to hardmod yourself? Ask one of the installers on the server! <http://pastebin.com/wNr42PtH>\"\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"Astronautlevel's Luma3DS commit builds can be found here: https://astronautlevel2.github.io/Luma3DS \n(Warning: most builds here are meant for developers and are untested, use at your own risk!)\"\"\"\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Guide - ctrtransfer', color=discord.Color.orange()\n )\n", "embed.set_author(name='Plailect', url='https://3ds.guide/')\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.url = 'https://3ds.guide/ctrtransfer'\n", "embed.description = 'How to do the 11.5.0-38 ctrtransfer'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'To install mods for Smash, [Smash Selector](https://gbatemp.net/threads/release-smash-selector.431245/) is recommended. Instructions for use can be found on the page.'\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'While on 2.1, **NEVER** shut the N3DS lid, update any model, format a 2DS or attempt to play a game on a cartridge. Doing any of these things *will* brick your system.'\n , color=discord.Color.red())\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n '> Reminder: if you would like someone to help you, please be as descriptive as possible, of your situation, things you have done, as little as they may seem, aswell as assisting materials. Asking to ask wont expedite your process, and may delay assistance.'\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \" **Please keep the channels clean and on-topic, further derailing will result in intervention. A staff or helper will be the quickest route to resolution; you can contact available staff by private messaging the Mod-mail bot.** A full list of staff and helpers can be found in #welcome-and-rules if you don't know who they are.\"\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Why you should not use video guides', color=\n discord.Color.dark_orange())\n", "embed.description = \"\"\"\"Video guides\" for custom firmware and arm9loaderhax/boot9strap are not recommended for use. Their contents generally become outdated very quickly for them to be of any use, and they are harder to update unlike a written guide.\n\nWhen this happens, video guides become more complicated than current methods, having users do certain tasks which may not be required anymore.\n\nThere is also a risk of the uploader spreading misinformation or including potentially harmful files, sometimes unintentionally. Using other people's files to install arm9loaderhax can cause serious issues and even brick your system.\"\"\"\n", "embed.add_field(name='Recommended', value=\n \"The recommended thing to do is to use [Plailect's written complete guide for boot9strap](https://3ds.guide). It is the most up to date one and is recommended for everyone.\"\n )\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.bot.say('https://www.youtube.com/watch?v=miVDKgInzyg')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Check your 3DSs IP (CFW)', color=discord.Color\n .dark_orange())\n", "embed.description = \"\"\"1. FBI\n2. Remote Install\n3. Recieve URLs over the network\"\"\"\n", "embed.add_field(name='Check your 3DSs IP (Homebrew)', value=\n \"\"\"1. Open Homebrew Launcher\n2. Press Y\"\"\")\n", "await self.bot.say('', embed=embed)\n", "@commands.command(aliases=['stock115', 'stock'])...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Running stock (unmodified) 11.4+ firmware?',\n color=discord.Color.dark_orange())\n", "embed.description = \"\"\"You have 3 possible options for installing CFW:\n- [NTRBoot](https://3ds.guide/ntrboot) which needs a compatible DS flashcart and maybe an additional hacked 3DS or DS(i) console depending on the flashcart\n- [DSiWare](https://3ds.guide/installing-boot9strap-\\\\(dsiware\\\\)) which requires a hacked 3DS\n- [Hardmod](https://3ds.guide/installing-boot9strap-\\\\(hardmod\\\\)) which requires soldering **Not for beginners!**\n **Downgrading is impossible on 11.4+!**\"\"\"\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'If you are looking for homebrew on your stock 11.4+ 3DS, you will need an entrypoint (like ninjhax, freakyhax, etc) for launching homebrew launcher'\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'Asking something that is on the guide will make everyone lose time, so please read and re-read the guide steps 2 or 3 times before coming here.'\n , title='Please read the guide')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"If you want to change your SD card to one bigger than 32GB then you'll have to format it to FAT32.\nYou can do this with the tool of your preference.\nFormatter examples:\n- [guiformat - Windows](http://www.ridgecrop.demon.co.uk/index.htm?guiformat.htm)\n- [gparted - Linux](http://gparted.org/download.php)\"\"\"\n , title='Big SD cards')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"Guide For Checking SD Card For Errors\n- [H2testw Guide - Windows](https://3ds.guide/h2testw-(windows\\\\))\n- [F3 Guide - Linux](https://3ds.guide/f3-(linux\\\\))\n- [F3X Guide - Mac](https://3ds.guide/f3x-(mac\\\\))\"\"\"\n , title='SD Card Errors')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"If your power LED turns on and off after you installed b9s, you are not bricked and are just missing a file called boot.firm in the root of your SD card.\nTo fix this you should:\n1.Check you inserted the SD card in your console\n2.Place/replace the file, downloading it from https://github.com/AuroraWright/Luma3DS/releases\nChecking your SD for errors or corruption:\n\tWindows: https://3ds.guide/h2testw-(windows)#\n\tLinux: https://3ds.guide/f3-(linux)#\n\tMac: https://3ds.guide/f3x-(mac)#\"\"\"\n , title='No. You are not bricked')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"If you want to set up an EmuNAND the first thing to know is that you probably don't need it; if you don't know what an EmuNAND is, you don't need one.\"\n , title='EmuNAND Recommendation')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"A failed update in Download Management does not mean there is an update and the system is trying to download it. This means your blocking method (DNS etc.) is working and the system can't check for an update.\"\n , color=discord.Color(39623))\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.bot.say('https://www.nintendo.co.jp/netinfo/en_US/index.html')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'While following the guide, after installing boot9strap, if you get an error that says \"Failed to mount CTRNAND\", just continue on with the guide.'\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"If you have lost the contents of your SD card with CFW, you will need in SD root:\n-Homebrew launcher executable [here](https://smealum.github.io/ninjhax2/boot.3dsx)\n-`boot.firm` from [luma3ds latest release 7z](https://github.com/AuroraWright/Luma3DS/releases/latest)\nThen repeat the [finalizing setup](https://3ds.guide/finalizing-setup) page.\"\"\"\n , color=discord.Color.red())\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Fix broken TWL', color=discord.Color(10664672))\n", "embed.set_author(name='Plailect', url=\n 'https://3ds.guide/troubleshooting#twl_broken')\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.url = 'https://3ds.guide/troubleshooting#twl_broken'\n", "embed.description = (\n 'Instructions on how to fix a broken TWL after doing the guide')\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"A red screen indicates that there is no boot.3dsx on root.\nIf you have a starter folder on root, place the contents of the starter folder on root.\nIf not, redownload the [Homebrew Starter Kit](https://smealum.github.io/ninjhax2/starter.zip) and place the contents of the starter folder inside the .zip on root.\"\"\"\n , title='If you get a red screen trying to open the Homebrew Launcher')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"1. Navigate to the following folder on your SD card: `/Nintendo 3DS/(32 Character ID)/(32 Character ID)/extdata/00000000/`\n2. Delete the corresponding folder for your region:\n USA: `0000008f`\n EUR: `00000098`\n JPN: `00000082`\n KOR: `000000A9`\"\"\"\n , title='How to clear Home Menu extdata')\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n \"\"\"1. Navigate to the following folder on your SD card: `/Nintendo 3DS/(32 Character ID)/(32 Character ID)/extdata/00000000/`\n2. Delete the corresponding folder for your region:\n USA: `000002cd`\n EUR: `000002ce`\n JPN: `000002cc`\"\"\"\n , title='How to delete Home Menu Theme Data')\n", "@commands.command(aliases=['godmode9'])...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='GodMode9 Usage', color=discord.Color(6750207))\n", "embed.set_author(name='Plailect', url='https://3ds.guide/godmode9-usage')\n", "embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n", "embed.url = 'https://3ds.guide/godmode9-usage'\n", "embed.description = 'GodMode9 usage guide'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "await self.simple_embed(\n 'If you are receiving a \"PM init failed\" error when attempting to launch safehax and are not on 11.3, use [this version of safehax.](https://github.com/TiniVi/safehax/releases/tag/r19)'\n )\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title=\n 'Launcher for old flashcards (r4,m3,dstt,dsx,etc)', color=discord.Color\n (4387938))\n", "embed.set_author(name='Apache Thunder', url=\n 'https://gbatemp.net/threads/r4-stage2-twl-flashcart-launcher-and-perhaps-other-cards-soon%E2%84%A2.416434/'\n )\n", "embed.set_thumbnail(url='https://gbatemp.net/data/avatars/m/105/105648.jpg')\n", "embed.url = (\n 'https://gbatemp.net/threads/r4-stage2-twl-flashcart-launcher-and-perhaps-other-cards-soon%E2%84%A2.416434/'\n )\n", "embed.description = 'Launcher for old flashcards'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Virtual Console Injects for 3DS', color=\n discord.Color.blue())\n", "embed.set_author(name='Asdolo', url=\n 'https://gbatemp.net/members/asdolo.389539/')\n", "embed.set_thumbnail(url='https://i.imgur.com/rHa76XM.png')\n", "embed.url = (\n 'https://gbatemp.net/search/40920047/?q=injector&t=post&o=date&g=1&c[title_only]=1&c[user][0]=389539'\n )\n", "embed.description = 'The recommended way to play old classics on your 3DS'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='GodMode9 dump/build Guide', color=discord.\n Color(6750207))\n", "embed.set_author(name='ih8ih8sn0w', url='https://pastebin.com/sx8HYULr')\n", "embed.set_thumbnail(url='http://i.imgur.com/QEUfyrp.png')\n", "embed.url = 'https://pastebin.com/sx8HYULr'\n", "embed.description = 'How to dump/build CIAs using GodMode9'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='LayeredFs Guide', color=discord.Color(6750207))\n", "embed.set_author(name='ih8ih8sn0w', url='https://pastebin.com/sx8HYULr')\n", "embed.set_thumbnail(url='http://i.imgur.com/QEUfyrp.png')\n", "embed.url = 'https://pastebin.com/QdzBv4Te'\n", "embed.description = 'How to use Luma 8.0+ LayeredFs for ROM Hacking.'\n", "await self.bot.say('', embed=embed)\n", "@commands.command()...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Sighax Information', color=discord.Color(255))\n", "embed.set_author(name='SciresM', url=\n 'https://www.reddit.com/r/3dshacks/comments/67f6as/psa_clearing_up_some_misconceptions_about_sighax/'\n )\n", "embed.set_thumbnail(url='https://i.imgur.com/11ajkdJ.jpg')\n", "embed.url = (\n 'https://www.reddit.com/r/3dshacks/comments/67f6as/psa_clearing_up_some_misconceptions_about_sighax/'\n )\n", "embed.description = 'PSA About Sighax'\n", "await self.bot.say('', embed=embed)\n", "@commands.command(pass_context=True, name='7zip')...\n", "\"\"\"docstring\"\"\"\n", "embed = discord.Embed(title='Download 7zip', color=discord.Color(255))\n", "embed.set_thumbnail(url='http://i.imgur.com/cX1fuf6.png')\n", "embed.url = 'http://www.7-zip.org/download.html'\n", "embed.description = (\n 'To be able to extract .7z files you need 7zip installed, get it here.')\n", "await self.bot.say('', embed=embed)\n", "def setup(bot):...\n", "bot.add_cog(Assistance(bot))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "AsyncFunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Return'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Docstring", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Expr'" ]
[ "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import pandas as pd\n", "VAR_0 = pd.read_csv('Data.csv')\n", "VAR_1 = VAR_0.iloc[:, :-1].values\n", "VAR_2 = VAR_0.iloc[:, (-1)].values\n" ]
[ "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import pandas as pd\n", "dataset = pd.read_csv('Data.csv')\n", "X = dataset.iloc[:, :-1].values\n", "y = dataset.iloc[:, (-1)].values\n" ]
[ 0, 0, 0, 7, 0, 0 ]
[ "Import'", "Import'", "Import'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_20(self):...\n", "VAR_25 = c.get('/test2')\n", "self.assertEqual(VAR_25.status_code, 302)\n", "self.assertTrue(VAR_25.location.startswith(self.app.config[\n 'TOKEN_SERVICE_URL']))\n" ]
[ "def test_authn(self):...\n", "resp = c.get('/test2')\n", "self.assertEqual(resp.status_code, 302)\n", "self.assertTrue(resp.location.startswith(self.app.config['TOKEN_SERVICE_URL']))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_2(self):...\n", "VAR_6 = self.client.get('/api/apps')\n", "self.assertEqual(VAR_6.status_code, 200)\n", "self.assertEqual(len(VAR_6.data['results']), 2)\n", "VAR_8 = VAR_6.data['results'][0]['id']\n", "self.assertTrue(self.client.login(VAR_1='autotest-2', VAR_2='password'))\n", "VAR_6 = self.client.get('/api/apps')\n", "self.assertEqual(len(VAR_6.data['results']), 1)\n", "for model in ['builds', 'config', 'containers', 'limits', 'releases']:\n", "VAR_6 = self.client.get('/api/apps/{}/{}/'.format(VAR_8, model))\n", "self.assertTrue(self.client.login(VAR_1='autotest-1', VAR_2='password'))\n", "self.assertEqual(VAR_6.data['detail'], 'Not found')\n", "VAR_5 = '/api/apps/{}/perms'.format(VAR_8)\n", "VAR_7 = {'username': 'autotest-2'}\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "self.assertTrue(self.client.login(VAR_1='autotest-2', VAR_2='password'))\n", "VAR_6 = self.client.get('/api/apps')\n", "self.assertEqual(VAR_6.status_code, 200)\n", "self.assertEqual(len(VAR_6.data['results']), 2)\n", "for model in ['builds', 'containers', 'releases']:\n", "VAR_6 = self.client.get('/api/apps/{}/{}/'.format(VAR_8, model))\n", "self.assertEqual(len(VAR_6.data['results']), 0)\n" ]
[ "def test_create(self):...\n", "response = self.client.get('/api/apps')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "app_id = response.data['results'][0]['id']\n", "self.assertTrue(self.client.login(username='autotest-2', password='password'))\n", "response = self.client.get('/api/apps')\n", "self.assertEqual(len(response.data['results']), 1)\n", "for model in ['builds', 'config', 'containers', 'limits', 'releases']:\n", "response = self.client.get('/api/apps/{}/{}/'.format(app_id, model))\n", "self.assertTrue(self.client.login(username='autotest-1', password='password'))\n", "self.assertEqual(response.data['detail'], 'Not found')\n", "url = '/api/apps/{}/perms'.format(app_id)\n", "body = {'username': 'autotest-2'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertTrue(self.client.login(username='autotest-2', password='password'))\n", "response = self.client.get('/api/apps')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "for model in ['builds', 'containers', 'releases']:\n", "response = self.client.get('/api/apps/{}/{}/'.format(app_id, model))\n", "self.assertEqual(len(response.data['results']), 0)\n" ]
[ 0, 5, 0, 0, 0, 5, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "For", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "For", "Assign'", "Expr'" ]
[ "def FUNC_26(VAR_20):...\n", "api.reset_local_state()\n", "return VAR_7.get('/request', VAR_20=headers).body\n" ]
[ "def call(headers):...\n", "api.reset_local_state()\n", "return app.get('/request', headers=headers).body\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "from utils.admin import load_credentials\n", "import MySQLdb\n", "import json\n", "VAR_0 = load_credentials()\n", "VAR_1 = VAR_0['dbhost'], VAR_0['dbuser'], VAR_0['dbpass'], VAR_0['dbname']\n", "def __init__(self):...\n", "self.conn = MySQLdb.connect(*self.dsn)\n", "self.cur = self.conn.cursor()\n", "def __enter__(self):...\n", "return CLASS_0()\n" ]
[ "from utils.admin import load_credentials\n", "import MySQLdb\n", "import json\n", "credentials = load_credentials()\n", "dsn = credentials['dbhost'], credentials['dbuser'], credentials['dbpass'\n ], credentials['dbname']\n", "def __init__(self):...\n", "self.conn = MySQLdb.connect(*self.dsn)\n", "self.cur = self.conn.cursor()\n", "def __enter__(self):...\n", "return DBase()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Import'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_7(self):...\n", "self.assert404(self.client.get(url_for('oauthclient.authorized', remote_app\n ='invalid')))\n", "self.assert404(self.client.get(url_for('oauthclient.disconnect', remote_app\n ='invalid')))\n" ]
[ "def test_no_remote_app(self):...\n", "self.assert404(self.client.get(url_for('oauthclient.authorized', remote_app\n ='invalid')))\n", "self.assert404(self.client.get(url_for('oauthclient.disconnect', remote_app\n ='invalid')))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "async def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = self.bot.ws._trace[0]\n", "VAR_29 = 'the gateway'\n", "await self.bot.change_presence(game=discord.Game(name=gateway, type=2),\n status=discord.Status.dnd)\n" ]
[ "async def on_connect(self):...\n", "\"\"\"docstring\"\"\"\n", "gateway = self.bot.ws._trace[0]\n", "gateway = 'the gateway'\n", "await self.bot.change_presence(game=discord.Game(name=gateway, type=2),\n status=discord.Status.dnd)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_34(self):...\n", "VAR_51 = self.targetsfile\n", "if not os.path.isfile(VAR_51):\n", "return\n", "VAR_52 = pickle.loads(f.read())\n", "if 'targets' in VAR_52:\n", "self.log.debug('Target list was loaded')\n", "if 'forums' in VAR_52:\n", "VAR_5.update(VAR_52['targets'])\n", "self.log.debug('Forum set was loaded')\n", "if 'domains' in VAR_52:\n", "VAR_7.update(VAR_52['forums'])\n", "self.log.debug('Domain set was loaded')\n", "if 'sets' in VAR_52:\n", "VAR_4.update(VAR_52['domains'])\n", "self.log.debug('Other sets were loaded')\n", "self.pc.sets.update(VAR_52['sets'])\n" ]
[ "def load_targets(self):...\n", "fname = self.targetsfile\n", "if not os.path.isfile(fname):\n", "return\n", "data = pickle.loads(f.read())\n", "if 'targets' in data:\n", "self.log.debug('Target list was loaded')\n", "if 'forums' in data:\n", "targets.update(data['targets'])\n", "self.log.debug('Forum set was loaded')\n", "if 'domains' in data:\n", "forums.update(data['forums'])\n", "self.log.debug('Domain set was loaded')\n", "if 'sets' in data:\n", "domains.update(data['domains'])\n", "self.log.debug('Other sets were loaded')\n", "self.pc.sets.update(data['sets'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Expr'", "For", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = '(?P<var>(pass|key|secret|PASS|KEY|SECRET).*?=)(?P<value>.*?\\\\s)'\n", "return re.sub(VAR_29, '\\\\g<var>****** ', VAR_4)\n" ]
[ "def _sanitize_log_msg(self, msg):...\n", "\"\"\"docstring\"\"\"\n", "reg = '(?P<var>(pass|key|secret|PASS|KEY|SECRET).*?=)(?P<value>.*?\\\\s)'\n", "return re.sub(reg, '\\\\g<var>****** ', msg)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_24():...\n", "\"\"\"docstring\"\"\"\n", "Activity.__default_acl__ = property(FUNC_3)\n", "CancelInvoice.__default_acl__ = property(FUNC_10)\n", "Company.__default_acl__ = property(FUNC_4)\n", "CompetenceGrid.__acl__ = property(FUNC_23)\n", "CompetenceGridItem.__acl__ = property(FUNC_23)\n", "CompetenceGridSubItem.__acl__ = property(FUNC_23)\n", "ConfigFiles.__default_acl__ = [(Allow, Everyone, 'view')]\n", "Customer.__default_acl__ = property(FUNC_18)\n", "DiscountLine.__acl__ = property(FUNC_13)\n", "Estimation.__default_acl__ = property(FUNC_8)\n", "ExpenseSheet.__default_acl__ = property(FUNC_15)\n", "ExpensePayment.__default_acl__ = property(FUNC_17)\n", "File.__default_acl__ = property(FUNC_21)\n", "Invoice.__default_acl__ = property(FUNC_9)\n", "Job.__default_acl__ = VAR_0[:]\n", "Payment.__default_acl__ = property(FUNC_16)\n", "PaymentLine.__acl__ = property(FUNC_14)\n", "Phase.__acl__ = property(FUNC_19)\n", "Project.__default_acl__ = property(FUNC_20)\n", "SaleProductCategory.__acl__ = property(FUNC_22)\n", "SaleProduct.__acl__ = property(FUNC_22)\n", "SaleProductGroup.__acl__ = property(FUNC_22)\n", "StatisticSheet.__acl__ = property(FUNC_0)\n", "StatisticEntry.__acl__ = property(FUNC_0)\n", "BaseStatisticCriterion.__acl__ = property(FUNC_0)\n", "TaskLine.__acl__ = property(FUNC_12)\n", "TaskLineGroup.__acl__ = property(FUNC_11)\n", "Template.__default_acl__ = property(FUNC_0)\n", "TemplatingHistory.__default_acl__ = property(FUNC_0)\n", "Timeslot.__default_acl__ = property(FUNC_0)\n", "User.__default_acl__ = property(FUNC_5)\n", "UserDatas.__default_acl__ = property(FUNC_1)\n", "Workshop.__default_acl__ = property(FUNC_2)\n", "Tva.__acl__ = property(FUNC_0)\n", "ExpenseType.__acl__ = property(FUNC_0)\n", "ExpenseKmType.__acl__ = property(FUNC_0)\n", "ExpenseTelType.__acl__ = property(FUNC_0)\n" ]
[ "def set_models_acl():...\n", "\"\"\"docstring\"\"\"\n", "Activity.__default_acl__ = property(get_activity_acl)\n", "CancelInvoice.__default_acl__ = property(get_cancelinvoice_default_acl)\n", "Company.__default_acl__ = property(get_company_acl)\n", "CompetenceGrid.__acl__ = property(get_competence_acl)\n", "CompetenceGridItem.__acl__ = property(get_competence_acl)\n", "CompetenceGridSubItem.__acl__ = property(get_competence_acl)\n", "ConfigFiles.__default_acl__ = [(Allow, Everyone, 'view')]\n", "Customer.__default_acl__ = property(get_customer_acl)\n", "DiscountLine.__acl__ = property(get_discount_line_acl)\n", "Estimation.__default_acl__ = property(get_estimation_default_acl)\n", "ExpenseSheet.__default_acl__ = property(get_expense_sheet_default_acl)\n", "ExpensePayment.__default_acl__ = property(get_expense_payment_acl)\n", "File.__default_acl__ = property(get_file_acl)\n", "Invoice.__default_acl__ = property(get_invoice_default_acl)\n", "Job.__default_acl__ = DEFAULT_PERM[:]\n", "Payment.__default_acl__ = property(get_payment_default_acl)\n", "PaymentLine.__acl__ = property(get_payment_line_acl)\n", "Phase.__acl__ = property(get_phase_acl)\n", "Project.__default_acl__ = property(get_project_acl)\n", "SaleProductCategory.__acl__ = property(get_product_acl)\n", "SaleProduct.__acl__ = property(get_product_acl)\n", "SaleProductGroup.__acl__ = property(get_product_acl)\n", "StatisticSheet.__acl__ = property(get_base_acl)\n", "StatisticEntry.__acl__ = property(get_base_acl)\n", "BaseStatisticCriterion.__acl__ = property(get_base_acl)\n", "TaskLine.__acl__ = property(get_task_line_acl)\n", "TaskLineGroup.__acl__ = property(get_task_line_group_acl)\n", "Template.__default_acl__ = property(get_base_acl)\n", "TemplatingHistory.__default_acl__ = property(get_base_acl)\n", "Timeslot.__default_acl__ = property(get_base_acl)\n", "User.__default_acl__ = property(get_user_acl)\n", "UserDatas.__default_acl__ = property(get_userdatas_acl)\n", "Workshop.__default_acl__ = property(get_event_acl)\n", "Tva.__acl__ = property(get_base_acl)\n", "ExpenseType.__acl__ = property(get_base_acl)\n", "ExpenseKmType.__acl__ = property(get_base_acl)\n", "ExpenseTelType.__acl__ = property(get_base_acl)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_12(self, VAR_13, VAR_14=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_36 = self.run_tracker.get_background_root_workunit()\n", "if VAR_14:\n", "VAR_44 = self.run_tracker.new_workunit_under_parent(VAR_17=\n parent_workunit_name, VAR_18=[WorkUnitLabel.MULTITOOL], parent=\n background_root_workunit)\n", "VAR_45 = VAR_36\n", "VAR_45 = VAR_44.__enter__()\n", "VAR_46 = None\n", "VAR_46 = lambda : VAR_44.__exit__(None, None, None)\n", "self.run_tracker.background_worker_pool().submit_async_work_chain(VAR_13,\n VAR_45=workunit_parent, VAR_46=done_hook)\n" ]
[ "def submit_background_work_chain(self, work_chain, parent_workunit_name=None):...\n", "\"\"\"docstring\"\"\"\n", "background_root_workunit = self.run_tracker.get_background_root_workunit()\n", "if parent_workunit_name:\n", "workunit_parent_ctx = self.run_tracker.new_workunit_under_parent(name=\n parent_workunit_name, labels=[WorkUnitLabel.MULTITOOL], parent=\n background_root_workunit)\n", "workunit_parent = background_root_workunit\n", "workunit_parent = workunit_parent_ctx.__enter__()\n", "done_hook = None\n", "done_hook = lambda : workunit_parent_ctx.__exit__(None, None, None)\n", "self.run_tracker.background_worker_pool().submit_async_work_chain(work_chain,\n workunit_parent=workunit_parent, done_hook=done_hook)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(VAR_0, VAR_1, VAR_2):...\n", "VAR_3 = VAR_0.get_input('project', default='')\n", "VAR_4 = VAR_0.get_input('username', default='')\n", "VAR_1['GCE_EMAIL'] = VAR_4\n", "VAR_1['GCE_PROJECT'] = VAR_3\n", "VAR_5 = {'type': 'service_account', 'private_key': VAR_0.get_input(\n 'ssh_key_data', default=''), 'client_email': VAR_4, 'project_id': VAR_3}\n", "VAR_6, VAR_7 = tempfile.mkstemp(dir=private_data_dir)\n", "VAR_8 = os.fdopen(VAR_6, 'w')\n", "json.dump(VAR_5, VAR_8)\n", "VAR_8.close()\n", "os.chmod(VAR_7, stat.S_IRUSR | stat.S_IWUSR)\n", "VAR_1['GCE_CREDENTIALS_FILE_PATH'] = VAR_7\n" ]
[ "def gce(cred, env, private_data_dir):...\n", "project = cred.get_input('project', default='')\n", "username = cred.get_input('username', default='')\n", "env['GCE_EMAIL'] = username\n", "env['GCE_PROJECT'] = project\n", "json_cred = {'type': 'service_account', 'private_key': cred.get_input(\n 'ssh_key_data', default=''), 'client_email': username, 'project_id':\n project}\n", "handle, path = tempfile.mkstemp(dir=private_data_dir)\n", "f = os.fdopen(handle, 'w')\n", "json.dump(json_cred, f)\n", "f.close()\n", "os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)\n", "env['GCE_CREDENTIALS_FILE_PATH'] = path\n" ]
[ 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_1(self):...\n", "self.events.append('on_enter: %s' % self.path)\n" ]
[ "def on_enter(self):...\n", "self.events.append('on_enter: %s' % self.path)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_6(self):...\n", "if self._IsServerAlive():\n", "self._server_popen.terminate()\n", "utils.RemoveIfExists(self._temp_options_filename)\n" ]
[ "def _ServerCleanup(self):...\n", "if self._IsServerAlive():\n", "self._server_popen.terminate()\n", "utils.RemoveIfExists(self._temp_options_filename)\n" ]
[ 0, 0, 0, 7 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_6(self, VAR_38):...\n", "if not VAR_38:\n", "if self.emp_error is not None:\n", "if len(VAR_38) > self.length:\n", "VAR_101.errors.add(self.emp_error)\n", "VAR_101.errors.add(self.len_error)\n", "return VAR_38\n" ]
[ "def run(self, title):...\n", "if not title:\n", "if self.emp_error is not None:\n", "if len(title) > self.length:\n", "c.errors.add(self.emp_error)\n", "c.errors.add(self.len_error)\n", "return title\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_0(self):...\n", "return self.submission_count\n" ]
[ "def get_submission_count(self):...\n", "return self.submission_count\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(self):...\n", "self.mock(time, 'time', lambda : 126.0)\n", "def FUNC_22(VAR_14, VAR_15):...\n", "VAR_37 = VAR_14.state['sleep_streak']\n", "self.assertEqual(VAR_14.remote, self.server)\n", "if VAR_37 == 5:\n", "return False\n" ]
[ "def test_run_bot(self):...\n", "self.mock(time, 'time', lambda : 126.0)\n", "def poll_server(botobj, _):...\n", "sleep_streak = botobj.state['sleep_streak']\n", "self.assertEqual(botobj.remote, self.server)\n", "if sleep_streak == 5:\n", "return False\n" ]
[ 0, 0, 0, 0, 5, 0, 0 ]
[ "FunctionDef'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Condition", "Return'" ]
[ "def FUNC_16(self):...\n", "self.run_test_case(self.scenario.delete_app(), retry_interval_secs=8,\n max_retries=8)\n" ]
[ "def test_z_delete_app(self):...\n", "self.run_test_case(self.scenario.delete_app(), retry_interval_secs=8,\n max_retries=8)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_22(VAR_37):...\n", "\"\"\"docstring\"\"\"\n", "VAR_64 = {}\n", "for VAR_95, info in iteritems(VAR_37):\n", "if not VAR_95.nostandalone:\n", "return VAR_64\n", "VAR_64[VAR_95] = info\n" ]
[ "def clean_before_output(kw_matches):...\n", "\"\"\"docstring\"\"\"\n", "filtered_kw_matches = {}\n", "for kw_match, info in iteritems(kw_matches):\n", "if not kw_match.nostandalone:\n", "return filtered_kw_matches\n", "filtered_kw_matches[kw_match] = info\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "def FUNC_13(self, VAR_13, VAR_12, VAR_22=0):...\n", "VAR_23 = self.wz.make_dealer_rep_msg(VAR_13, VAR_22, wzrpc.status.error, VAR_12\n )\n", "self.wz_sock.send_multipart(VAR_23)\n" ]
[ "def send_wz_error(self, reqid, data, seqid=0):...\n", "msg = self.wz.make_dealer_rep_msg(reqid, seqid, wzrpc.status.error, data)\n", "self.wz_sock.send_multipart(msg)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_4():...\n", "VAR_11 = open('RedditAPIAccess.txt')\n", "VAR_12 = []\n", "for line in VAR_11:\n", "VAR_12.append(line)\n", "return line[2]\n" ]
[ "def getBotUsername():...\n", "inputFile = open('RedditAPIAccess.txt')\n", "lines = []\n", "for line in inputFile:\n", "lines.append(line)\n", "return line[2]\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Expr'", "Return'" ]
[ "def FUNC_18(*VAR_20, **VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = VAR_20[0]\n", "if len(VAR_20) == 1:\n", "VAR_53 = product\n", "if len(VAR_20) == 2:\n", "if isinstance(VAR_32, str):\n", "VAR_53 = VAR_20[1]\n", "VAR_32 = [VAR_32]\n", "def FUNC_36(VAR_11):...\n", "for VAR_51, VAR_58 in VAR_11.items():\n", "if isinstance(VAR_58, str) or not isinstance(VAR_58, Iterable):\n", "return [VAR_10.format(**comb) for comb in map(dict, VAR_53(*FUNC_36(VAR_11)\n )) for VAR_10 in VAR_32]\n", "VAR_58 = [VAR_58]\n", "yield [(VAR_51, VAR_16) for VAR_16 in VAR_58]\n" ]
[ "def expand(*args, **wildcards):...\n", "\"\"\"docstring\"\"\"\n", "filepatterns = args[0]\n", "if len(args) == 1:\n", "combinator = product\n", "if len(args) == 2:\n", "if isinstance(filepatterns, str):\n", "combinator = args[1]\n", "filepatterns = [filepatterns]\n", "def flatten(wildcards):...\n", "for wildcard, values in wildcards.items():\n", "if isinstance(values, str) or not isinstance(values, Iterable):\n", "return [filepattern.format(**comb) for comb in map(dict, combinator(*\n flatten(wildcards))) for filepattern in filepatterns]\n", "values = [values]\n", "yield [(wildcard, value) for value in values]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "FunctionDef'", "For", "Condition", "Return'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7):...\n", "setattr(self, 'name', VAR_1)\n", "setattr(self, 'id', VAR_0)\n", "VAR_17 = {}\n", "VAR_17['type'] = None\n", "VAR_17['issued'] = str(VAR_6)\n", "VAR_17['synopsis'] = VAR_2\n", "VAR_17['description'] = VAR_4\n", "VAR_17['solution'] = VAR_5\n", "VAR_17['severity'] = VAR_3\n", "VAR_17['summary'] = None\n", "VAR_17['updated'] = str(VAR_7)\n", "VAR_17['url'] = 'https://access.redhat.com/errata/%s' % VAR_1\n", "VAR_17['bugzilla_list'] = []\n", "VAR_17['cve_list'] = []\n", "VAR_17['package_list'] = []\n", "VAR_17['reference_list'] = []\n", "setattr(self, 'mydict', VAR_17)\n" ]
[ "def __init__(self, id, name, synopsis, severity, description, solution,...\n", "setattr(self, 'name', name)\n", "setattr(self, 'id', id)\n", "mydict = {}\n", "mydict['type'] = None\n", "mydict['issued'] = str(issued)\n", "mydict['synopsis'] = synopsis\n", "mydict['description'] = description\n", "mydict['solution'] = solution\n", "mydict['severity'] = severity\n", "mydict['summary'] = None\n", "mydict['updated'] = str(updated)\n", "mydict['url'] = 'https://access.redhat.com/errata/%s' % name\n", "mydict['bugzilla_list'] = []\n", "mydict['cve_list'] = []\n", "mydict['package_list'] = []\n", "mydict['reference_list'] = []\n", "setattr(self, 'mydict', mydict)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_3():...\n", "def FUNC_12():...\n", "VAR_18 = requests.get(FUNC_2('healthy'))\n", "VAR_18.raise_for_status()\n", "return VAR_18.json()\n" ]
[ "def _CheckServerIsHealthyWithCache():...\n", "def _ServerIsHealthy():...\n", "response = requests.get(_BuildUri('healthy'))\n", "response.raise_for_status()\n", "return response.json()\n" ]
[ 0, 0, 7, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_0(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "self.get_user_test_info()\n", "return False\n", "return True\n" ]
[ "def test_success(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "self.get_user_test_info()\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_17(self, VAR_15):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_15 is None:\n", "return dict()\n", "VAR_37 = 0\n", "VAR_38 = None\n", "for o in self.products:\n", "VAR_47 = o.match(VAR_15)\n", "return VAR_38\n", "if VAR_47:\n", "VAR_50 = self.get_wildcard_len(VAR_47.groupdict())\n", "if not VAR_38 or VAR_37 > VAR_50:\n", "VAR_38 = VAR_47.groupdict()\n", "VAR_37 = VAR_50\n" ]
[ "def get_wildcards(self, requested_output):...\n", "\"\"\"docstring\"\"\"\n", "if requested_output is None:\n", "return dict()\n", "bestmatchlen = 0\n", "bestmatch = None\n", "for o in self.products:\n", "match = o.match(requested_output)\n", "return bestmatch\n", "if match:\n", "l = self.get_wildcard_len(match.groupdict())\n", "if not bestmatch or bestmatchlen > l:\n", "bestmatch = match.groupdict()\n", "bestmatchlen = l\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_0():...\n", "VAR_1 = config('cuckoo:cuckoo:machinery')\n", "if config('routing:vpn:enabled'):\n", "VAR_4 = config('routing:vpn:vpns')\n", "VAR_4 = []\n", "return {'machine': config('%s:%s:machines' % (VAR_1, VAR_1)), 'package':\n None, 'priority': 2, 'timeout': config('cuckoo:timeouts:default'),\n 'routing': {'route': config('routing:routing:route'), 'inetsim': config\n ('routing:inetsim:enabled'), 'tor': config('routing:tor:enabled'),\n 'vpns': VAR_4}, 'options': {'enable-services': False, 'enforce-timeout':\n False, 'full-memory-dump': config('cuckoo:cuckoo:memory_dump'),\n 'no-injection': False, 'process-memory-dump': True,\n 'simulated-human-interaction': True}}\n" ]
[ "def defaults():...\n", "machinery = config('cuckoo:cuckoo:machinery')\n", "if config('routing:vpn:enabled'):\n", "vpns = config('routing:vpn:vpns')\n", "vpns = []\n", "return {'machine': config('%s:%s:machines' % (machinery, machinery)),\n 'package': None, 'priority': 2, 'timeout': config(\n 'cuckoo:timeouts:default'), 'routing': {'route': config(\n 'routing:routing:route'), 'inetsim': config('routing:inetsim:enabled'),\n 'tor': config('routing:tor:enabled'), 'vpns': vpns}, 'options': {\n 'enable-services': False, 'enforce-timeout': False, 'full-memory-dump':\n config('cuckoo:cuckoo:memory_dump'), 'no-injection': False,\n 'process-memory-dump': True, 'simulated-human-interaction': True}}\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_24(self):...\n", "return self._user_id\n" ]
[ "def user_id(self):...\n", "return self._user_id\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_43(self, VAR_21=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = list()\n", "VAR_21 = VAR_21 or self.get_columns_list()\n", "for VAR_14 in VAR_21:\n", "if not self.is_relation(VAR_14):\n", "return VAR_32\n", "if hasattr(self.obj, VAR_14):\n", "if not hasattr(getattr(self.obj, VAR_14), '__call__') or hasattr(getattr(\n", "VAR_32.append(VAR_14)\n", "VAR_32.append(VAR_14)\n" ]
[ "def get_order_columns_list(self, list_columns=None):...\n", "\"\"\"docstring\"\"\"\n", "ret_lst = list()\n", "list_columns = list_columns or self.get_columns_list()\n", "for col_name in list_columns:\n", "if not self.is_relation(col_name):\n", "return ret_lst\n", "if hasattr(self.obj, col_name):\n", "if not hasattr(getattr(self.obj, col_name), '__call__') or hasattr(getattr(\n", "ret_lst.append(col_name)\n", "ret_lst.append(col_name)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Condition", "Return'", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_17(VAR_4, VAR_1):...\n", "VAR_38 = 5\n", "if VAR_4 == 'country_ru' or VAR_4 == 'country_en':\n", "VAR_52 = users.find_one(VAR_1).language + VAR_4\n", "VAR_52 = VAR_4\n", "VAR_39 = VAR_14 + timedelta(minutes=cache_time) < datetime.now(\n ) if VAR_14 else True\n", "if not VAR_15.get(VAR_52, None) or not VAR_14 or VAR_39:\n", "VAR_14 = datetime.now()\n", "log.debug('Return cached result of %s...', VAR_3.__name__)\n", "VAR_15[VAR_52] = VAR_3(VAR_4, VAR_1)\n", "VAR_51 = VAR_14 + timedelta(minutes=cache_time) - datetime.now()\n", "return VAR_15[VAR_52]\n", "log.debug('Time to reevaluate result of %s is %s', VAR_3.__name__, str(\n VAR_51)[:-7])\n", "return VAR_15[VAR_52]\n" ]
[ "def function_launcher(item_type, message):...\n", "cache_time = 5\n", "if item_type == 'country_ru' or item_type == 'country_en':\n", "result_id = users.find_one(message).language + item_type\n", "result_id = item_type\n", "high_time = when_was_called + timedelta(minutes=cache_time) < datetime.now(\n ) if when_was_called else True\n", "if not result.get(result_id, None) or not when_was_called or high_time:\n", "when_was_called = datetime.now()\n", "log.debug('Return cached result of %s...', func.__name__)\n", "result[result_id] = func(item_type, message)\n", "time_left = when_was_called + timedelta(minutes=cache_time) - datetime.now()\n", "return result[result_id]\n", "log.debug('Time to reevaluate result of %s is %s', func.__name__, str(\n time_left)[:-7])\n", "return result[result_id]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Return'", "Expr'", "Return'" ]
[ "def FUNC_21(self, VAR_15):...\n", "\"\"\"docstring\"\"\"\n", "for key, value in self.iscsi_ips.items():\n", "if value['nsp'] == VAR_15:\n", "return key\n" ]
[ "def _get_ip_using_nsp(self, nsp):...\n", "\"\"\"docstring\"\"\"\n", "for key, value in self.iscsi_ips.items():\n", "if value['nsp'] == nsp:\n", "return key\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Condition", "Return'" ]
[ "@functools.wraps(VAR_0)...\n", "if not self.request.headers.get('X-XSRF-Token-Request'):\n", "return VAR_0(self, *VAR_5, **kwargs)\n" ]
[ "@functools.wraps(f)...\n", "if not self.request.headers.get('X-XSRF-Token-Request'):\n", "return f(self, *args, **kwargs)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Condition", "Return'" ]
[ "@staticmethod...\n", "VAR_30 = VAR_12.rfind(',')\n", "if VAR_30 > -1:\n", "return VAR_11[0] % (VAR_12[:VAR_30], VAR_12[VAR_30 + 1:]) + '\\n'\n", "return VAR_11[1] % VAR_12 + '\\n'\n" ]
[ "@staticmethod...\n", "i = content.rfind(',')\n", "if i > -1:\n", "return cmdTemplate[0] % (content[:i], content[i + 1:]) + '\\n'\n", "return cmdTemplate[1] % content + '\\n'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_18(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_24 = list(self.clusters.values())\n", "for VAR_39 in VAR_24:\n", "VAR_24.remove(VAR_39)\n", "VAR_39.master = self.master\n", "if VAR_39.check_enabled():\n", "VAR_34 = VAR_39.__class__.__name__\n", "self.log_debug('Installation matches %s, checking for layered profiles' %\n VAR_34)\n", "for remaining in VAR_24:\n", "if issubclass(remaining.__class__, VAR_39.__class__):\n", "self.config['cluster'] = VAR_39\n", "VAR_38 = remaining.__class__.__name__\n", "VAR_35 = str(VAR_39.__class__.__name__).lower()\n", "self.log_debug('Layered profile %s found. Checking installation' % VAR_38)\n", "self.config['cluster_type'] = VAR_35\n", "remaining.master = self.master\n", "self.log_info('Cluster type set to %s' % self.config['cluster_type'])\n", "if remaining.check_enabled():\n", "self.log_debug('string' % (VAR_38, VAR_34))\n", "VAR_39 = remaining\n" ]
[ "def determine_cluster(self):...\n", "\"\"\"docstring\"\"\"\n", "checks = list(self.clusters.values())\n", "for cluster in checks:\n", "checks.remove(cluster)\n", "cluster.master = self.master\n", "if cluster.check_enabled():\n", "cname = cluster.__class__.__name__\n", "self.log_debug('Installation matches %s, checking for layered profiles' % cname\n )\n", "for remaining in checks:\n", "if issubclass(remaining.__class__, cluster.__class__):\n", "self.config['cluster'] = cluster\n", "rname = remaining.__class__.__name__\n", "name = str(cluster.__class__.__name__).lower()\n", "self.log_debug('Layered profile %s found. Checking installation' % rname)\n", "self.config['cluster_type'] = name\n", "remaining.master = self.master\n", "self.log_info('Cluster type set to %s' % self.config['cluster_type'])\n", "if remaining.check_enabled():\n", "self.log_debug(\n 'Installation matches both layered profile %s and base profile %s, setting cluster type to layered profile'\n % (rname, cname))\n", "cluster = remaining\n" ]
[ 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Expr'", "Assign'", "Condition", "Assign'", "Expr'", "For", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_0():...\n", "VAR_4 = [legacy_api.BuildBucketApi, config_api.ConfigApi, swarmbucket_api.\n SwarmbucketApi]\n", "VAR_5 = [webapp2.Route('/', CLASS_0), webapp2.Route('/b/<build_id:\\\\d+>',\n CLASS_2), webapp2.Route('/build/<build_id:\\\\d+>', CLASS_3)]\n", "VAR_5.extend(endpoints_webapp2.api_routes(VAR_4))\n", "VAR_5.extend(endpoints_webapp2.api_routes(VAR_4, base_path='/api'))\n", "VAR_6 = prpc.Server()\n", "VAR_6.add_interceptor(auth.prpc_interceptor)\n", "VAR_6.add_service(access.AccessServicer())\n", "VAR_6.add_service(api.BuildsApi())\n", "VAR_5 += VAR_6.get_routes()\n", "return VAR_5\n" ]
[ "def get_frontend_routes():...\n", "endpoints_services = [legacy_api.BuildBucketApi, config_api.ConfigApi,\n swarmbucket_api.SwarmbucketApi]\n", "routes = [webapp2.Route('/', MainHandler), webapp2.Route(\n '/b/<build_id:\\\\d+>', BuildRPCHandler), webapp2.Route(\n '/build/<build_id:\\\\d+>', ViewBuildHandler)]\n", "routes.extend(endpoints_webapp2.api_routes(endpoints_services))\n", "routes.extend(endpoints_webapp2.api_routes(endpoints_services, base_path=\n '/api'))\n", "prpc_server = prpc.Server()\n", "prpc_server.add_interceptor(auth.prpc_interceptor)\n", "prpc_server.add_service(access.AccessServicer())\n", "prpc_server.add_service(api.BuildsApi())\n", "routes += prpc_server.get_routes()\n", "return routes\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "AugAssign'", "Return'" ]
[ "def FUNC_7(self, VAR_1, VAR_9, VAR_10):...\n", "if VAR_10 > 0:\n", "VAR_20 = 'string' % (VAR_1, 0, 0)\n", "self.c.execute(VAR_20)\n", "VAR_20 = 'string' % (VAR_9, VAR_10, VAR_1)\n", "self.c.execute(VAR_20)\n", "self.db.commit()\n" ]
[ "def add_consumption_data_row(self, ts, energy_used, power_used):...\n", "if power_used > 0:\n", "query = (\n \"\"\"\n INSERT OR IGNORE INTO Consumption (\n TimeStamp,\n EnergyUsed,\n PowerUsed \n ) VALUES (\n %s,\n %s,\n %s\n );\n \"\"\"\n % (ts, 0, 0))\n", "self.c.execute(query)\n", "query = (\n \"\"\"\n UPDATE Consumption SET \n EnergyUsed = EnergyUsed + %s,\n PowerUsed = PowerUsed + %s\n WHERE TimeStamp = %s;\n \"\"\"\n % (energy_used, power_used, ts))\n", "self.c.execute(query)\n", "self.db.commit()\n" ]
[ 0, 0, 4, 0, 4, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_10(self):...\n", "self._tmp_dir = TempDir()\n", "self.accounts = {}\n" ]
[ "def _initialize(self):...\n", "self._tmp_dir = TempDir()\n", "self.accounts = {}\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_9(self, VAR_20, VAR_23):...\n", "return self._store\n" ]
[ "def new_connection(self, cluster_api, provider):...\n", "return self._store\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_10(VAR_17):...\n", "VAR_35 = os.path.dirname(VAR_17)\n", "if not os.path.exists(VAR_35):\n", "os.makedirs(VAR_35)\n" ]
[ "def ensure_dir(file_path):...\n", "directory = os.path.dirname(file_path)\n", "if not os.path.exists(directory):\n", "os.makedirs(directory)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_7(VAR_1, VAR_9=None, **VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_9 is None:\n", "VAR_9 = {'hours': 4}\n", "if isinstance(VAR_9, datetime.timedelta):\n", "if isinstance(VAR_9, dict):\n", "VAR_7 = datetime.datetime.now()\n", "VAR_9 = datetime.timedelta(**delta)\n", "VAR_6 = VAR_7 - VAR_9\n", "return FUNC_6(VAR_1, VAR_6, VAR_7, **kwargs)\n" ]
[ "def live(conn, delta=None, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "if delta is None:\n", "delta = {'hours': 4}\n", "if isinstance(delta, datetime.timedelta):\n", "if isinstance(delta, dict):\n", "end = datetime.datetime.now()\n", "delta = datetime.timedelta(**delta)\n", "start = end - delta\n", "return history(conn, start, end, **kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_2, VAR_1, VAR_3):...\n", "self.cursor.execute('update users set money = money - %s where id = %s' % (\n VAR_3, VAR_2))\n", "self.cursor.execute('update projects set money = money + %s where id = %s' %\n (VAR_3, VAR_1))\n", "self.cursor.execute('string' % (VAR_1, VAR_2, VAR_3))\n", "self.db.commit()\n" ]
[ "def save_accepted_transaction(self, user_id, project_id, money):...\n", "self.cursor.execute('update users set money = money - %s where id = %s' % (\n money, user_id))\n", "self.cursor.execute('update projects set money = money + %s where id = %s' %\n (money, project_id))\n", "self.cursor.execute(\n \"insert into transactions (project_id, user_id, money, timestamp, state) values (%s, %s, %s, now(), 'accepted' )\"\n % (project_id, user_id, money))\n", "self.db.commit()\n" ]
[ 0, 4, 4, 4, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "from __future__ import absolute_import\n", "from mock import MagicMock, patch\n", "from six.moves.urllib_parse import quote_plus\n", "from flask import url_for, session\n", "from invenio.testsuite import make_test_suite, run_test_suite\n", "from invenio.ext.sqlalchemy import db\n", "from .helpers import OAuth2ClientTestCase\n", "def FUNC_0(self):...\n", "VAR_8 = lambda x: dict(request_token_params={'scope': ''}, base_url=\n 'https://foo.bar/', request_token_url=None, access_token_url=\n 'https://foo.bar/oauth/access_token', authorize_url=\n 'https://foo.bar/oauth/authorize', consumer_key=x, consumer_secret=\n 'testsecret')\n", "self.app.config['OAUTHCLIENT_REMOTE_APPS'] = dict(test=dict(\n authorized_handler=self.handler, params=params('testid'), title=\n 'MyLinkedTestAccount'), test_invalid=dict(authorized_handler=self.\n handler_invalid, params=params('test_invalidid'), title='Test Invalid'),\n full=dict(params=params('fullid'), title='Full'))\n", "self.handled_resp = None\n", "self.handled_remote = None\n", "self.handled_args = None\n", "self.handled_kwargs = None\n", "from invenio.modules.oauthclient.models import RemoteToken, RemoteAccount\n", "RemoteToken.query.delete()\n", "RemoteAccount.query.delete()\n", "db.session.commit()\n", "def FUNC_1(self):...\n", "self.handled_resp = None\n", "self.handled_remote = None\n", "self.handled_args = None\n", "self.handled_kwargs = None\n", "from invenio.modules.oauthclient.models import RemoteToken, RemoteAccount\n", "RemoteToken.query.delete()\n", "RemoteAccount.query.delete()\n", "db.session.commit()\n", "def FUNC_2(self, VAR_1, VAR_2, *VAR_3, **VAR_4):...\n", "self.handled_resp = VAR_1\n", "self.handled_remote = VAR_2\n", "self.handled_args = VAR_3\n", "self.handled_kwargs = VAR_4\n", "return 'TEST'\n" ]
[ "from __future__ import absolute_import\n", "from mock import MagicMock, patch\n", "from six.moves.urllib_parse import quote_plus\n", "from flask import url_for, session\n", "from invenio.testsuite import make_test_suite, run_test_suite\n", "from invenio.ext.sqlalchemy import db\n", "from .helpers import OAuth2ClientTestCase\n", "def setUp(self):...\n", "params = lambda x: dict(request_token_params={'scope': ''}, base_url=\n 'https://foo.bar/', request_token_url=None, access_token_url=\n 'https://foo.bar/oauth/access_token', authorize_url=\n 'https://foo.bar/oauth/authorize', consumer_key=x, consumer_secret=\n 'testsecret')\n", "self.app.config['OAUTHCLIENT_REMOTE_APPS'] = dict(test=dict(\n authorized_handler=self.handler, params=params('testid'), title=\n 'MyLinkedTestAccount'), test_invalid=dict(authorized_handler=self.\n handler_invalid, params=params('test_invalidid'), title='Test Invalid'),\n full=dict(params=params('fullid'), title='Full'))\n", "self.handled_resp = None\n", "self.handled_remote = None\n", "self.handled_args = None\n", "self.handled_kwargs = None\n", "from invenio.modules.oauthclient.models import RemoteToken, RemoteAccount\n", "RemoteToken.query.delete()\n", "RemoteAccount.query.delete()\n", "db.session.commit()\n", "def tearDown(self):...\n", "self.handled_resp = None\n", "self.handled_remote = None\n", "self.handled_args = None\n", "self.handled_kwargs = None\n", "from invenio.modules.oauthclient.models import RemoteToken, RemoteAccount\n", "RemoteToken.query.delete()\n", "RemoteAccount.query.delete()\n", "db.session.commit()\n", "def handler(self, resp, remote, *args, **kwargs):...\n", "self.handled_resp = resp\n", "self.handled_remote = remote\n", "self.handled_args = args\n", "self.handled_kwargs = kwargs\n", "return 'TEST'\n" ]
[ 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(VAR_34, VAR_32, VAR_33):...\n", "print('save weeks')\n", "VAR_39 = 'meal_planner.db'\n", "VAR_50 = 'recipes_' + str(VAR_13)\n", "conn.execute('CREATE TABLE IF NOT EXISTS ' + VAR_50 +\n ' (recipe text, row int, column int)')\n", "conn.execute('INSERT INTO ' + VAR_50 + ' VALUES (?, ?, ?);', (VAR_34,\n VAR_32, VAR_33))\n" ]
[ "def save_weeks_recipes(recipeName, row, column):...\n", "print('save weeks')\n", "database_file = 'meal_planner.db'\n", "tableName = 'recipes_' + str(weekNumber)\n", "conn.execute('CREATE TABLE IF NOT EXISTS ' + tableName +\n ' (recipe text, row int, column int)')\n", "conn.execute('INSERT INTO ' + tableName + ' VALUES (?, ?, ?);', (recipeName,\n row, column))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = self.session.get(self.login_url)\n", "VAR_11 = re.findall('_xsrf=([\\\\w|-]+)', VAR_10.headers.get('Set-Cookie'))[0]\n", "return VAR_11\n" ]
[ "def _get_token(self):...\n", "\"\"\"docstring\"\"\"\n", "resp = self.session.get(self.login_url)\n", "token = re.findall('_xsrf=([\\\\w|-]+)', resp.headers.get('Set-Cookie'))[0]\n", "return token\n" ]
[ 0, 0, 0, 5, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_9, VAR_10):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def ensure_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_1(self, VAR_9):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = VAR_9['package_list']\n", "VAR_14 = {}\n", "VAR_15 = {}\n", "if not VAR_13:\n", "return VAR_15\n", "VAR_16 = None\n", "VAR_17 = None\n", "if 'repository_list' in VAR_9:\n", "VAR_17 = VAR_9['repository_list']\n", "self.cursor.execute('SELECT id, epoch, version, release from evr')\n", "VAR_16 = []\n", "VAR_18 = self.cursor.fetchall()\n", "self.cursor.execute('select id from repo where name in %s;', [tuple(VAR_17)])\n", "VAR_19 = {}\n", "for id_tuple in self.cursor.fetchall():\n", "VAR_20 = {}\n", "for VAR_50 in id_tuple:\n", "for VAR_50, VAR_55, VAR_53, VAR_54 in VAR_18:\n", "VAR_16.append(VAR_50)\n", "VAR_49 = VAR_55 + ':' + VAR_53 + ':' + VAR_54\n", "self.cursor.execute('SELECT id, name from arch')\n", "VAR_19[VAR_49] = VAR_50\n", "VAR_21 = self.cursor.fetchall()\n", "VAR_20[VAR_50] = {'epoch': VAR_55, 'version': VAR_53, 'release': VAR_54}\n", "VAR_22 = {}\n", "VAR_23 = {}\n", "for VAR_50, VAR_7 in VAR_21:\n", "VAR_22[VAR_7] = VAR_50\n", "VAR_24 = []\n", "VAR_23[VAR_50] = VAR_7\n", "VAR_25 = []\n", "for VAR_51 in VAR_13:\n", "VAR_51 = str(VAR_51)\n", "self.cursor.execute(\n 'select id, name, evr_id, arch_id from package where evr_id in %s;', [\n tuple(VAR_25)])\n", "if VAR_51 not in VAR_14:\n", "VAR_26 = self.cursor.fetchall()\n", "VAR_52, VAR_53, VAR_54, VAR_55, VAR_56 = FUNC_0(str(VAR_51))\n", "VAR_27 = {}\n", "VAR_14[VAR_51] = {}\n", "for VAR_50, VAR_7, VAR_62, arch_id in VAR_26:\n", "VAR_58 = VAR_55 + ':' + VAR_53 + ':' + VAR_54\n", "VAR_49 = VAR_7 + ':' + str(VAR_62) + ':' + str(arch_id)\n", "VAR_28 = []\n", "if VAR_58 in VAR_19:\n", "if VAR_49 not in VAR_27:\n", "for VAR_51 in VAR_14.keys():\n", "VAR_24.append(VAR_52)\n", "VAR_27[VAR_49] = [VAR_50]\n", "VAR_27[VAR_49].append(VAR_50)\n", "VAR_52, VAR_53, VAR_54, VAR_55, VAR_56 = FUNC_0(str(VAR_51))\n", "self.cursor.execute('select pkg_id, repo_id from pkg_repo where pkg_id in %s;',\n [tuple(VAR_28)])\n", "VAR_14[VAR_51][VAR_52] = []\n", "VAR_49 = str(VAR_52 + ':' + str(VAR_14[VAR_51]['evr_id']) + ':' + str(\n VAR_14[VAR_51]['arch_id']))\n", "VAR_29 = self.cursor.fetchall()\n", "VAR_62 = VAR_19[VAR_58]\n", "VAR_28.extend(VAR_27[VAR_49])\n", "VAR_30 = {}\n", "VAR_25.append(VAR_62)\n", "VAR_14[VAR_51]['pkg_id'].extend(VAR_27[VAR_49])\n", "VAR_31 = []\n", "VAR_14[VAR_51]['evr_id'] = VAR_62\n", "for VAR_59, repo_id in VAR_29:\n", "VAR_14[VAR_51]['arch_id'] = VAR_22[VAR_56]\n", "VAR_31.append(repo_id)\n", "for VAR_51 in VAR_14.keys():\n", "VAR_14[VAR_51]['repo_id'] = []\n", "if VAR_59 in VAR_30:\n", "self.cursor.execute('select name, id from package where name in %s;', [\n tuple(VAR_24)])\n", "for VAR_59 in VAR_14[VAR_51]['pkg_id']:\n", "VAR_14[VAR_51]['pkg_id'] = []\n", "VAR_30[VAR_59].append(repo_id)\n", "VAR_30[VAR_59] = [repo_id]\n", "VAR_32 = self.cursor.fetchall()\n", "VAR_14[VAR_51]['repo_id'].extend(VAR_30[VAR_59])\n", "VAR_14[VAR_51]['update_id'] = []\n", "VAR_33 = {}\n", "for VAR_7, VAR_50 in VAR_32:\n", "if VAR_7 in VAR_33:\n", "for VAR_51 in VAR_14.keys():\n", "VAR_33[VAR_7].append(VAR_50)\n", "VAR_33[VAR_7] = [VAR_50]\n", "VAR_52, VAR_53, VAR_54, VAR_55, VAR_56 = FUNC_0(str(VAR_51))\n", "VAR_34 = []\n", "VAR_14[VAR_51][VAR_52].extend(VAR_33[VAR_52])\n", "for VAR_51 in VAR_14:\n", "VAR_52, VAR_53, VAR_54, VAR_55, VAR_56 = FUNC_0(str(VAR_51))\n", "self.cursor.execute('select id, name, url from repo where id in %s;', [\n tuple(VAR_31)])\n", "if VAR_52 in VAR_14[VAR_51] and VAR_14[VAR_51][VAR_52]:\n", "VAR_35 = self.cursor.fetchall()\n", "VAR_60 = 'string' % ('%s', str(VAR_14[VAR_51]['evr_id']))\n", "VAR_36 = {}\n", "self.cursor.execute(VAR_60, [tuple(VAR_14[VAR_51][VAR_52])])\n", "for VAR_50, VAR_7, url in VAR_35:\n", "for VAR_50 in self.cursor.fetchall():\n", "VAR_36[VAR_50] = {'name': VAR_7, 'url': url}\n", "self.cursor.execute('select pkg_id, repo_id from pkg_repo where pkg_id in %s;',\n [tuple(VAR_34)])\n", "VAR_14[VAR_51]['update_id'].append(VAR_50[0])\n", "VAR_37 = self.cursor.fetchall()\n", "VAR_34.append(VAR_50[0])\n", "VAR_30 = {}\n", "for VAR_59, repo_id in VAR_37:\n", "if VAR_59 not in VAR_30:\n", "self.cursor.execute(\n 'select pkg_id, errata_id from pkg_errata where pkg_id in %s;', [tuple(\n VAR_34)])\n", "VAR_30[VAR_59] = [repo_id]\n", "VAR_30[VAR_59].append(repo_id)\n", "VAR_38 = self.cursor.fetchall()\n", "VAR_39 = {}\n", "VAR_40 = []\n", "for VAR_59, VAR_61 in VAR_38:\n", "VAR_40.append(VAR_61)\n", "self.cursor.execute('SELECT id, name from errata where id in %s;', [tuple(\n VAR_40)])\n", "if VAR_59 not in VAR_39:\n", "VAR_41 = self.cursor.fetchall()\n", "VAR_39[VAR_59] = [VAR_61]\n", "VAR_39[VAR_59].append(VAR_61)\n", "VAR_42 = {}\n", "VAR_43 = []\n", "for VAR_50, VAR_7 in VAR_41:\n", "VAR_42[VAR_50] = VAR_7\n", "self.cursor.execute(\n 'SELECT errata_id, repo_id from errata_repo where errata_id in %s;', [\n tuple(VAR_43)])\n", "VAR_43.append(VAR_50)\n", "VAR_32 = self.cursor.fetchall()\n", "VAR_44 = {}\n", "for VAR_61, repo_id in VAR_32:\n", "if VAR_61 not in VAR_44:\n", "self.cursor.execute(\n 'SELECT id, name, evr_id, arch_id from package where id in %s;', [tuple\n (VAR_34)])\n", "VAR_44[VAR_61] = [repo_id]\n", "VAR_44[VAR_61].append(repo_id)\n", "VAR_45 = self.cursor.fetchall()\n", "VAR_46 = {}\n", "VAR_47 = {}\n", "for VAR_50, VAR_7, VAR_62, arch_id in VAR_45:\n", "VAR_57 = VAR_7 + '-'\n", "for VAR_51 in VAR_14:\n", "if VAR_20[VAR_62]['epoch'] != '0':\n", "VAR_15[VAR_51] = []\n", "VAR_48 = {'update_list': VAR_15}\n", "VAR_57 += VAR_20[VAR_62]['epoch'] + ':'\n", "VAR_57 += VAR_20[VAR_62]['version'] + '-' + VAR_20[VAR_62]['release'\n ] + '.' + VAR_23[arch_id]\n", "if 'update_id' not in VAR_14[VAR_51]:\n", "if VAR_16 is not None:\n", "VAR_46[VAR_50] = VAR_57\n", "for upd_pkg_id in VAR_14[VAR_51]['update_id']:\n", "VAR_48.update({'repository_list': VAR_17})\n", "return VAR_48\n", "VAR_47[VAR_50] = arch_id\n", "if VAR_14[VAR_51]['arch_id'] == VAR_47[upd_pkg_id]:\n", "for r_id in VAR_30[upd_pkg_id]:\n", "if r_id in VAR_14[VAR_51]['repo_id'] and (VAR_16 is None or r_id in VAR_16):\n", "if upd_pkg_id in VAR_39:\n", "VAR_63 = VAR_39[upd_pkg_id]\n", "for e_id in VAR_63:\n", "if r_id in VAR_44[e_id]:\n", "VAR_64 = VAR_42[e_id]\n", "VAR_65 = VAR_36[r_id]['name']\n", "VAR_15[VAR_51].append({'package': VAR_46[upd_pkg_id], 'erratum': VAR_64,\n 'repository': VAR_65})\n" ]
[ "def process_list(self, data):...\n", "\"\"\"docstring\"\"\"\n", "packages_to_process = data['package_list']\n", "auxiliary_dict = {}\n", "answer = {}\n", "if not packages_to_process:\n", "return answer\n", "provided_repo_ids = None\n", "provided_repo_names = None\n", "if 'repository_list' in data:\n", "provided_repo_names = data['repository_list']\n", "self.cursor.execute('SELECT id, epoch, version, release from evr')\n", "provided_repo_ids = []\n", "evrs = self.cursor.fetchall()\n", "self.cursor.execute('select id from repo where name in %s;', [tuple(\n provided_repo_names)])\n", "evr2id_dict = {}\n", "for id_tuple in self.cursor.fetchall():\n", "id2evr_dict = {}\n", "for id in id_tuple:\n", "for id, e, v, r in evrs:\n", "provided_repo_ids.append(id)\n", "key = e + ':' + v + ':' + r\n", "self.cursor.execute('SELECT id, name from arch')\n", "evr2id_dict[key] = id\n", "archs = self.cursor.fetchall()\n", "id2evr_dict[id] = {'epoch': e, 'version': v, 'release': r}\n", "arch2id_dict = {}\n", "id2arch_dict = {}\n", "for id, name in archs:\n", "arch2id_dict[name] = id\n", "packages_names = []\n", "id2arch_dict[id] = name\n", "packages_evrids = []\n", "for pkg in packages_to_process:\n", "pkg = str(pkg)\n", "self.cursor.execute(\n 'select id, name, evr_id, arch_id from package where evr_id in %s;', [\n tuple(packages_evrids)])\n", "if pkg not in auxiliary_dict:\n", "packs = self.cursor.fetchall()\n", "n, v, r, e, a = split_filename(str(pkg))\n", "nevra2pkg_id = {}\n", "auxiliary_dict[pkg] = {}\n", "for id, name, evr_id, arch_id in packs:\n", "evr_key = e + ':' + v + ':' + r\n", "key = name + ':' + str(evr_id) + ':' + str(arch_id)\n", "pkg_ids = []\n", "if evr_key in evr2id_dict:\n", "if key not in nevra2pkg_id:\n", "for pkg in auxiliary_dict.keys():\n", "packages_names.append(n)\n", "nevra2pkg_id[key] = [id]\n", "nevra2pkg_id[key].append(id)\n", "n, v, r, e, a = split_filename(str(pkg))\n", "self.cursor.execute('select pkg_id, repo_id from pkg_repo where pkg_id in %s;',\n [tuple(pkg_ids)])\n", "auxiliary_dict[pkg][n] = []\n", "key = str(n + ':' + str(auxiliary_dict[pkg]['evr_id']) + ':' + str(\n auxiliary_dict[pkg]['arch_id']))\n", "pack_repo_ids = self.cursor.fetchall()\n", "evr_id = evr2id_dict[evr_key]\n", "pkg_ids.extend(nevra2pkg_id[key])\n", "pkg_id2repo_id = {}\n", "packages_evrids.append(evr_id)\n", "auxiliary_dict[pkg]['pkg_id'].extend(nevra2pkg_id[key])\n", "repo_ids = []\n", "auxiliary_dict[pkg]['evr_id'] = evr_id\n", "for pkg_id, repo_id in pack_repo_ids:\n", "auxiliary_dict[pkg]['arch_id'] = arch2id_dict[a]\n", "repo_ids.append(repo_id)\n", "for pkg in auxiliary_dict.keys():\n", "auxiliary_dict[pkg]['repo_id'] = []\n", "if pkg_id in pkg_id2repo_id:\n", "self.cursor.execute('select name, id from package where name in %s;', [\n tuple(packages_names)])\n", "for pkg_id in auxiliary_dict[pkg]['pkg_id']:\n", "auxiliary_dict[pkg]['pkg_id'] = []\n", "pkg_id2repo_id[pkg_id].append(repo_id)\n", "pkg_id2repo_id[pkg_id] = [repo_id]\n", "sql_result = self.cursor.fetchall()\n", "auxiliary_dict[pkg]['repo_id'].extend(pkg_id2repo_id[pkg_id])\n", "auxiliary_dict[pkg]['update_id'] = []\n", "names2ids = {}\n", "for name, id in sql_result:\n", "if name in names2ids:\n", "for pkg in auxiliary_dict.keys():\n", "names2ids[name].append(id)\n", "names2ids[name] = [id]\n", "n, v, r, e, a = split_filename(str(pkg))\n", "update_pkg_ids = []\n", "auxiliary_dict[pkg][n].extend(names2ids[n])\n", "for pkg in auxiliary_dict:\n", "n, v, r, e, a = split_filename(str(pkg))\n", "self.cursor.execute('select id, name, url from repo where id in %s;', [\n tuple(repo_ids)])\n", "if n in auxiliary_dict[pkg] and auxiliary_dict[pkg][n]:\n", "all_repos = self.cursor.fetchall()\n", "sql = (\n \"\"\"\n select package.id from package join evr on package.evr_id = evr.id where package.id in %s and evr.evr > (select evr from evr where id = %s);\n \"\"\"\n % ('%s', str(auxiliary_dict[pkg]['evr_id'])))\n", "repoinfo_dict = {}\n", "self.cursor.execute(sql, [tuple(auxiliary_dict[pkg][n])])\n", "for id, name, url in all_repos:\n", "for id in self.cursor.fetchall():\n", "repoinfo_dict[id] = {'name': name, 'url': url}\n", "self.cursor.execute('select pkg_id, repo_id from pkg_repo where pkg_id in %s;',\n [tuple(update_pkg_ids)])\n", "auxiliary_dict[pkg]['update_id'].append(id[0])\n", "all_pkg_repos = self.cursor.fetchall()\n", "update_pkg_ids.append(id[0])\n", "pkg_id2repo_id = {}\n", "for pkg_id, repo_id in all_pkg_repos:\n", "if pkg_id not in pkg_id2repo_id:\n", "self.cursor.execute(\n 'select pkg_id, errata_id from pkg_errata where pkg_id in %s;', [tuple(\n update_pkg_ids)])\n", "pkg_id2repo_id[pkg_id] = [repo_id]\n", "pkg_id2repo_id[pkg_id].append(repo_id)\n", "all_pkg_errata = self.cursor.fetchall()\n", "pkg_id2errata_id = {}\n", "all_errata = []\n", "for pkg_id, errata_id in all_pkg_errata:\n", "all_errata.append(errata_id)\n", "self.cursor.execute('SELECT id, name from errata where id in %s;', [tuple(\n all_errata)])\n", "if pkg_id not in pkg_id2errata_id:\n", "errata = self.cursor.fetchall()\n", "pkg_id2errata_id[pkg_id] = [errata_id]\n", "pkg_id2errata_id[pkg_id].append(errata_id)\n", "id2errata_dict = {}\n", "all_errata_id = []\n", "for id, name in errata:\n", "id2errata_dict[id] = name\n", "self.cursor.execute(\n 'SELECT errata_id, repo_id from errata_repo where errata_id in %s;', [\n tuple(all_errata_id)])\n", "all_errata_id.append(id)\n", "sql_result = self.cursor.fetchall()\n", "errata_id2repo_id = {}\n", "for errata_id, repo_id in sql_result:\n", "if errata_id not in errata_id2repo_id:\n", "self.cursor.execute(\n 'SELECT id, name, evr_id, arch_id from package where id in %s;', [tuple\n (update_pkg_ids)])\n", "errata_id2repo_id[errata_id] = [repo_id]\n", "errata_id2repo_id[errata_id].append(repo_id)\n", "packages = self.cursor.fetchall()\n", "pkg_id2full_name = {}\n", "pkg_id2arch_id = {}\n", "for id, name, evr_id, arch_id in packages:\n", "full_rpm_name = name + '-'\n", "for pkg in auxiliary_dict:\n", "if id2evr_dict[evr_id]['epoch'] != '0':\n", "answer[pkg] = []\n", "response = {'update_list': answer}\n", "full_rpm_name += id2evr_dict[evr_id]['epoch'] + ':'\n", "full_rpm_name += id2evr_dict[evr_id]['version'] + '-' + id2evr_dict[evr_id][\n 'release'] + '.' + id2arch_dict[arch_id]\n", "if 'update_id' not in auxiliary_dict[pkg]:\n", "if provided_repo_ids is not None:\n", "pkg_id2full_name[id] = full_rpm_name\n", "for upd_pkg_id in auxiliary_dict[pkg]['update_id']:\n", "response.update({'repository_list': provided_repo_names})\n", "return response\n", "pkg_id2arch_id[id] = arch_id\n", "if auxiliary_dict[pkg]['arch_id'] == pkg_id2arch_id[upd_pkg_id]:\n", "for r_id in pkg_id2repo_id[upd_pkg_id]:\n", "if r_id in auxiliary_dict[pkg]['repo_id'] and (provided_repo_ids is None or\n", "if upd_pkg_id in pkg_id2errata_id:\n", "errata_ids = pkg_id2errata_id[upd_pkg_id]\n", "for e_id in errata_ids:\n", "if r_id in errata_id2repo_id[e_id]:\n", "e_name = id2errata_dict[e_id]\n", "r_name = repoinfo_dict[r_id]['name']\n", "answer[pkg].append({'package': pkg_id2full_name[upd_pkg_id], 'erratum':\n e_name, 'repository': r_name})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "For", "For", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "For", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "For", "Assign'", "Condition", "Expr'", "For", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Condition", "For", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "For", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "For", "For", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "For", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "For", "Condition", "Assign'", "Assign'", "AugAssign'", "AugAssign'", "Condition", "Condition", "Assign'", "For", "Expr'", "Return'", "Assign'", "Condition", "For", "Condition", "Condition", "Assign'", "For", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "@property...\n", "return base64.b64encode((self.rule.name + ''.join(self.output)).encode('utf-8')\n ).decode('utf-8')\n" ]
[ "@property...\n", "return base64.b64encode((self.rule.name + ''.join(self.output)).encode('utf-8')\n ).decode('utf-8')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_6(self, VAR_53):...\n", "if VAR_53:\n", "self.error()\n", "VAR_23 = _force_utf8(VAR_53)\n", "return self.error(errors.USER_DOESNT_EXIST)\n", "return Account._by_name(VAR_23)\n" ]
[ "def run(self, username):...\n", "if username:\n", "self.error()\n", "name = _force_utf8(username)\n", "return self.error(errors.USER_DOESNT_EXIST)\n", "return Account._by_name(name)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Return'", "Return'" ]
[ "@validate(VAR_0=VMeetup('id'), VAR_15=VMenu('controller', CommentSortMenu),...\n", "VAR_18 = Link._byID(VAR_0.assoc_link)\n", "VAR_20 = c.user.pref_num_comments or g.num_comments\n", "VAR_21 = g.max_comments if VAR_16 == 'true' else VAR_20\n", "VAR_22 = CommentBuilder(VAR_18, CommentSortMenu.operator(VAR_15), None, None)\n", "VAR_23 = NestedListing(VAR_22, VAR_21=num, parent_name=article._fullname)\n", "VAR_24 = PaneStack()\n", "if c.user_is_loggedin:\n", "VAR_24.append(CommentReplyBox())\n", "VAR_24.append(VAR_23.listing())\n", "VAR_24.append(CommentReplyBox(link_name=article._fullname))\n", "VAR_25 = CommentSortMenu(default=sort, type='dropdown2')\n", "VAR_26 = [VAR_25, NumCommentsMenu(VAR_18.num_comments, default=num_comments)]\n", "VAR_27 = CommentListing(VAR_27=displayPane, VAR_16=article.num_comments,\n VAR_26=nav_menus)\n", "VAR_28 = None\n", "if c.user_is_loggedin:\n", "VAR_29 = VAR_18._getLastClickTime(c.user)\n", "VAR_2 = ShowMeetup(VAR_0=meetup, VAR_27=content, fullname=article._fullname,\n VAR_28=lastViewed)\n", "VAR_28 = VAR_29._date if VAR_29 else None\n", "return BoringPage(pagename=meetup.title, VAR_27=res, body_class='meetup'\n ).render()\n", "VAR_18._click(c.user)\n" ]
[ "@validate(meetup=VMeetup('id'), sort=VMenu('controller', CommentSortMenu),...\n", "article = Link._byID(meetup.assoc_link)\n", "user_num = c.user.pref_num_comments or g.num_comments\n", "num = g.max_comments if num_comments == 'true' else user_num\n", "builder = CommentBuilder(article, CommentSortMenu.operator(sort), None, None)\n", "listing = NestedListing(builder, num=num, parent_name=article._fullname)\n", "displayPane = PaneStack()\n", "if c.user_is_loggedin:\n", "displayPane.append(CommentReplyBox())\n", "displayPane.append(listing.listing())\n", "displayPane.append(CommentReplyBox(link_name=article._fullname))\n", "sort_menu = CommentSortMenu(default=sort, type='dropdown2')\n", "nav_menus = [sort_menu, NumCommentsMenu(article.num_comments, default=\n num_comments)]\n", "content = CommentListing(content=displayPane, num_comments=article.\n num_comments, nav_menus=nav_menus)\n", "lastViewed = None\n", "if c.user_is_loggedin:\n", "clicked = article._getLastClickTime(c.user)\n", "res = ShowMeetup(meetup=meetup, content=content, fullname=article._fullname,\n lastViewed=lastViewed)\n", "lastViewed = clicked._date if clicked else None\n", "return BoringPage(pagename=meetup.title, content=res, body_class='meetup'\n ).render()\n", "article._click(c.user)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_0(self, VAR_0='crimemap'):...\n", "return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n" ]
[ "def connect(self, database='crimemap'):...\n", "return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2():...\n", "VAR_3 = {'foo': 'bar'}\n", "VAR_1 = yaml.dump(VAR_3)\n", "VAR_2 = load_source(VAR_1)\n", "assert VAR_2 == VAR_3\n" ]
[ "def test_yaml_string():...\n", "native = {'foo': 'bar'}\n", "source = yaml.dump(native)\n", "result = load_source(source)\n", "assert result == native\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assert'" ]