lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_7(VAR_17):...\n",
"@receiver(VAR_5, **decorator_kwargs)...\n",
"if settings.SUSPEND_SIGNALS:\n",
"return\n",
"return VAR_17(VAR_23, **kwargs)\n"
] | [
"def our_wrapper(func):...\n",
"@receiver(signal, **decorator_kwargs)...\n",
"if settings.SUSPEND_SIGNALS:\n",
"return\n",
"return func(sender, **kwargs)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"from datetime import date, time\n",
"from django.shortcuts import redirect\n",
"from django.http import HttpRequest, HttpResponseBadRequest\n",
"from frontpage.models import Profile, Media, MediaUpload\n",
"from frontpage.management.magic import compile_markdown, get_current_user\n",
"import logging\n",
"import ntpath\n",
"import os\n",
"import math\n",
"import PIL\n",
"from PIL import Image\n",
"PATH_TO_UPLOAD_FOLDER_ON_DISK: str = '/usr/local/www/focweb/'\n",
"VAR_0 = 64\n",
"def FUNC_0(VAR_1: HttpRequest):...\n",
"VAR_14 = int(VAR_1.GET['payload'])\n",
"return redirect('/admin?error=' + str(e))\n",
"return redirect('/admin/users')\n",
"VAR_15 = int(VAR_1.GET['media_id'])\n",
"user: Profile = Profile.objects.get(pk=int(user_id))\n",
"VAR_2: Profile = get_current_user(VAR_1)\n",
"if not VAR_2 == user and VAR_2.rights < 4:\n",
"return redirect(\"/admin?error='You're not allowed to edit other users.'\")\n",
"VAR_16 = Media.objects.get(pk=int(media_id))\n",
"user.avatarMedia = VAR_16\n",
"user.save()\n"
] | [
"from datetime import date, time\n",
"from django.shortcuts import redirect\n",
"from django.http import HttpRequest, HttpResponseBadRequest\n",
"from frontpage.models import Profile, Media, MediaUpload\n",
"from frontpage.management.magic import compile_markdown, get_current_user\n",
"import logging\n",
"import ntpath\n",
"import os\n",
"import math\n",
"import PIL\n",
"from PIL import Image\n",
"PATH_TO_UPLOAD_FOLDER_ON_DISK: str = '/usr/local/www/focweb/'\n",
"IMAGE_SCALE = 64\n",
"def action_change_user_avatar(request: HttpRequest):...\n",
"user_id = int(request.GET['payload'])\n",
"return redirect('/admin?error=' + str(e))\n",
"return redirect('/admin/users')\n",
"media_id = int(request.GET['media_id'])\n",
"user: Profile = Profile.objects.get(pk=int(user_id))\n",
"u: Profile = get_current_user(request)\n",
"if not u == user and u.rights < 4:\n",
"return redirect(\"/admin?error='You're not allowed to edit other users.'\")\n",
"medium = Media.objects.get(pk=int(media_id))\n",
"user.avatarMedia = medium\n",
"user.save()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"AnnAssign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Return'",
"Return'",
"Assign'",
"AnnAssign'",
"AnnAssign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"self.driver._eql_execute('pool', 'select', self.configuration.eqlx_pool, 'show'\n ).AndReturn(['TotalCapacity: 111GB', 'FreeSpace: 11GB'])\n",
"self.mox.ReplayAll()\n",
"self.driver._update_volume_stats()\n",
"self.assertEqual(self.driver._stats['total_capacity_gb'], 111.0)\n",
"self.assertEqual(self.driver._stats['free_capacity_gb'], 11.0)\n"
] | [
"def test_update_volume_stats(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"self.driver._eql_execute('pool', 'select', self.configuration.eqlx_pool, 'show'\n ).AndReturn(['TotalCapacity: 111GB', 'FreeSpace: 11GB'])\n",
"self.mox.ReplayAll()\n",
"self.driver._update_volume_stats()\n",
"self.assertEqual(self.driver._stats['total_capacity_gb'], 111.0)\n",
"self.assertEqual(self.driver._stats['free_capacity_gb'], 11.0)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1.value = beamr.interpreters.Text('\\\\' + VAR_1.value)\n",
"return VAR_1\n"
] | [
"def t_ANTIESCAPE(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t.value = beamr.interpreters.Text('\\\\' + t.value)\n",
"return t\n"
] | [
0,
0,
2,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def __setitem__(self, VAR_7, VAR_15):...\n",
"self.current[VAR_7] = VAR_15\n"
] | [
"def __setitem__(self, path, value):...\n",
"self.current[path] = value\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_6(self, VAR_25):...\n",
"VAR_77 = []\n",
"if VAR_25:\n",
"VAR_77 = [VAR_3 for VAR_3 in self.comma_sep.split(VAR_25) if VAR_3 ==\n _force_ascii(VAR_3)]\n",
"return VAR_77\n"
] | [
"def run(self, tag_field):...\n",
"tags = []\n",
"if tag_field:\n",
"tags = [x for x in self.comma_sep.split(tag_field) if x == _force_ascii(x)]\n",
"return tags\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"return self.name\n"
] | [
"def get_name(self):...\n",
"return self.name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self, VAR_11):...\n",
"VAR_17 = self.bot\n",
"VAR_18 = VAR_11.message\n",
"VAR_19 = VAR_18.text\n",
"VAR_20 = VAR_18.message_id\n",
"VAR_15 = VAR_18.chat_id\n",
"VAR_21 = self.userparams\n",
"VAR_21.initializeUser(VAR_15=chat_id, VAR_28=INITIAL_SUBSCRIBER_PARAMS)\n",
"VAR_22 = LanguageSupport(VAR_21.getEntry(VAR_15=chat_id, param='lang'))\n",
"VAR_23 = VAR_22.languageSupport\n",
"VAR_24 = VAR_22.allVariants\n",
"VAR_16 = VAR_23(getMainMenu(VAR_21.getEntry(VAR_15=chat_id, param=\n 'subscribed')))\n",
"if VAR_19 == '/start':\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(START_MESSAGE), key_markup=MMKM)\n",
"if VAR_19 == '/help' or VAR_19 == HELP_BUTTON:\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(HELP_MESSAGE).format(str(\n MIN_PICTURE_SEND_PERIOD), str(MAX_PICTURE_SEND_PERIOD)), key_markup=\n MMKM, markdown=True)\n",
"if VAR_19 == '/about' or VAR_19 == ABOUT_BUTTON:\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(ABOUT_MESSAGE).format('.'.join\n ([str(i) for i in VERSION_NUMBER])), key_markup=MMKM, markdown=True)\n",
"if VAR_19 == '/otherbots' or VAR_19 == VAR_23(OTHER_BOTS_BUTTON):\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=lS(OTHER_BOTS_MESSAGE),\n key_markup=MMKM, markdown=True)\n",
"if VAR_19 == '/period' or VAR_19 == VAR_23(SHOW_PERIOD_BUTTON):\n",
"VAR_40 = self.userparams.getEntry(VAR_15, 'period')\n",
"if VAR_19 == '/subscribe' or VAR_19 == SUBSCRIBE_BUTTON:\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n 'An image is sent to you every {0} seconds.'.format(period), key_markup\n =MMKM)\n",
"VAR_40 = self.userparams.getEntry(VAR_15, 'period')\n",
"if VAR_19 == '/unsubscribe' or VAR_19 == UNSUBSCRIBE_BUTTON:\n",
"if self.userparams.getEntry(VAR_15, 'subscribed') == 0:\n",
"if self.userparams.getEntry(VAR_15, 'subscribed') == 1:\n",
"if VAR_19 == '/gimmepic' or VAR_19 == GIMMEPIC_BUTTON:\n",
"self.userparams.setEntry(VAR_15, 'subscribed', 1)\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"You have already subscribed!\nTo cancel subscription enter /unsubscribe.\nTo change the period of picture sending type a number.\nYour current period is {0} seconds.\"\"\"\n .format(period), key_markup=MMKM)\n",
"self.userparams.setEntry(VAR_15, 'subscribed', 0)\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"You haven't subscribed yet! To subscribe type /subscribe\", key_markup=MMKM\n )\n",
"self.startRandomPicThread(VAR_15, VAR_16)\n",
"VAR_41 = int(VAR_19)\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19='Unknown command!', key_markup=MMKM)\n",
"self.userparams.setEntry(VAR_15, 'last_update_time', time())\n",
"VAR_16 = getMainMenu(subscribed=False)\n",
"if self.userparams.getEntry(VAR_15, 'subscribed') == 0:\n",
"VAR_16 = getMainMenu(subscribed=True)\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n 'You have unsubscribed. To subscribe again type /subscribe', key_markup\n =MMKM)\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"You're not subscribed yet! /subscribe first!\", key_markup=MMKM)\n",
"if VAR_41 < VAR_4:\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"You're subscribed now! \nAn image will be sent to you every {0} seconds. \nTo cancel subscription enter /unsubscribe. \nTo change the period of picture sending type a number.\"\"\"\n .format(period), key_markup=MMKM)\n",
"self.userparams.setEntry(VAR_15, 'period', VAR_4)\n",
"if VAR_41 > VAR_5:\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"The minimum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MIN_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n",
"self.userparams.setEntry(VAR_15, 'period', VAR_5)\n",
"self.userparams.setEntry(VAR_15, 'period', VAR_41)\n",
"self.userparams.setEntry(VAR_15, 'last_update_time', int(time()))\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19=\n \"\"\"The maximum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MAX_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n",
"VAR_17.sendMessage(VAR_15=chat_id, VAR_19='Setting period to ' + str(\n new_period) + '.', key_markup=MMKM)\n"
] | [
"def processUpdate(self, u):...\n",
"bot = self.bot\n",
"Message = u.message\n",
"message = Message.text\n",
"message_id = Message.message_id\n",
"chat_id = Message.chat_id\n",
"subs = self.userparams\n",
"subs.initializeUser(chat_id=chat_id, data=INITIAL_SUBSCRIBER_PARAMS)\n",
"LS = LanguageSupport(subs.getEntry(chat_id=chat_id, param='lang'))\n",
"lS = LS.languageSupport\n",
"allv = LS.allVariants\n",
"MMKM = lS(getMainMenu(subs.getEntry(chat_id=chat_id, param='subscribed')))\n",
"if message == '/start':\n",
"bot.sendMessage(chat_id=chat_id, message=lS(START_MESSAGE), key_markup=MMKM)\n",
"if message == '/help' or message == HELP_BUTTON:\n",
"bot.sendMessage(chat_id=chat_id, message=lS(HELP_MESSAGE).format(str(\n MIN_PICTURE_SEND_PERIOD), str(MAX_PICTURE_SEND_PERIOD)), key_markup=\n MMKM, markdown=True)\n",
"if message == '/about' or message == ABOUT_BUTTON:\n",
"bot.sendMessage(chat_id=chat_id, message=lS(ABOUT_MESSAGE).format('.'.join(\n [str(i) for i in VERSION_NUMBER])), key_markup=MMKM, markdown=True)\n",
"if message == '/otherbots' or message == lS(OTHER_BOTS_BUTTON):\n",
"bot.sendMessage(chat_id=chat_id, message=lS(OTHER_BOTS_MESSAGE), key_markup\n =MMKM, markdown=True)\n",
"if message == '/period' or message == lS(SHOW_PERIOD_BUTTON):\n",
"period = self.userparams.getEntry(chat_id, 'period')\n",
"if message == '/subscribe' or message == SUBSCRIBE_BUTTON:\n",
"bot.sendMessage(chat_id=chat_id, message=\n 'An image is sent to you every {0} seconds.'.format(period), key_markup\n =MMKM)\n",
"period = self.userparams.getEntry(chat_id, 'period')\n",
"if message == '/unsubscribe' or message == UNSUBSCRIBE_BUTTON:\n",
"if self.userparams.getEntry(chat_id, 'subscribed') == 0:\n",
"if self.userparams.getEntry(chat_id, 'subscribed') == 1:\n",
"if message == '/gimmepic' or message == GIMMEPIC_BUTTON:\n",
"self.userparams.setEntry(chat_id, 'subscribed', 1)\n",
"bot.sendMessage(chat_id=chat_id, message=\n \"\"\"You have already subscribed!\nTo cancel subscription enter /unsubscribe.\nTo change the period of picture sending type a number.\nYour current period is {0} seconds.\"\"\"\n .format(period), key_markup=MMKM)\n",
"self.userparams.setEntry(chat_id, 'subscribed', 0)\n",
"bot.sendMessage(chat_id=chat_id, message=\n \"You haven't subscribed yet! To subscribe type /subscribe\", key_markup=MMKM\n )\n",
"self.startRandomPicThread(chat_id, MMKM)\n",
"new_period = int(message)\n",
"bot.sendMessage(chat_id=chat_id, message='Unknown command!', key_markup=MMKM)\n",
"self.userparams.setEntry(chat_id, 'last_update_time', time())\n",
"MMKM = getMainMenu(subscribed=False)\n",
"if self.userparams.getEntry(chat_id, 'subscribed') == 0:\n",
"MMKM = getMainMenu(subscribed=True)\n",
"bot.sendMessage(chat_id=chat_id, message=\n 'You have unsubscribed. To subscribe again type /subscribe', key_markup\n =MMKM)\n",
"bot.sendMessage(chat_id=chat_id, message=\n \"You're not subscribed yet! /subscribe first!\", key_markup=MMKM)\n",
"if new_period < MIN_PICTURE_SEND_PERIOD:\n",
"bot.sendMessage(chat_id=chat_id, message=\n \"\"\"You're subscribed now! \nAn image will be sent to you every {0} seconds. \nTo cancel subscription enter /unsubscribe. \nTo change the period of picture sending type a number.\"\"\"\n .format(period), key_markup=MMKM)\n",
"self.userparams.setEntry(chat_id, 'period', MIN_PICTURE_SEND_PERIOD)\n",
"if new_period > MAX_PICTURE_SEND_PERIOD:\n",
"bot.sendMessage(chat_id=chat_id, message=\n \"\"\"The minimum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MIN_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n",
"self.userparams.setEntry(chat_id, 'period', MAX_PICTURE_SEND_PERIOD)\n",
"self.userparams.setEntry(chat_id, 'period', new_period)\n",
"self.userparams.setEntry(chat_id, 'last_update_time', int(time()))\n",
"bot.sendMessage(chat_id=chat_id, message=\n \"\"\"The maximum possible period is {0}.\nSetting period to {0}.\"\"\".format\n (str(MAX_PICTURE_SEND_PERIOD)), key_markup=MMKM)\n",
"bot.sendMessage(chat_id=chat_id, message='Setting period to ' + str(\n new_period) + '.', key_markup=MMKM)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self, VAR_8, VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = \"UPDATE {0} SET file_id='{1}' WHERE path='{2}'\".format(VAR_2,\n VAR_11, VAR_8)\n",
"self._run_command(VAR_7)\n"
] | [
"def updateCacheID(self, pth, cacheID):...\n",
"\"\"\"docstring\"\"\"\n",
"command = \"UPDATE {0} SET file_id='{1}' WHERE path='{2}'\".format(TABLE_NAME,\n cacheID, pth)\n",
"self._run_command(command)\n"
] | [
0,
0,
4,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_9(VAR_8):...\n",
"return urllib.parse.unquote_plus(VAR_8)\n"
] | [
"def url_unescape(url):...\n",
"return urllib.parse.unquote_plus(url)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(VAR_1, VAR_6):...\n",
""
] | [
"def directory_index(path, fullpath):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def __init__(self, *VAR_4, **VAR_5):...\n",
"super(CLASS_0, self).__init__(*VAR_4, **kwargs)\n",
"self.configuration.append_config_values(VAR_1)\n",
"self._group_ip = None\n",
"self.sshpool = None\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(DellEQLSanISCSIDriver, self).__init__(*args, **kwargs)\n",
"self.configuration.append_config_values(eqlx_opts)\n",
"self._group_ip = None\n",
"self.sshpool = None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_0(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_2 <= VAR_23(VAR_1):\n",
"return VAR_1[:VAR_2]\n",
"VAR_3 = (VAR_2 + VAR_23(VAR_1) - 1) / VAR_23(VAR_1)\n",
"VAR_4 = VAR_1 * VAR_3\n",
"return VAR_4[:VAR_2]\n"
] | [
"def _generate_test_string(length):...\n",
"\"\"\"docstring\"\"\"\n",
"if length <= len(_TESTSTR):\n",
"return _TESTSTR[:length]\n",
"c = (length + len(_TESTSTR) - 1) / len(_TESTSTR)\n",
"v = _TESTSTR * c\n",
"return v[:length]\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_25(self, VAR_73):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.message = VAR_73\n",
"return VAR_101\n"
] | [
"def message(self, message):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.message = message\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_1):...\n",
"self.worker = VAR_0\n",
"self.mode = VAR_1\n",
"self.redis_client = VAR_0.redis_client\n"
] | [
"def __init__(self, worker, mode):...\n",
"self.worker = worker\n",
"self.mode = mode\n",
"self.redis_client = worker.redis_client\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_12(self):...\n",
"VAR_4 = 'hubba-bubba'\n",
"def FUNC_29():...\n",
"self.assertIn('reauthn-for-chpass', VAR_32)\n",
"VAR_29 = VAR_32['reauthn-for-chpass']\n",
"VAR_30 = int(time.time())\n",
"self.assertTrue(VAR_30 - VAR_29 < 5)\n",
"self.acs('/chpass', VAR_4, FUNC_29)\n"
] | [
"def test_chpass_assertion_consumer_service(self):...\n",
"eppn = 'hubba-bubba'\n",
"def _check():...\n",
"self.assertIn('reauthn-for-chpass', session)\n",
"then = session['reauthn-for-chpass']\n",
"now = int(time.time())\n",
"self.assertTrue(now - then < 5)\n",
"self.acs('/chpass', eppn, _check)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_20(self):...\n",
"VAR_14 = self.new_mocked_client(client.NSX3Client)\n",
"VAR_14.get('ports')\n",
"FUNC_2('get', VAR_14, 'https://1.2.3.4/api/v1/ports')\n"
] | [
"def test_get_resource(self):...\n",
"api = self.new_mocked_client(client.NSX3Client)\n",
"api.get('ports')\n",
"assert_json_call('get', api, 'https://1.2.3.4/api/v1/ports')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_8(self):...\n",
"url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.URLError('url'))\n",
"self._mox.ReplayAll()\n",
"self.assertIsNone(url_helper.UrlOpen('url', max_tries=1))\n",
"self._mox.VerifyAll()\n"
] | [
"def testUrlOpenFailure(self):...\n",
"url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.URLError('url'))\n",
"self._mox.ReplayAll()\n",
"self.assertIsNone(url_helper.UrlOpen('url', max_tries=1))\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(VAR_3, VAR_4=None):...\n",
"VAR_0 = CLASS_0(VAR_3)\n",
"VAR_0.rule = VAR_4\n",
"return VAR_0\n"
] | [
"def IOFile(file, rule=None):...\n",
"f = _IOFile(file)\n",
"f.rule = rule\n",
"return f\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_0 = get_connection()\n",
"VAR_1 = VAR_0.cursor()\n",
"VAR_1.execute(\n f\"insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values ('{self.ip_address}', '{self.user_agent}', '{self.referrer}', '{self.full_path}', '{self.visit_time}');\"\n )\n",
"VAR_0.commit()\n",
"VAR_0.close()\n",
"return 0\n"
] | [
"def on_save(self):...\n",
"connection = get_connection()\n",
"cursor = connection.cursor()\n",
"cursor.execute(\n f\"insert into visitors (ip_address, user_agent, referrer, full_path, visit_time) values ('{self.ip_address}', '{self.user_agent}', '{self.referrer}', '{self.full_path}', '{self.visit_time}');\"\n )\n",
"connection.commit()\n",
"connection.close()\n",
"return 0\n"
] | [
0,
0,
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def get_entity(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_4(self):...\n",
"VAR_2 = '/api/apps'\n",
"VAR_5 = {'id': 'autotest'}\n",
"VAR_3 = self.client.post(VAR_2, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_3.status_code, 201)\n",
"VAR_4 = VAR_3.data['id']\n",
"if not os.path.exists(settings.DEIS_LOG_DIR):\n",
"os.mkdir(settings.DEIS_LOG_DIR)\n",
"VAR_6 = os.path.join(settings.DEIS_LOG_DIR, VAR_4 + '.log')\n",
"if os.path.exists(VAR_6):\n",
"os.remove(VAR_6)\n",
"VAR_2 = '/api/apps/{app_id}/logs'.format(**locals())\n",
"VAR_3 = self.client.get(VAR_2)\n",
"self.assertEqual(VAR_3.status_code, 204)\n",
"self.assertEqual(VAR_3.data, 'No logs for {}'.format(VAR_4))\n",
"f.write(VAR_0)\n",
"VAR_3 = self.client.get(VAR_2)\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"self.assertEqual(VAR_3.data, VAR_0)\n",
"os.remove(VAR_6)\n",
"VAR_2 = '/api/apps/{app_id}/run'.format(**locals())\n",
"VAR_5 = {'command': 'ls -al'}\n",
"VAR_3 = self.client.post(VAR_2, json.dumps(VAR_5), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_3.status_code, 200)\n",
"self.assertEqual(VAR_3.data[0], 0)\n",
"os.remove(VAR_6)\n"
] | [
"def test_app_actions(self):...\n",
"url = '/api/apps'\n",
"body = {'id': 'autotest'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"if not os.path.exists(settings.DEIS_LOG_DIR):\n",
"os.mkdir(settings.DEIS_LOG_DIR)\n",
"path = os.path.join(settings.DEIS_LOG_DIR, app_id + '.log')\n",
"if os.path.exists(path):\n",
"os.remove(path)\n",
"url = '/api/apps/{app_id}/logs'.format(**locals())\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 204)\n",
"self.assertEqual(response.data, 'No logs for {}'.format(app_id))\n",
"f.write(FAKE_LOG_DATA)\n",
"response = self.client.get(url)\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(response.data, FAKE_LOG_DATA)\n",
"os.remove(path)\n",
"url = '/api/apps/{app_id}/run'.format(**locals())\n",
"body = {'command': 'ls -al'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 200)\n",
"self.assertEqual(response.data[0], 0)\n",
"os.remove(path)\n"
] | [
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"self.expected_requests([('http://localhost/swarming/api/v1/bot/handshake',\n {'data': {'attributes': 'b'}, 'headers': {'X-XSRF-Token-Request': '1'}},\n {'expiration_sec': 100, 'ignored': True, 'xsrf_token': 'token'}), (\n 'http://localhost/a', {'data': {'foo': 'bar'}, 'headers': {\n 'X-XSRF-Token': 'token'}}, 'foo', None)])\n",
"VAR_0 = xsrf_client.XsrfRemote('http://localhost/',\n '/swarming/api/v1/bot/handshake')\n",
"VAR_0.xsrf_request_params = {'attributes': 'b'}\n",
"self.assertEqual('foo', VAR_0.url_read('/a', data={'foo': 'bar'}))\n"
] | [
"def testXsrfRemoteCustom(self):...\n",
"self.expected_requests([('http://localhost/swarming/api/v1/bot/handshake',\n {'data': {'attributes': 'b'}, 'headers': {'X-XSRF-Token-Request': '1'}},\n {'expiration_sec': 100, 'ignored': True, 'xsrf_token': 'token'}), (\n 'http://localhost/a', {'data': {'foo': 'bar'}, 'headers': {\n 'X-XSRF-Token': 'token'}}, 'foo', None)])\n",
"remote = xsrf_client.XsrfRemote('http://localhost/',\n '/swarming/api/v1/bot/handshake')\n",
"remote.xsrf_request_params = {'attributes': 'b'}\n",
"self.assertEqual('foo', remote.url_read('/a', data={'foo': 'bar'}))\n"
] | [
0,
5,
5,
5,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@cached_property...\n",
"return self.child_branch or self.child_leaf\n"
] | [
"@cached_property...\n",
"return self.child_branch or self.child_leaf\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_16):...\n",
"self.flags = dict()\n"
] | [
"def __init__(self, value):...\n",
"self.flags = dict()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_1(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN'],\n 'Content-Length': '0'}\n",
"VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"VAR_10 = 'https://api.github.com/user/following/{}'\n",
"VAR_10 = VAR_10.format(VAR_1)\n",
"VAR_11 = requests.put(VAR_10, VAR_8=headers, VAR_9=auth)\n"
] | [
"def follow_user(user):...\n",
"\"\"\"docstring\"\"\"\n",
"headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN'],\n 'Content-Length': '0'}\n",
"auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"url = 'https://api.github.com/user/following/{}'\n",
"url = url.format(user)\n",
"r = requests.put(url, headers=headers, auth=auth)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self):...\n",
"VAR_7, VAR_23, VAR_19, VAR_5 = super().deconstruct()\n",
"if VAR_23 == 'django.contrib.postgres.fields.array.ArrayField':\n",
"VAR_23 = 'django.contrib.postgres.fields.ArrayField'\n",
"VAR_5.update({'base_field': self.base_field.clone(), 'size': self.size})\n",
"return VAR_7, VAR_23, VAR_19, VAR_5\n"
] | [
"def deconstruct(self):...\n",
"name, path, args, kwargs = super().deconstruct()\n",
"if path == 'django.contrib.postgres.fields.array.ArrayField':\n",
"path = 'django.contrib.postgres.fields.ArrayField'\n",
"kwargs.update({'base_field': self.base_field.clone(), 'size': self.size})\n",
"return name, path, args, kwargs\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self, *VAR_15, **VAR_75):...\n",
""
] | [
"def newfn(self, *a, **env):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_5(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {}\n",
"VAR_5['clusterName'] = self.configuration.san_clustername\n",
"if self.configuration.san_thin_provision:\n",
"VAR_5['thinProvision'] = '1'\n",
"VAR_5['thinProvision'] = '0'\n",
"VAR_5['volumeName'] = VAR_10['name']\n",
"if int(VAR_10['size']) == 0:\n",
"VAR_5['size'] = '100MB'\n",
"VAR_5['size'] = '%sGB' % VAR_10['size']\n",
"self._cliq_run_xml('createVolume', VAR_5)\n",
"VAR_27 = self._cliq_get_volume_info(VAR_10['name'])\n",
"VAR_8 = VAR_27['volume.clusterName']\n",
"VAR_28 = VAR_27['volume.iscsiIqn']\n",
"VAR_29 = '1'\n",
"if not self.cluster_vip:\n",
"self.cluster_vip = self._cliq_get_cluster_vip(VAR_8)\n",
"VAR_30 = self.cluster_vip + ':3260,' + VAR_29\n",
"VAR_31 = {}\n",
"VAR_31['provider_location'] = '%s %s %s' % (VAR_30, VAR_28, 0)\n",
"return VAR_31\n"
] | [
"def create_volume(self, volume):...\n",
"\"\"\"docstring\"\"\"\n",
"cliq_args = {}\n",
"cliq_args['clusterName'] = self.configuration.san_clustername\n",
"if self.configuration.san_thin_provision:\n",
"cliq_args['thinProvision'] = '1'\n",
"cliq_args['thinProvision'] = '0'\n",
"cliq_args['volumeName'] = volume['name']\n",
"if int(volume['size']) == 0:\n",
"cliq_args['size'] = '100MB'\n",
"cliq_args['size'] = '%sGB' % volume['size']\n",
"self._cliq_run_xml('createVolume', cliq_args)\n",
"volume_info = self._cliq_get_volume_info(volume['name'])\n",
"cluster_name = volume_info['volume.clusterName']\n",
"iscsi_iqn = volume_info['volume.iscsiIqn']\n",
"cluster_interface = '1'\n",
"if not self.cluster_vip:\n",
"self.cluster_vip = self._cliq_get_cluster_vip(cluster_name)\n",
"iscsi_portal = self.cluster_vip + ':3260,' + cluster_interface\n",
"model_update = {}\n",
"model_update['provider_location'] = '%s %s %s' % (iscsi_portal, iscsi_iqn, 0)\n",
"return model_update\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"self.hostname = VAR_3\n",
"self.message = 'Lost connection to remote host %s' % VAR_3\n"
] | [
"def __init__(self, hostname):...\n",
"\"\"\"docstring\"\"\"\n",
"self.hostname = hostname\n",
"self.message = 'Lost connection to remote host %s' % hostname\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'"
] |
[
"def FUNC_8(VAR_15, VAR_16=None, VAR_17=None):...\n",
"VAR_34 = FUNC_0()\n",
"return VAR_34.do_request(VAR_6 + VAR_15, VAR_16, VAR_17)\n"
] | [
"def admin_req(path, args=None, files=None):...\n",
"browser = get_aws_browser()\n",
"return browser.do_request(AWS_BASE_URL + path, args, files)\n"
] | [
0,
0,
5
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@VAR_2.route('/data/dailyjobs/')...\n",
"VAR_24, VAR_23 = FUNC_4(request.args)\n",
"VAR_28 = FUNC_0()\n",
"VAR_32 = VAR_28.cursor()\n",
"VAR_41 = 'string' % (VAR_24, VAR_23)\n",
"VAR_32.execute(VAR_41)\n",
"VAR_62 = {}\n",
"for rows in VAR_32.fetchall():\n",
"VAR_52 = str(rows[0])\n",
"return {'dailyjobs': VAR_62}\n",
"VAR_38 = rows[1]\n",
"VAR_55 = rows[2]\n",
"VAR_78 = int(rows[3])\n",
"VAR_79 = int(rows[4])\n",
"VAR_80 = int(rows[5])\n",
"if VAR_52 not in VAR_62:\n",
"VAR_62[VAR_52] = {'mozilla-inbound': [], 'fx-team': [], 'try': [],\n 'autoland': []}\n",
"if 'mozilla-inbound' in VAR_55:\n",
"VAR_62[VAR_52]['mozilla-inbound'].append([VAR_38, VAR_78, VAR_79, VAR_80])\n",
"if 'fx-team' in VAR_55:\n",
"VAR_62[VAR_52]['fx-team'].append([VAR_38, VAR_78, VAR_79, VAR_80])\n",
"if 'try' in VAR_55:\n",
"VAR_62[VAR_52]['try'].append([VAR_38, VAR_78, VAR_79, VAR_80])\n",
"if 'autoland' in VAR_55:\n",
"VAR_62[VAR_52]['autoland'].append([VAR_38, VAR_78, VAR_79, VAR_80])\n"
] | [
"@app.route('/data/dailyjobs/')...\n",
"start_date, end_date = clean_date_params(request.args)\n",
"db = create_db_connnection()\n",
"cursor = db.cursor()\n",
"query = (\n \"select date, platform, branch, numpushes, numjobs, sumduration from dailyjobs where date>='%s' and date <='%s' order by case platform when 'linux' then 1 when 'osx' then 2 when 'win' then 3 when 'android' then 4 end\"\n % (start_date, end_date))\n",
"cursor.execute(query)\n",
"output = {}\n",
"for rows in cursor.fetchall():\n",
"date = str(rows[0])\n",
"return {'dailyjobs': output}\n",
"platform = rows[1]\n",
"branch = rows[2]\n",
"numpushes = int(rows[3])\n",
"numjobs = int(rows[4])\n",
"sumduration = int(rows[5])\n",
"if date not in output:\n",
"output[date] = {'mozilla-inbound': [], 'fx-team': [], 'try': [], 'autoland': []\n }\n",
"if 'mozilla-inbound' in branch:\n",
"output[date]['mozilla-inbound'].append([platform, numpushes, numjobs,\n sumduration])\n",
"if 'fx-team' in branch:\n",
"output[date]['fx-team'].append([platform, numpushes, numjobs, sumduration])\n",
"if 'try' in branch:\n",
"output[date]['try'].append([platform, numpushes, numjobs, sumduration])\n",
"if 'autoland' in branch:\n",
"output[date]['autoland'].append([platform, numpushes, numjobs, sumduration])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(VAR_4):...\n",
"VAR_14 = VAR_4['request']\n",
"VAR_4['_'] = VAR_14.translate\n",
"VAR_4['localizer'] = VAR_14.localizer\n",
"VAR_4['h'] = VAR_11\n"
] | [
"def add_render_globals_to_template(event):...\n",
"request = event['request']\n",
"event['_'] = request.translate\n",
"event['localizer'] = request.localizer\n",
"event['h'] = template_helpers\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@FUNC_7...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_16.pop(VAR_17(VAR_1.name) + '_autoregister', None)\n",
"VAR_5 = FUNC_1(VAR_1, VAR_2)\n",
"VAR_19 = signup_handlers[VAR_1.name]\n",
"if not current_user.is_authenticated():\n",
"VAR_27 = VAR_19['info'](VAR_2)\n",
"if not VAR_5.remote_account.extra_data and VAR_1.name in signup_handlers:\n",
"VAR_11 = oauth_get_user(VAR_1.consumer_key, VAR_27=account_info,\n access_token=token_getter(remote)[0])\n",
"VAR_19['setup'](VAR_5)\n",
"if request.args.get('next', None):\n",
"if VAR_11 is None:\n",
"return redirect(request.args.get('next'))\n",
"return redirect('/')\n",
"VAR_11 = oauth_register(VAR_27)\n",
"if not oauth_authenticate(VAR_1.consumer_key, VAR_11, require_existing_link\n",
"if VAR_11 is None:\n",
"return current_app.login_manager.unauthorized()\n",
"VAR_5 = FUNC_1(VAR_1, VAR_2)\n",
"VAR_16[VAR_17(VAR_1.name) + '_autoregister'] = True\n",
"VAR_16[VAR_17(VAR_1.name) + '_account_info'] = VAR_27\n",
"return redirect(url_for('.signup', VAR_0=remote.name, next=request.args.get\n ('next', '/')))\n"
] | [
"@oauth_error_handler...\n",
"\"\"\"docstring\"\"\"\n",
"session.pop(token_session_key(remote.name) + '_autoregister', None)\n",
"token = response_token_setter(remote, resp)\n",
"handlers = signup_handlers[remote.name]\n",
"if not current_user.is_authenticated():\n",
"account_info = handlers['info'](resp)\n",
"if not token.remote_account.extra_data and remote.name in signup_handlers:\n",
"user = oauth_get_user(remote.consumer_key, account_info=account_info,\n access_token=token_getter(remote)[0])\n",
"handlers['setup'](token)\n",
"if request.args.get('next', None):\n",
"if user is None:\n",
"return redirect(request.args.get('next'))\n",
"return redirect('/')\n",
"user = oauth_register(account_info)\n",
"if not oauth_authenticate(remote.consumer_key, user, require_existing_link=\n",
"if user is None:\n",
"return current_app.login_manager.unauthorized()\n",
"token = response_token_setter(remote, resp)\n",
"session[token_session_key(remote.name) + '_autoregister'] = True\n",
"session[token_session_key(remote.name) + '_account_info'] = account_info\n",
"return redirect(url_for('.signup', remote_app=remote.name, next=request.\n args.get('next', '/')))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
5
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Return'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7=None):...\n",
"VAR_12 = ''\n",
"if VAR_3 is None:\n",
"VAR_29 = VAR_4\n",
"VAR_12 += self._formatCredentials(VAR_3, VAR_8='src')\n",
"if VAR_5 is None:\n",
"VAR_29 = 'src:{}'.format(VAR_4)\n",
"VAR_30 = VAR_6\n",
"VAR_12 += self._formatCredentials(VAR_5, VAR_8='dst')\n",
"VAR_9 = '{credentials} rclone copy {src} {dst} --progress --stats 2s '.format(\n VAR_12=credentials, VAR_29=src, VAR_30=dst)\n",
"VAR_30 = 'dst:{}'.format(VAR_6)\n",
"logging.info(FUNC_0(VAR_9))\n",
"if VAR_7 is None:\n",
"VAR_7 = self._get_next_job_id()\n",
"if self._job_id_exists(VAR_7):\n",
"self._stop_events[VAR_7] = threading.Event()\n",
"self._execute_interactive(VAR_9, VAR_7)\n",
"return VAR_7\n"
] | [
"def copy(self, src_data, src_path, dst_data, dst_path, job_id=None):...\n",
"credentials = ''\n",
"if src_data is None:\n",
"src = src_path\n",
"credentials += self._formatCredentials(src_data, name='src')\n",
"if dst_data is None:\n",
"src = 'src:{}'.format(src_path)\n",
"dst = dst_path\n",
"credentials += self._formatCredentials(dst_data, name='dst')\n",
"command = ('{credentials} rclone copy {src} {dst} --progress --stats 2s '.\n format(credentials=credentials, src=src, dst=dst))\n",
"dst = 'dst:{}'.format(dst_path)\n",
"logging.info(sanitize(command))\n",
"if job_id is None:\n",
"job_id = self._get_next_job_id()\n",
"if self._job_id_exists(job_id):\n",
"self._stop_events[job_id] = threading.Event()\n",
"self._execute_interactive(command, job_id)\n",
"return job_id\n"
] | [
0,
2,
0,
0,
2,
0,
0,
0,
2,
2,
0,
2,
0,
0,
0,
0,
2,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_2, VAR_3=None):...\n",
"super(CLASS_1, self).__init__(VAR_3)\n",
"self.authenticated = VAR_2\n"
] | [
"def __init__(self, authenticated, exceptions=None):...\n",
"super(RdpPollResult, self).__init__(exceptions)\n",
"self.authenticated = authenticated\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_10(VAR_5):...\n",
"if VAR_5 == []:\n",
"return 'true'\n",
"VAR_11 = '('\n",
"for i in range(0, len(VAR_5) - 1):\n",
"VAR_11 += VAR_5[i] + '=1 or '\n",
"VAR_11 += VAR_5[len(VAR_5) - 1] + ' = 1 ) '\n",
"return VAR_11\n"
] | [
"def makeBoolean(list):...\n",
"if list == []:\n",
"return 'true'\n",
"result = '('\n",
"for i in range(0, len(list) - 1):\n",
"result += list[i] + '=1 or '\n",
"result += list[len(list) - 1] + ' = 1 ) '\n",
"return result\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"For",
"AugAssign'",
"AugAssign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_7):...\n",
"if not VAR_7:\n",
"return []\n",
"def FUNC_9(VAR_1):...\n",
"return {'name': str(VAR_1), 'link': VAR_1.get_absolute_url()}\n"
] | [
"def _generate_courses(self, profile):...\n",
"if not profile:\n",
"return []\n",
"def course_entry(instance):...\n",
"return {'name': str(instance), 'link': instance.get_absolute_url()}\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"FunctionDef'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._raylet_socket_name\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._raylet_socket_name\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_2(self, VAR_4, *VAR_5, **VAR_6):...\n",
"return self.to_python(VAR_4)\n"
] | [
"def from_db_value(self, value, *args, **kwargs):...\n",
"return self.to_python(value)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_9(self, *VAR_15, **VAR_75):...\n",
""
] | [
"def newfn(self, *a, **env):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_9(self, VAR_8):...\n",
"return {key: value for key, value in VAR_8.items() if key in self.\n _backend_kwargs}\n"
] | [
"def filter_kwargs_for_backend(self, kwargs):...\n",
"return {key: value for key, value in kwargs.items() if key in self.\n _backend_kwargs}\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(VAR_0) == 2:\n",
"VAR_10, VAR_41 = VAR_0\n",
"if len(VAR_0) == 1:\n",
"self.name = VAR_10\n",
"VAR_17 = VAR_0[0]\n",
"self.workflow = VAR_41\n",
"self.name = VAR_17.name\n",
"self.docstring = None\n",
"self.workflow = VAR_17.workflow\n",
"self.message = None\n",
"self.docstring = VAR_17.docstring\n",
"self._input = InputFiles()\n",
"self.message = VAR_17.message\n",
"self._output = OutputFiles()\n",
"self._input = InputFiles(VAR_17._input)\n",
"self._params = Params()\n",
"self._output = OutputFiles(VAR_17._output)\n",
"self.dependencies = dict()\n",
"self._params = Params(VAR_17._params)\n",
"self.dynamic_output = set()\n",
"self.dependencies = dict(VAR_17.dependencies)\n",
"self.dynamic_input = set()\n",
"self.dynamic_output = set(VAR_17.dynamic_output)\n",
"self.temp_output = set()\n",
"self.dynamic_input = set(VAR_17.dynamic_input)\n",
"self.protected_output = set()\n",
"self.temp_output = set(VAR_17.temp_output)\n",
"self.touch_output = set()\n",
"self.protected_output = set(VAR_17.protected_output)\n",
"self.subworkflow_input = dict()\n",
"self.touch_output = set(VAR_17.touch_output)\n",
"self.resources = dict(_cores=1, _nodes=1)\n",
"self.subworkflow_input = dict(VAR_17.subworkflow_input)\n",
"self.priority = 0\n",
"self.resources = VAR_17.resources\n",
"self.version = None\n",
"self.priority = VAR_17.priority\n",
"self._log = Log()\n",
"self.version = VAR_17.version\n",
"self._benchmark = None\n",
"self._log = VAR_17._log\n",
"self.wildcard_names = set()\n",
"self._benchmark = VAR_17._benchmark\n",
"self.lineno = VAR_1\n",
"self.wildcard_names = set(VAR_17.wildcard_names)\n",
"self.snakefile = VAR_2\n",
"self.lineno = VAR_17.lineno\n",
"self.run_func = None\n",
"self.snakefile = VAR_17.snakefile\n",
"self.shellcmd = None\n",
"self.run_func = VAR_17.run_func\n",
"self.norun = False\n",
"self.shellcmd = VAR_17.shellcmd\n",
"self.norun = VAR_17.norun\n"
] | [
"def __init__(self, *args, lineno=None, snakefile=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(args) == 2:\n",
"name, workflow = args\n",
"if len(args) == 1:\n",
"self.name = name\n",
"other = args[0]\n",
"self.workflow = workflow\n",
"self.name = other.name\n",
"self.docstring = None\n",
"self.workflow = other.workflow\n",
"self.message = None\n",
"self.docstring = other.docstring\n",
"self._input = InputFiles()\n",
"self.message = other.message\n",
"self._output = OutputFiles()\n",
"self._input = InputFiles(other._input)\n",
"self._params = Params()\n",
"self._output = OutputFiles(other._output)\n",
"self.dependencies = dict()\n",
"self._params = Params(other._params)\n",
"self.dynamic_output = set()\n",
"self.dependencies = dict(other.dependencies)\n",
"self.dynamic_input = set()\n",
"self.dynamic_output = set(other.dynamic_output)\n",
"self.temp_output = set()\n",
"self.dynamic_input = set(other.dynamic_input)\n",
"self.protected_output = set()\n",
"self.temp_output = set(other.temp_output)\n",
"self.touch_output = set()\n",
"self.protected_output = set(other.protected_output)\n",
"self.subworkflow_input = dict()\n",
"self.touch_output = set(other.touch_output)\n",
"self.resources = dict(_cores=1, _nodes=1)\n",
"self.subworkflow_input = dict(other.subworkflow_input)\n",
"self.priority = 0\n",
"self.resources = other.resources\n",
"self.version = None\n",
"self.priority = other.priority\n",
"self._log = Log()\n",
"self.version = other.version\n",
"self._benchmark = None\n",
"self._log = other._log\n",
"self.wildcard_names = set()\n",
"self._benchmark = other._benchmark\n",
"self.lineno = lineno\n",
"self.wildcard_names = set(other.wildcard_names)\n",
"self.snakefile = snakefile\n",
"self.lineno = other.lineno\n",
"self.run_func = None\n",
"self.snakefile = other.snakefile\n",
"self.shellcmd = None\n",
"self.run_func = other.run_func\n",
"self.norun = False\n",
"self.shellcmd = other.shellcmd\n",
"self.norun = other.norun\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_17(self):...\n",
"return self.get('__islocal')\n"
] | [
"def is_new(self):...\n",
"return self.get('__islocal')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_39(VAR_101):...\n",
"if VAR_101.input:\n",
"VAR_13.set_input(*VAR_101.input[0], **ruleinfo.input[1])\n",
"if VAR_101.output:\n",
"VAR_13.set_output(*VAR_101.output[0], **ruleinfo.output[1])\n",
"if VAR_101.params:\n",
"VAR_13.set_params(*VAR_101.params[0], **ruleinfo.params[1])\n",
"if VAR_101.threads:\n",
"if not isinstance(VAR_101.threads, int):\n",
"if VAR_101.resources:\n",
"VAR_13.resources['_cores'] = VAR_101.threads\n",
"VAR_76, VAR_52 = VAR_101.resources\n",
"if VAR_101.priority:\n",
"if VAR_76:\n",
"if not isinstance(VAR_101.priority, int) and not isinstance(VAR_101.\n",
"if VAR_101.version:\n",
"if not all(map(lambda r: isinstance(r, int), VAR_52.values())):\n",
"VAR_13.priority = VAR_101.priority\n",
"VAR_13.version = VAR_101.version\n",
"if VAR_101.log:\n",
"VAR_13.resources.update(VAR_52)\n",
"VAR_13.set_log(*VAR_101.log[0], **ruleinfo.log[1])\n",
"if VAR_101.message:\n",
"VAR_13.message = VAR_101.message\n",
"if VAR_101.benchmark:\n",
"VAR_13.benchmark = VAR_101.benchmark\n",
"VAR_13.norun = VAR_101.norun\n",
"VAR_13.docstring = VAR_101.docstring\n",
"VAR_13.run_func = VAR_101.func\n",
"VAR_13.shellcmd = VAR_101.shellcmd\n",
"VAR_101.func.__name__ = '__{}'.format(VAR_10)\n",
"self.globals[VAR_101.func.__name__] = VAR_101.func\n",
"setattr(VAR_86, VAR_10, VAR_13)\n",
"return VAR_101.func\n"
] | [
"def decorate(ruleinfo):...\n",
"if ruleinfo.input:\n",
"rule.set_input(*ruleinfo.input[0], **ruleinfo.input[1])\n",
"if ruleinfo.output:\n",
"rule.set_output(*ruleinfo.output[0], **ruleinfo.output[1])\n",
"if ruleinfo.params:\n",
"rule.set_params(*ruleinfo.params[0], **ruleinfo.params[1])\n",
"if ruleinfo.threads:\n",
"if not isinstance(ruleinfo.threads, int):\n",
"if ruleinfo.resources:\n",
"rule.resources['_cores'] = ruleinfo.threads\n",
"args, resources = ruleinfo.resources\n",
"if ruleinfo.priority:\n",
"if args:\n",
"if not isinstance(ruleinfo.priority, int) and not isinstance(ruleinfo.\n",
"if ruleinfo.version:\n",
"if not all(map(lambda r: isinstance(r, int), resources.values())):\n",
"rule.priority = ruleinfo.priority\n",
"rule.version = ruleinfo.version\n",
"if ruleinfo.log:\n",
"rule.resources.update(resources)\n",
"rule.set_log(*ruleinfo.log[0], **ruleinfo.log[1])\n",
"if ruleinfo.message:\n",
"rule.message = ruleinfo.message\n",
"if ruleinfo.benchmark:\n",
"rule.benchmark = ruleinfo.benchmark\n",
"rule.norun = ruleinfo.norun\n",
"rule.docstring = ruleinfo.docstring\n",
"rule.run_func = ruleinfo.func\n",
"rule.shellcmd = ruleinfo.shellcmd\n",
"ruleinfo.func.__name__ = '__{}'.format(name)\n",
"self.globals[ruleinfo.func.__name__] = ruleinfo.func\n",
"setattr(rules, name, rule)\n",
"return ruleinfo.func\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'CRAZY', VAR_7='1')\n",
"FUNC_3(VAR_2, 'KAIZEN', VAR_7='2')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='CAYZEN', VAR_10=[{'name': '----CAYZEN'}, {\n 'name': 'KAIZEN'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'CRAZY', id='1')\n",
"seed_database_with(solr, 'KAIZEN', id='2')\n",
"verify_results(client, jwt, query='CAYZEN', expected=[{'name': '----CAYZEN'\n }, {'name': 'KAIZEN'}])\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_27(VAR_6):...\n",
"if callable(VAR_6):\n",
"return VAR_6()\n"
] | [
"def default_encode(value):...\n",
"if callable(value):\n",
"return value()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.delete_volume(VAR_6)\n",
"self.common.client_logout()\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.delete_volume(volume)\n",
"self.common.client_logout()\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_21, VAR_23=True):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_22 = self.fs\n",
"for part in VAR_21.split('/'):\n",
"if not len(part):\n",
"return VAR_22[VAR_8]\n",
"VAR_67 = False\n",
"for c in VAR_22[VAR_8]:\n",
"if c[VAR_1] == part:\n",
"if not VAR_67:\n",
"if c[VAR_2] == VAR_11:\n",
"VAR_22 = self.getfile(c[VAR_9], VAR_23=follow_symlinks)\n",
"VAR_22 = c\n",
"VAR_67 = True\n"
] | [
"def get_path(self, path, follow_symlinks=True):...\n",
"\"\"\"docstring\"\"\"\n",
"cwd = self.fs\n",
"for part in path.split('/'):\n",
"if not len(part):\n",
"return cwd[A_CONTENTS]\n",
"ok = False\n",
"for c in cwd[A_CONTENTS]:\n",
"if c[A_NAME] == part:\n",
"if not ok:\n",
"if c[A_TYPE] == T_LINK:\n",
"cwd = self.getfile(c[A_TARGET], follow_symlinks=follow_symlinks)\n",
"cwd = c\n",
"ok = True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"For",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"from osv import osv\n",
"from tools.translate import _\n",
"import time\n",
"VAR_0 = 'pos.open.statement'\n",
"VAR_1 = 'Open Statements'\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = []\n",
"VAR_7 = self.pool.get('ir.model.data')\n",
"VAR_8 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('ir.sequence')\n",
"VAR_11 = self.pool.get('account.journal')\n",
"VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n",
"VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_12)))\n",
"VAR_13 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"for journal in VAR_11.browse(VAR_2, VAR_3, VAR_13):\n",
"VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n",
"VAR_14 = self.pool.get('ir.model.data')\n",
"if len(VAR_4):\n",
"VAR_15 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n",
"VAR_17 = ''\n",
"VAR_16 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n",
"if journal.sequence_id:\n",
"if VAR_15:\n",
"VAR_17 = VAR_10.get_id(VAR_2, VAR_3, journal.sequence_id.id)\n",
"VAR_17 = VAR_10.get(VAR_2, VAR_3, 'account.bank.statement')\n",
"VAR_15 = VAR_14.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n",
"if VAR_16:\n",
"VAR_18 = VAR_9.create(VAR_2, VAR_3, {'journal_id': journal.id, 'company_id':\n VAR_8, 'user_id': VAR_3, 'state': 'open', 'name': VAR_17,\n 'starting_details_ids': VAR_9._get_cash_close_box_lines(VAR_2, VAR_3, [])})\n",
"VAR_16 = VAR_14.browse(VAR_2, VAR_3, VAR_16, VAR_5=context).res_id\n",
"return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(VAR_15, 'tree'), (VAR_16, 'form')],\n 'type': 'ir.actions.act_window'}\n",
"VAR_9.button_open(VAR_2, VAR_3, [VAR_18], VAR_5)\n"
] | [
"from osv import osv\n",
"from tools.translate import _\n",
"import time\n",
"_name = 'pos.open.statement'\n",
"_description = 'Open Statements'\n",
"def open_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"sequence_obj = self.pool.get('ir.sequence')\n",
"journal_obj = self.pool.get('account.journal')\n",
"cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n",
"j_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n",
"journal_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"for journal in journal_obj.browse(cr, uid, journal_ids):\n",
"ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n",
"data_obj = self.pool.get('ir.model.data')\n",
"if len(ids):\n",
"id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n",
"number = ''\n",
"id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n",
"if journal.sequence_id:\n",
"if id2:\n",
"number = sequence_obj.get_id(cr, uid, journal.sequence_id.id)\n",
"number = sequence_obj.get(cr, uid, 'account.bank.statement')\n",
"id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n",
"if id3:\n",
"statement_id = statement_obj.create(cr, uid, {'journal_id': journal.id,\n 'company_id': company_id, 'user_id': uid, 'state': 'open', 'name':\n number, 'starting_details_ids': statement_obj._get_cash_close_box_lines\n (cr, uid, [])})\n",
"id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n",
"return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(id2, 'tree'), (id3, 'form')],\n 'type': 'ir.actions.act_window'}\n",
"statement_obj.button_open(cr, uid, [statement_id], context)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_6(VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"return '/' + VAR_10[0][1:-1]\n"
] | [
"def _constructRequestURL(django_url_pattern):...\n",
"\"\"\"docstring\"\"\"\n",
"return '/' + django_url_pattern[0][1:-1]\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_11(self, VAR_3):...\n",
""
] | [
"def doneWithConnection(self, conn):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_11(self, VAR_10):...\n",
"if self.version == 'custom':\n",
"return\n",
"VAR_13 = [('scalac', self._create_compiler_jardep), ('scala-library', self.\n _create_runtime_jardep)]\n",
"for spec_key, create_jardep_func in VAR_13:\n",
"VAR_18 = self.injectables_spec_for_key(spec_key)\n",
"VAR_19 = Address.parse(VAR_18)\n",
"if not VAR_10.contains_address(VAR_19):\n",
"VAR_21 = [create_jardep_func(self.version)]\n",
"if not VAR_10.get_target(VAR_19).is_synthetic:\n",
"VAR_10.inject_synthetic_target(VAR_19, JarLibrary, VAR_21=jars, scope='forced')\n"
] | [
"def injectables(self, build_graph):...\n",
"if self.version == 'custom':\n",
"return\n",
"specs_to_create = [('scalac', self._create_compiler_jardep), (\n 'scala-library', self._create_runtime_jardep)]\n",
"for spec_key, create_jardep_func in specs_to_create:\n",
"spec = self.injectables_spec_for_key(spec_key)\n",
"target_address = Address.parse(spec)\n",
"if not build_graph.contains_address(target_address):\n",
"jars = [create_jardep_func(self.version)]\n",
"if not build_graph.get_target(target_address).is_synthetic:\n",
"build_graph.inject_synthetic_target(target_address, JarLibrary, jars=jars,\n scope='forced')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(self, VAR_4):...\n",
"if self.content_length is not None:\n",
"VAR_4, VAR_31 = VAR_4[:self.content_length], VAR_4[self.content_length:]\n",
"VAR_31 = ''\n",
"self.content_length -= len(VAR_4)\n",
"self._contentbuffer.write(VAR_4)\n",
"if self.content_length == 0 and self._contentbuffer is not None:\n",
"VAR_32 = self._contentbuffer\n",
"self.content_length = self._contentbuffer = None\n",
"self.setLineMode(VAR_31)\n",
"VAR_32.seek(0, 0)\n",
"if self.file_upload:\n",
"self._on_request_body(self.uploaded_file)\n",
"self._on_request_body(VAR_32.read())\n",
"self.file_upload = False\n",
"self.uploaded_file = {}\n"
] | [
"def rawDataReceived(self, data):...\n",
"if self.content_length is not None:\n",
"data, rest = data[:self.content_length], data[self.content_length:]\n",
"rest = ''\n",
"self.content_length -= len(data)\n",
"self._contentbuffer.write(data)\n",
"if self.content_length == 0 and self._contentbuffer is not None:\n",
"tmpbuf = self._contentbuffer\n",
"self.content_length = self._contentbuffer = None\n",
"self.setLineMode(rest)\n",
"tmpbuf.seek(0, 0)\n",
"if self.file_upload:\n",
"self._on_request_body(self.uploaded_file)\n",
"self._on_request_body(tmpbuf.read())\n",
"self.file_upload = False\n",
"self.uploaded_file = {}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_16(self, VAR_0, VAR_1, VAR_10, VAR_3=None):...\n",
"if not VAR_3:\n",
"VAR_3 = self.make_reqid()\n",
"VAR_8 = [VAR_0, VAR_1, make_auth_hash(VAR_0, VAR_1, VAR_3, VAR_10)]\n",
"return b'Router', b'auth-bind-route', VAR_8, VAR_3\n"
] | [
"def make_auth_bind_route_data(self, interface, method, key, reqid=None):...\n",
"if not reqid:\n",
"reqid = self.make_reqid()\n",
"args = [interface, method, make_auth_hash(interface, method, reqid, key)]\n",
"return b'Router', b'auth-bind-route', args, reqid\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_9(self, VAR_6, VAR_10, VAR_11):...\n",
"if VAR_6 in self.clients:\n",
"VAR_14 = self.clients[VAR_6]\n",
"if VAR_14['status'] == self.RUNNING:\n",
"VAR_23 = self.clients[VAR_6]['instance']\n",
"VAR_23.perform_request(VAR_10, VAR_11)\n"
] | [
"def send_request(self, language, request, params):...\n",
"if language in self.clients:\n",
"language_client = self.clients[language]\n",
"if language_client['status'] == self.RUNNING:\n",
"client = self.clients[language]['instance']\n",
"client.perform_request(request, params)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"@VAR_0.route('/<page_name>/history')...\n",
"VAR_3 = VAR_1.query('string' % VAR_2)\n",
"VAR_11 = VAR_3.namedresult()\n",
"return render_template('page_history.html', VAR_2=page_name, VAR_11=\n page_histories)\n"
] | [
"@app.route('/<page_name>/history')...\n",
"query = db.query(\n \"select page_content.timestamp, page_content.id from page, page_content where page.id = page_content.page_id and page.page_name = '%s'\"\n % page_name)\n",
"page_histories = query.namedresult()\n",
"return render_template('page_history.html', page_name=page_name,\n page_histories=page_histories)\n"
] | [
0,
4,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def execute(self):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_7():...\n",
"VAR_14 = []\n",
"VAR_15 = User.query.all()\n",
"for VAR_8 in VAR_15:\n",
"VAR_5 = ''\n",
"return VAR_14\n",
"VAR_4 = VAR_8.name\n",
"if VAR_8.username:\n",
"VAR_5 = VAR_8.username\n",
"VAR_7 = Name.query.filter_by(VAR_0=person.id)\n",
"VAR_14.append({'id': VAR_8.id, 'username': VAR_5, 'name': VAR_4, 'names':\n VAR_7})\n"
] | [
"def get_people():...\n",
"people = []\n",
"ppl = User.query.all()\n",
"for person in ppl:\n",
"username = ''\n",
"return people\n",
"name = person.name\n",
"if person.username:\n",
"username = person.username\n",
"names = Name.query.filter_by(user_id=person.id)\n",
"people.append({'id': person.id, 'username': username, 'name': name, 'names':\n names})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_2, VAR_3):...\n",
"Frame.__init__(self, VAR_2, bg='#f8f8f8')\n",
"VAR_4 = Frame(self, bg='#e7e7e7')\n",
"VAR_4.pack(fill='both')\n",
"VAR_5 = Image.open('home.jpg')\n",
"VAR_6 = ImageTk.PhotoImage(VAR_5)\n",
"from landingpage import LandingPage\n",
"VAR_7 = Button(VAR_4, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : controller.show_frame(\n LandingPage))\n",
"VAR_7.image = VAR_6\n",
"VAR_7.pack(side=LEFT)\n",
"VAR_8 = Label(VAR_4, text='Meal Planner', font=LARGE_FONT, bg='#e7e7e7', fg\n ='#272822')\n",
"VAR_8.pack(side=LEFT, padx=289)\n",
"VAR_9 = Button(VAR_4, text='Grocery List', highlightbackground='#e7e7e7',\n command=lambda : view_grocery_list())\n",
"VAR_9.pack(side=LEFT)\n",
"VAR_10 = Frame(self, bg='#f8f8f8')\n",
"VAR_11 = datetime.datetime.now()\n",
"VAR_12 = datetime.date(VAR_11.year, VAR_11.month, VAR_11.day)\n",
"VAR_13 = VAR_12.isocalendar()[1]\n",
"VAR_14 = Week(VAR_11.year, VAR_13)\n",
"VAR_15 = Frame(self, bg='#f8f8f8')\n",
"VAR_15.rowconfigure(0, weight=1)\n",
"VAR_15.columnconfigure(0, weight=1)\n",
"VAR_15.rowconfigure(1, weight=3)\n",
"VAR_15.columnconfigure(1, weight=3)\n",
"VAR_15.pack()\n",
"VAR_16 = ['Breakfast', 'Lunch', 'Dinner']\n",
"for VAR_44 in range(len(VAR_16)):\n",
"Label(VAR_15, text=columnLabels[i], font=('Trebuchet MS', 16), bg='#f8f8f8'\n ).grid(VAR_32=0, VAR_33=i + 2, pady=10, padx=85, sticky='nsew')\n",
"VAR_17 = 'Monday ' + str(VAR_14.monday())\n",
"VAR_18 = 'Tuesday ' + str(VAR_14.tuesday())\n",
"VAR_19 = 'Wednesday ' + str(VAR_14.wednesday())\n",
"VAR_20 = 'Thursday ' + str(VAR_14.thursday())\n",
"VAR_21 = 'Friday ' + str(VAR_14.friday())\n",
"VAR_22 = 'Saturday ' + str(VAR_14.saturday())\n",
"VAR_23 = 'Sunday ' + str(VAR_14.sunday())\n",
"VAR_24 = [VAR_17, VAR_18, VAR_19, VAR_20, VAR_21, VAR_22, VAR_23]\n",
"for VAR_44 in range(len(VAR_24)):\n",
"Label(VAR_15, font=('Trebuchet MS', 12), bg='#f8f8f8', text=labels[i]).grid(\n VAR_32=i + 1, VAR_33=0, padx=5, pady=15, sticky='w')\n",
"VAR_25 = {}\n",
"VAR_35 = ttk.Separator(VAR_15, orient='vertical')\n",
"VAR_26 = []\n",
"VAR_35.grid(VAR_32=i + 1, VAR_33=1, padx=5, sticky='nsew')\n",
"for rows in range(len(VAR_24)):\n",
"for columns in range(len(VAR_16)):\n",
"def FUNC_0(VAR_27, VAR_28):...\n",
"VAR_47 = Button(VAR_15, text='Add meal to day', highlightbackground=\n '#f8f8f8', command=lambda x=rows + 1, y=columns + 2: add_meal(x, y))\n",
"VAR_15.pack_forget()\n",
"VAR_47.grid(VAR_32=rows + 1, VAR_33=columns + 2)\n",
"VAR_10.forget()\n",
"VAR_47.position = rows + 1, columns + 2\n",
"VAR_36 = Frame(self, bg='#f8f8f8')\n",
"VAR_25[VAR_47] = VAR_47.position\n",
"VAR_36.rowconfigure(0, weight=1)\n",
"VAR_26.append(VAR_47)\n",
"VAR_36.columnconfigure(0, weight=1)\n",
"VAR_36.rowconfigure(1, weight=3)\n",
"VAR_36.columnconfigure(1, weight=3)\n",
"VAR_36.pack()\n",
"VAR_37 = []\n",
"VAR_38 = []\n",
"VAR_39 = 'meal_planner.db'\n",
"VAR_48 = conn.cursor()\n",
"VAR_49 = VAR_48.execute('SELECT * FROM recipe')\n",
"for result in [VAR_49]:\n",
"for VAR_32 in result.fetchall():\n",
"for VAR_44 in range(len(VAR_37)):\n",
"VAR_51 = VAR_32[0]\n",
"Button(VAR_36, text=recipeNames[i], highlightbackground='#f8f8f8', command=\n lambda x=recipeNames[i], y=ingredientList[i]: add_recipe(x, y,\n add_meal_frame, rowLocation, columnLocation)).grid(VAR_32=i, VAR_33=0)\n",
"def FUNC_1(VAR_29, VAR_30, VAR_31, VAR_32, VAR_33):...\n",
"VAR_30 = VAR_32[4]\n",
"VAR_31.pack_forget()\n",
"VAR_37.append(VAR_51)\n",
"VAR_10.forget()\n",
"VAR_38.append(VAR_30)\n",
"VAR_40 = VAR_32, VAR_33\n",
"for key, value in VAR_25.items():\n",
"if value == VAR_40:\n",
"FUNC_5(VAR_29, VAR_32, VAR_33)\n",
"key.destroy()\n",
"FUNC_4(VAR_30)\n",
"VAR_41 = Label(VAR_15, text=recipe, bg='#f8f8f8')\n",
"VAR_41.grid(VAR_32=row, VAR_33=column)\n",
"VAR_41.bind('<Button-1>', lambda event: FUNC_2(VAR_29))\n",
"VAR_15.pack()\n",
"def FUNC_2(VAR_34):...\n",
"VAR_15.pack_forget()\n",
"VAR_10.pack(expand=True, fill='both')\n",
"VAR_9.pack_forget()\n",
"VAR_39 = 'meal_planner.db'\n",
"print(VAR_34)\n",
"VAR_48 = conn.cursor()\n",
"VAR_49 = VAR_48.execute('SELECT * FROM recipe WHERE name = ' + '\"' + VAR_34 +\n '\"')\n",
"for result in [VAR_49]:\n",
"for VAR_32 in result.fetchall():\n",
"Label(VAR_10, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000').pack(\n side=TOP)\n",
"VAR_51 = VAR_32[0]\n",
"Label(VAR_10, text=secondString, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000'\n ).pack(side=TOP)\n",
"VAR_52 = VAR_32[1]\n",
"Label(VAR_10, text=thirdString, font=MEDIUM_FONT, bg='#f8f8f8', fg='#000000'\n ).pack(side=TOP)\n",
"VAR_53 = VAR_32[2]\n",
"VAR_42 = Button(VAR_4, text='Return to Menu', highlightbackground='#e7e7e7',\n command=lambda : [viewRecipeFrame.pack_forget(), menu.pack(),\n returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n",
"VAR_30 = VAR_32[4]\n",
"VAR_42.pack(side=RIGHT)\n",
"VAR_54 = VAR_32[5]\n",
"def FUNC_3():...\n",
"VAR_55 = \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n \"\"\".format(\n VAR_51, VAR_52, VAR_53)\n",
"print('grocery== list')\n",
"VAR_56 = 'Ingredients: {}'.format(VAR_30)\n",
"VAR_43 = Frame(self)\n",
"VAR_57 = 'Directions: {}'.format(VAR_54)\n",
"VAR_43.rowconfigure(0, weight=1)\n",
"VAR_43.columnconfigure(0, weight=1)\n",
"VAR_43.rowconfigure(1, weight=3)\n",
"VAR_43.columnconfigure(1, weight=3)\n",
"VAR_43.pack()\n",
"VAR_15.pack_forget()\n",
"VAR_9.pack_forget()\n",
"VAR_8.configure(text='Grocery List')\n",
"VAR_44 = 0\n",
"VAR_39 = 'meal_planner.db'\n",
"VAR_45 = []\n",
"VAR_48 = conn.cursor()\n",
"VAR_50 = 'ingredients_' + str(VAR_13)\n",
"VAR_49 = VAR_48.execute('SELECT * FROM ' + VAR_50)\n",
"for result in [VAR_49]:\n",
"for VAR_32 in result.fetchall():\n",
"VAR_46 = 0\n",
"print(VAR_32)\n",
"for item in VAR_45:\n",
"for ingredient in VAR_32:\n",
"print(item)\n",
"VAR_42 = Button(VAR_4, text='Return to Menu', highlightbackground='#e7e7e7',\n command=lambda : [groceryListFrame.pack_forget(), menu.pack(),\n returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n",
"print(ingredient)\n",
"VAR_44 = VAR_44 + 1\n",
"VAR_42.pack(side=RIGHT)\n",
"VAR_45.append(str(ingredient).split())\n",
"Label(VAR_43, text=ingredient, font=MEDIUM_FONT, justify=LEFT).grid(VAR_32=\n i, VAR_33=0, sticky='w')\n",
"def FUNC_4(VAR_30):...\n",
"VAR_39 = 'meal_planner.db'\n",
"VAR_50 = 'ingredients_' + str(VAR_13)\n",
"conn.execute('CREATE TABLE IF NOT EXISTS ' + VAR_50 + ' (ingredients text)')\n",
"conn.execute('INSERT INTO ' + VAR_50 + ' VALUES (?);', (VAR_30,))\n",
"def FUNC_5(VAR_34, VAR_32, VAR_33):...\n",
"print('save weeks')\n",
"VAR_39 = 'meal_planner.db'\n",
"VAR_50 = 'recipes_' + str(VAR_13)\n",
"conn.execute('CREATE TABLE IF NOT EXISTS ' + VAR_50 +\n ' (recipe text, row int, column int)')\n",
"conn.execute('INSERT INTO ' + VAR_50 + ' VALUES (?, ?, ?);', (VAR_34,\n VAR_32, VAR_33))\n"
] | [
"def __init__(self, parent, controller):...\n",
"Frame.__init__(self, parent, bg='#f8f8f8')\n",
"menuFrame = Frame(self, bg='#e7e7e7')\n",
"menuFrame.pack(fill='both')\n",
"load = Image.open('home.jpg')\n",
"render = ImageTk.PhotoImage(load)\n",
"from landingpage import LandingPage\n",
"img = Button(menuFrame, image=render, borderwidth=0, highlightthickness=0,\n highlightbackground='#e7e7e7', command=lambda : controller.show_frame(\n LandingPage))\n",
"img.image = render\n",
"img.pack(side=LEFT)\n",
"label = Label(menuFrame, text='Meal Planner', font=LARGE_FONT, bg='#e7e7e7',\n fg='#272822')\n",
"label.pack(side=LEFT, padx=289)\n",
"groceryButton = Button(menuFrame, text='Grocery List', highlightbackground=\n '#e7e7e7', command=lambda : view_grocery_list())\n",
"groceryButton.pack(side=LEFT)\n",
"viewRecipeFrame = Frame(self, bg='#f8f8f8')\n",
"now = datetime.datetime.now()\n",
"dt = datetime.date(now.year, now.month, now.day)\n",
"weekNumber = dt.isocalendar()[1]\n",
"w = Week(now.year, weekNumber)\n",
"menu = Frame(self, bg='#f8f8f8')\n",
"menu.rowconfigure(0, weight=1)\n",
"menu.columnconfigure(0, weight=1)\n",
"menu.rowconfigure(1, weight=3)\n",
"menu.columnconfigure(1, weight=3)\n",
"menu.pack()\n",
"columnLabels = ['Breakfast', 'Lunch', 'Dinner']\n",
"for i in range(len(columnLabels)):\n",
"Label(menu, text=columnLabels[i], font=('Trebuchet MS', 16), bg='#f8f8f8'\n ).grid(row=0, column=i + 2, pady=10, padx=85, sticky='nsew')\n",
"mondayText = 'Monday ' + str(w.monday())\n",
"tuesdayText = 'Tuesday ' + str(w.tuesday())\n",
"wednesdayText = 'Wednesday ' + str(w.wednesday())\n",
"thursdayText = 'Thursday ' + str(w.thursday())\n",
"fridayText = 'Friday ' + str(w.friday())\n",
"saturdayText = 'Saturday ' + str(w.saturday())\n",
"sundayText = 'Sunday ' + str(w.sunday())\n",
"labels = [mondayText, tuesdayText, wednesdayText, thursdayText, fridayText,\n saturdayText, sundayText]\n",
"for i in range(len(labels)):\n",
"Label(menu, font=('Trebuchet MS', 12), bg='#f8f8f8', text=labels[i]).grid(row\n =i + 1, column=0, padx=5, pady=15, sticky='w')\n",
"buttonDict = {}\n",
"sep = ttk.Separator(menu, orient='vertical')\n",
"listOfButtons = []\n",
"sep.grid(row=i + 1, column=1, padx=5, sticky='nsew')\n",
"for rows in range(len(labels)):\n",
"for columns in range(len(columnLabels)):\n",
"def add_meal(rowLocation, columnLocation):...\n",
"buttons = Button(menu, text='Add meal to day', highlightbackground=\n '#f8f8f8', command=lambda x=rows + 1, y=columns + 2: add_meal(x, y))\n",
"menu.pack_forget()\n",
"buttons.grid(row=rows + 1, column=columns + 2)\n",
"viewRecipeFrame.forget()\n",
"buttons.position = rows + 1, columns + 2\n",
"add_meal_frame = Frame(self, bg='#f8f8f8')\n",
"buttonDict[buttons] = buttons.position\n",
"add_meal_frame.rowconfigure(0, weight=1)\n",
"listOfButtons.append(buttons)\n",
"add_meal_frame.columnconfigure(0, weight=1)\n",
"add_meal_frame.rowconfigure(1, weight=3)\n",
"add_meal_frame.columnconfigure(1, weight=3)\n",
"add_meal_frame.pack()\n",
"recipeNames = []\n",
"ingredientList = []\n",
"database_file = 'meal_planner.db'\n",
"cursor = conn.cursor()\n",
"selection = cursor.execute('SELECT * FROM recipe')\n",
"for result in [selection]:\n",
"for row in result.fetchall():\n",
"for i in range(len(recipeNames)):\n",
"name = row[0]\n",
"Button(add_meal_frame, text=recipeNames[i], highlightbackground='#f8f8f8',\n command=lambda x=recipeNames[i], y=ingredientList[i]: add_recipe(x, y,\n add_meal_frame, rowLocation, columnLocation)).grid(row=i, column=0)\n",
"def add_recipe(recipe, ingredients, view, row, column):...\n",
"ingredients = row[4]\n",
"view.pack_forget()\n",
"recipeNames.append(name)\n",
"viewRecipeFrame.forget()\n",
"ingredientList.append(ingredients)\n",
"searchIndex = row, column\n",
"for key, value in buttonDict.items():\n",
"if value == searchIndex:\n",
"save_weeks_recipes(recipe, row, column)\n",
"key.destroy()\n",
"save_ingredients(ingredients)\n",
"recipeLabel = Label(menu, text=recipe, bg='#f8f8f8')\n",
"recipeLabel.grid(row=row, column=column)\n",
"recipeLabel.bind('<Button-1>', lambda event: callback(recipe))\n",
"menu.pack()\n",
"def callback(recipeName):...\n",
"menu.pack_forget()\n",
"viewRecipeFrame.pack(expand=True, fill='both')\n",
"groceryButton.pack_forget()\n",
"database_file = 'meal_planner.db'\n",
"print(recipeName)\n",
"cursor = conn.cursor()\n",
"selection = cursor.execute('SELECT * FROM recipe WHERE name = ' + '\"' +\n recipeName + '\"')\n",
"for result in [selection]:\n",
"for row in result.fetchall():\n",
"Label(viewRecipeFrame, text=string, font=MEDIUM_FONT, bg='#f8f8f8', fg=\n '#000000').pack(side=TOP)\n",
"name = row[0]\n",
"Label(viewRecipeFrame, text=secondString, font=MEDIUM_FONT, bg='#f8f8f8',\n fg='#000000').pack(side=TOP)\n",
"time = row[1]\n",
"Label(viewRecipeFrame, text=thirdString, font=MEDIUM_FONT, bg='#f8f8f8', fg\n ='#000000').pack(side=TOP)\n",
"servings = row[2]\n",
"returnButton = Button(menuFrame, text='Return to Menu', highlightbackground\n ='#e7e7e7', command=lambda : [viewRecipeFrame.pack_forget(), menu.pack(\n ), returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n",
"ingredients = row[4]\n",
"returnButton.pack(side=RIGHT)\n",
"directions = row[5]\n",
"def view_grocery_list():...\n",
"string = \"\"\"Name: {} \n Cook time: {} \n Number of Servings: {} \n \"\"\".format(name\n , time, servings)\n",
"print('grocery== list')\n",
"secondString = 'Ingredients: {}'.format(ingredients)\n",
"groceryListFrame = Frame(self)\n",
"thirdString = 'Directions: {}'.format(directions)\n",
"groceryListFrame.rowconfigure(0, weight=1)\n",
"groceryListFrame.columnconfigure(0, weight=1)\n",
"groceryListFrame.rowconfigure(1, weight=3)\n",
"groceryListFrame.columnconfigure(1, weight=3)\n",
"groceryListFrame.pack()\n",
"menu.pack_forget()\n",
"groceryButton.pack_forget()\n",
"label.configure(text='Grocery List')\n",
"i = 0\n",
"database_file = 'meal_planner.db'\n",
"item_array = []\n",
"cursor = conn.cursor()\n",
"tableName = 'ingredients_' + str(weekNumber)\n",
"selection = cursor.execute('SELECT * FROM ' + tableName)\n",
"for result in [selection]:\n",
"for row in result.fetchall():\n",
"j = 0\n",
"print(row)\n",
"for item in item_array:\n",
"for ingredient in row:\n",
"print(item)\n",
"returnButton = Button(menuFrame, text='Return to Menu', highlightbackground\n ='#e7e7e7', command=lambda : [groceryListFrame.pack_forget(), menu.pack\n (), returnButton.pack_forget(), label.configure(text='Meal Planer'),\n groceryButton.pack(side=RIGHT)])\n",
"print(ingredient)\n",
"i = i + 1\n",
"returnButton.pack(side=RIGHT)\n",
"item_array.append(str(ingredient).split())\n",
"Label(groceryListFrame, text=ingredient, font=MEDIUM_FONT, justify=LEFT).grid(\n row=i, column=0, sticky='w')\n",
"def save_ingredients(ingredients):...\n",
"database_file = 'meal_planner.db'\n",
"tableName = 'ingredients_' + str(weekNumber)\n",
"conn.execute('CREATE TABLE IF NOT EXISTS ' + tableName + ' (ingredients text)')\n",
"conn.execute('INSERT INTO ' + tableName + ' VALUES (?);', (ingredients,))\n",
"def save_weeks_recipes(recipeName, row, column):...\n",
"print('save weeks')\n",
"database_file = 'meal_planner.db'\n",
"tableName = 'recipes_' + str(weekNumber)\n",
"conn.execute('CREATE TABLE IF NOT EXISTS ' + tableName +\n ' (recipe text, row int, column int)')\n",
"conn.execute('INSERT INTO ' + tableName + ' VALUES (?, ?, ?);', (recipeName,\n row, column))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"For",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"For",
"For",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"For",
"For",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"For",
"Assign'",
"Expr'",
"For",
"For",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_11(self):...\n",
"self.clear_input_fields()\n",
"keepassgtk.config_manager.create_config_entry_string('history',\n 'last-opened-db', str(self.database_filepath))\n",
"keepassgtk.config_manager.save_config()\n",
"self.unlock_database_stack_box.destroy()\n",
"UnlockedDatabase(self.window, self.parent_widget, self.database_manager)\n"
] | [
"def open_database_page(self):...\n",
"self.clear_input_fields()\n",
"keepassgtk.config_manager.create_config_entry_string('history',\n 'last-opened-db', str(self.database_filepath))\n",
"keepassgtk.config_manager.save_config()\n",
"self.unlock_database_stack_box.destroy()\n",
"UnlockedDatabase(self.window, self.parent_widget, self.database_manager)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"VAR_1 = bot.Bot(None, {}, 'https://localhost:1/',\n '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n",
"VAR_2 = threading.Event()\n",
"VAR_1.call_later(0.1, VAR_2.set)\n",
"VAR_1.cancel_all_timers()\n",
"self.assertFalse(VAR_2.wait(0.3))\n"
] | [
"def test_bot_call_later_cancel(self):...\n",
"obj = bot.Bot(None, {}, 'https://localhost:1/', '1234-1a2b3c4-tainted-joe',\n 'base_dir', None)\n",
"ev = threading.Event()\n",
"obj.call_later(0.1, ev.set)\n",
"obj.cancel_all_timers()\n",
"self.assertFalse(ev.wait(0.3))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"VAR_8 = []\n",
"for VAR_5 in self.groups:\n",
"VAR_8.append(VAR_5.serialize())\n",
"return dict(VAR_2=self.name, vars=self.vars.copy(), address=self.address,\n uuid=self._uuid, VAR_8=groups, implicit=self.implicit)\n"
] | [
"def serialize(self):...\n",
"groups = []\n",
"for group in self.groups:\n",
"groups.append(group.serialize())\n",
"return dict(name=self.name, vars=self.vars.copy(), address=self.address,\n uuid=self._uuid, groups=groups, implicit=self.implicit)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_6(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = kijiji_api.KijijiApi()\n",
"VAR_20.login(VAR_0.username, VAR_0.password)\n",
"VAR_20.delete_ad(VAR_0.id)\n"
] | [
"def delete_ad(args):...\n",
"\"\"\"docstring\"\"\"\n",
"api = kijiji_api.KijijiApi()\n",
"api.login(args.username, args.password)\n",
"api.delete_ad(args.id)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_11):...\n",
"if VAR_11 and is_banned_domain(VAR_11):\n",
"VAR_101.errors.add(errors.BANNED_DOMAIN)\n"
] | [
"def run(self, url):...\n",
"if url and is_banned_domain(url):\n",
"c.errors.add(errors.BANNED_DOMAIN)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def __init__(self, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=None, VAR_5=...\n",
"\"\"\"docstring\"\"\"\n",
"self._rules = OrderedDict()\n",
"self.first_rule = None\n",
"self._workdir = None\n",
"self.overwrite_workdir = VAR_6\n",
"self.workdir_init = os.path.abspath(os.curdir)\n",
"self._ruleorder = Ruleorder()\n",
"self._localrules = set()\n",
"self.linemaps = dict()\n",
"self.rule_count = 0\n",
"self.basedir = os.path.dirname(VAR_1)\n",
"self.snakefile = os.path.abspath(VAR_1)\n",
"self.snakemakepath = VAR_2\n",
"self.included = []\n",
"self.included_stack = []\n",
"self.jobscript = VAR_3\n",
"self.persistence = None\n",
"self.global_resources = None\n",
"self.globals = globals()\n",
"self._subworkflows = dict()\n",
"self.overwrite_shellcmd = VAR_4\n",
"self.overwrite_config = VAR_5\n",
"self.overwrite_configfile = VAR_7\n",
"self.config_args = VAR_8\n",
"self._onsuccess = lambda log: None\n",
"self._onerror = lambda log: None\n",
"self.debug = VAR_9\n",
"VAR_85 = dict()\n",
"VAR_85.update(self.overwrite_config)\n",
"VAR_86 = CLASS_3()\n"
] | [
"def __init__(self, snakefile=None, snakemakepath=None, jobscript=None,...\n",
"\"\"\"docstring\"\"\"\n",
"self._rules = OrderedDict()\n",
"self.first_rule = None\n",
"self._workdir = None\n",
"self.overwrite_workdir = overwrite_workdir\n",
"self.workdir_init = os.path.abspath(os.curdir)\n",
"self._ruleorder = Ruleorder()\n",
"self._localrules = set()\n",
"self.linemaps = dict()\n",
"self.rule_count = 0\n",
"self.basedir = os.path.dirname(snakefile)\n",
"self.snakefile = os.path.abspath(snakefile)\n",
"self.snakemakepath = snakemakepath\n",
"self.included = []\n",
"self.included_stack = []\n",
"self.jobscript = jobscript\n",
"self.persistence = None\n",
"self.global_resources = None\n",
"self.globals = globals()\n",
"self._subworkflows = dict()\n",
"self.overwrite_shellcmd = overwrite_shellcmd\n",
"self.overwrite_config = overwrite_config\n",
"self.overwrite_configfile = overwrite_configfile\n",
"self.config_args = config_args\n",
"self._onsuccess = lambda log: None\n",
"self._onerror = lambda log: None\n",
"self.debug = debug\n",
"config = dict()\n",
"config.update(self.overwrite_config)\n",
"rules = Rules()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"\"\"\"string\"\"\"\n",
"import copy\n",
"from flask import current_app, session\n",
"from flask.ext.login import current_user\n",
"from invenio.ext.sqlalchemy.utils import session_manager\n",
"VAR_0 = dict(title='ORCID', description=\n 'Connecting Research and Researchers.', icon='', authorized_handler=\n 'invenio.modules.oauthclient.handlers:authorized_signup_handler',\n disconnect_handler=\n 'invenio.modules.oauthclient.handlers:disconnect_handler',\n signup_handler=dict(info=\n 'invenio.modules.oauthclient.contrib.orcid:account_info', setup=\n 'invenio.modules.oauthclient.contrib.orcid:account_setup', view=\n 'invenio.modules.oauthclient.handlers:signup_handler'), params=dict(\n request_token_params={'scope': '/authenticate'}, base_url=\n 'https://pub.orcid.com/', request_token_url=None, access_token_url=\n 'https://pub.orcid.org/oauth/token', access_token_method='POST',\n authorize_url='https://orcid.org/oauth/authorize#show_login', app_key=\n 'ORCID_APP_CREDENTIALS', content_type='application/json'))\n",
"\"\"\" ORCID Remote Application. \"\"\"\n",
"VAR_1 = copy.deepcopy(VAR_0)\n",
"\"\"\"ORCID Sandbox Remote Application.\"\"\"\n",
"VAR_1['params'].update(dict(base_url='https://api.sandbox.orcid.org/',\n access_token_url='https://api.sandbox.orcid.org/oauth/token',\n authorize_url='https://sandbox.orcid.org/oauth/authorize#show_login'))\n",
"def FUNC_0(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = dict(external_id=resp.get('orcid'), external_method='orcid')\n",
"return VAR_5\n"
] | [
"\"\"\"Pre-configured remote application for enabling sign in/up with ORCID.\n\n**Usage:**\n\n1. Edit your configuration and add:\n\n .. code-block:: python\n\n from invenio.modules.oauthclient.contrib import orcid\n OAUTHCLIENT_REMOTE_APPS = dict(\n orcid=orcid.REMOTE_APP,\n )\n\n ORCID_APP_CREDENTIALS = dict(\n consumer_key=\"changeme\",\n consumer_secret=\"changeme\",\n )\n\n Note, if you want to use the ORCID sandbox, use ``orcid.REMOTE_SANDBOX_APP``\n instead of ``orcid.REMOTE_APP``.\n\n2. Register a new application with ORCID. When registering the\n application ensure that the *Redirect URI* points to:\n ``CFG_SITE_SECURE_URL/oauth/authorized/orcid/`` (note, ORCID does not\n allow localhost to be used, thus testing on development machines is\n somewhat complicated by this).\n\n\n3. Grab the *Client ID* and *Client Secret* after registering the application\n and add them to your instance configuration (``invenio.cfg``):\n\n .. code-block:: python\n\n ORCID_APP_CREDENTIALS = dict(\n consumer_key=\"<CLIENT ID>\",\n consumer_secret=\"<CLIENT SECRET>\",\n )\n\n4. Now go to ``CFG_SITE_SECURE_URL/oauth/login/orcid/`` (e.g.\n http://localhost:4000/oauth/login/orcid/)\n\n5. Also, you should see ORCID listed under Linked accounts:\n http://localhost:4000//account/settings/linkedaccounts/\n\nBy default the ORCID module will try first look if a link already exists\nbetween a ORCID account and a user. If no link is found, the user is asked\nto provide an email address to sign-up.\n\nIn templates you can add a sign in/up link:\n\n.. code-block:: jinja\n\n <a href=\"{{url_for('oauthclient.login', remote_app='orcid')}}\">Sign in with ORCID</a>\n\n\"\"\"\n",
"import copy\n",
"from flask import current_app, session\n",
"from flask.ext.login import current_user\n",
"from invenio.ext.sqlalchemy.utils import session_manager\n",
"REMOTE_APP = dict(title='ORCID', description=\n 'Connecting Research and Researchers.', icon='', authorized_handler=\n 'invenio.modules.oauthclient.handlers:authorized_signup_handler',\n disconnect_handler=\n 'invenio.modules.oauthclient.handlers:disconnect_handler',\n signup_handler=dict(info=\n 'invenio.modules.oauthclient.contrib.orcid:account_info', setup=\n 'invenio.modules.oauthclient.contrib.orcid:account_setup', view=\n 'invenio.modules.oauthclient.handlers:signup_handler'), params=dict(\n request_token_params={'scope': '/authenticate'}, base_url=\n 'https://pub.orcid.com/', request_token_url=None, access_token_url=\n 'https://pub.orcid.org/oauth/token', access_token_method='POST',\n authorize_url='https://orcid.org/oauth/authorize#show_login', app_key=\n 'ORCID_APP_CREDENTIALS', content_type='application/json'))\n",
"\"\"\" ORCID Remote Application. \"\"\"\n",
"REMOTE_SANDBOX_APP = copy.deepcopy(REMOTE_APP)\n",
"\"\"\"ORCID Sandbox Remote Application.\"\"\"\n",
"REMOTE_SANDBOX_APP['params'].update(dict(base_url=\n 'https://api.sandbox.orcid.org/', access_token_url=\n 'https://api.sandbox.orcid.org/oauth/token', authorize_url=\n 'https://sandbox.orcid.org/oauth/authorize#show_login'))\n",
"def account_info(remote, resp):...\n",
"\"\"\"docstring\"\"\"\n",
"account_info = dict(external_id=resp.get('orcid'), external_method='orcid')\n",
"return account_info\n"
] | [
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_21 = self.agent.type_to_payload(\n 'deregisterInstancesFromGoogleLoadBalancerDescription', {\n 'loadBalancerNames': [self.__use_lb_name], 'instanceIds': self.\n use_instance_names[:2], 'region': self.bindings['TEST_GCE_REGION'],\n 'credentials': self.bindings['GCE_CREDENTIALS']})\n",
"VAR_20 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_20.new_clause_builder('Instances not in Target Pool', retryable_for_secs=5\n ).list_resources('target-pools', VAR_35=['--region', self.bindings[\n 'TEST_GCE_REGION']]).excludes_pred_list([jc.PathContainsPredicate(\n 'name', self.__use_lb_tp_name), jc.PathElementsContainPredicate(\n 'instances', self.use_instance_names[0]), jc.\n PathElementsContainPredicate('instances', self.use_instance_names[1])])\n",
"return st.OperationContract(self.new_post_operation(title=\n 'deregister_load_balancer_instances', data=payload, VAR_29='ops'),\n contract=builder.build())\n"
] | [
"def deregister_load_balancer_instances(self):...\n",
"\"\"\"docstring\"\"\"\n",
"payload = self.agent.type_to_payload(\n 'deregisterInstancesFromGoogleLoadBalancerDescription', {\n 'loadBalancerNames': [self.__use_lb_name], 'instanceIds': self.\n use_instance_names[:2], 'region': self.bindings['TEST_GCE_REGION'],\n 'credentials': self.bindings['GCE_CREDENTIALS']})\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Instances not in Target Pool', retryable_for_secs=5\n ).list_resources('target-pools', extra_args=['--region', self.bindings[\n 'TEST_GCE_REGION']]).excludes_pred_list([jc.PathContainsPredicate(\n 'name', self.__use_lb_tp_name), jc.PathElementsContainPredicate(\n 'instances', self.use_instance_names[0]), jc.\n PathElementsContainPredicate('instances', self.use_instance_names[1])])\n",
"return st.OperationContract(self.new_post_operation(title=\n 'deregister_load_balancer_instances', data=payload, path='ops'),\n contract=builder.build())\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"super(CLASS_0, self).setUp()\n",
"self.mock(net, 'url_open', self._url_open)\n",
"self.mock(net, 'url_read_json', self._url_read_json)\n",
"self.mock(net, 'sleep_before_retry', lambda *VAR_10: None)\n",
"self._lock = threading.Lock()\n",
"self._requests = []\n"
] | [
"def setUp(self):...\n",
"super(TestCase, self).setUp()\n",
"self.mock(net, 'url_open', self._url_open)\n",
"self.mock(net, 'url_read_json', self._url_read_json)\n",
"self.mock(net, 'sleep_before_retry', lambda *_: None)\n",
"self._lock = threading.Lock()\n",
"self._requests = []\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"import io\n",
"import os\n",
"import sys\n",
"import webbrowser\n",
"from pathlib import Path\n",
"import ipfsapi\n",
"from ipvc.common import CommonAPI, expand_ref, refpath_to_mfs, make_len, atomic\n",
"def __init__(self, *VAR_0, **VAR_1):...\n",
"super().__init__(*VAR_0, **kwargs)\n",
"@atomic...\n",
"VAR_12, VAR_6 = self.common()\n",
"VAR_13 = self.ipfs.files_read(self.get_mfs_path(self.fs_cwd, repo_info=\n 'active_branch_name')).decode('utf-8')\n",
"if not self.quiet:\n",
"print(VAR_13)\n",
"return VAR_13\n"
] | [
"import io\n",
"import os\n",
"import sys\n",
"import webbrowser\n",
"from pathlib import Path\n",
"import ipfsapi\n",
"from ipvc.common import CommonAPI, expand_ref, refpath_to_mfs, make_len, atomic\n",
"def __init__(self, *args, **kwargs):...\n",
"super().__init__(*args, **kwargs)\n",
"@atomic...\n",
"_, branch = self.common()\n",
"active = self.ipfs.files_read(self.get_mfs_path(self.fs_cwd, repo_info=\n 'active_branch_name')).decode('utf-8')\n",
"if not self.quiet:\n",
"print(active)\n",
"return active\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"if self.pid:\n",
"os.waitpid(self.pid, 0)\n"
] | [
"def wait(self):...\n",
"if self.pid:\n",
"os.waitpid(self.pid, 0)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def FUNC_4(VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"for directory in os.getenv('PATH').split(':'):\n",
"if os.path.exists(os.path.join(directory, VAR_7)):\n",
"return False\n",
"return True\n"
] | [
"def executable_exists(executable):...\n",
"\"\"\"docstring\"\"\"\n",
"for directory in os.getenv('PATH').split(':'):\n",
"if os.path.exists(os.path.join(directory, executable)):\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Return'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self.config.get(self.section, 'default_language')\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self.config.get(self.section, 'default_language')\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_9(self, VAR_15, VAR_16):...\n",
"VAR_28 = Gtk.Builder()\n",
"VAR_28.add_from_resource('/run/terminal/KeepassGtk/create_database.ui')\n",
"VAR_6 = VAR_28.get_object('headerbar')\n",
"UnlockDatabase(self, self.create_tab(VAR_15, VAR_6), VAR_16)\n"
] | [
"def start_database_opening_routine(self, tab_title, filepath):...\n",
"builder = Gtk.Builder()\n",
"builder.add_from_resource('/run/terminal/KeepassGtk/create_database.ui')\n",
"headerbar = builder.get_object('headerbar')\n",
"UnlockDatabase(self, self.create_tab(tab_title, headerbar), filepath)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_13(VAR_6):...\n",
"VAR_4.register_jvm_tool(VAR_8, VAR_4._key_for_tool_version('scalac', VAR_6),\n VAR_15=[cls._create_compiler_jardep(version)])\n"
] | [
"def register_scala_compiler_tool(version):...\n",
"cls.register_jvm_tool(register, cls._key_for_tool_version('scalac', version\n ), classpath=[cls._create_compiler_jardep(version)])\n"
] | [
0,
7
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n",
"VAR_8 = {'web': 'not_an_int'}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertContains(VAR_5, 'Invalid scaling format', status_code=400)\n",
"VAR_8 = {'invalid': 1}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n",
"self.assertContains(VAR_5, 'Container type invalid', status_code=400)\n"
] | [
"def test_container_errors(self):...\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"url = '/api/apps/{app_id}/scale'.format(**locals())\n",
"body = {'web': 'not_an_int'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertContains(response, 'Invalid scaling format', status_code=400)\n",
"body = {'invalid': 1}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertContains(response, 'Container type invalid', status_code=400)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_14(self, VAR_1=0, VAR_2=0, VAR_3='https://localhost:1'):...\n",
"VAR_22 = {'exit_code': VAR_2, 'must_signal_internal_failure': None,\n 'version': 3}\n",
"def __init__(VAR_39, VAR_40, VAR_41, VAR_42, VAR_48, VAR_43, VAR_44, VAR_45,...\n",
"VAR_39.returncode = None\n",
"VAR_39._out_file = VAR_7.path.join(self.root_dir, 'work',\n 'task_runner_out.json')\n",
"VAR_6 = [sys.executable, bot_main.THIS_FILE, 'task_runner',\n '--swarming-server', VAR_3, '--in-file', VAR_7.path.join(self.root_dir,\n 'work', 'task_runner_in.json'), '--out-file', VAR_39._out_file,\n '--cost-usd-hour', '3600.0', '--start', '100.0', '--min-free-space',\n str(int((os_utilities.get_min_free_space(bot_main.THIS_FILE) + 250.0) *\n 1024 * 1024))]\n",
"self.assertEqual(VAR_6, VAR_40)\n",
"self.assertEqual(True, VAR_41)\n",
"self.assertEqual(self.bot.base_dir, VAR_42)\n",
"self.assertEqual('24', VAR_48['SWARMING_TASK_ID'])\n",
"self.assertTrue(VAR_43)\n",
"self.assertEqual(subprocess42.STDOUT, VAR_44)\n",
"self.assertEqual(subprocess42.PIPE, VAR_45)\n",
"self.assertEqual(sys.platform != 'win32', VAR_46)\n",
"def FUNC_32(VAR_39, VAR_49=None):...\n",
"VAR_39.returncode = VAR_1\n",
"json.dump(VAR_22, VAR_32)\n",
"return 0\n"
] | [
"def _mock_popen(self, returncode=0, exit_code=0, url='https://localhost:1'):...\n",
"result = {'exit_code': exit_code, 'must_signal_internal_failure': None,\n 'version': 3}\n",
"def __init__(self2, cmd, detached, cwd, env, stdout, stderr, stdin, close_fds):...\n",
"self2.returncode = None\n",
"self2._out_file = os.path.join(self.root_dir, 'work', 'task_runner_out.json')\n",
"expected = [sys.executable, bot_main.THIS_FILE, 'task_runner',\n '--swarming-server', url, '--in-file', os.path.join(self.root_dir,\n 'work', 'task_runner_in.json'), '--out-file', self2._out_file,\n '--cost-usd-hour', '3600.0', '--start', '100.0', '--min-free-space',\n str(int((os_utilities.get_min_free_space(bot_main.THIS_FILE) + 250.0) *\n 1024 * 1024))]\n",
"self.assertEqual(expected, cmd)\n",
"self.assertEqual(True, detached)\n",
"self.assertEqual(self.bot.base_dir, cwd)\n",
"self.assertEqual('24', env['SWARMING_TASK_ID'])\n",
"self.assertTrue(stdout)\n",
"self.assertEqual(subprocess42.STDOUT, stderr)\n",
"self.assertEqual(subprocess42.PIPE, stdin)\n",
"self.assertEqual(sys.platform != 'win32', close_fds)\n",
"def wait(self2, timeout=None):...\n",
"self2.returncode = returncode\n",
"json.dump(result, f)\n",
"return 0\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_3(VAR_3):...\n",
""
] | [
"def chkuser(x):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(self, VAR_5):...\n",
"VAR_13 = '%s_%s_seq' % (VAR_5.name(), VAR_5.sqlSerialColumnName())\n",
"VAR_3, VAR_14 = self.executeSQL(\"select nextval('%s')\" % VAR_13)\n",
"VAR_10 = VAR_14.fetchone()[0]\n",
"assert VAR_10, \"Didn't get next id value from sequence\"\n",
"return VAR_10\n"
] | [
"def retrieveNextInsertId(self, klass):...\n",
"seqname = '%s_%s_seq' % (klass.name(), klass.sqlSerialColumnName())\n",
"conn, curs = self.executeSQL(\"select nextval('%s')\" % seqname)\n",
"value = curs.fetchone()[0]\n",
"assert value, \"Didn't get next id value from sequence\"\n",
"return value\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Return'"
] |
[
"def FUNC_22(self):...\n",
"return bool(self._latest_file_parse_request and self.\n _latest_file_parse_request.Done())\n"
] | [
"def DiagnosticsForCurrentFileReady(self):...\n",
"return bool(self._latest_file_parse_request and self.\n _latest_file_parse_request.Done())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(self, **VAR_4):...\n",
"VAR_16 = self.kwargs['pk']\n",
"VAR_17 = Event.objects.get(VAR_16=pk)\n",
"VAR_18 = Object.objects.filter(event=pk)\n",
"VAR_5 = super().get_context_data(**kwargs)\n",
"VAR_5['event'] = VAR_17\n",
"VAR_5['objects'] = VAR_18\n",
"VAR_5['categories'] = VAR_17.getUniqCategory()\n",
"VAR_5['types'] = VAR_17.getUniqType()\n",
"VAR_5['count'] = self.object_list.count()\n",
"return VAR_5\n"
] | [
"def get_context_data(self, **kwargs):...\n",
"pk = self.kwargs['pk']\n",
"event_obj = Event.objects.get(pk=pk)\n",
"objects_obj = Object.objects.filter(event=pk)\n",
"context = super().get_context_data(**kwargs)\n",
"context['event'] = event_obj\n",
"context['objects'] = objects_obj\n",
"context['categories'] = event_obj.getUniqCategory()\n",
"context['types'] = event_obj.getUniqType()\n",
"context['count'] = self.object_list.count()\n",
"return context\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self, VAR_4):...\n",
"VAR_5 = VAR_4.credentials.username\n",
"VAR_6 = VAR_4.credentials.password\n",
"VAR_7 = VAR_4.credentials.domain\n",
"if VAR_7 is None:\n",
"VAR_8 = \"--ignore-certificate --authonly -u '{}' -p '{}' {}:{}\"\n",
"VAR_8 = \"--ignore-certificate --authonly -d {} -u '{}' -p '{}' {}:{}\"\n",
"VAR_9 = VAR_8.format(VAR_5, VAR_6, VAR_4.server, VAR_4.port)\n",
"VAR_9 = VAR_8.format(VAR_7.domain, VAR_5, VAR_6, VAR_4.server, VAR_4.port)\n",
"VAR_10 = subprocess.check_output('xfreerdp {}'.format(VAR_9), shell=True,\n stderr=subprocess.STDOUT)\n",
"if 'connected to' in str(e.output) and 'Authentication failure' not in str(e\n",
"VAR_11 = CLASS_1(True)\n",
"VAR_11 = CLASS_1(True)\n",
"print('{{{{%s}}}}' % e.output)\n",
"return VAR_11\n",
"return VAR_11\n",
"VAR_11 = CLASS_1(False, e)\n",
"return VAR_11\n"
] | [
"def poll(self, poll_input):...\n",
"username = poll_input.credentials.username\n",
"password = poll_input.credentials.password\n",
"domain = poll_input.credentials.domain\n",
"if domain is None:\n",
"opt_str = \"--ignore-certificate --authonly -u '{}' -p '{}' {}:{}\"\n",
"opt_str = \"--ignore-certificate --authonly -d {} -u '{}' -p '{}' {}:{}\"\n",
"options = opt_str.format(username, password, poll_input.server, poll_input.port\n )\n",
"options = opt_str.format(domain.domain, username, password, poll_input.\n server, poll_input.port)\n",
"output = subprocess.check_output('xfreerdp {}'.format(options), shell=True,\n stderr=subprocess.STDOUT)\n",
"if 'connected to' in str(e.output) and 'Authentication failure' not in str(e\n",
"result = RdpPollResult(True)\n",
"result = RdpPollResult(True)\n",
"print('{{{{%s}}}}' % e.output)\n",
"return result\n",
"return result\n",
"result = RdpPollResult(False, e)\n",
"return result\n"
] | [
0,
0,
0,
0,
2,
0,
2,
2,
2,
2,
2,
2,
0,
2,
2,
0,
2,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'",
"Assign'",
"Return'"
] |
[
"def FUNC_45(self):...\n",
"return self.__class__.__call__(VAR_37=self, VAR_39=True)\n"
] | [
"def plainstrings(self):...\n",
"return self.__class__.__call__(toclone=self, plainstr=True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_7(self, VAR_8):...\n",
"VAR_14 = 'SELECT * FROM report WHERE Report_ID = ' + VAR_8\n",
"self.cursor.execute(VAR_14)\n",
"self.connection.commit()\n",
"VAR_15 = self.cursor.fetchone()\n",
"VAR_20 = ' '.join(map(str, VAR_15))\n",
"return VAR_20\n"
] | [
"def get_report(self, reportID):...\n",
"query = 'SELECT * FROM report WHERE Report_ID = ' + reportID\n",
"self.cursor.execute(query)\n",
"self.connection.commit()\n",
"fetch = self.cursor.fetchone()\n",
"report = ' '.join(map(str, fetch))\n",
"return report\n"
] | [
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(VAR_16, VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_single_keywords(VAR_16, VAR_17) or {}\n"
] | [
"def extract_single_keywords(skw_db, fulltext):...\n",
"\"\"\"docstring\"\"\"\n",
"return keyworder.get_single_keywords(skw_db, fulltext) or {}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"return chain(*map(attrgetter(VAR_1), VAR_0))\n"
] | [
"def jobfiles(jobs, type):...\n",
"return chain(*map(attrgetter(type), jobs))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_14(self, VAR_4, VAR_33, VAR_18=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_18 is not None:\n",
"self.flags.ignore_permissions = VAR_18\n",
"if isinstance(VAR_4, dict):\n",
"VAR_4 = [VAR_4]\n",
"for VAR_34 in VAR_4:\n",
"if isinstance(VAR_34, string_types):\n",
"VAR_33.append(VAR_34)\n",
"VAR_33.append(self.prepare_filter_condition(VAR_34))\n"
] | [
"def build_filter_conditions(self, filters, conditions, ignore_permissions=None...\n",
"\"\"\"docstring\"\"\"\n",
"if ignore_permissions is not None:\n",
"self.flags.ignore_permissions = ignore_permissions\n",
"if isinstance(filters, dict):\n",
"filters = [filters]\n",
"for f in filters:\n",
"if isinstance(f, string_types):\n",
"conditions.append(f)\n",
"conditions.append(self.prepare_filter_condition(f))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'"
] |
[
"def __new__(VAR_10, VAR_11, VAR_12, VAR_13):...\n",
"for VAR_47 in webapp2.WSGIApplication.allowed_methods:\n",
"VAR_48 = VAR_13.get(VAR_47.lower())\n",
"return type.__new__(VAR_10, VAR_11, VAR_12, VAR_13)\n",
"if VAR_48 and not api.is_decorated(VAR_48):\n"
] | [
"def __new__(mcs, name, bases, attributes):...\n",
"for method in webapp2.WSGIApplication.allowed_methods:\n",
"func = attributes.get(method.lower())\n",
"return type.__new__(mcs, name, bases, attributes)\n",
"if func and not api.is_decorated(func):\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"Return'",
"Condition"
] |
[
"def FUNC_5(self):...\n",
"VAR_8 = 'test_foo.py:FooTest.test_bar'\n",
"VAR_7 = BokChoyTestSuite('', test_spec=spec)\n",
"VAR_1 = 'tests/{}'.format(VAR_8)\n",
"self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name))\n"
] | [
"def test_testcase_spec(self):...\n",
"spec = 'test_foo.py:FooTest.test_bar'\n",
"suite = BokChoyTestSuite('', test_spec=spec)\n",
"name = 'tests/{}'.format(spec)\n",
"self.assertEqual(suite.cmd, self._expected_command(name=name))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_18(self):...\n",
"return FUNC_18.dumps(self.properties())\n"
] | [
"def json(self):...\n",
"return json.dumps(self.properties())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self, VAR_9):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {}\n",
"VAR_5['volumeName'] = VAR_9\n",
"VAR_18 = self._cliq_run_xml('getVolumeInfo', VAR_5)\n",
"VAR_23 = {}\n",
"VAR_24 = VAR_18.find('response/volume')\n",
"for VAR_43, v in VAR_24.attrib.items():\n",
"VAR_23['volume.' + VAR_43] = v\n",
"VAR_25 = VAR_24.find('status')\n",
"if VAR_25 is not None:\n",
"for VAR_43, v in VAR_25.attrib.items():\n",
"VAR_26 = VAR_24.find('permission')\n",
"VAR_23['status.' + VAR_43] = v\n",
"if VAR_26 is not None:\n",
"for VAR_43, v in VAR_25.attrib.items():\n",
"VAR_0.debug(_('Volume info: %(volume_name)s => %(volume_attributes)s') % {\n 'volume_name': VAR_9, 'volume_attributes': VAR_23})\n",
"VAR_23['permission.' + VAR_43] = v\n",
"return VAR_23\n"
] | [
"def _cliq_get_volume_info(self, volume_name):...\n",
"\"\"\"docstring\"\"\"\n",
"cliq_args = {}\n",
"cliq_args['volumeName'] = volume_name\n",
"result_xml = self._cliq_run_xml('getVolumeInfo', cliq_args)\n",
"volume_attributes = {}\n",
"volume_node = result_xml.find('response/volume')\n",
"for k, v in volume_node.attrib.items():\n",
"volume_attributes['volume.' + k] = v\n",
"status_node = volume_node.find('status')\n",
"if status_node is not None:\n",
"for k, v in status_node.attrib.items():\n",
"permission_node = volume_node.find('permission')\n",
"volume_attributes['status.' + k] = v\n",
"if permission_node is not None:\n",
"for k, v in status_node.attrib.items():\n",
"LOG.debug(_('Volume info: %(volume_name)s => %(volume_attributes)s') % {\n 'volume_name': volume_name, 'volume_attributes': volume_attributes})\n",
"volume_attributes['permission.' + k] = v\n",
"return volume_attributes\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"For",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"super(CLASS_0, self).validate()\n",
"if not self.blog_intro:\n",
"self.blog_intro = self.content[:140]\n",
"if self.blog_intro:\n",
"self.blog_intro = strip_html_tags(self.blog_intro)\n",
"self.blog_intro = self.blog_intro[:140]\n",
"if self.published and not self.published_on:\n",
"self.published_on = today()\n",
"frappe.db.sql('string', (self.blogger,))\n"
] | [
"def validate(self):...\n",
"super(BlogPost, self).validate()\n",
"if not self.blog_intro:\n",
"self.blog_intro = self.content[:140]\n",
"if self.blog_intro:\n",
"self.blog_intro = strip_html_tags(self.blog_intro)\n",
"self.blog_intro = self.blog_intro[:140]\n",
"if self.published and not self.published_on:\n",
"self.published_on = today()\n",
"frappe.db.sql(\n \"\"\"update tabBlogger set posts=(select count(*) from `tabBlog Post`\n\t\t\twhere ifnull(blogger,'')=tabBlogger.name)\n\t\t\twhere name=%s\"\"\"\n , (self.blogger,))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_39(self, VAR_16, VAR_25=None, VAR_26=True):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_47 = self.meta.get_field(VAR_16)\n",
"if VAR_47 and VAR_47.get('__print_hide'):\n",
"return True\n",
"VAR_48 = 0\n",
"if self.get(VAR_16) == 0 and not self.meta.istable:\n",
"VAR_48 = (VAR_25 and VAR_25.print_hide_if_no_value or VAR_47 and VAR_47.\n print_hide_if_no_value)\n",
"if not VAR_48:\n",
"if VAR_25 and VAR_25.print_hide is not None:\n",
"return VAR_48\n",
"VAR_48 = VAR_25.print_hide\n",
"if VAR_47:\n",
"VAR_48 = VAR_47.print_hide\n"
] | [
"def is_print_hide(self, fieldname, df=None, for_print=True):...\n",
"\"\"\"docstring\"\"\"\n",
"meta_df = self.meta.get_field(fieldname)\n",
"if meta_df and meta_df.get('__print_hide'):\n",
"return True\n",
"print_hide = 0\n",
"if self.get(fieldname) == 0 and not self.meta.istable:\n",
"print_hide = (df and df.print_hide_if_no_value or meta_df and meta_df.\n print_hide_if_no_value)\n",
"if not print_hide:\n",
"if df and df.print_hide is not None:\n",
"return print_hide\n",
"print_hide = df.print_hide\n",
"if meta_df:\n",
"print_hide = meta_df.print_hide\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def __init__(self):...\n",
""
] | [
"def __init__(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(self, VAR_5):...\n",
"VAR_13 = self.connection.cursor()\n",
"VAR_13.execute(\n 'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = %s)'\n , VAR_5)\n",
"return False\n",
"return True\n"
] | [
"def tableExists(self, tableName):...\n",
"cursor = self.connection.cursor()\n",
"cursor.execute(\n 'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = %s)'\n , tableName)\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"return hpcommon.HP3PARCommon(self.configuration)\n"
] | [
"def _init_common(self):...\n",
"return hpcommon.HP3PARCommon(self.configuration)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_10(self, VAR_2, *VAR_3, **VAR_4):...\n",
"self._dispatch_processing()\n",
"return super().dispatch(VAR_2, *VAR_3, **kwargs)\n"
] | [
"def dispatch(self, request, *args, **kwargs):...\n",
"self._dispatch_processing()\n",
"return super().dispatch(request, *args, **kwargs)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_13(self, VAR_51):...\n",
"\"\"\"docstring\"\"\"\n",
"if not isinstance(VAR_51, dict):\n",
"VAR_73, VAR_54, VAR_52, VAR_74 = self._get_validated_data(VAR_51)\n",
"VAR_75 = self.field_mapping[VAR_52][self.FIELD_NAME]\n",
"VAR_76 = getattr(self.VALUE_OPERATORS, VAR_73)\n",
"VAR_39 = self._get_data_type(VAR_52)\n",
"VAR_77 = self._get_table_name(VAR_52)\n",
"if VAR_76 == self.VALUE_OPERATORS.is_op:\n",
"assert VAR_54.upper(\n ) in self.IS_OPERATOR_VALUE, 'Invalid rhs for `IS` operator'\n",
"self._sanitize_value(VAR_54, VAR_39)\n",
"VAR_91, VAR_92 = VAR_54.upper(), None\n",
"if VAR_74:\n",
"VAR_78 = u'`{table}`.`{field}`'.format(VAR_77=table, VAR_52=field_name)\n",
"self._sanitize_value(VAR_74, VAR_39)\n",
"if VAR_39 == self.STRING:\n",
"if 'aggregate_lhs' in VAR_51:\n",
"VAR_54 = self._sql_injection_proof(VAR_54)\n",
"VAR_91, VAR_92 = self._convert_values([VAR_54, VAR_74], VAR_39)\n",
"VAR_93 = VAR_51['aggregate_lhs'].upper()\n",
"if VAR_76 in [self.VALUE_OPERATORS.is_challenge_completed, self.\n",
"if VAR_74:\n",
"if VAR_93 in self.ALLOWED_AGGREGATE_FUNCTIONS:\n",
"return '{negate} {check}'.format(negate='NOT' if sql_operator == self.\n VALUE_OPERATORS.is_challenge_not_completed else '', check=self.\n CHALLENGE_CHECK_QUERY.format(value=sql_value))\n",
"if VAR_76 == self.BETWEEN:\n",
"VAR_74 = self._sql_injection_proof(VAR_74)\n",
"VAR_78 = u'{func_name}({field_name})'.format(func_name=aggregate_func_name,\n VAR_75=lhs)\n",
"VAR_0.info('Unsupported aggregate functions: {}'.format(VAR_93))\n",
"VAR_63 = u'{lhs} {operator} {primary_value} AND {secondary_value}'.format(\n VAR_78=lhs, VAR_73=sql_operator, VAR_54=sql_value, VAR_74=\n secondary_sql_value)\n",
"VAR_63 = u'{lhs} {operator} {value}'.format(VAR_73=sql_operator, VAR_78=lhs,\n VAR_54=sql_value)\n",
"return VAR_63\n"
] | [
"def _generate_where_phrase(self, where):...\n",
"\"\"\"docstring\"\"\"\n",
"if not isinstance(where, dict):\n",
"operator, value, field, secondary_value = self._get_validated_data(where)\n",
"field_name = self.field_mapping[field][self.FIELD_NAME]\n",
"sql_operator = getattr(self.VALUE_OPERATORS, operator)\n",
"data_type = self._get_data_type(field)\n",
"table = self._get_table_name(field)\n",
"if sql_operator == self.VALUE_OPERATORS.is_op:\n",
"assert value.upper() in self.IS_OPERATOR_VALUE, 'Invalid rhs for `IS` operator'\n",
"self._sanitize_value(value, data_type)\n",
"sql_value, secondary_sql_value = value.upper(), None\n",
"if secondary_value:\n",
"lhs = u'`{table}`.`{field}`'.format(table=table, field=field_name)\n",
"self._sanitize_value(secondary_value, data_type)\n",
"if data_type == self.STRING:\n",
"if 'aggregate_lhs' in where:\n",
"value = self._sql_injection_proof(value)\n",
"sql_value, secondary_sql_value = self._convert_values([value,\n secondary_value], data_type)\n",
"aggregate_func_name = where['aggregate_lhs'].upper()\n",
"if sql_operator in [self.VALUE_OPERATORS.is_challenge_completed, self.\n",
"if secondary_value:\n",
"if aggregate_func_name in self.ALLOWED_AGGREGATE_FUNCTIONS:\n",
"return '{negate} {check}'.format(negate='NOT' if sql_operator == self.\n VALUE_OPERATORS.is_challenge_not_completed else '', check=self.\n CHALLENGE_CHECK_QUERY.format(value=sql_value))\n",
"if sql_operator == self.BETWEEN:\n",
"secondary_value = self._sql_injection_proof(secondary_value)\n",
"lhs = u'{func_name}({field_name})'.format(func_name=aggregate_func_name,\n field_name=lhs)\n",
"logger.info('Unsupported aggregate functions: {}'.format(aggregate_func_name))\n",
"where_phrase = (u'{lhs} {operator} {primary_value} AND {secondary_value}'.\n format(lhs=lhs, operator=sql_operator, value=sql_value, secondary_value\n =secondary_sql_value))\n",
"where_phrase = u'{lhs} {operator} {value}'.format(operator=sql_operator,\n lhs=lhs, value=sql_value)\n",
"return where_phrase\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assert'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10[VAR_11]\n",
"return\n",
"VAR_20 = c_onboarding_settings(use_secondary=True).find_one({'name': VAR_11})\n",
"if not VAR_20:\n",
"c_onboarding_settings().insert({'name': VAR_11, 'data': VAR_10[VAR_11]})\n",
"return VAR_20['data']\n",
"return VAR_10[VAR_11]\n"
] | [
"def get_onboarding_setting(setting_name):...\n",
"\"\"\"docstring\"\"\"\n",
"ONBOARDING_SETTINGS_DEFAULT[setting_name]\n",
"return\n",
"onboarding_setting = c_onboarding_settings(use_secondary=True).find_one({\n 'name': setting_name})\n",
"if not onboarding_setting:\n",
"c_onboarding_settings().insert({'name': setting_name, 'data':\n ONBOARDING_SETTINGS_DEFAULT[setting_name]})\n",
"return onboarding_setting['data']\n",
"return ONBOARDING_SETTINGS_DEFAULT[setting_name]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_21(VAR_16):...\n",
"return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n"
] | [
"def get_io(rule):...\n",
"return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.