lines
sequencelengths
1
383
raw_lines
sequencelengths
1
383
label
sequencelengths
1
383
type
sequencelengths
1
383
[ "def FUNC_26(VAR_0, VAR_1):...\n", "create_dir('random')\n", "VAR_8 = VAR_1.post('/folders/rename', data={'current_path': 'random',\n 'new_name': 'renamed_random'}, follow_redirects=True)\n", "assert VAR_8.status_code == 200\n", "assert b'Renamed successfully' in VAR_8.data\n" ]
[ "def test_rename_dir(test_app, client):...\n", "create_dir('random')\n", "resp = client.post('/folders/rename', data={'current_path': 'random',\n 'new_name': 'renamed_random'}, follow_redirects=True)\n", "assert resp.status_code == 200\n", "assert b'Renamed successfully' in resp.data\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assert'", "Assert'" ]
[ "def FUNC_59(VAR_120, *VAR_119):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_61(VAR_120, *VAR_119)\n" ]
[ "def get_app_path(app_name, *joins):...\n", "\"\"\"docstring\"\"\"\n", "return get_pymodule_path(app_name, *joins)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_17(self, VAR_40):...\n", "VAR_51 = VAR_40[-5:]\n", "FUNC_7([VAR_1('node_modules', '.bin', 'po2json'), '-p', '-F', '-f',\n 'jed1.x', '-d', 'nbjs', VAR_1('notebook', 'i18n', VAR_51, 'LC_MESSAGES',\n 'nbjs.po'), VAR_1('notebook', 'i18n', VAR_51, 'LC_MESSAGES', 'nbjs.json')])\n" ]
[ "def build_jstranslation(self, trd):...\n", "lang = trd[-5:]\n", "run([pjoin('node_modules', '.bin', 'po2json'), '-p', '-F', '-f', 'jed1.x',\n '-d', 'nbjs', pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.po'),\n pjoin('notebook', 'i18n', lang, 'LC_MESSAGES', 'nbjs.json')])\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_9(VAR_16):...\n", "for VAR_11 in VAR_16:\n", "if isinstance(VAR_11, collections.Iterable) and not isinstance(VAR_11, str\n", "yield from FUNC_9(VAR_11)\n", "yield VAR_11\n" ]
[ "def flatten(arr):...\n", "for el in arr:\n", "if isinstance(el, collections.Iterable) and not isinstance(el, str\n", "yield from flatten(el)\n", "yield el\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_19(VAR_9, VAR_10, VAR_11):...\n", "if VAR_87.role_admin():\n", "VAR_11 = int(VAR_11)\n", "VAR_11 = VAR_87.id\n", "if VAR_87.check_visibility(constants.SIDEBAR_DOWNLOAD):\n", "if VAR_87.show_detail_random():\n", "abort(404)\n", "VAR_68 = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()\n ).order_by(func.random()).limit(config.config_random_books)\n", "VAR_68 = false()\n", "VAR_63, VAR_64, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, ub.\n Downloads.user_id == VAR_11, VAR_10[0], db.books_series_link, db.Books.\n id == db.books_series_link.c.book, db.Series, ub.Downloads, db.Books.id ==\n ub.Downloads.book_id)\n", "for VAR_95 in VAR_63:\n", "if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()\n", "VAR_104 = ub.session.query(ub.User).filter(ub.User.id == VAR_11).first()\n", "ub.delete_download(VAR_95.id)\n", "return render_title_template('index.html', VAR_68=random, VAR_63=entries,\n VAR_65=pagination, id=user_id, VAR_150=_(\n u'Downloaded books by %(user)s', user=user.name), VAR_9='download',\n VAR_10=order[1])\n" ]
[ "def render_downloaded_books(page, order, user_id):...\n", "if current_user.role_admin():\n", "user_id = int(user_id)\n", "user_id = current_user.id\n", "if current_user.check_visibility(constants.SIDEBAR_DOWNLOAD):\n", "if current_user.show_detail_random():\n", "abort(404)\n", "random = calibre_db.session.query(db.Books).filter(calibre_db.common_filters()\n ).order_by(func.random()).limit(config.config_random_books)\n", "random = false()\n", "entries, __, pagination = calibre_db.fill_indexpage(page, 0, db.Books, ub.\n Downloads.user_id == user_id, order[0], db.books_series_link, db.Books.\n id == db.books_series_link.c.book, db.Series, ub.Downloads, db.Books.id ==\n ub.Downloads.book_id)\n", "for book in entries:\n", "if not calibre_db.session.query(db.Books).filter(calibre_db.common_filters()\n", "user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()\n", "ub.delete_download(book.id)\n", "return render_title_template('index.html', random=random, entries=entries,\n pagination=pagination, id=user_id, title=_(\n u'Downloaded books by %(user)s', user=user.name), page='download',\n order=order[1])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_35(VAR_40, VAR_37):...\n", "VAR_38 = VAR_37.title\n", "VAR_39 = VAR_37.author\n", "VAR_66, VAR_63, VAR_70 = FUNC_34(VAR_38, VAR_39)\n", "VAR_71 = helper.get_valid_filename(VAR_38)\n", "VAR_72 = helper.get_valid_filename(VAR_70.name)\n", "VAR_73 = os.path.join(VAR_72, VAR_71).replace('\\\\', '/')\n", "VAR_42 = db.Books(VAR_38, '', VAR_66, datetime.utcnow(), datetime(101, 1, 1\n ), '1', datetime.utcnow(), VAR_73, VAR_37.cover, VAR_70, [], '')\n", "VAR_40 |= FUNC_7(VAR_63, VAR_42.authors, db.Authors, calibre_db.session,\n 'author')\n", "VAR_40 |= FUNC_18(VAR_37.series_id, VAR_42)\n", "VAR_26 = []\n", "VAR_40 |= FUNC_20(VAR_37.languages, VAR_42, VAR_25=True, VAR_26=invalid)\n", "if VAR_26:\n", "for l in VAR_26:\n", "VAR_40 |= FUNC_16(VAR_37.tags, VAR_42)\n", "flash(_(u\"'%(langname)s' is not a valid language\", langname=l), category=\n 'warning')\n", "VAR_40 |= FUNC_21(VAR_37.publisher, VAR_42)\n", "VAR_40 |= FUNC_17(VAR_37.series, VAR_42)\n", "VAR_74 = os.path.getsize(VAR_37.file_path)\n", "VAR_75 = db.Data(VAR_42, VAR_37.extension.upper()[1:], VAR_74, VAR_71)\n", "VAR_42.data.append(VAR_75)\n", "calibre_db.session.add(VAR_42)\n", "calibre_db.session.flush()\n", "return VAR_42, VAR_63, VAR_71\n" ]
[ "def create_book_on_upload(modif_date, meta):...\n", "title = meta.title\n", "authr = meta.author\n", "sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr\n )\n", "title_dir = helper.get_valid_filename(title)\n", "author_dir = helper.get_valid_filename(db_author.name)\n", "path = os.path.join(author_dir, title_dir).replace('\\\\', '/')\n", "db_book = db.Books(title, '', sort_authors, datetime.utcnow(), datetime(101,\n 1, 1), '1', datetime.utcnow(), path, meta.cover, db_author, [], '')\n", "modif_date |= modify_database_object(input_authors, db_book.authors, db.\n Authors, calibre_db.session, 'author')\n", "modif_date |= edit_book_series_index(meta.series_id, db_book)\n", "invalid = []\n", "modif_date |= edit_book_languages(meta.languages, db_book, upload=True,\n invalid=invalid)\n", "if invalid:\n", "for l in invalid:\n", "modif_date |= edit_book_tags(meta.tags, db_book)\n", "flash(_(u\"'%(langname)s' is not a valid language\", langname=l), category=\n 'warning')\n", "modif_date |= edit_book_publisher(meta.publisher, db_book)\n", "modif_date |= edit_book_series(meta.series, db_book)\n", "file_size = os.path.getsize(meta.file_path)\n", "db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, title_dir)\n", "db_book.data.append(db_data)\n", "calibre_db.session.add(db_book)\n", "calibre_db.session.flush()\n", "return db_book, input_authors, title_dir\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "AugAssign'", "AugAssign'", "Assign'", "AugAssign'", "Condition", "For", "AugAssign'", "Expr'", "AugAssign'", "AugAssign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(self):...\n", "self.normativeRefs = {}\n", "self.informativeRefs = {}\n", "self.refs = ReferenceManager(VAR_10=self.dataFile, VAR_11=self.testing)\n", "self.externalRefsUsed = defaultdict(lambda : defaultdict(dict))\n", "self.md = None\n", "self.mdBaseline = metadata.MetadataManager()\n", "self.mdDocument = None\n", "self.mdCommandLine = metadata.MetadataManager()\n", "self.mdDefaults = None\n", "self.mdOverridingDefaults = None\n", "self.biblios = {}\n", "self.typeExpansions = {}\n", "self.macros = defaultdict(lambda x: '???')\n", "self.canIUse = {}\n", "self.mdnSpecLinks = {}\n", "self.widl = idl.getParser()\n", "self.testSuites = json.loads(self.dataFile.fetch('test-suites.json', str=True))\n", "self.languages = json.loads(self.dataFile.fetch('languages.json', str=True))\n", "self.extraStyles = defaultdict(str)\n", "self.extraStyles['style-colors'] = VAR_0\n", "self.extraStyles['style-darkmode'] = VAR_1\n", "self.extraStyles['style-md-lists'] = VAR_2\n", "self.extraStyles['style-autolinks'] = VAR_3\n", "self.extraStyles['style-selflinks'] = VAR_4\n", "self.extraStyles['style-counters'] = VAR_5\n", "self.extraScripts = defaultdict(str)\n", "VAR_31 = self.inputSource.read()\n", "die(\"Couldn't find the input file at the specified location '{0}'.\", self.\n inputSource)\n", "return True\n", "self.lines = VAR_31.lines\n", "return False\n", "if VAR_31.date is not None:\n", "self.mdBaseline.addParsedData('Date', VAR_31.date)\n" ]
[ "def initializeState(self):...\n", "self.normativeRefs = {}\n", "self.informativeRefs = {}\n", "self.refs = ReferenceManager(fileRequester=self.dataFile, testing=self.testing)\n", "self.externalRefsUsed = defaultdict(lambda : defaultdict(dict))\n", "self.md = None\n", "self.mdBaseline = metadata.MetadataManager()\n", "self.mdDocument = None\n", "self.mdCommandLine = metadata.MetadataManager()\n", "self.mdDefaults = None\n", "self.mdOverridingDefaults = None\n", "self.biblios = {}\n", "self.typeExpansions = {}\n", "self.macros = defaultdict(lambda x: '???')\n", "self.canIUse = {}\n", "self.mdnSpecLinks = {}\n", "self.widl = idl.getParser()\n", "self.testSuites = json.loads(self.dataFile.fetch('test-suites.json', str=True))\n", "self.languages = json.loads(self.dataFile.fetch('languages.json', str=True))\n", "self.extraStyles = defaultdict(str)\n", "self.extraStyles['style-colors'] = styleColors\n", "self.extraStyles['style-darkmode'] = styleDarkMode\n", "self.extraStyles['style-md-lists'] = styleMdLists\n", "self.extraStyles['style-autolinks'] = styleAutolinks\n", "self.extraStyles['style-selflinks'] = styleSelflinks\n", "self.extraStyles['style-counters'] = styleCounters\n", "self.extraScripts = defaultdict(str)\n", "inputContent = self.inputSource.read()\n", "die(\"Couldn't find the input file at the specified location '{0}'.\", self.\n inputSource)\n", "return True\n", "self.lines = inputContent.lines\n", "return False\n", "if inputContent.date is not None:\n", "self.mdBaseline.addParsedData('Date', inputContent.date)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'", "Assign'", "Return'", "Condition", "Expr'" ]
[ "def FUNC_21(self):...\n", "VAR_5 = {'not_rooms': ['!secretbase:unknown'], 'rooms': ['!secretbase:unknown']\n }\n", "VAR_6 = FUNC_0(sender='@foo:bar', type='m.room.message', room_id=\n '!secretbase:unknown')\n", "self.assertFalse(Filter(VAR_5).check(VAR_6))\n" ]
[ "def test_definition_not_rooms_takes_priority_over_rooms(self):...\n", "definition = {'not_rooms': ['!secretbase:unknown'], 'rooms': [\n '!secretbase:unknown']}\n", "event = MockEvent(sender='@foo:bar', type='m.room.message', room_id=\n '!secretbase:unknown')\n", "self.assertFalse(Filter(definition).check(event))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_6(VAR_0):...\n", "VAR_9 = GeneratorError()\n", "VAR_10 = VAR_0.patch('openapi_python_client._get_project_for_url_or_path',\n return_value=error)\n", "VAR_6 = VAR_0.MagicMock()\n", "VAR_7 = VAR_0.MagicMock()\n", "from openapi_python_client import update_existing_client\n", "VAR_11 = update_existing_client(VAR_6=url, VAR_7=path)\n", "VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)\n", "assert VAR_11 == [VAR_9]\n" ]
[ "def test_update_existing_client_project_error(mocker):...\n", "error = GeneratorError()\n", "_get_project_for_url_or_path = mocker.patch(\n 'openapi_python_client._get_project_for_url_or_path', return_value=error)\n", "url = mocker.MagicMock()\n", "path = mocker.MagicMock()\n", "from openapi_python_client import update_existing_client\n", "result = update_existing_client(url=url, path=path)\n", "_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)\n", "assert result == [error]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'" ]
[ "def FUNC_108(VAR_169, VAR_7='utf-8'):...\n", "" ]
[ "def safe_decode(param, encoding='utf-8'):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_16(self, VAR_44):...\n", "VAR_13 = FUNC_1(VAR_44.group(1))\n", "self.def_links[VAR_13] = {'link': VAR_44.group(2), 'title': VAR_44.group(3)}\n" ]
[ "def parse_def_links(self, m):...\n", "key = _keyify(m.group(1))\n", "self.def_links[key] = {'link': m.group(2), 'title': m.group(3)}\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_1(VAR_15, VAR_16=VAR_11, VAR_17=VAR_12, VAR_18=VAR_13, VAR_19=VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_15.tag in VAR_17:\n", "return\n", "VAR_62 = VAR_15.get('class')\n", "if VAR_62:\n", "VAR_62 = VAR_62.split()\n", "for child in list(VAR_15):\n", "for match_class in VAR_19:\n", "FUNC_1(child, VAR_16=link_regexes, VAR_17=avoid_elements, VAR_18=\n avoid_hosts, VAR_19=avoid_classes)\n", "if VAR_15.text:\n", "if match_class in VAR_62:\n", "if child.tail:\n", "VAR_20, VAR_83 = FUNC_2(VAR_15.text, VAR_16, VAR_18, VAR_21=el.makeelement)\n", "return\n", "VAR_20, VAR_92 = FUNC_2(child.tail, VAR_16, VAR_18, VAR_21=el.makeelement)\n", "if VAR_83:\n", "if VAR_92:\n", "VAR_15.text = VAR_20\n", "child.tail = VAR_20\n", "VAR_15[:0] = VAR_83\n", "VAR_99 = VAR_15.index(child)\n", "VAR_15[VAR_99 + 1:VAR_99 + 1] = VAR_92\n" ]
[ "def autolink(el, link_regexes=_link_regexes, avoid_elements=_avoid_elements,...\n", "\"\"\"docstring\"\"\"\n", "if el.tag in avoid_elements:\n", "return\n", "class_name = el.get('class')\n", "if class_name:\n", "class_name = class_name.split()\n", "for child in list(el):\n", "for match_class in avoid_classes:\n", "autolink(child, link_regexes=link_regexes, avoid_elements=avoid_elements,\n avoid_hosts=avoid_hosts, avoid_classes=avoid_classes)\n", "if el.text:\n", "if match_class in class_name:\n", "if child.tail:\n", "text, pre_children = _link_text(el.text, link_regexes, avoid_hosts, factory\n =el.makeelement)\n", "return\n", "text, tail_children = _link_text(child.tail, link_regexes, avoid_hosts,\n factory=el.makeelement)\n", "if pre_children:\n", "if tail_children:\n", "el.text = text\n", "child.tail = text\n", "el[:0] = pre_children\n", "index = el.index(child)\n", "el[index + 1:index + 1] = tail_children\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Assign'", "For", "For", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_15(self, VAR_13, VAR_14):...\n", "VAR_62 = self.useroptions.forUser(self.getUserId())\n", "VAR_62.setOption(VAR_13, VAR_14)\n", "return 'success'\n" ]
[ "def api_setuseroption(self, optionkey, optionval):...\n", "uo = self.useroptions.forUser(self.getUserId())\n", "uo.setOption(optionkey, optionval)\n", "return 'success'\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe.utils import cstr, unique, cint\n", "from frappe.permissions import has_permission\n", "from frappe.handler import is_whitelisted\n", "from frappe import _\n", "from six import string_types\n", "import re\n", "import wrapt\n", "VAR_0 = ['DocType', 'Role']\n", "def FUNC_0(VAR_1):...\n", "VAR_19 = ['select', 'delete', 'drop', 'update', 'case', 'and', 'or', 'like']\n", "def FUNC_7(VAR_1):...\n", "VAR_20.throw(_('Invalid Search Field {0}').format(VAR_1), VAR_20.DataError)\n", "if len(VAR_1) == 1:\n", "VAR_24 = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()_].*')\n", "if len(VAR_1) >= 3:\n", "if VAR_24.match(VAR_1):\n", "if '=' in VAR_1:\n", "@VAR_20.whitelist()...\n", "FUNC_7(VAR_1)\n", "FUNC_7(VAR_1)\n", "if ' --' in VAR_1:\n", "FUNC_2(VAR_2, VAR_3.strip(), VAR_4, VAR_1=searchfield, VAR_6=page_length,\n VAR_5=filters, VAR_7=reference_doctype, VAR_8=ignore_user_permissions)\n", "FUNC_7(VAR_1)\n", "if any(' {0} '.format(keyword) in VAR_1.split() for keyword in VAR_19):\n", "VAR_20.response['results'] = FUNC_4(VAR_20.response['values'])\n", "FUNC_7(VAR_1)\n", "if any(keyword in VAR_1.split() for keyword in VAR_19):\n", "@VAR_20.whitelist()...\n", "FUNC_7(VAR_1)\n", "VAR_24 = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()].*')\n", "VAR_9 = cint(VAR_9)\n", "if any(VAR_24.match(f) for f in VAR_1.split()):\n", "if isinstance(VAR_5, string_types):\n", "FUNC_7(VAR_1)\n", "VAR_5 = json.loads(VAR_5)\n", "if VAR_1:\n", "FUNC_0(VAR_1)\n", "if not VAR_1:\n", "VAR_1 = 'name'\n", "VAR_21 = VAR_20.get_hooks().standard_queries or {}\n", "if VAR_4 and VAR_4.split()[0].lower() != 'select':\n", "if not VAR_4 and VAR_2 in VAR_21:\n", "is_whitelisted(VAR_20.get_attr(VAR_4))\n", "if VAR_20.local.conf.developer_mode:\n", "def FUNC_3(VAR_12, VAR_13):...\n", "FUNC_2(VAR_2, VAR_3, VAR_21[VAR_2][0], VAR_1, VAR_9, VAR_6, VAR_5)\n", "VAR_12 = VAR_20.get_meta(VAR_2)\n", "VAR_20.response['values'] = VAR_20.call(VAR_4, VAR_2, VAR_3, VAR_1, VAR_9,\n VAR_6, VAR_5, VAR_11=as_dict)\n", "VAR_20.respond_as_web_page(title='Invalid Method', html='Method not found',\n indicator_color='red', http_status_code=404)\n", "return\n", "VAR_22 = ['name']\n", "if VAR_4:\n", "if VAR_12.search_fields:\n", "VAR_20.throw(_('This query style is discontinued'))\n", "if isinstance(VAR_5, dict):\n", "for d in VAR_12.search_fields.split(','):\n", "if VAR_12.title_field and VAR_12.title_field not in VAR_22:\n", "VAR_34 = VAR_5.items()\n", "if VAR_5 == None:\n", "if d.strip() not in VAR_22:\n", "VAR_22.append(VAR_12.title_field)\n", "if VAR_13 not in VAR_22:\n", "VAR_5 = []\n", "VAR_5 = []\n", "VAR_26 = []\n", "VAR_22.append(d.strip())\n", "VAR_22.append(VAR_13)\n", "return VAR_22\n", "for f in VAR_34:\n", "if VAR_3:\n", "if isinstance(f[1], (list, tuple)):\n", "VAR_35 = ['name']\n", "if VAR_12.get('fields', {'fieldname': 'enabled', 'fieldtype': 'Check'}):\n", "VAR_5.append([VAR_2, f[0], f[1][0], f[1][1]])\n", "VAR_5.append([VAR_2, f[0], '=', f[1]])\n", "if VAR_12.title_field:\n", "VAR_5.append([VAR_2, 'enabled', '=', 1])\n", "if VAR_12.get('fields', {'fieldname': 'disabled', 'fieldtype': 'Check'}):\n", "VAR_35.append(VAR_12.title_field)\n", "if VAR_12.search_fields:\n", "VAR_5.append([VAR_2, 'disabled', '!=', 1])\n", "VAR_27 = FUNC_3(VAR_12, VAR_1 or 'name')\n", "VAR_35.extend(VAR_12.get_search_fields())\n", "for f in VAR_35:\n", "if VAR_10:\n", "VAR_36 = VAR_12.get_field(f.strip())\n", "VAR_27 = list(set(VAR_27 + json.loads(VAR_10)))\n", "VAR_28 = [('`tab%s`.`%s`' % (VAR_12.name, f.strip())) for f in VAR_27]\n", "if VAR_2 not in VAR_0 and (f == 'name' or VAR_36 and VAR_36.fieldtype in [\n", "VAR_28.append('locate({_txt}, `tab{doctype}`.`name`) as `_relevance`'.\n format(_txt=frappe.db.escape((txt or '').replace('%', '').replace('@',\n '')), VAR_2=doctype))\n", "VAR_26.append([VAR_2, f.strip(), 'like', '%{0}%'.format(VAR_3)])\n", "from frappe.model.db_query import get_order_by\n", "VAR_29 = get_order_by(VAR_2, VAR_12)\n", "VAR_30 = '_relevance, {0}, `tab{1}`.idx desc'.format(VAR_29, VAR_2)\n", "VAR_31 = 'select' if VAR_20.only_has_select_perm(VAR_2) else 'read'\n", "VAR_32 = True if VAR_2 == 'DocType' else cint(VAR_8) and has_permission(VAR_2,\n VAR_31=ptype)\n", "if VAR_2 in VAR_0:\n", "VAR_6 = None\n", "VAR_33 = VAR_20.get_list(VAR_2, VAR_5=filters, VAR_27=formatted_fields,\n VAR_26=or_filters, limit_start=start, limit_page_length=page_length,\n VAR_30=order_by, VAR_32=ignore_permissions, VAR_7=reference_doctype,\n as_list=not as_dict, strict=False)\n", "if VAR_2 in VAR_0:\n", "VAR_33 = tuple([v for v in list(VAR_33) if re.search(re.escape(VAR_3) +\n '.*', _(v.name) if VAR_11 else _(v[0]), re.IGNORECASE)])\n", "if VAR_11:\n", "for r in VAR_33:\n", "VAR_20.response['values'] = [r[:-1] for r in VAR_33]\n", "r.pop('_relevance')\n", "VAR_20.response['values'] = VAR_33\n" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe.utils import cstr, unique, cint\n", "from frappe.permissions import has_permission\n", "from frappe.handler import is_whitelisted\n", "from frappe import _\n", "from six import string_types\n", "import re\n", "import wrapt\n", "UNTRANSLATED_DOCTYPES = ['DocType', 'Role']\n", "def sanitize_searchfield(searchfield):...\n", "blacklisted_keywords = ['select', 'delete', 'drop', 'update', 'case', 'and',\n 'or', 'like']\n", "def _raise_exception(searchfield):...\n", "frappe.throw(_('Invalid Search Field {0}').format(searchfield), frappe.\n DataError)\n", "if len(searchfield) == 1:\n", "regex = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()_].*')\n", "if len(searchfield) >= 3:\n", "if regex.match(searchfield):\n", "if '=' in searchfield:\n", "@frappe.whitelist()...\n", "_raise_exception(searchfield)\n", "_raise_exception(searchfield)\n", "if ' --' in searchfield:\n", "search_widget(doctype, txt.strip(), query, searchfield=searchfield,\n page_length=page_length, filters=filters, reference_doctype=\n reference_doctype, ignore_user_permissions=ignore_user_permissions)\n", "_raise_exception(searchfield)\n", "if any(' {0} '.format(keyword) in searchfield.split() for keyword in\n", "frappe.response['results'] = build_for_autosuggest(frappe.response['values'])\n", "_raise_exception(searchfield)\n", "if any(keyword in searchfield.split() for keyword in blacklisted_keywords):\n", "@frappe.whitelist()...\n", "_raise_exception(searchfield)\n", "regex = re.compile('^.*[=;*,\\\\\\'\"$\\\\-+%#@()].*')\n", "start = cint(start)\n", "if any(regex.match(f) for f in searchfield.split()):\n", "if isinstance(filters, string_types):\n", "_raise_exception(searchfield)\n", "filters = json.loads(filters)\n", "if searchfield:\n", "sanitize_searchfield(searchfield)\n", "if not searchfield:\n", "searchfield = 'name'\n", "standard_queries = frappe.get_hooks().standard_queries or {}\n", "if query and query.split()[0].lower() != 'select':\n", "if not query and doctype in standard_queries:\n", "is_whitelisted(frappe.get_attr(query))\n", "if frappe.local.conf.developer_mode:\n", "def get_std_fields_list(meta, key):...\n", "search_widget(doctype, txt, standard_queries[doctype][0], searchfield,\n start, page_length, filters)\n", "meta = frappe.get_meta(doctype)\n", "frappe.response['values'] = frappe.call(query, doctype, txt, searchfield,\n start, page_length, filters, as_dict=as_dict)\n", "frappe.respond_as_web_page(title='Invalid Method', html='Method not found',\n indicator_color='red', http_status_code=404)\n", "return\n", "sflist = ['name']\n", "if query:\n", "if meta.search_fields:\n", "frappe.throw(_('This query style is discontinued'))\n", "if isinstance(filters, dict):\n", "for d in meta.search_fields.split(','):\n", "if meta.title_field and meta.title_field not in sflist:\n", "filters_items = filters.items()\n", "if filters == None:\n", "if d.strip() not in sflist:\n", "sflist.append(meta.title_field)\n", "if key not in sflist:\n", "filters = []\n", "filters = []\n", "or_filters = []\n", "sflist.append(d.strip())\n", "sflist.append(key)\n", "return sflist\n", "for f in filters_items:\n", "if txt:\n", "if isinstance(f[1], (list, tuple)):\n", "search_fields = ['name']\n", "if meta.get('fields', {'fieldname': 'enabled', 'fieldtype': 'Check'}):\n", "filters.append([doctype, f[0], f[1][0], f[1][1]])\n", "filters.append([doctype, f[0], '=', f[1]])\n", "if meta.title_field:\n", "filters.append([doctype, 'enabled', '=', 1])\n", "if meta.get('fields', {'fieldname': 'disabled', 'fieldtype': 'Check'}):\n", "search_fields.append(meta.title_field)\n", "if meta.search_fields:\n", "filters.append([doctype, 'disabled', '!=', 1])\n", "fields = get_std_fields_list(meta, searchfield or 'name')\n", "search_fields.extend(meta.get_search_fields())\n", "for f in search_fields:\n", "if filter_fields:\n", "fmeta = meta.get_field(f.strip())\n", "fields = list(set(fields + json.loads(filter_fields)))\n", "formatted_fields = [('`tab%s`.`%s`' % (meta.name, f.strip())) for f in fields]\n", "if doctype not in UNTRANSLATED_DOCTYPES and (f == 'name' or fmeta and fmeta\n", "formatted_fields.append('locate({_txt}, `tab{doctype}`.`name`) as `_relevance`'\n .format(_txt=frappe.db.escape((txt or '').replace('%', '').replace('@',\n '')), doctype=doctype))\n", "or_filters.append([doctype, f.strip(), 'like', '%{0}%'.format(txt)])\n", "from frappe.model.db_query import get_order_by\n", "order_by_based_on_meta = get_order_by(doctype, meta)\n", "order_by = '_relevance, {0}, `tab{1}`.idx desc'.format(order_by_based_on_meta,\n doctype)\n", "ptype = 'select' if frappe.only_has_select_perm(doctype) else 'read'\n", "ignore_permissions = True if doctype == 'DocType' else cint(\n ignore_user_permissions) and has_permission(doctype, ptype=ptype)\n", "if doctype in UNTRANSLATED_DOCTYPES:\n", "page_length = None\n", "values = frappe.get_list(doctype, filters=filters, fields=formatted_fields,\n or_filters=or_filters, limit_start=start, limit_page_length=page_length,\n order_by=order_by, ignore_permissions=ignore_permissions,\n reference_doctype=reference_doctype, as_list=not as_dict, strict=False)\n", "if doctype in UNTRANSLATED_DOCTYPES:\n", "values = tuple([v for v in list(values) if re.search(re.escape(txt) + '.*',\n _(v.name) if as_dict else _(v[0]), re.IGNORECASE)])\n", "if as_dict:\n", "for r in values:\n", "frappe.response['values'] = [r[:-1] for r in values]\n", "r.pop('_relevance')\n", "frappe.response['values'] = values\n" ]
[ 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Assign'", "FunctionDef'", "Assign'", "FunctionDef'", "Expr'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "For", "Assign'", "Expr'", "For", "Condition", "Expr'", "Assign'", "Assign'", "For", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Condition", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Return'", "Assign'", "Condition", "Condition", "Expr'", "Condition", "For", "Condition", "Assign'", "Condition", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'", "For", "Condition", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "For", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "For", "Assign'", "Expr'", "Assign'" ]
[ "def FUNC_59(VAR_36):...\n", "\"\"\"docstring\"\"\"\n", "VAR_16 = getattr(VAR_36, 'display_name', None)\n", "if VAR_16:\n", "return u'{0} ({1})'.format(VAR_16, VAR_36.location.to_deprecated_string())\n", "return VAR_36.location.to_deprecated_string()\n" ]
[ "def _display_unit(unit):...\n", "\"\"\"docstring\"\"\"\n", "name = getattr(unit, 'display_name', None)\n", "if name:\n", "return u'{0} ({1})'.format(name, unit.location.to_deprecated_string())\n", "return unit.location.to_deprecated_string()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Return'" ]
[ "@classmethod...\n", "if not VAR_21._temp_dir:\n", "VAR_21._temp_dir = tempfile.mkdtemp(dir=click_web.OUTPUT_FOLDER)\n", "VAR_0.info(f'Temp dir: {VAR_21._temp_dir}')\n", "return VAR_21._temp_dir\n" ]
[ "@classmethod...\n", "if not cls._temp_dir:\n", "cls._temp_dir = tempfile.mkdtemp(dir=click_web.OUTPUT_FOLDER)\n", "logger.info(f'Temp dir: {cls._temp_dir}')\n", "return cls._temp_dir\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Expr'", "Return'" ]
[ "\"\"\"A cleanup tool for HTML.\n\nRemoves unwanted tags and content. See the `Cleaner` class for\ndetails.\n\"\"\"\n", "import re\n", "import copy\n", "from urlparse import urlsplit\n", "from urllib.parse import urlsplit\n", "from lxml import etree\n", "from lxml.html import defs\n", "from lxml.html import fromstring, XHTML_NAMESPACE\n", "from lxml.html import xhtml_to_html, _transform_result\n", "VAR_69\n", "VAR_69 = chr\n", "VAR_70\n", "VAR_70 = str\n", "VAR_71\n", "VAR_71 = str\n", "VAR_72\n", "VAR_72 = str, VAR_71\n", "__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',\n 'word_break', 'word_break_html']\n", "VAR_0 = re.compile('expression\\\\s*\\\\(.*?\\\\)', re.S | re.I)\n", "VAR_1 = re.compile('@\\\\s*import', re.I)\n", "VAR_2 = re.compile('^data:image/.+;base64', re.I).search\n", "VAR_3 = re.compile(\n '(?:javascript|jscript|livescript|vbscript|data|about|mocha):', re.I\n ).search\n", "def FUNC_0(VAR_4):...\n", "if VAR_2(VAR_4):\n", "return None\n", "return VAR_3(VAR_4)\n" ]
[ "\"\"\"A cleanup tool for HTML.\n\nRemoves unwanted tags and content. See the `Cleaner` class for\ndetails.\n\"\"\"\n", "import re\n", "import copy\n", "from urlparse import urlsplit\n", "from urllib.parse import urlsplit\n", "from lxml import etree\n", "from lxml.html import defs\n", "from lxml.html import fromstring, XHTML_NAMESPACE\n", "from lxml.html import xhtml_to_html, _transform_result\n", "unichr\n", "unichr = chr\n", "unicode\n", "unicode = str\n", "bytes\n", "bytes = str\n", "basestring\n", "basestring = str, bytes\n", "__all__ = ['clean_html', 'clean', 'Cleaner', 'autolink', 'autolink_html',\n 'word_break', 'word_break_html']\n", "_css_javascript_re = re.compile('expression\\\\s*\\\\(.*?\\\\)', re.S | re.I)\n", "_css_import_re = re.compile('@\\\\s*import', re.I)\n", "_is_image_dataurl = re.compile('^data:image/.+;base64', re.I).search\n", "_is_possibly_malicious_scheme = re.compile(\n '(?:javascript|jscript|livescript|vbscript|data|about|mocha):', re.I\n ).search\n", "def _is_javascript_scheme(s):...\n", "if _is_image_dataurl(s):\n", "return None\n", "return _is_possibly_malicious_scheme(s)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6():...\n", "\"\"\"docstring\"\"\"\n", "VAR_23 = request.vars.send\n", "if DEMO_MODE:\n", "session.authorized = True\n", "if not VAR_23:\n", "session.last_time = t0\n", "VAR_23 = URL('site')\n", "if session.authorized:\n", "redirect(VAR_23)\n", "if failed_login_count() >= allowed_number_of_attempts:\n", "return dict(VAR_23=send)\n", "time.sleep(2 ** allowed_number_of_attempts)\n", "if request.vars.password:\n", "if verify_password(request.vars.password[:1024]):\n", "session.authorized = True\n", "VAR_210 = login_record(False)\n", "login_record(True)\n", "if VAR_210 >= allowed_number_of_attempts:\n", "if CHECK_VERSION:\n", "VAR_43.flash = T('admin disabled because too many invalid login attempts')\n", "if VAR_210 == allowed_number_of_attempts - 1:\n", "session.check_version = True\n", "session.check_version = False\n", "VAR_43.flash = T('You have one more login attempt before you are locked out')\n", "VAR_43.flash = T('invalid password.')\n", "session.last_time = t0\n", "if isinstance(VAR_23, list):\n", "VAR_23 = str(VAR_23[0])\n", "redirect(VAR_23)\n" ]
[ "def index():...\n", "\"\"\"docstring\"\"\"\n", "send = request.vars.send\n", "if DEMO_MODE:\n", "session.authorized = True\n", "if not send:\n", "session.last_time = t0\n", "send = URL('site')\n", "if session.authorized:\n", "redirect(send)\n", "if failed_login_count() >= allowed_number_of_attempts:\n", "return dict(send=send)\n", "time.sleep(2 ** allowed_number_of_attempts)\n", "if request.vars.password:\n", "if verify_password(request.vars.password[:1024]):\n", "session.authorized = True\n", "times_denied = login_record(False)\n", "login_record(True)\n", "if times_denied >= allowed_number_of_attempts:\n", "if CHECK_VERSION:\n", "response.flash = T('admin disabled because too many invalid login attempts')\n", "if times_denied == allowed_number_of_attempts - 1:\n", "session.check_version = True\n", "session.check_version = False\n", "response.flash = T('You have one more login attempt before you are locked out')\n", "response.flash = T('invalid password.')\n", "session.last_time = t0\n", "if isinstance(send, list):\n", "send = str(send[0])\n", "redirect(send)\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Expr'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'" ]
[ "@parameterized.named_parameters(('VariablesToFeedNone', '', 'func2', None),...\n", "if not test.is_built_with_xla():\n", "self.skipTest('Skipping test because XLA is not compiled in.')\n", "VAR_13 = os.path.join(test.get_temp_dir(), 'dummy_model')\n", "VAR_14 = self.AOTCompileDummyModel()\n", "VAR_6 = getattr(VAR_14, VAR_6)\n", "self.evaluate(VAR_14.var.initializer)\n", "self.evaluate(VAR_14.write_var.initializer)\n", "save.save(VAR_14, VAR_13, signatures={'func': func})\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_50 = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir/out')\n", "VAR_10 = ['aot_compile_cpu', '--dir', VAR_13, '--tag_set', 'serve',\n '--signature_def_key', 'func', '--output_prefix', VAR_50,\n '--variables_to_feed', VAR_5, '--cpp_class', 'Generated']\n", "if VAR_7:\n", "VAR_10.extend(['--target_triple', VAR_7])\n", "VAR_10 = self.parser.parse_args(VAR_10)\n", "saved_model_cli.aot_compile_cpu(VAR_10)\n", "self.assertRegex(str(captured_warn.call_args),\n \"Signature input key 'y'.*has been pruned while freezing the graph.\")\n", "self.assertTrue(file_io.file_exists('{}.o'.format(VAR_50)))\n", "self.assertTrue(file_io.file_exists('{}.h'.format(VAR_50)))\n", "self.assertTrue(file_io.file_exists('{}_metadata.o'.format(VAR_50)))\n", "self.assertTrue(file_io.file_exists('{}_makefile.inc'.format(VAR_50)))\n", "VAR_51 = file_io.read_file_to_string('{}.h'.format(VAR_50))\n", "self.assertIn('class Generated', VAR_51)\n", "self.assertIn('arg_feed_x_data', VAR_51)\n", "self.assertIn('result_fetch_res_data', VAR_51)\n", "self.assertNotIn('arg_feed_y_data', VAR_51)\n", "if VAR_5:\n", "self.assertIn('set_var_param_my_var_data(const float', VAR_51)\n", "if VAR_6 == VAR_14.func_write:\n", "self.assertNotIn('set_var_param_my_var_data(float', VAR_51)\n", "self.assertIn('set_var_param_write_var_data(float', VAR_51)\n", "VAR_52 = file_io.read_file_to_string('{}_makefile.inc'.format(VAR_50))\n", "self.assertNotIn('set_var_param_write_var_data(const float', VAR_51)\n", "self.assertIn('-D_GLIBCXX_USE_CXX11_ABI=', VAR_52)\n" ]
[ "@parameterized.named_parameters(('VariablesToFeedNone', '', 'func2', None),...\n", "if not test.is_built_with_xla():\n", "self.skipTest('Skipping test because XLA is not compiled in.')\n", "saved_model_dir = os.path.join(test.get_temp_dir(), 'dummy_model')\n", "dummy_model = self.AOTCompileDummyModel()\n", "func = getattr(dummy_model, func)\n", "self.evaluate(dummy_model.var.initializer)\n", "self.evaluate(dummy_model.write_var.initializer)\n", "save.save(dummy_model, saved_model_dir, signatures={'func': func})\n", "self.parser = saved_model_cli.create_parser()\n", "output_prefix = os.path.join(test.get_temp_dir(), 'aot_compile_cpu_dir/out')\n", "args = ['aot_compile_cpu', '--dir', saved_model_dir, '--tag_set', 'serve',\n '--signature_def_key', 'func', '--output_prefix', output_prefix,\n '--variables_to_feed', variables_to_feed, '--cpp_class', 'Generated']\n", "if target_triple:\n", "args.extend(['--target_triple', target_triple])\n", "args = self.parser.parse_args(args)\n", "saved_model_cli.aot_compile_cpu(args)\n", "self.assertRegex(str(captured_warn.call_args),\n \"Signature input key 'y'.*has been pruned while freezing the graph.\")\n", "self.assertTrue(file_io.file_exists('{}.o'.format(output_prefix)))\n", "self.assertTrue(file_io.file_exists('{}.h'.format(output_prefix)))\n", "self.assertTrue(file_io.file_exists('{}_metadata.o'.format(output_prefix)))\n", "self.assertTrue(file_io.file_exists('{}_makefile.inc'.format(output_prefix)))\n", "header_contents = file_io.read_file_to_string('{}.h'.format(output_prefix))\n", "self.assertIn('class Generated', header_contents)\n", "self.assertIn('arg_feed_x_data', header_contents)\n", "self.assertIn('result_fetch_res_data', header_contents)\n", "self.assertNotIn('arg_feed_y_data', header_contents)\n", "if variables_to_feed:\n", "self.assertIn('set_var_param_my_var_data(const float', header_contents)\n", "if func == dummy_model.func_write:\n", "self.assertNotIn('set_var_param_my_var_data(float', header_contents)\n", "self.assertIn('set_var_param_write_var_data(float', header_contents)\n", "makefile_contents = file_io.read_file_to_string('{}_makefile.inc'.format(\n output_prefix))\n", "self.assertNotIn('set_var_param_write_var_data(const float', header_contents)\n", "self.assertIn('-D_GLIBCXX_USE_CXX11_ABI=', makefile_contents)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "if not getattr(self, '_metaclass', False) and self.meta.issingle:\n", "VAR_80 = frappe.db.get_singles_dict(self.doctype)\n", "VAR_21 = frappe.db.get_value(self.doctype, self.name, '*', as_dict=1,\n for_update=self.flags.for_update)\n", "if not VAR_80:\n", "if not VAR_21:\n", "VAR_80 = frappe.new_doc(self.doctype).as_dict()\n", "super(CLASS_0, self).__init__(VAR_80)\n", "frappe.throw(_('{0} {1} not found').format(_(self.doctype), self.name),\n frappe.DoesNotExistError)\n", "super(CLASS_0, self).__init__(VAR_21)\n", "VAR_80['name'] = self.doctype\n", "self.init_valid_columns()\n", "if self.name == 'DocType' and self.doctype == 'DocType':\n", "self._fix_numeric_types()\n", "from frappe.model.meta import DOCTYPE_TABLE_FIELDS\n", "VAR_81 = self.meta.get_table_fields()\n", "VAR_81 = DOCTYPE_TABLE_FIELDS\n", "for VAR_19 in VAR_81:\n", "VAR_50 = frappe.db.get_values(VAR_19.options, {'parent': self.name,\n 'parenttype': self.doctype, 'parentfield': VAR_19.fieldname}, '*',\n as_dict=True, order_by='idx asc')\n", "if hasattr(self, '__setup__'):\n", "if VAR_50:\n", "self.__setup__()\n", "self.set(VAR_19.fieldname, VAR_50)\n", "self.set(VAR_19.fieldname, [])\n" ]
[ "def load_from_db(self):...\n", "\"\"\"docstring\"\"\"\n", "if not getattr(self, '_metaclass', False) and self.meta.issingle:\n", "single_doc = frappe.db.get_singles_dict(self.doctype)\n", "d = frappe.db.get_value(self.doctype, self.name, '*', as_dict=1, for_update\n =self.flags.for_update)\n", "if not single_doc:\n", "if not d:\n", "single_doc = frappe.new_doc(self.doctype).as_dict()\n", "super(Document, self).__init__(single_doc)\n", "frappe.throw(_('{0} {1} not found').format(_(self.doctype), self.name),\n frappe.DoesNotExistError)\n", "super(Document, self).__init__(d)\n", "single_doc['name'] = self.doctype\n", "self.init_valid_columns()\n", "if self.name == 'DocType' and self.doctype == 'DocType':\n", "self._fix_numeric_types()\n", "from frappe.model.meta import DOCTYPE_TABLE_FIELDS\n", "table_fields = self.meta.get_table_fields()\n", "table_fields = DOCTYPE_TABLE_FIELDS\n", "for df in table_fields:\n", "children = frappe.db.get_values(df.options, {'parent': self.name,\n 'parenttype': self.doctype, 'parentfield': df.fieldname}, '*', as_dict=\n True, order_by='idx asc')\n", "if hasattr(self, '__setup__'):\n", "if children:\n", "self.__setup__()\n", "self.set(df.fieldname, children)\n", "self.set(df.fieldname, [])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Expr'", "ImportFrom'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_5(self):...\n", "VAR_19 = filescheme.dirbrowser_html(os.getcwd()).decode('utf-8')\n", "VAR_20 = bs4.BeautifulSoup(VAR_19, 'html.parser')\n", "print(VAR_20.prettify())\n", "VAR_21 = VAR_20.div\n", "assert VAR_21['id'] == 'dirbrowserContainer'\n", "VAR_22 = VAR_21('div', id='dirbrowserTitle')[0]\n", "VAR_23 = VAR_22('p', id='dirbrowserTitleText')[0].text\n", "assert VAR_23 == 'Browse directory: {}'.format(os.getcwd())\n" ]
[ "def test_basic(self):...\n", "html = filescheme.dirbrowser_html(os.getcwd()).decode('utf-8')\n", "soup = bs4.BeautifulSoup(html, 'html.parser')\n", "print(soup.prettify())\n", "container = soup.div\n", "assert container['id'] == 'dirbrowserContainer'\n", "title_elem = container('div', id='dirbrowserTitle')[0]\n", "title_text = title_elem('p', id='dirbrowserTitleText')[0].text\n", "assert title_text == 'Browse directory: {}'.format(os.getcwd())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assert'", "Assign'", "Assign'", "Assert'" ]
[ "def FUNC_79(VAR_325):...\n", "VAR_30, VAR_29 = VAR_325\n", "return int((VAR_30 - VAR_84 + VAR_88) * VAR_95), int((VAR_29 - VAR_85 +\n VAR_90) * VAR_95)\n" ]
[ "def resizeXY(xy):...\n", "x, y = xy\n", "return int((x - newX + left_xs) * factor), int((y - newY + top_xs) * factor)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_4(VAR_0):...\n", "VAR_9 = GeneratorError()\n", "VAR_10 = VAR_0.patch('openapi_python_client._get_project_for_url_or_path',\n return_value=error)\n", "VAR_6 = VAR_0.MagicMock()\n", "VAR_7 = VAR_0.MagicMock()\n", "from openapi_python_client import create_new_client\n", "VAR_11 = create_new_client(VAR_6=url, VAR_7=path)\n", "VAR_10.assert_called_once_with(VAR_6=url, VAR_7=path)\n", "assert VAR_11 == [VAR_9]\n" ]
[ "def test_create_new_client_project_error(mocker):...\n", "error = GeneratorError()\n", "_get_project_for_url_or_path = mocker.patch(\n 'openapi_python_client._get_project_for_url_or_path', return_value=error)\n", "url = mocker.MagicMock()\n", "path = mocker.MagicMock()\n", "from openapi_python_client import create_new_client\n", "result = create_new_client(url=url, path=path)\n", "_get_project_for_url_or_path.assert_called_once_with(url=url, path=path)\n", "assert result == [error]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'" ]
[ "def FUNC_44(self):...\n", "return json.dumps(audiotranscode.getEncoders())\n" ]
[ "def api_getencoders(self):...\n", "return json.dumps(audiotranscode.getEncoders())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_9(self):...\n", "assert method_name_to_url('fooBar') == '/foo-bar'\n", "assert method_name_to_url('foo_bar') == '/foo-bar'\n", "assert method_name_to_url('fooBar_baz') == '/foo-bar-baz'\n", "assert method_name_to_url('_FooBar_baz-booFoo_') == '/foo-bar-baz-boo-foo'\n" ]
[ "def test_it_works(self):...\n", "assert method_name_to_url('fooBar') == '/foo-bar'\n", "assert method_name_to_url('foo_bar') == '/foo-bar'\n", "assert method_name_to_url('fooBar_baz') == '/foo-bar-baz'\n", "assert method_name_to_url('_FooBar_baz-booFoo_') == '/foo-bar-baz-boo-foo'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assert'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_26():...\n", "VAR_52 = request.vars.keywords or ''\n", "VAR_3 = FUNC_5()\n", "def FUNC_59(VAR_5, VAR_52):...\n", "VAR_5 = os.path.join(apath(VAR_3, VAR_122=request), VAR_5)\n", "if VAR_52 in read_file(VAR_5, 'r'):\n", "return True\n", "return False\n" ]
[ "def search():...\n", "keywords = request.vars.keywords or ''\n", "app = get_app()\n", "def match(filename, keywords):...\n", "filename = os.path.join(apath(app, r=request), filename)\n", "if keywords in read_file(filename, 'r'):\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@VAR_1.url_value_preprocessor...\n", "g.auth_token = VAR_7.pop('auth_token')\n" ]
[ "@kobo.url_value_preprocessor...\n", "g.auth_token = values.pop('auth_token')\n" ]
[ 0, 0 ]
[ "Condition", "Assign'" ]
[ "@FUNC_0...\n", "return Distributor()\n" ]
[ "@cache_in_self...\n", "return Distributor()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@abc.abstractmethod...\n", "\"\"\"docstring\"\"\"\n" ]
[ "@abc.abstractmethod...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "Condition", "Docstring" ]
[ "def FUNC_36(self):...\n", "return helpdesk_settings.HELPDESK_KBITEM_TEAM_GETTER(self)\n" ]
[ "def get_team(self):...\n", "return helpdesk_settings.HELPDESK_KBITEM_TEAM_GETTER(self)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@FUNC_0...\n", "return E2eRoomKeysHandler(self)\n" ]
[ "@cache_in_self...\n", "return E2eRoomKeysHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@log_function...\n", "VAR_101 = await self.auth.check_host_in_room(VAR_11, VAR_5)\n", "if not VAR_101:\n", "VAR_17 = min(VAR_17, 100)\n", "VAR_20 = await self.store.get_backfill_events(VAR_11, VAR_31, VAR_17)\n", "VAR_20 = await filter_events_for_server(self.storage, VAR_5, VAR_20)\n", "return VAR_20\n" ]
[ "@log_function...\n", "in_room = await self.auth.check_host_in_room(room_id, origin)\n", "if not in_room:\n", "limit = min(limit, 100)\n", "events = await self.store.get_backfill_events(room_id, pdu_list, limit)\n", "events = await filter_events_for_server(self.storage, origin, events)\n", "return events\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_36(self, VAR_44):...\n", "VAR_14 = VAR_44.group(2) or VAR_44.group(1)\n", "VAR_14 = self.output(VAR_14)\n", "return self.renderer.double_emphasis(VAR_14)\n" ]
[ "def output_double_emphasis(self, m):...\n", "text = m.group(2) or m.group(1)\n", "text = self.output(text)\n", "return self.renderer.double_emphasis(text)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "@VAR_8.route('/file/<path:path>', methods=['GET'])...\n", "VAR_17 = secure_filename(VAR_17)\n", "if VAR_8.interface.encrypt and isinstance(VAR_8.interface.examples, str\n", "VAR_95 = encrypted_file.read()\n", "return send_file(os.path.join(VAR_8.cwd, VAR_17))\n", "VAR_87 = encryptor.decrypt(VAR_8.interface.encryption_key, VAR_95)\n", "return send_file(io.BytesIO(VAR_87), attachment_filename=os.path.basename(path)\n )\n" ]
[ "@app.route('/file/<path:path>', methods=['GET'])...\n", "path = secure_filename(path)\n", "if app.interface.encrypt and isinstance(app.interface.examples, str\n", "encrypted_data = encrypted_file.read()\n", "return send_file(os.path.join(app.cwd, path))\n", "file_data = encryptor.decrypt(app.interface.encryption_key, encrypted_data)\n", "return send_file(io.BytesIO(file_data), attachment_filename=os.path.\n basename(path))\n" ]
[ 0, 1, 0, 0, 1, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_1, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "self.request = VAR_1\n", "self.ajax = self.request.is_ajax()\n", "self.basket = VAR_2 or VAR_1.basket\n" ]
[ "def __init__(self, request, basket=None):...\n", "\"\"\"docstring\"\"\"\n", "self.request = request\n", "self.ajax = self.request.is_ajax()\n", "self.basket = basket or request.basket\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_63(self):...\n", "\"\"\"docstring\"\"\"\n", "return self.s\n" ]
[ "def xml(self):...\n", "\"\"\"docstring\"\"\"\n", "return self.s\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_32(self):...\n", "provideUtility(PreferredCharsetResolver)\n", "VAR_11 = PageTemplate()\n", "self.assert_expected(VAR_11, 'UnicodeResolution.html')\n" ]
[ "def test_unicode_conflict_resolution(self):...\n", "provideUtility(PreferredCharsetResolver)\n", "t = PageTemplate()\n", "self.assert_expected(t, 'UnicodeResolution.html')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'" ]
[ "from datetime import timedelta\n", "from django.urls import reverse\n", "from django.utils import timezone\n", "from weblate.trans.tests.test_views import ViewTestCase\n", "from weblate.trans.views.reports import generate_counts, generate_credits\n", "VAR_0 = [{'count': 1, 'count_edit': 0, 'count_new': 1, 'name':\n 'Weblate Test', 'words': 2, 'words_edit': 0, 'words_new': 2, 'chars': \n 14, 'chars_edit': 0, 'chars_new': 14, 'email': '[email protected]',\n 't_chars': 14, 't_chars_edit': 0, 't_chars_new': 14, 't_words': 2,\n 't_words_edit': 0, 't_words_new': 2, 'count_approve': 0,\n 'words_approve': 0, 'chars_approve': 0, 't_chars_approve': 0,\n 't_words_approve': 0, 'edits': 14, 'edits_approve': 0, 'edits_edit': 0,\n 'edits_new': 14}]\n", "def FUNC_0(self):...\n", "super().setUp()\n", "self.user.is_superuser = True\n", "self.user.save()\n", "def FUNC_1(self):...\n", "self.edit_unit('Hello, world!\\n', 'Nazdar svete!\\n')\n", "def FUNC_2(self):...\n", "VAR_4 = generate_credits(None, timezone.now() - timedelta(days=1), timezone\n .now() + timedelta(days=1), translation__component=self.component)\n", "self.assertEqual(VAR_4, [])\n", "def FUNC_3(self, VAR_1=1):...\n", "self.add_change()\n", "VAR_4 = generate_credits(None, timezone.now() - timedelta(days=1), timezone\n .now() + timedelta(days=1), translation__component=self.component)\n", "self.assertEqual(VAR_4, [{'Czech': [('[email protected]', 'Weblate Test',\n VAR_1)]}])\n", "def FUNC_4(self):...\n", "self.edit_unit('Hello, world!\\n', 'Nazdar svete2!\\n')\n", "self.test_credits_one(VAR_1=2)\n", "def FUNC_5(self):...\n", "self.add_change()\n", "VAR_4 = generate_counts(None, timezone.now() - timedelta(days=1), timezone.\n now() + timedelta(days=1), component=self.component)\n", "self.assertEqual(VAR_4, VAR_0)\n", "def FUNC_6(self):...\n", "return self.kw_component\n" ]
[ "from datetime import timedelta\n", "from django.urls import reverse\n", "from django.utils import timezone\n", "from weblate.trans.tests.test_views import ViewTestCase\n", "from weblate.trans.views.reports import generate_counts, generate_credits\n", "COUNTS_DATA = [{'count': 1, 'count_edit': 0, 'count_new': 1, 'name':\n 'Weblate Test', 'words': 2, 'words_edit': 0, 'words_new': 2, 'chars': \n 14, 'chars_edit': 0, 'chars_new': 14, 'email': '[email protected]',\n 't_chars': 14, 't_chars_edit': 0, 't_chars_new': 14, 't_words': 2,\n 't_words_edit': 0, 't_words_new': 2, 'count_approve': 0,\n 'words_approve': 0, 'chars_approve': 0, 't_chars_approve': 0,\n 't_words_approve': 0, 'edits': 14, 'edits_approve': 0, 'edits_edit': 0,\n 'edits_new': 14}]\n", "def setUp(self):...\n", "super().setUp()\n", "self.user.is_superuser = True\n", "self.user.save()\n", "def add_change(self):...\n", "self.edit_unit('Hello, world!\\n', 'Nazdar svete!\\n')\n", "def test_credits_empty(self):...\n", "data = generate_credits(None, timezone.now() - timedelta(days=1), timezone.\n now() + timedelta(days=1), translation__component=self.component)\n", "self.assertEqual(data, [])\n", "def test_credits_one(self, expected_count=1):...\n", "self.add_change()\n", "data = generate_credits(None, timezone.now() - timedelta(days=1), timezone.\n now() + timedelta(days=1), translation__component=self.component)\n", "self.assertEqual(data, [{'Czech': [('[email protected]', 'Weblate Test',\n expected_count)]}])\n", "def test_credits_more(self):...\n", "self.edit_unit('Hello, world!\\n', 'Nazdar svete2!\\n')\n", "self.test_credits_one(expected_count=2)\n", "def test_counts_one(self):...\n", "self.add_change()\n", "data = generate_counts(None, timezone.now() - timedelta(days=1), timezone.\n now() + timedelta(days=1), component=self.component)\n", "self.assertEqual(data, COUNTS_DATA)\n", "def get_kwargs(self):...\n", "return self.kw_component\n" ]
[ 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Return'" ]
[ "from builtins import next\n", "from builtins import range\n", "import os\n", "import datetime\n", "from xml.sax.saxutils import quoteattr\n", "import sys\n", "import logging\n", "import random\n", "import glob\n", "from itertools import cycle\n", "from flask import Blueprint, url_for, Response, stream_with_context, send_file, jsonify\n", "from werkzeug.datastructures import Headers\n", "from opendiamond.dataretriever.test_utils import *\n", "VAR_0 = 'cocktailtest'\n", "VAR_1 = False\n", "VAR_2 = True\n", "VAR_3 = VAR_4 = None\n", "VAR_5 = int(10000.0)\n", "\"\"\"string\"\"\"\n", "def FUNC_0(VAR_6):...\n", "VAR_3 = '/srv/diamond/STREAM'\n", "VAR_4 = VAR_6.dataroot\n", "VAR_7 = Blueprint('test_store', __name__)\n", "VAR_8 = logging.getLogger(__name__)\n", "@VAR_7.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')...\n", "VAR_17 = FUNC_4(VAR_10, VAR_14)\n", "VAR_21 = int((VAR_11 - 1) * (1.0 / VAR_12) * len(VAR_17))\n", "VAR_22 = int(VAR_11 * (1.0 / VAR_12) * len(VAR_17))\n", "VAR_17 = VAR_17[VAR_21:VAR_22]\n", "VAR_8.info('Mixer Size {}'.format(len(VAR_17)))\n", "sys.stdout.flush()\n", "return FUNC_2(VAR_9, VAR_13, VAR_17, VAR_15, VAR_16)\n" ]
[ "from builtins import next\n", "from builtins import range\n", "import os\n", "import datetime\n", "from xml.sax.saxutils import quoteattr\n", "import sys\n", "import logging\n", "import random\n", "import glob\n", "from itertools import cycle\n", "from flask import Blueprint, url_for, Response, stream_with_context, send_file, jsonify\n", "from werkzeug.datastructures import Headers\n", "from opendiamond.dataretriever.test_utils import *\n", "BASEURL = 'cocktailtest'\n", "STYLE = False\n", "LOCAL_OBJ_URI = True\n", "INDEXDIR = DATAROOT = None\n", "ITEMS_PER_ITERATION = int(10000.0)\n", "\"\"\"\n Example cocktail url:\n /cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0/classes/gull,cardinal\n /cocktail/base/\"0\"/mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0\n /cocktail/base/FFFFFFFFFFFFFFFF/keywords/d_42_1.0\n\"\"\"\n", "def init(config):...\n", "INDEXDIR = '/srv/diamond/STREAM'\n", "DATAROOT = config.dataroot\n", "scope_blueprint = Blueprint('test_store', __name__)\n", "_log = logging.getLogger(__name__)\n", "@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')...\n", "mixer_list = get_mixer_list(mixeridx, classes)\n", "start_idx = int((index - 1) * (1.0 / total) * len(mixer_list))\n", "end_idx = int(index * (1.0 / total) * len(mixer_list))\n", "mixer_list = mixer_list[start_idx:end_idx]\n", "_log.info('Mixer Size {}'.format(len(mixer_list)))\n", "sys.stdout.flush()\n", "return get_scope(baseidx, params, mixer_list, start, limit)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Import'", "Import'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "if self.path is None:\n", "return None\n", "return os.path.abspath(self.path)\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "if self.path is None:\n", "return None\n", "return os.path.abspath(self.path)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_15(self, VAR_3):...\n", "VAR_4 = {}\n", "VAR_4['label'] = VAR_3.label\n", "VAR_4['help_text'] = VAR_3.help_text\n", "VAR_4['required'] = VAR_3.required\n", "VAR_4['initial'] = VAR_3.default_value\n", "return VAR_4\n" ]
[ "def get_field_options(self, field):...\n", "options = {}\n", "options['label'] = field.label\n", "options['help_text'] = field.help_text\n", "options['required'] = field.required\n", "options['initial'] = field.default_value\n", "return options\n" ]
[ 0, 0, 0, 2, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "VAR_0.debug('get_room_state_ids dest=%s, room=%s', VAR_5, VAR_6)\n", "VAR_2 = FUNC_1('/state_ids/%s', VAR_6)\n", "return self.client.get_json(VAR_5, VAR_2=path, VAR_3={'event_id': event_id},\n try_trailing_slash_on_400=True)\n" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "logger.debug('get_room_state_ids dest=%s, room=%s', destination, room_id)\n", "path = _create_v1_path('/state_ids/%s', room_id)\n", "return self.client.get_json(destination, path=path, args={'event_id':\n event_id}, try_trailing_slash_on_400=True)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = VAR_2.split('/', 2)\n", "if len(VAR_5) != 3:\n", "VAR_31 = 'Spec is not of the form \"user/repo/ref\", provided: \"{spec}\".'.format(\n VAR_2=spec)\n", "return VAR_5\n", "if len(VAR_5) == 2 and VAR_5[-1] != 'master':\n", "VAR_31 += ' Did you mean \"{spec}/master\"?'.format(VAR_2=spec)\n" ]
[ "def tokenize_spec(spec):...\n", "\"\"\"docstring\"\"\"\n", "spec_parts = spec.split('/', 2)\n", "if len(spec_parts) != 3:\n", "msg = 'Spec is not of the form \"user/repo/ref\", provided: \"{spec}\".'.format(\n spec=spec)\n", "return spec_parts\n", "if len(spec_parts) == 2 and spec_parts[-1] != 'master':\n", "msg += ' Did you mean \"{spec}/master\"?'.format(spec=spec)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'", "Condition", "AugAssign'" ]
[ "def FUNC_21(VAR_23, VAR_24, VAR_25):...\n", "if VAR_23 != 'SECRET_KEY':\n", "if type(VAR_24) is dict:\n", "for k, v in VAR_24.items():\n", "VAR_25[VAR_23] = VAR_24\n", "FUNC_21(k, v, VAR_25[VAR_23])\n" ]
[ "def update_config_value(key, val, dictionary):...\n", "if key != 'SECRET_KEY':\n", "if type(val) is dict:\n", "for k, v in val.items():\n", "dictionary[key] = val\n", "update_config_value(k, v, dictionary[key])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "For", "Assign'", "Expr'" ]
[ "def FUNC_22(VAR_45):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1.session.user = VAR_45\n", "VAR_1.session.sid = VAR_45\n", "VAR_1.cache = {}\n", "VAR_1.form_dict = CLASS_0()\n", "VAR_1.jenv = None\n", "VAR_1.session.data = CLASS_0()\n", "VAR_1.role_permissions = {}\n", "VAR_1.new_doc_templates = {}\n", "VAR_1.user_perms = None\n" ]
[ "def set_user(username):...\n", "\"\"\"docstring\"\"\"\n", "local.session.user = username\n", "local.session.sid = username\n", "local.cache = {}\n", "local.form_dict = _dict()\n", "local.jenv = None\n", "local.session.data = _dict()\n", "local.role_permissions = {}\n", "local.new_doc_templates = {}\n", "local.user_perms = None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_11(VAR_0, VAR_1: FlaskClient):...\n", "VAR_12 = {'title': 'Testing the create route', 'tags': 'testing,note',\n 'path': '', 'submit': 'true'}\n", "VAR_8 = VAR_1.post('/notes/new', data=note_data)\n", "assert VAR_8.status_code == 302\n", "assert not b'invalid' in VAR_8.data\n", "VAR_8 = VAR_1.post('/notes/new', data=note_data, follow_redirects=True)\n", "assert VAR_8.status_code == 200\n", "assert b'<span class=\"post-tag\">note</span>' in VAR_8.data\n", "assert b'<span class=\"post-tag\">testing</span>' in VAR_8.data\n", "assert b'Testing the create route' in VAR_8.data\n" ]
[ "def test_create_note(test_app, client: FlaskClient):...\n", "note_data = {'title': 'Testing the create route', 'tags': 'testing,note',\n 'path': '', 'submit': 'true'}\n", "resp = client.post('/notes/new', data=note_data)\n", "assert resp.status_code == 302\n", "assert not b'invalid' in resp.data\n", "resp = client.post('/notes/new', data=note_data, follow_redirects=True)\n", "assert resp.status_code == 200\n", "assert b'<span class=\"post-tag\">note</span>' in resp.data\n", "assert b'<span class=\"post-tag\">testing</span>' in resp.data\n", "assert b'Testing the create route' in resp.data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_101(VAR_6):...\n", "return '<b>{0}</b>'.format(VAR_6)\n" ]
[ "def bold(text):...\n", "return '<b>{0}</b>'.format(text)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, **VAR_28):...\n", "VAR_28['appbuilder'] = current_app.appbuilder\n", "super().__init__(**kwargs)\n" ]
[ "def __init__(self, **kwargs):...\n", "kwargs['appbuilder'] = current_app.appbuilder\n", "super().__init__(**kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@staticmethod...\n", "VAR_148 = VAR_72.split('-')\n", "VAR_48 = '-'.join(VAR_148[0:-1])\n", "return VAR_48, VAR_148[-1]\n" ]
[ "@staticmethod...\n", "parts = query.split('-')\n", "queue = '-'.join(parts[0:-1])\n", "return queue, parts[-1]\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Return'" ]
[ "import abc\n", "import functools\n", "import logging\n", "import os\n", "from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast\n", "import twisted.internet.base\n", "import twisted.internet.tcp\n", "from twisted.mail.smtp import sendmail\n", "from twisted.web.iweb import IPolicyForHTTPS\n", "from synapse.api.auth import Auth\n", "from synapse.api.filtering import Filtering\n", "from synapse.api.ratelimiting import Ratelimiter\n", "from synapse.appservice.api import ApplicationServiceApi\n", "from synapse.appservice.scheduler import ApplicationServiceScheduler\n", "from synapse.config.homeserver import HomeServerConfig\n", "from synapse.crypto import context_factory\n", "from synapse.crypto.context_factory import RegularPolicyForHTTPS\n", "from synapse.crypto.keyring import Keyring\n", "from synapse.events.builder import EventBuilderFactory\n", "from synapse.events.spamcheck import SpamChecker\n", "from synapse.events.third_party_rules import ThirdPartyEventRules\n", "from synapse.events.utils import EventClientSerializer\n", "from synapse.federation.federation_client import FederationClient\n", "from synapse.federation.federation_server import FederationHandlerRegistry, FederationServer\n", "from synapse.federation.send_queue import FederationRemoteSendQueue\n", "from synapse.federation.sender import FederationSender\n", "from synapse.federation.transport.client import TransportLayerClient\n", "from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer\n", "from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler\n", "from synapse.handlers.account_validity import AccountValidityHandler\n", "from synapse.handlers.acme import AcmeHandler\n", "from synapse.handlers.admin import AdminHandler\n", "from synapse.handlers.appservice import ApplicationServicesHandler\n", "from synapse.handlers.auth import AuthHandler, MacaroonGenerator\n", "from synapse.handlers.cas_handler import CasHandler\n", "from synapse.handlers.deactivate_account import DeactivateAccountHandler\n", "from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler\n", "from synapse.handlers.devicemessage import DeviceMessageHandler\n", "from synapse.handlers.directory import DirectoryHandler\n", "from synapse.handlers.e2e_keys import E2eKeysHandler\n", "from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler\n", "from synapse.handlers.events import EventHandler, EventStreamHandler\n", "from synapse.handlers.federation import FederationHandler\n", "from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler\n", "from synapse.handlers.identity import IdentityHandler\n", "from synapse.handlers.initial_sync import InitialSyncHandler\n", "from synapse.handlers.message import EventCreationHandler, MessageHandler\n", "from synapse.handlers.pagination import PaginationHandler\n", "from synapse.handlers.password_policy import PasswordPolicyHandler\n", "from synapse.handlers.presence import PresenceHandler\n", "from synapse.handlers.profile import ProfileHandler\n", "from synapse.handlers.read_marker import ReadMarkerHandler\n", "from synapse.handlers.receipts import ReceiptsHandler\n", "from synapse.handlers.register import RegistrationHandler\n", "from synapse.handlers.room import RoomContextHandler, RoomCreationHandler, RoomShutdownHandler\n", "from synapse.handlers.room_list import RoomListHandler\n", "from synapse.handlers.room_member import RoomMemberMasterHandler\n", "from synapse.handlers.room_member_worker import RoomMemberWorkerHandler\n", "from synapse.handlers.search import SearchHandler\n", "from synapse.handlers.set_password import SetPasswordHandler\n", "from synapse.handlers.sso import SsoHandler\n", "from synapse.handlers.stats import StatsHandler\n", "from synapse.handlers.sync import SyncHandler\n", "from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler\n", "from synapse.handlers.user_directory import UserDirectoryHandler\n", "from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient\n", "from synapse.http.matrixfederationclient import MatrixFederationHttpClient\n", "from synapse.module_api import ModuleApi\n", "from synapse.notifier import Notifier\n", "from synapse.push.action_generator import ActionGenerator\n", "from synapse.push.pusherpool import PusherPool\n", "from synapse.replication.tcp.client import ReplicationDataHandler\n", "from synapse.replication.tcp.handler import ReplicationCommandHandler\n", "from synapse.replication.tcp.resource import ReplicationStreamer\n", "from synapse.replication.tcp.streams import STREAMS_MAP, Stream\n", "from synapse.rest.media.v1.media_repository import MediaRepository, MediaRepositoryResource\n", "from synapse.secrets import Secrets\n", "from synapse.server_notices.server_notices_manager import ServerNoticesManager\n", "from synapse.server_notices.server_notices_sender import ServerNoticesSender\n", "from synapse.server_notices.worker_server_notices_sender import WorkerServerNoticesSender\n", "from synapse.state import StateHandler, StateResolutionHandler\n", "from synapse.storage import Databases, DataStore, Storage\n", "from synapse.streams.events import EventSources\n", "from synapse.types import DomainSpecificString\n", "from synapse.util import Clock\n", "from synapse.util.distributor import Distributor\n", "from synapse.util.ratelimitutils import FederationRateLimiter\n", "from synapse.util.stringutils import random_string\n", "VAR_0 = logging.getLogger(__name__)\n", "if TYPE_CHECKING:\n", "from synapse.handlers.oidc_handler import OidcHandler\n", "VAR_1 = TypeVar('T', bound=Callable[..., Any])\n", "from synapse.handlers.saml_handler import SamlHandler\n", "def FUNC_0(VAR_2: T) ->VAR_1:...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_2.__name__.startswith('get_'):\n", "VAR_3 = VAR_2.__name__[len('get'):]\n", "VAR_4 = [False]\n", "@functools.wraps(VAR_2)...\n", "return getattr(self, VAR_3)\n", "if VAR_4[0]:\n", "VAR_4[0] = True\n", "VAR_18 = VAR_2(self)\n", "VAR_4[0] = False\n", "return VAR_18\n", "setattr(self, VAR_3, VAR_18)\n" ]
[ "import abc\n", "import functools\n", "import logging\n", "import os\n", "from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, TypeVar, cast\n", "import twisted.internet.base\n", "import twisted.internet.tcp\n", "from twisted.mail.smtp import sendmail\n", "from twisted.web.iweb import IPolicyForHTTPS\n", "from synapse.api.auth import Auth\n", "from synapse.api.filtering import Filtering\n", "from synapse.api.ratelimiting import Ratelimiter\n", "from synapse.appservice.api import ApplicationServiceApi\n", "from synapse.appservice.scheduler import ApplicationServiceScheduler\n", "from synapse.config.homeserver import HomeServerConfig\n", "from synapse.crypto import context_factory\n", "from synapse.crypto.context_factory import RegularPolicyForHTTPS\n", "from synapse.crypto.keyring import Keyring\n", "from synapse.events.builder import EventBuilderFactory\n", "from synapse.events.spamcheck import SpamChecker\n", "from synapse.events.third_party_rules import ThirdPartyEventRules\n", "from synapse.events.utils import EventClientSerializer\n", "from synapse.federation.federation_client import FederationClient\n", "from synapse.federation.federation_server import FederationHandlerRegistry, FederationServer\n", "from synapse.federation.send_queue import FederationRemoteSendQueue\n", "from synapse.federation.sender import FederationSender\n", "from synapse.federation.transport.client import TransportLayerClient\n", "from synapse.groups.attestations import GroupAttestationSigning, GroupAttestionRenewer\n", "from synapse.groups.groups_server import GroupsServerHandler, GroupsServerWorkerHandler\n", "from synapse.handlers.account_validity import AccountValidityHandler\n", "from synapse.handlers.acme import AcmeHandler\n", "from synapse.handlers.admin import AdminHandler\n", "from synapse.handlers.appservice import ApplicationServicesHandler\n", "from synapse.handlers.auth import AuthHandler, MacaroonGenerator\n", "from synapse.handlers.cas_handler import CasHandler\n", "from synapse.handlers.deactivate_account import DeactivateAccountHandler\n", "from synapse.handlers.device import DeviceHandler, DeviceWorkerHandler\n", "from synapse.handlers.devicemessage import DeviceMessageHandler\n", "from synapse.handlers.directory import DirectoryHandler\n", "from synapse.handlers.e2e_keys import E2eKeysHandler\n", "from synapse.handlers.e2e_room_keys import E2eRoomKeysHandler\n", "from synapse.handlers.events import EventHandler, EventStreamHandler\n", "from synapse.handlers.federation import FederationHandler\n", "from synapse.handlers.groups_local import GroupsLocalHandler, GroupsLocalWorkerHandler\n", "from synapse.handlers.identity import IdentityHandler\n", "from synapse.handlers.initial_sync import InitialSyncHandler\n", "from synapse.handlers.message import EventCreationHandler, MessageHandler\n", "from synapse.handlers.pagination import PaginationHandler\n", "from synapse.handlers.password_policy import PasswordPolicyHandler\n", "from synapse.handlers.presence import PresenceHandler\n", "from synapse.handlers.profile import ProfileHandler\n", "from synapse.handlers.read_marker import ReadMarkerHandler\n", "from synapse.handlers.receipts import ReceiptsHandler\n", "from synapse.handlers.register import RegistrationHandler\n", "from synapse.handlers.room import RoomContextHandler, RoomCreationHandler, RoomShutdownHandler\n", "from synapse.handlers.room_list import RoomListHandler\n", "from synapse.handlers.room_member import RoomMemberMasterHandler\n", "from synapse.handlers.room_member_worker import RoomMemberWorkerHandler\n", "from synapse.handlers.search import SearchHandler\n", "from synapse.handlers.set_password import SetPasswordHandler\n", "from synapse.handlers.sso import SsoHandler\n", "from synapse.handlers.stats import StatsHandler\n", "from synapse.handlers.sync import SyncHandler\n", "from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler\n", "from synapse.handlers.user_directory import UserDirectoryHandler\n", "from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient\n", "from synapse.http.matrixfederationclient import MatrixFederationHttpClient\n", "from synapse.module_api import ModuleApi\n", "from synapse.notifier import Notifier\n", "from synapse.push.action_generator import ActionGenerator\n", "from synapse.push.pusherpool import PusherPool\n", "from synapse.replication.tcp.client import ReplicationDataHandler\n", "from synapse.replication.tcp.handler import ReplicationCommandHandler\n", "from synapse.replication.tcp.resource import ReplicationStreamer\n", "from synapse.replication.tcp.streams import STREAMS_MAP, Stream\n", "from synapse.rest.media.v1.media_repository import MediaRepository, MediaRepositoryResource\n", "from synapse.secrets import Secrets\n", "from synapse.server_notices.server_notices_manager import ServerNoticesManager\n", "from synapse.server_notices.server_notices_sender import ServerNoticesSender\n", "from synapse.server_notices.worker_server_notices_sender import WorkerServerNoticesSender\n", "from synapse.state import StateHandler, StateResolutionHandler\n", "from synapse.storage import Databases, DataStore, Storage\n", "from synapse.streams.events import EventSources\n", "from synapse.types import DomainSpecificString\n", "from synapse.util import Clock\n", "from synapse.util.distributor import Distributor\n", "from synapse.util.ratelimitutils import FederationRateLimiter\n", "from synapse.util.stringutils import random_string\n", "logger = logging.getLogger(__name__)\n", "if TYPE_CHECKING:\n", "from synapse.handlers.oidc_handler import OidcHandler\n", "T = TypeVar('T', bound=Callable[..., Any])\n", "from synapse.handlers.saml_handler import SamlHandler\n", "def cache_in_self(builder: T) ->T:...\n", "\"\"\"docstring\"\"\"\n", "if not builder.__name__.startswith('get_'):\n", "depname = builder.__name__[len('get'):]\n", "building = [False]\n", "@functools.wraps(builder)...\n", "return getattr(self, depname)\n", "if building[0]:\n", "building[0] = True\n", "dep = builder(self)\n", "building[0] = False\n", "return dep\n", "setattr(self, depname, dep)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Condition", "ImportFrom'", "Assign'", "ImportFrom'", "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_4(VAR_9):...\n", "\"\"\"docstring\"\"\"\n", "if ':' not in VAR_9:\n", "return False\n", "VAR_42 = VAR_9.split(':', 1)[0].lower()\n", "return VAR_42 in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes\n" ]
[ "def is_url(name):...\n", "\"\"\"docstring\"\"\"\n", "if ':' not in name:\n", "return False\n", "scheme = name.split(':', 1)[0].lower()\n", "return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Assign'", "Return'" ]
[ "@VAR_0.simple_tag...\n", "\"\"\"docstring\"\"\"\n", "VAR_53 = reverse('rest_framework:logout')\n", "VAR_41 = format_html('<li class=\"navbar-text\">{user}</li>', VAR_14=escape(user)\n )\n", "VAR_41 = 'string'\n", "return mark_safe(VAR_41)\n", "VAR_41 = format_html(VAR_41, VAR_14=escape(user), href=logout_url, next=\n escape(request.path))\n", "return mark_safe(VAR_41)\n" ]
[ "@register.simple_tag...\n", "\"\"\"docstring\"\"\"\n", "logout_url = reverse('rest_framework:logout')\n", "snippet = format_html('<li class=\"navbar-text\">{user}</li>', user=escape(user))\n", "snippet = \"\"\"<li class=\"dropdown\">\n <a href=\"#\" class=\"dropdown-toggle\" data-toggle=\"dropdown\">\n {user}\n <b class=\"caret\"></b>\n </a>\n <ul class=\"dropdown-menu\">\n <li><a href='{href}?next={next}'>Log out</a></li>\n </ul>\n </li>\"\"\"\n", "return mark_safe(snippet)\n", "snippet = format_html(snippet, user=escape(user), href=logout_url, next=\n escape(request.path))\n", "return mark_safe(snippet)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Return'" ]
[ "def FUNC_9(self, VAR_15: str) ->tuple[str, str]:...\n", "\"\"\"docstring\"\"\"\n", "if VAR_15 not in self.ledger.options['include']:\n", "VAR_28 = file.read()\n", "VAR_10 = sha256(VAR_28).hexdigest()\n", "VAR_16 = decode(VAR_28)\n", "return VAR_16, VAR_10\n" ]
[ "def get_source(self, path: str) ->tuple[str, str]:...\n", "\"\"\"docstring\"\"\"\n", "if path not in self.ledger.options['include']:\n", "contents = file.read()\n", "sha256sum = sha256(contents).hexdigest()\n", "source = decode(contents)\n", "return source, sha256sum\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_29(self, VAR_44):...\n", "VAR_49 = VAR_44.group(1)\n", "if self._in_link:\n", "return self.renderer.text(VAR_49)\n", "return self.renderer.autolink(VAR_49, False)\n" ]
[ "def output_url(self, m):...\n", "link = m.group(1)\n", "if self._in_link:\n", "return self.renderer.text(link)\n", "return self.renderer.autolink(link, False)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __eq__(self, VAR_8) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "return isinstance(VAR_8, CLASS_1\n ) and self.name == VAR_8.name and self.path == VAR_8.path and self.url == VAR_8.url\n" ]
[ "def __eq__(self, other) ->bool:...\n", "\"\"\"docstring\"\"\"\n", "return isinstance(other, CustomComponent\n ) and self.name == other.name and self.path == other.path and self.url == other.url\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_1(self):...\n", "VAR_16 = 'some_media_id'\n", "VAR_17 = 'Test\\n'\n", "VAR_18 = self.filepaths.local_media_filepath_rel(VAR_16)\n", "VAR_19 = os.path.join(self.secondary_base_path, VAR_18)\n", "os.makedirs(os.path.dirname(VAR_19))\n", "f.write(VAR_17)\n", "VAR_20 = FileInfo(None, VAR_16)\n", "VAR_21 = defer.ensureDeferred(self.media_storage.\n ensure_media_is_in_local_cache(VAR_20))\n", "self.wait_on_thread(VAR_21)\n", "VAR_22 = self.get_success(VAR_21)\n", "self.assertTrue(os.path.exists(VAR_22))\n", "self.assertEquals(os.path.commonprefix([self.primary_base_path, VAR_22]),\n self.primary_base_path)\n", "VAR_36 = f.read()\n", "self.assertEqual(VAR_17, VAR_36)\n" ]
[ "def test_ensure_media_is_in_local_cache(self):...\n", "media_id = 'some_media_id'\n", "test_body = 'Test\\n'\n", "rel_path = self.filepaths.local_media_filepath_rel(media_id)\n", "secondary_path = os.path.join(self.secondary_base_path, rel_path)\n", "os.makedirs(os.path.dirname(secondary_path))\n", "f.write(test_body)\n", "file_info = FileInfo(None, media_id)\n", "x = defer.ensureDeferred(self.media_storage.ensure_media_is_in_local_cache(\n file_info))\n", "self.wait_on_thread(x)\n", "local_path = self.get_success(x)\n", "self.assertTrue(os.path.exists(local_path))\n", "self.assertEquals(os.path.commonprefix([self.primary_base_path, local_path]\n ), self.primary_base_path)\n", "body = f.read()\n", "self.assertEqual(test_body, body)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_17(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = '#test2:test'\n", "VAR_16 = self._add_alias(VAR_15)\n", "self._set_canonical_alias({'alias': self.test_alias, 'alt_aliases': [self.\n test_alias, VAR_15]})\n", "VAR_14 = self._get_canonical_alias()\n", "self.assertEqual(VAR_14['content']['alias'], self.test_alias)\n", "self.assertEqual(VAR_14['content']['alt_aliases'], [self.test_alias, VAR_15])\n", "self.get_success(self.handler.delete_association(create_requester(self.\n admin_user), VAR_16))\n", "VAR_14 = self._get_canonical_alias()\n", "self.assertEqual(VAR_14['content']['alias'], self.test_alias)\n", "self.assertEqual(VAR_14['content']['alt_aliases'], [self.test_alias])\n" ]
[ "def test_remove_other_alias(self):...\n", "\"\"\"docstring\"\"\"\n", "other_test_alias = '#test2:test'\n", "other_room_alias = self._add_alias(other_test_alias)\n", "self._set_canonical_alias({'alias': self.test_alias, 'alt_aliases': [self.\n test_alias, other_test_alias]})\n", "data = self._get_canonical_alias()\n", "self.assertEqual(data['content']['alias'], self.test_alias)\n", "self.assertEqual(data['content']['alt_aliases'], [self.test_alias,\n other_test_alias])\n", "self.get_success(self.handler.delete_association(create_requester(self.\n admin_user), other_room_alias))\n", "data = self._get_canonical_alias()\n", "self.assertEqual(data['content']['alias'], self.test_alias)\n", "self.assertEqual(data['content']['alt_aliases'], [self.test_alias])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "from __future__ import unicode_literals, print_function\n", "import frappe\n", "import time\n", "from frappe import _, msgprint\n", "from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff\n", "from frappe.model.base_document import BaseDocument, get_controller\n", "from frappe.model.naming import set_new_name\n", "from six import iteritems, string_types\n", "from werkzeug.exceptions import NotFound, Forbidden\n", "import hashlib, json\n", "from frappe.model import optional_fields, table_fields\n", "from frappe.model.workflow import validate_workflow\n", "from frappe.model.workflow import set_workflow_state_on_action\n", "from frappe.utils.global_search import update_global_search\n", "from frappe.integrations.doctype.webhook import run_webhooks\n", "from frappe.desk.form.document_follow import follow_document\n", "from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event\n", "def FUNC_0(*VAR_0, **VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_0:\n", "if isinstance(VAR_0[0], BaseDocument):\n", "if len(VAR_0) < 2 and VAR_1:\n", "return VAR_0[0]\n", "if isinstance(VAR_0[0], string_types):\n", "if 'doctype' in VAR_1:\n", "VAR_5 = get_controller(VAR_2)\n", "VAR_2 = VAR_0[0]\n", "if isinstance(VAR_0[0], dict):\n", "VAR_2 = VAR_1['doctype']\n", "if VAR_5:\n", "VAR_1 = VAR_0[0]\n", "return VAR_5(*VAR_0, **kwargs)\n", "\"\"\"All controllers inherit from `Document`.\"\"\"\n", "def __init__(self, *VAR_0, **VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "self.doctype = self.name = None\n", "self._default_new_docs = {}\n", "self.flags = frappe._dict()\n", "if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):\n", "if len(VAR_0) == 1:\n", "if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):\n", "self.doctype = self.name = VAR_0[0]\n", "self.doctype = VAR_0[0]\n", "VAR_1 = VAR_0[0]\n", "if VAR_1:\n", "self.load_from_db()\n", "if isinstance(VAR_0[1], dict):\n", "super(CLASS_0, self).__init__(VAR_1)\n", "@staticmethod...\n", "return\n", "self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], 'name')\n", "self.name = VAR_0[1]\n", "self.init_valid_columns()\n", "\"\"\"docstring\"\"\"\n", "if self.name is None:\n", "if 'for_update' in VAR_1:\n", "VAR_6.whitelisted = True\n", "frappe.throw(_('{0} {1} not found').format(_(VAR_0[0]), VAR_0[1]), frappe.\n DoesNotExistError)\n", "self.flags.for_update = VAR_1.get('for_update')\n", "return VAR_6\n" ]
[ "from __future__ import unicode_literals, print_function\n", "import frappe\n", "import time\n", "from frappe import _, msgprint\n", "from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff\n", "from frappe.model.base_document import BaseDocument, get_controller\n", "from frappe.model.naming import set_new_name\n", "from six import iteritems, string_types\n", "from werkzeug.exceptions import NotFound, Forbidden\n", "import hashlib, json\n", "from frappe.model import optional_fields, table_fields\n", "from frappe.model.workflow import validate_workflow\n", "from frappe.model.workflow import set_workflow_state_on_action\n", "from frappe.utils.global_search import update_global_search\n", "from frappe.integrations.doctype.webhook import run_webhooks\n", "from frappe.desk.form.document_follow import follow_document\n", "from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event\n", "def get_doc(*args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "if args:\n", "if isinstance(args[0], BaseDocument):\n", "if len(args) < 2 and kwargs:\n", "return args[0]\n", "if isinstance(args[0], string_types):\n", "if 'doctype' in kwargs:\n", "controller = get_controller(doctype)\n", "doctype = args[0]\n", "if isinstance(args[0], dict):\n", "doctype = kwargs['doctype']\n", "if controller:\n", "kwargs = args[0]\n", "return controller(*args, **kwargs)\n", "\"\"\"All controllers inherit from `Document`.\"\"\"\n", "def __init__(self, *args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "self.doctype = self.name = None\n", "self._default_new_docs = {}\n", "self.flags = frappe._dict()\n", "if args and args[0] and isinstance(args[0], string_types):\n", "if len(args) == 1:\n", "if args and args[0] and isinstance(args[0], dict):\n", "self.doctype = self.name = args[0]\n", "self.doctype = args[0]\n", "kwargs = args[0]\n", "if kwargs:\n", "self.load_from_db()\n", "if isinstance(args[1], dict):\n", "super(Document, self).__init__(kwargs)\n", "@staticmethod...\n", "return\n", "self.name = frappe.db.get_value(args[0], args[1], 'name')\n", "self.name = args[1]\n", "self.init_valid_columns()\n", "\"\"\"docstring\"\"\"\n", "if self.name is None:\n", "if 'for_update' in kwargs:\n", "f.whitelisted = True\n", "frappe.throw(_('{0} {1} not found').format(_(args[0]), args[1]), frappe.\n DoesNotExistError)\n", "self.flags.for_update = kwargs.get('for_update')\n", "return f\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2 ]
[ "ImportFrom'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Condition", "Condition", "Condition", "Return'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Docstring", "Condition", "For", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_0, VAR_1, VAR_11=None):...\n", "def FUNC_12(VAR_4):...\n", "if not self._check_permission([VAR_0], VAR_1, VAR_4):\n", "if callable(VAR_11):\n", "return VAR_4\n", "VAR_11()\n", "self._deny_hook()\n" ]
[ "def check_perm(self, role, method, callback=None):...\n", "def decorator(view_func):...\n", "if not self._check_permission([role], method, view_func):\n", "if callable(callback):\n", "return view_func\n", "callback()\n", "self._deny_hook()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Condition", "Condition", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_15(VAR_3, **VAR_4):...\n", "return 200, {'result': True}\n" ]
[ "def _callback(request, **kwargs):...\n", "return 200, {'result': True}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_66(VAR_6):...\n", "if VAR_122 is not None:\n", "return reverse(VAR_120, VAR_116=(iid, thumbsize))\n", "return reverse(VAR_120, VAR_116=(iid,))\n" ]
[ "def get_thumb_url(iid):...\n", "if thumbsize is not None:\n", "return reverse(prefix, args=(iid, thumbsize))\n", "return reverse(prefix, args=(iid,))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_24(VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_49 = 'string'\n", "VAR_50 = VAR_21.add_parser('run', description=run_msg, formatter_class=\n argparse.RawTextHelpFormatter)\n", "VAR_50.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to execute')\n", "VAR_50.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to load, separated by ','\")\n", "VAR_50.add_argument('--signature_def', type=str, required=True, metavar=\n 'SIGNATURE_DEF_KEY', help='key of SignatureDef to run')\n", "VAR_51 = 'string'\n", "VAR_50.add_argument('--inputs', type=str, default='', help=msg)\n", "VAR_51 = 'string'\n", "VAR_50.add_argument('--input_exprs', type=str, default='', help=msg)\n", "VAR_51 = 'string'\n", "VAR_50.add_argument('--input_examples', type=str, default='', help=msg)\n", "VAR_50.add_argument('--outdir', type=str, default=None, help=\n 'if specified, output tensor(s) will be saved to given directory')\n", "VAR_50.add_argument('--overwrite', action='store_true', help=\n 'if set, output file will be overwritten if it already exists.')\n", "VAR_50.add_argument('--tf_debug', action='store_true', help=\n 'if set, will use TensorFlow Debugger (tfdbg) to watch the intermediate Tensors and runtime GraphDefs while running the SavedModel.'\n )\n", "VAR_50.add_argument('--worker', type=str, default=None, help=\n 'if specified, a Session will be run on the worker. Valid worker specification is a bns or gRPC path.'\n )\n", "VAR_50.add_argument('--init_tpu', action='store_true', default=None, help=\n 'if specified, tpu.initialize_system will be called on the Session. This option should be only used if the worker is a TPU job.'\n )\n", "VAR_50.set_defaults(func=run)\n" ]
[ "def add_run_subparser(subparsers):...\n", "\"\"\"docstring\"\"\"\n", "run_msg = \"\"\"Usage example:\nTo run input tensors from files through a MetaGraphDef and save the output tensors to files:\n$saved_model_cli show --dir /tmp/saved_model --tag_set serve \\\\\n --signature_def serving_default \\\\\n --inputs input1_key=/tmp/124.npz[x],input2_key=/tmp/123.npy \\\\\n --input_exprs 'input3_key=np.ones(2)' \\\\\n --input_examples 'input4_key=[{\"id\":[26],\"weights\":[0.5, 0.5]}]' \\\\\n --outdir=/out\n\nFor more information about input file format, please see:\nhttps://www.tensorflow.org/guide/saved_model_cli\n\"\"\"\n", "parser_run = subparsers.add_parser('run', description=run_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n", "parser_run.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to execute')\n", "parser_run.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to load, separated by ','\")\n", "parser_run.add_argument('--signature_def', type=str, required=True, metavar\n ='SIGNATURE_DEF_KEY', help='key of SignatureDef to run')\n", "msg = (\n \"Loading inputs from files, in the format of '<input_key>=<filename>, or '<input_key>=<filename>[<variable_name>]', separated by ';'. The file format can only be from .npy, .npz or pickle.\"\n )\n", "parser_run.add_argument('--inputs', type=str, default='', help=msg)\n", "msg = (\n 'Specifying inputs by python expressions, in the format of \"<input_key>=\\'<python expression>\\'\", separated by \\';\\'. numpy module is available as \\'np\\'. Will override duplicate input keys from --inputs option.'\n )\n", "parser_run.add_argument('--input_exprs', type=str, default='', help=msg)\n", "msg = (\n 'Specifying tf.Example inputs as list of dictionaries. For example: <input_key>=[{feature0:value_list,feature1:value_list}]. Use \";\" to separate input keys. Will override duplicate input keys from --inputs and --input_exprs option.'\n )\n", "parser_run.add_argument('--input_examples', type=str, default='', help=msg)\n", "parser_run.add_argument('--outdir', type=str, default=None, help=\n 'if specified, output tensor(s) will be saved to given directory')\n", "parser_run.add_argument('--overwrite', action='store_true', help=\n 'if set, output file will be overwritten if it already exists.')\n", "parser_run.add_argument('--tf_debug', action='store_true', help=\n 'if set, will use TensorFlow Debugger (tfdbg) to watch the intermediate Tensors and runtime GraphDefs while running the SavedModel.'\n )\n", "parser_run.add_argument('--worker', type=str, default=None, help=\n 'if specified, a Session will be run on the worker. Valid worker specification is a bns or gRPC path.'\n )\n", "parser_run.add_argument('--init_tpu', action='store_true', default=None,\n help=\n 'if specified, tpu.initialize_system will be called on the Session. This option should be only used if the worker is a TPU job.'\n )\n", "parser_run.set_defaults(func=run)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_28():...\n", "def FUNC_116(VAR_129):...\n", "def FUNC_121(*VAR_79, **VAR_42):...\n", "if VAR_13.read_from_replica:\n", "FUNC_6()\n", "VAR_225 = VAR_129(*VAR_79, **get_newargs(fn, kwargs))\n", "if VAR_1 and hasattr(VAR_1, 'primary_db'):\n", "return VAR_225\n", "VAR_1.db.close()\n", "VAR_1.db = VAR_1.primary_db\n" ]
[ "def read_only():...\n", "def innfn(fn):...\n", "def wrapper_fn(*args, **kwargs):...\n", "if conf.read_from_replica:\n", "connect_replica()\n", "retval = fn(*args, **get_newargs(fn, kwargs))\n", "if local and hasattr(local, 'primary_db'):\n", "return retval\n", "local.db.close()\n", "local.db = local.primary_db\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "FunctionDef'", "Condition", "Expr'", "Assign'", "Condition", "Return'", "Expr'", "Assign'" ]
[ "@VAR_1.route('/get_requests', methods=['POST'])...\n", "VAR_4 = VAR_2.sentences_stats('get_requests')\n", "return json.dumps({'status': 'OK', 'd': VAR_4})\n" ]
[ "@app.route('/get_requests', methods=['POST'])...\n", "d = db.sentences_stats('get_requests')\n", "return json.dumps({'status': 'OK', 'd': d})\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_22(self, VAR_19=None):...\n", "if self.end:\n", "return True\n", "return not self.same_part(VAR_19, self.item, self._next)\n" ]
[ "def last(self, name=None):...\n", "if self.end:\n", "return True\n", "return not self.same_part(name, self.item, self._next)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(VAR_7, VAR_8='Argument', VAR_6=0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_26 = ' ' * VAR_6\n", "def FUNC_31(VAR_31):...\n", "VAR_63 = \"'\" * isinstance(VAR_31, str)\n", "return VAR_63 + str(VAR_31) + VAR_63\n" ]
[ "def _print_args(arguments, argument_type='Argument', indent=0):...\n", "\"\"\"docstring\"\"\"\n", "indent_str = ' ' * indent\n", "def _maybe_add_quotes(value):...\n", "is_quotes = \"'\" * isinstance(value, str)\n", "return is_quotes + str(value) + is_quotes\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_82(VAR_67):...\n", "if not VAR_67.name in self.flags.notifications_executed:\n", "evaluate_alert(self, VAR_67.name, VAR_67.event)\n", "self.flags.notifications_executed.append(VAR_67.name)\n" ]
[ "def _evaluate_alert(alert):...\n", "if not alert.name in self.flags.notifications_executed:\n", "evaluate_alert(self, alert.name, alert.event)\n", "self.flags.notifications_executed.append(alert.name)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_169(VAR_212, VAR_215, VAR_101=VAR_101):...\n", "for VAR_440 in (VAR_212.tags or []):\n", "VAR_440 = VAR_440.strip().lower()\n", "if VAR_440:\n", "VAR_101.wiki_tag.insert(VAR_148=tag, wiki_page=id)\n" ]
[ "def update_tags_insert(page, id, db=db):...\n", "for tag in (page.tags or []):\n", "tag = tag.strip().lower()\n", "if tag:\n", "db.wiki_tag.insert(name=tag, wiki_page=id)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Condition", "Expr'" ]
[ "@transaction.non_atomic_requests...\n", "\"\"\"docstring\"\"\"\n", "VAR_38 = CourseKey.from_string(VAR_10)\n", "VAR_41, VAR_40 = FUNC_74(VAR_9, VAR_38)\n", "return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)\n", "if VAR_9.method == 'POST':\n", "if VAR_9.method == 'DELETE':\n", "VAR_154 = FUNC_72(VAR_38, VAR_40, VAR_41)\n", "return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)\n", "return JsonResponse(VAR_154)\n", "FUNC_73(VAR_38, VAR_40)\n", "return JsonResponse({'success': False, 'message': VAR_169.message}, status=400)\n", "return JsonResponse({}, status=204)\n" ]
[ "@transaction.non_atomic_requests...\n", "\"\"\"docstring\"\"\"\n", "course_key = CourseKey.from_string(course_id)\n", "certificate_exception, student = parse_request_data_and_get_user(request,\n course_key)\n", "return JsonResponse({'success': False, 'message': error.message}, status=400)\n", "if request.method == 'POST':\n", "if request.method == 'DELETE':\n", "exception = add_certificate_exception(course_key, student,\n certificate_exception)\n", "return JsonResponse({'success': False, 'message': error.message}, status=400)\n", "return JsonResponse(exception)\n", "remove_certificate_exception(course_key, student)\n", "return JsonResponse({'success': False, 'message': error.message}, status=400)\n", "return JsonResponse({}, status=204)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Return'", "Condition", "Condition", "Assign'", "Return'", "Return'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_48(self, VAR_67, VAR_68=True):...\n", "\"\"\"docstring\"\"\"\n", "VAR_82 = 'ul'\n", "if VAR_68:\n", "VAR_82 = 'ol'\n", "return '<%s>\\n%s</%s>\\n' % (VAR_82, VAR_67, VAR_82)\n" ]
[ "def list(self, body, ordered=True):...\n", "\"\"\"docstring\"\"\"\n", "tag = 'ul'\n", "if ordered:\n", "tag = 'ol'\n", "return '<%s>\\n%s</%s>\\n' % (tag, body, tag)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@VAR_2.route('/show/<int:book_id>/<book_format>', defaults={'anyname': 'None'})...\n", "VAR_6 = VAR_6.split('.')[0]\n", "VAR_95 = calibre_db.get_book(VAR_5)\n", "VAR_8 = calibre_db.get_book_format(VAR_5, VAR_6.upper())\n", "if not VAR_8:\n", "return 'File not in Database'\n", "VAR_3.info('Serving book: %s', VAR_8.name)\n", "if config.config_use_google_drive:\n", "if VAR_6.upper() == 'TXT':\n", "VAR_133 = Headers()\n", "VAR_3.debug_or_exception(ex)\n", "return send_from_directory(os.path.join(config.config_calibre_dir, VAR_95.\n path), VAR_8.name + '.' + VAR_6)\n", "VAR_145 = open(os.path.join(config.config_calibre_dir, VAR_95.path, VAR_8.\n name + '.' + VAR_6), 'rb').read()\n", "VAR_3.error('File Not Found')\n", "VAR_133['Content-Type'] = mimetypes.types_map.get('.' + VAR_6,\n 'application/octet-stream')\n", "return 'File Not Found'\n", "VAR_135 = chardet.detect(VAR_145)\n", "return 'File Not Found'\n", "VAR_134 = getFileFromEbooksFolder(VAR_95.path, VAR_8.name + '.' + VAR_6)\n", "return make_response(VAR_145.decode(VAR_135['encoding'], 'surrogatepass').\n encode('utf-8', 'surrogatepass'))\n", "return do_gdrive_download(VAR_134, VAR_133, VAR_6.upper() == 'TXT')\n" ]
[ "@web.route('/show/<int:book_id>/<book_format>', defaults={'anyname': 'None'})...\n", "book_format = book_format.split('.')[0]\n", "book = calibre_db.get_book(book_id)\n", "data = calibre_db.get_book_format(book_id, book_format.upper())\n", "if not data:\n", "return 'File not in Database'\n", "log.info('Serving book: %s', data.name)\n", "if config.config_use_google_drive:\n", "if book_format.upper() == 'TXT':\n", "headers = Headers()\n", "log.debug_or_exception(ex)\n", "return send_from_directory(os.path.join(config.config_calibre_dir, book.\n path), data.name + '.' + book_format)\n", "rawdata = open(os.path.join(config.config_calibre_dir, book.path, data.name +\n '.' + book_format), 'rb').read()\n", "log.error('File Not Found')\n", "headers['Content-Type'] = mimetypes.types_map.get('.' + book_format,\n 'application/octet-stream')\n", "return 'File Not Found'\n", "result = chardet.detect(rawdata)\n", "return 'File Not Found'\n", "df = getFileFromEbooksFolder(book.path, data.name + '.' + book_format)\n", "return make_response(rawdata.decode(result['encoding'], 'surrogatepass').\n encode('utf-8', 'surrogatepass'))\n", "return do_gdrive_download(df, headers, book_format.upper() == 'TXT')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "For", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Return'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Return'", "Assign'", "Return'", "Return'" ]
[ "def __init__(self, VAR_2):...\n", "self.hs = VAR_2\n", "self.auth = VAR_2.get_auth()\n", "self.client = VAR_2.get_http_client()\n", "self.clock = VAR_2.get_clock()\n", "self.server_name = VAR_2.hostname\n", "self.store = VAR_2.get_datastore()\n", "self.max_upload_size = VAR_2.config.max_upload_size\n", "self.max_image_pixels = VAR_2.config.max_image_pixels\n", "self.primary_base_path = VAR_2.config.media_store_path\n", "self.filepaths = MediaFilePaths(self.primary_base_path)\n", "self.dynamic_thumbnails = VAR_2.config.dynamic_thumbnails\n", "self.thumbnail_requirements = VAR_2.config.thumbnail_requirements\n", "self.remote_media_linearizer = Linearizer(VAR_11='media_remote')\n", "self.recently_accessed_remotes = set()\n", "self.recently_accessed_locals = set()\n", "self.federation_domain_whitelist = VAR_2.config.federation_domain_whitelist\n", "VAR_23 = []\n", "for clz, provider_config, wrapper_config in VAR_2.config.media_storage_providers:\n", "VAR_42 = clz(VAR_2, provider_config)\n", "self.media_storage = MediaStorage(self.hs, self.primary_base_path, self.\n filepaths, VAR_23)\n", "VAR_43 = StorageProviderWrapper(VAR_42, store_local=wrapper_config.\n store_local, store_remote=wrapper_config.store_remote,\n store_synchronous=wrapper_config.store_synchronous)\n", "self.clock.looping_call(self._start_update_recently_accessed, VAR_1)\n", "VAR_23.append(VAR_43)\n" ]
[ "def __init__(self, hs):...\n", "self.hs = hs\n", "self.auth = hs.get_auth()\n", "self.client = hs.get_http_client()\n", "self.clock = hs.get_clock()\n", "self.server_name = hs.hostname\n", "self.store = hs.get_datastore()\n", "self.max_upload_size = hs.config.max_upload_size\n", "self.max_image_pixels = hs.config.max_image_pixels\n", "self.primary_base_path = hs.config.media_store_path\n", "self.filepaths = MediaFilePaths(self.primary_base_path)\n", "self.dynamic_thumbnails = hs.config.dynamic_thumbnails\n", "self.thumbnail_requirements = hs.config.thumbnail_requirements\n", "self.remote_media_linearizer = Linearizer(name='media_remote')\n", "self.recently_accessed_remotes = set()\n", "self.recently_accessed_locals = set()\n", "self.federation_domain_whitelist = hs.config.federation_domain_whitelist\n", "storage_providers = []\n", "for clz, provider_config, wrapper_config in hs.config.media_storage_providers:\n", "backend = clz(hs, provider_config)\n", "self.media_storage = MediaStorage(self.hs, self.primary_base_path, self.\n filepaths, storage_providers)\n", "provider = StorageProviderWrapper(backend, store_local=wrapper_config.\n store_local, store_remote=wrapper_config.store_remote,\n store_synchronous=wrapper_config.store_synchronous)\n", "self.clock.looping_call(self._start_update_recently_accessed,\n UPDATE_RECENTLY_ACCESSED_TS)\n", "storage_providers.append(provider)\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_2(self, VAR_8, VAR_10, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = minisix.long(str(VAR_8), VAR_11)\n", "if VAR_10 == 10:\n", "return str(VAR_8)\n", "return self._convertDecimalToBase(VAR_8, VAR_10)\n" ]
[ "def _convertBaseToBase(self, number, toBase, fromBase):...\n", "\"\"\"docstring\"\"\"\n", "number = minisix.long(str(number), fromBase)\n", "if toBase == 10:\n", "return str(number)\n", "return self._convertDecimalToBase(number, toBase)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_39(VAR_62, VAR_101=None, VAR_102=None, VAR_103=False):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.model.create_new import get_new_doc\n", "return get_new_doc(VAR_62, VAR_101, VAR_102, VAR_103=as_dict)\n" ]
[ "def new_doc(doctype, parent_doc=None, parentfield=None, as_dict=False):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.model.create_new import get_new_doc\n", "return get_new_doc(doctype, parent_doc, parentfield, as_dict=as_dict)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Return'" ]
[ "@property...\n", "return True\n" ]
[ "@property...\n", "return True\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_24(VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_49 = 'string'\n", "VAR_50 = VAR_21.add_parser('run', description=run_msg, formatter_class=\n argparse.RawTextHelpFormatter)\n", "VAR_50.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to execute')\n", "VAR_50.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to load, separated by ','\")\n", "VAR_50.add_argument('--signature_def', type=str, required=True, metavar=\n 'SIGNATURE_DEF_KEY', help='key of SignatureDef to run')\n", "VAR_51 = 'string'\n", "VAR_50.add_argument('--inputs', type=str, default='', help=msg)\n", "VAR_51 = 'string'\n", "VAR_50.add_argument('--input_exprs', type=str, default='', help=msg)\n", "VAR_51 = 'string'\n", "VAR_50.add_argument('--input_examples', type=str, default='', help=msg)\n", "VAR_50.add_argument('--outdir', type=str, default=None, help=\n 'if specified, output tensor(s) will be saved to given directory')\n", "VAR_50.add_argument('--overwrite', action='store_true', help=\n 'if set, output file will be overwritten if it already exists.')\n", "VAR_50.add_argument('--tf_debug', action='store_true', help=\n 'if set, will use TensorFlow Debugger (tfdbg) to watch the intermediate Tensors and runtime GraphDefs while running the SavedModel.'\n )\n", "VAR_50.add_argument('--worker', type=str, default=None, help=\n 'if specified, a Session will be run on the worker. Valid worker specification is a bns or gRPC path.'\n )\n", "VAR_50.add_argument('--init_tpu', action='store_true', default=None, help=\n 'if specified, tpu.initialize_system will be called on the Session. This option should be only used if the worker is a TPU job.'\n )\n", "VAR_50.set_defaults(func=run)\n" ]
[ "def add_run_subparser(subparsers):...\n", "\"\"\"docstring\"\"\"\n", "run_msg = \"\"\"Usage example:\nTo run input tensors from files through a MetaGraphDef and save the output tensors to files:\n$saved_model_cli show --dir /tmp/saved_model --tag_set serve \\\\\n --signature_def serving_default \\\\\n --inputs input1_key=/tmp/124.npz[x],input2_key=/tmp/123.npy \\\\\n --input_exprs 'input3_key=np.ones(2)' \\\\\n --input_examples 'input4_key=[{\"id\":[26],\"weights\":[0.5, 0.5]}]' \\\\\n --outdir=/out\n\nFor more information about input file format, please see:\nhttps://www.tensorflow.org/guide/saved_model_cli\n\"\"\"\n", "parser_run = subparsers.add_parser('run', description=run_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n", "parser_run.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to execute')\n", "parser_run.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to load, separated by ','\")\n", "parser_run.add_argument('--signature_def', type=str, required=True, metavar\n ='SIGNATURE_DEF_KEY', help='key of SignatureDef to run')\n", "msg = (\n \"Loading inputs from files, in the format of '<input_key>=<filename>, or '<input_key>=<filename>[<variable_name>]', separated by ';'. The file format can only be from .npy, .npz or pickle.\"\n )\n", "parser_run.add_argument('--inputs', type=str, default='', help=msg)\n", "msg = (\n 'Specifying inputs by python expressions, in the format of \"<input_key>=\\'<python expression>\\'\", separated by \\';\\'. numpy module is available as \\'np\\'. Will override duplicate input keys from --inputs option.'\n )\n", "parser_run.add_argument('--input_exprs', type=str, default='', help=msg)\n", "msg = (\n 'Specifying tf.Example inputs as list of dictionaries. For example: <input_key>=[{feature0:value_list,feature1:value_list}]. Use \";\" to separate input keys. Will override duplicate input keys from --inputs and --input_exprs option.'\n )\n", "parser_run.add_argument('--input_examples', type=str, default='', help=msg)\n", "parser_run.add_argument('--outdir', type=str, default=None, help=\n 'if specified, output tensor(s) will be saved to given directory')\n", "parser_run.add_argument('--overwrite', action='store_true', help=\n 'if set, output file will be overwritten if it already exists.')\n", "parser_run.add_argument('--tf_debug', action='store_true', help=\n 'if set, will use TensorFlow Debugger (tfdbg) to watch the intermediate Tensors and runtime GraphDefs while running the SavedModel.'\n )\n", "parser_run.add_argument('--worker', type=str, default=None, help=\n 'if specified, a Session will be run on the worker. Valid worker specification is a bns or gRPC path.'\n )\n", "parser_run.add_argument('--init_tpu', action='store_true', default=None,\n help=\n 'if specified, tpu.initialize_system will be called on the Session. This option should be only used if the worker is a TPU job.'\n )\n", "parser_run.set_defaults(func=run)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_29(self, VAR_10=None, VAR_11=None, VAR_96=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_11 is None:\n", "VAR_11 = []\n", "if VAR_96 is None:\n", "VAR_96 = {}\n", "return URL(VAR_14=self.settings.controller, VAR_10=f, VAR_11=args, VAR_96=vars)\n" ]
[ "def url(self, f=None, args=None, vars=None):...\n", "\"\"\"docstring\"\"\"\n", "if args is None:\n", "args = []\n", "if vars is None:\n", "vars = {}\n", "return URL(c=self.settings.controller, f=f, args=args, vars=vars)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_21(self):...\n", "\"\"\"docstring\"\"\"\n", "return u'%s-%s' % (self.queue.slug, self.id)\n" ]
[ "def _get_ticket_for_url(self):...\n", "\"\"\"docstring\"\"\"\n", "return u'%s-%s' % (self.queue.slug, self.id)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_4(self, VAR_13):...\n", "if not VAR_13:\n", "if self.required:\n", "parse_query(VAR_13)\n", "report_error()\n", "return ''\n", "return VAR_13\n" ]
[ "def clean(self, value):...\n", "if not value:\n", "if self.required:\n", "parse_query(value)\n", "report_error()\n", "return ''\n", "return value\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Expr'", "Expr'", "Return'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "from django.contrib.auth.models import Permission\n", "from django.contrib.auth import get_user_model\n", "from django.contrib.contenttypes.models import ContentType\n", "from django.core.exceptions import ObjectDoesNotExist, ValidationError\n", "from django.db import models\n", "from django.conf import settings\n", "from django.utils import timezone\n", "from django.utils.translation import ugettext_lazy as _, ugettext\n", "from io import StringIO\n", "import re\n", "import os\n", "import mimetypes\n", "import datetime\n", "from django.utils.safestring import mark_safe\n", "from markdown import markdown\n", "from markdown.extensions import Extension\n", "import uuid\n", "from helpdesk import settings as helpdesk_settings\n", "from .validators import validate_file_extension\n", "from .templated_email import send_templated_mail\n", "def FUNC_0(VAR_0):...\n", "if VAR_0:\n", "VAR_0 = '{0:02d}h:{1:02d}m'.format(VAR_0.seconds // 3600, VAR_0.seconds % \n 3600 // 60)\n", "VAR_0 = ''\n", "return VAR_0\n" ]
[ "\"\"\"\ndjango-helpdesk - A Django powered ticket tracker for small enterprise.\n\n(c) Copyright 2008 Jutda. All Rights Reserved. See LICENSE for details.\n\nmodels.py - Model (and hence database) definitions. This is the core of the\n helpdesk structure.\n\"\"\"\n", "from django.contrib.auth.models import Permission\n", "from django.contrib.auth import get_user_model\n", "from django.contrib.contenttypes.models import ContentType\n", "from django.core.exceptions import ObjectDoesNotExist, ValidationError\n", "from django.db import models\n", "from django.conf import settings\n", "from django.utils import timezone\n", "from django.utils.translation import ugettext_lazy as _, ugettext\n", "from io import StringIO\n", "import re\n", "import os\n", "import mimetypes\n", "import datetime\n", "from django.utils.safestring import mark_safe\n", "from markdown import markdown\n", "from markdown.extensions import Extension\n", "import uuid\n", "from helpdesk import settings as helpdesk_settings\n", "from .validators import validate_file_extension\n", "from .templated_email import send_templated_mail\n", "def format_time_spent(time_spent):...\n", "if time_spent:\n", "time_spent = '{0:02d}h:{1:02d}m'.format(time_spent.seconds // 3600, \n time_spent.seconds % 3600 // 60)\n", "time_spent = ''\n", "return time_spent\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_3, VAR_4, VAR_5):...\n", "VAR_7 = []\n", "for c_elements in VAR_3:\n", "VAR_83 = False\n", "return VAR_7\n", "if VAR_4 == 'languages':\n", "VAR_100 = c_elements.lang_code\n", "if VAR_4 == 'custom':\n", "for inp_element in VAR_5:\n", "VAR_100 = c_elements.value\n", "VAR_100 = c_elements.name\n", "if inp_element.lower() == VAR_100.lower():\n", "if not VAR_83:\n", "VAR_83 = True\n", "VAR_7.append(c_elements)\n" ]
[ "def search_objects_remove(db_book_object, db_type, input_elements):...\n", "del_elements = []\n", "for c_elements in db_book_object:\n", "found = False\n", "return del_elements\n", "if db_type == 'languages':\n", "type_elements = c_elements.lang_code\n", "if db_type == 'custom':\n", "for inp_element in input_elements:\n", "type_elements = c_elements.value\n", "type_elements = c_elements.name\n", "if inp_element.lower() == type_elements.lower():\n", "if not found:\n", "found = True\n", "del_elements.append(c_elements)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'", "Condition", "For", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_12(self):...\n", "return os.path.join(self.runtime_dir, 'notebook_cookie_secret')\n" ]
[ "def _cookie_secret_file_default(self):...\n", "return os.path.join(self.runtime_dir, 'notebook_cookie_secret')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(VAR_15, VAR_27=40, VAR_17=VAR_25, VAR_19=VAR_26, VAR_28=VAR_69(8203)...\n", "\"\"\"docstring\"\"\"\n", "if VAR_15.tag in VAR_25:\n", "return\n", "VAR_62 = VAR_15.get('class')\n", "if VAR_62:\n", "VAR_89 = False\n", "if VAR_15.text:\n", "VAR_62 = VAR_62.split()\n", "VAR_15.text = FUNC_6(VAR_15.text, VAR_27, VAR_28)\n", "for child in VAR_15:\n", "for avoid in VAR_19:\n", "FUNC_4(child, VAR_27=max_width, VAR_17=avoid_elements, VAR_19=avoid_classes,\n VAR_28=break_character)\n", "if avoid in VAR_62:\n", "if VAR_89:\n", "if child.tail:\n", "VAR_89 = True\n", "return\n", "child.tail = FUNC_6(child.tail, VAR_27, VAR_28)\n" ]
[ "def word_break(el, max_width=40, avoid_elements=_avoid_word_break_elements,...\n", "\"\"\"docstring\"\"\"\n", "if el.tag in _avoid_word_break_elements:\n", "return\n", "class_name = el.get('class')\n", "if class_name:\n", "dont_break = False\n", "if el.text:\n", "class_name = class_name.split()\n", "el.text = _break_text(el.text, max_width, break_character)\n", "for child in el:\n", "for avoid in avoid_classes:\n", "word_break(child, max_width=max_width, avoid_elements=avoid_elements,\n avoid_classes=avoid_classes, break_character=break_character)\n", "if avoid in class_name:\n", "if dont_break:\n", "if child.tail:\n", "dont_break = True\n", "return\n", "child.tail = _break_text(child.tail, max_width, break_character)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "For", "For", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "self.assertTrue('_known_servers_count' not in self.store.__dict__.keys())\n", "self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)\n", "self.inject_room_member(self.room, self.u_bob, Membership.JOIN)\n", "self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)\n", "self.pump()\n", "self.assertTrue('_known_servers_count' not in self.store.__dict__.keys())\n" ]
[ "def test_count_known_servers_stat_counter_disabled(self):...\n", "\"\"\"docstring\"\"\"\n", "self.assertTrue('_known_servers_count' not in self.store.__dict__.keys())\n", "self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)\n", "self.inject_room_member(self.room, self.u_bob, Membership.JOIN)\n", "self.inject_room_member(self.room, self.u_charlie.to_string(), Membership.JOIN)\n", "self.pump()\n", "self.assertTrue('_known_servers_count' not in self.store.__dict__.keys())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_113(self):...\n", "return CLASS_0(dict(self).copy())\n" ]
[ "def copy(self):...\n", "return _dict(dict(self).copy())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_108(self, VAR_11=None):...\n", "VAR_56 = VAR_263.request\n", "VAR_244 = VAR_263.response\n", "if not VAR_11:\n", "VAR_11 = VAR_56.args\n", "if VAR_11 and VAR_11[0] in self.rss_procedures:\n", "VAR_392 = self.call_service_function(self.rss_procedures[VAR_11[0]], *\n VAR_11[1:], **dict(request.vars))\n", "self.error()\n", "VAR_244.headers['Content-Type'] = 'application/rss+xml'\n", "return serializers.rss(VAR_392)\n" ]
[ "def serve_rss(self, args=None):...\n", "request = current.request\n", "response = current.response\n", "if not args:\n", "args = request.args\n", "if args and args[0] in self.rss_procedures:\n", "feed = self.call_service_function(self.rss_procedures[args[0]], *args[1:],\n **dict(request.vars))\n", "self.error()\n", "response.headers['Content-Type'] = 'application/rss+xml'\n", "return serializers.rss(feed)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def __call__(self, VAR_10, VAR_13, VAR_14, VAR_12):...\n", "VAR_11 = VAR_13.get('request')\n", "VAR_10 = self.traverse(VAR_10, VAR_11, VAR_12)\n", "if VAR_14 is False:\n", "return VAR_10\n", "if getattr(VAR_10, '__call__', VAR_0) is not VAR_0 or isinstance(VAR_10, type):\n", "return VAR_10()\n", "return VAR_10\n" ]
[ "def __call__(self, base, econtext, call, path_items):...\n", "request = econtext.get('request')\n", "base = self.traverse(base, request, path_items)\n", "if call is False:\n", "return base\n", "if getattr(base, '__call__', _marker) is not _marker or isinstance(base, type):\n", "return base()\n", "return base\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_2(self, VAR_1, VAR_2=None):...\n", "" ]
[ "def get(self, arg, word=None):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "\"\"\"string\"\"\"\n", "import StringIO\n", "import json\n", "import logging\n", "import re\n", "import time\n", "from django.conf import settings\n", "from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt\n", "from django.views.decorators.http import require_POST, require_http_methods\n", "from django.views.decorators.cache import cache_control\n", "from django.core.exceptions import ValidationError, PermissionDenied\n", "from django.core.mail.message import EmailMessage\n", "from django.core.exceptions import ObjectDoesNotExist\n", "from django.db import IntegrityError, transaction\n", "from django.core.urlresolvers import reverse\n", "from django.core.validators import validate_email\n", "from django.utils.translation import ugettext as _\n", "from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound\n", "from django.utils.html import strip_tags\n", "from django.shortcuts import redirect\n", "import string\n", "import random\n", "import unicodecsv\n", "import decimal\n", "from student import auth\n", "from student.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole\n", "from util.file import store_uploaded_file, course_and_time_based_filename_generator, FileValidationException, UniversalNewlineIterator\n", "from util.json_request import JsonResponse, JsonResponseBadRequest\n", "from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features\n", "from microsite_configuration import microsite\n", "from courseware.access import has_access\n", "from courseware.courses import get_course_with_access, get_course_by_id\n", "from django.contrib.auth.models import User\n", "from django_comment_client.utils import has_forum_access\n", "from django_comment_common.models import Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA\n", "from edxmako.shortcuts import render_to_string\n", "from courseware.models import StudentModule\n", "from shoppingcart.models import Coupon, CourseRegistrationCode, RegistrationCodeRedemption, Invoice, CourseMode, CourseRegistrationCodeInvoiceItem\n", "from student.models import CourseEnrollment, unique_id_for_user, anonymous_id_for_user, UserProfile, Registration, EntranceExamConfiguration, ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED, ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED, UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE\n", "import instructor_task.api\n", "from instructor_task.api_helper import AlreadyRunningError\n", "from instructor_task.models import ReportStore\n", "import instructor.enrollment as enrollment\n", "from instructor.enrollment import get_user_email_language, enroll_email, send_mail_to_student, get_email_params, send_beta_role_email, unenroll_email\n", "from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role\n", "import instructor_analytics.basic\n", "import instructor_analytics.distributions\n", "import instructor_analytics.csvs\n", "import csv\n", "from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference\n", "from openedx.core.djangolib.markup import HTML, Text\n", "from instructor.views import INVOICE_KEY\n", "from submissions import api as sub_api\n", "from certificates import api as certs_api\n", "from certificates.models import CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateInvalidation\n", "from bulk_email.models import CourseEmail, BulkEmailFlag\n", "from student.models import get_user_by_username_or_email\n", "from .tools import dump_student_extensions, dump_module_extensions, find_unit, get_student_from_identifier, require_student_from_identifier, handle_dashboard_error, parse_datetime, set_due_date_extension, strip_if_string\n", "from opaque_keys.edx.keys import CourseKey, UsageKey\n", "from opaque_keys.edx.locations import SlashSeparatedCourseKey\n", "from opaque_keys import InvalidKeyError\n", "from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted\n", "from openedx.core.djangoapps.theming import helpers as theming_helpers\n", "VAR_0 = logging.getLogger(__name__)\n", "def FUNC_0(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_83(VAR_9, *VAR_2, **VAR_3):...\n", "VAR_164 = VAR_9.is_ajax() or VAR_9.META.get('HTTP_ACCEPT', '').startswith(\n 'application/json')\n", "return VAR_1(VAR_9, *VAR_2, **kwargs)\n", "VAR_74 = _('User does not exist.')\n", "return FUNC_83\n", "if VAR_164:\n", "return JsonResponse({'error': VAR_74}, 400)\n", "return HttpResponseBadRequest(VAR_74)\n" ]
[ "\"\"\"\nInstructor Dashboard API views\n\nJSON views which the instructor dashboard requests.\n\nMany of these GETs may become PUTs in the future.\n\"\"\"\n", "import StringIO\n", "import json\n", "import logging\n", "import re\n", "import time\n", "from django.conf import settings\n", "from django.views.decorators.csrf import ensure_csrf_cookie, csrf_exempt\n", "from django.views.decorators.http import require_POST, require_http_methods\n", "from django.views.decorators.cache import cache_control\n", "from django.core.exceptions import ValidationError, PermissionDenied\n", "from django.core.mail.message import EmailMessage\n", "from django.core.exceptions import ObjectDoesNotExist\n", "from django.db import IntegrityError, transaction\n", "from django.core.urlresolvers import reverse\n", "from django.core.validators import validate_email\n", "from django.utils.translation import ugettext as _\n", "from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound\n", "from django.utils.html import strip_tags\n", "from django.shortcuts import redirect\n", "import string\n", "import random\n", "import unicodecsv\n", "import decimal\n", "from student import auth\n", "from student.roles import GlobalStaff, CourseSalesAdminRole, CourseFinanceAdminRole\n", "from util.file import store_uploaded_file, course_and_time_based_filename_generator, FileValidationException, UniversalNewlineIterator\n", "from util.json_request import JsonResponse, JsonResponseBadRequest\n", "from instructor.views.instructor_task_helpers import extract_email_features, extract_task_features\n", "from microsite_configuration import microsite\n", "from courseware.access import has_access\n", "from courseware.courses import get_course_with_access, get_course_by_id\n", "from django.contrib.auth.models import User\n", "from django_comment_client.utils import has_forum_access\n", "from django_comment_common.models import Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA\n", "from edxmako.shortcuts import render_to_string\n", "from courseware.models import StudentModule\n", "from shoppingcart.models import Coupon, CourseRegistrationCode, RegistrationCodeRedemption, Invoice, CourseMode, CourseRegistrationCodeInvoiceItem\n", "from student.models import CourseEnrollment, unique_id_for_user, anonymous_id_for_user, UserProfile, Registration, EntranceExamConfiguration, ManualEnrollmentAudit, UNENROLLED_TO_ALLOWEDTOENROLL, ALLOWEDTOENROLL_TO_ENROLLED, ENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED, UNENROLLED_TO_ENROLLED, UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_UNENROLLED, DEFAULT_TRANSITION_STATE\n", "import instructor_task.api\n", "from instructor_task.api_helper import AlreadyRunningError\n", "from instructor_task.models import ReportStore\n", "import instructor.enrollment as enrollment\n", "from instructor.enrollment import get_user_email_language, enroll_email, send_mail_to_student, get_email_params, send_beta_role_email, unenroll_email\n", "from instructor.access import list_with_level, allow_access, revoke_access, ROLES, update_forum_role\n", "import instructor_analytics.basic\n", "import instructor_analytics.distributions\n", "import instructor_analytics.csvs\n", "import csv\n", "from openedx.core.djangoapps.user_api.preferences.api import get_user_preference, set_user_preference\n", "from openedx.core.djangolib.markup import HTML, Text\n", "from instructor.views import INVOICE_KEY\n", "from submissions import api as sub_api\n", "from certificates import api as certs_api\n", "from certificates.models import CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateInvalidation\n", "from bulk_email.models import CourseEmail, BulkEmailFlag\n", "from student.models import get_user_by_username_or_email\n", "from .tools import dump_student_extensions, dump_module_extensions, find_unit, get_student_from_identifier, require_student_from_identifier, handle_dashboard_error, parse_datetime, set_due_date_extension, strip_if_string\n", "from opaque_keys.edx.keys import CourseKey, UsageKey\n", "from opaque_keys.edx.locations import SlashSeparatedCourseKey\n", "from opaque_keys import InvalidKeyError\n", "from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted\n", "from openedx.core.djangoapps.theming import helpers as theming_helpers\n", "log = logging.getLogger(__name__)\n", "def common_exceptions_400(func):...\n", "\"\"\"docstring\"\"\"\n", "def wrapped(request, *args, **kwargs):...\n", "use_json = request.is_ajax() or request.META.get('HTTP_ACCEPT', '').startswith(\n 'application/json')\n", "return func(request, *args, **kwargs)\n", "message = _('User does not exist.')\n", "return wrapped\n", "if use_json:\n", "return JsonResponse({'error': message}, 400)\n", "return HttpResponseBadRequest(message)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Docstring", "FunctionDef'", "Assign'", "Return'", "Assign'", "Return'", "Condition", "Return'", "Return'" ]
[ "@VAR_2.route('/me', methods=['GET', 'POST'])...\n", "VAR_48 = calibre_db.speaking_language()\n", "VAR_47 = babel.list_translations() + [LC('en')]\n", "VAR_44 = VAR_0['kobo'] and config.config_kobo_sync\n", "if VAR_0['oauth'] and config.config_login_type == 2:\n", "VAR_46 = get_oauth_status()\n", "VAR_46 = None\n", "VAR_45 = VAR_100\n", "VAR_45 = {}\n", "if request.method == 'POST':\n", "FUNC_67(VAR_44, VAR_45, VAR_46, VAR_47, VAR_48)\n", "return render_title_template('user_edit.html', VAR_47=translations, profile\n =1, VAR_48=languages, VAR_116=current_user, VAR_44=kobo_support,\n VAR_149=_(u\"%(name)s's profile\", name=current_user.name), VAR_9='me',\n registered_oauth=local_oauth_check, VAR_46=oauth_status)\n" ]
[ "@web.route('/me', methods=['GET', 'POST'])...\n", "languages = calibre_db.speaking_language()\n", "translations = babel.list_translations() + [LC('en')]\n", "kobo_support = feature_support['kobo'] and config.config_kobo_sync\n", "if feature_support['oauth'] and config.config_login_type == 2:\n", "oauth_status = get_oauth_status()\n", "oauth_status = None\n", "local_oauth_check = oauth_check\n", "local_oauth_check = {}\n", "if request.method == 'POST':\n", "change_profile(kobo_support, local_oauth_check, oauth_status, translations,\n languages)\n", "return render_title_template('user_edit.html', translations=translations,\n profile=1, languages=languages, content=current_user, kobo_support=\n kobo_support, title=_(u\"%(name)s's profile\", name=current_user.name),\n page='me', registered_oauth=local_oauth_check, oauth_status=oauth_status)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "import os\n", "import shutil\n", "import tempfile\n", "from binascii import unhexlify\n", "from io import BytesIO\n", "from typing import Optional\n", "from urllib import parse\n", "from mock import Mock\n", "import attr\n", "from parameterized import parameterized_class\n", "from PIL import Image as Image\n", "from twisted.internet import defer\n", "from twisted.internet.defer import Deferred\n", "from synapse.logging.context import make_deferred_yieldable\n", "from synapse.rest.media.v1._base import FileInfo\n", "from synapse.rest.media.v1.filepath import MediaFilePaths\n", "from synapse.rest.media.v1.media_storage import MediaStorage\n", "from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend\n", "from tests import unittest\n", "from tests.server import FakeSite, make_request\n", "VAR_0 = True\n", "def FUNC_0(self, VAR_1, VAR_2, VAR_3):...\n", "self.test_dir = tempfile.mkdtemp(prefix='synapse-tests-')\n", "self.addCleanup(shutil.rmtree, self.test_dir)\n", "self.primary_base_path = os.path.join(self.test_dir, 'primary')\n", "self.secondary_base_path = os.path.join(self.test_dir, 'secondary')\n", "VAR_3.config.media_store_path = self.primary_base_path\n", "VAR_15 = [FileStorageProviderBackend(VAR_3, self.secondary_base_path)]\n", "self.filepaths = MediaFilePaths(self.primary_base_path)\n", "self.media_storage = MediaStorage(VAR_3, self.primary_base_path, self.\n filepaths, VAR_15)\n", "def FUNC_1(self):...\n", "VAR_16 = 'some_media_id'\n", "VAR_17 = 'Test\\n'\n", "VAR_18 = self.filepaths.local_media_filepath_rel(VAR_16)\n", "VAR_19 = os.path.join(self.secondary_base_path, VAR_18)\n", "os.makedirs(os.path.dirname(VAR_19))\n", "f.write(VAR_17)\n", "VAR_20 = FileInfo(None, VAR_16)\n", "VAR_21 = defer.ensureDeferred(self.media_storage.\n ensure_media_is_in_local_cache(VAR_20))\n", "self.wait_on_thread(VAR_21)\n", "VAR_22 = self.get_success(VAR_21)\n", "self.assertTrue(os.path.exists(VAR_22))\n", "self.assertEquals(os.path.commonprefix([self.primary_base_path, VAR_22]),\n self.primary_base_path)\n", "VAR_36 = f.read()\n", "self.assertEqual(VAR_17, VAR_36)\n", "\"\"\"string\"\"\"\n", "VAR_4 = attr.ib(type=bytes)\n", "VAR_5 = attr.ib(type=bytes)\n", "VAR_6 = attr.ib(type=bytes)\n", "VAR_7 = attr.ib(type=Optional[bytes])\n", "VAR_8 = attr.ib(type=Optional[bytes])\n", "VAR_9 = attr.ib(default=True, type=bool)\n", "VAR_10 = True\n", "VAR_11 = '@test:user'\n", "def FUNC_2(self, VAR_1, VAR_2):...\n", "self.fetches = []\n", "def FUNC_10(VAR_23, VAR_24, VAR_25, VAR_26=None, VAR_27=None):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_11(VAR_37):...\n", "VAR_4, VAR_39 = VAR_37\n", "VAR_25.write(VAR_4)\n", "return VAR_39\n" ]
[ "import os\n", "import shutil\n", "import tempfile\n", "from binascii import unhexlify\n", "from io import BytesIO\n", "from typing import Optional\n", "from urllib import parse\n", "from mock import Mock\n", "import attr\n", "from parameterized import parameterized_class\n", "from PIL import Image as Image\n", "from twisted.internet import defer\n", "from twisted.internet.defer import Deferred\n", "from synapse.logging.context import make_deferred_yieldable\n", "from synapse.rest.media.v1._base import FileInfo\n", "from synapse.rest.media.v1.filepath import MediaFilePaths\n", "from synapse.rest.media.v1.media_storage import MediaStorage\n", "from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend\n", "from tests import unittest\n", "from tests.server import FakeSite, make_request\n", "needs_threadpool = True\n", "def prepare(self, reactor, clock, hs):...\n", "self.test_dir = tempfile.mkdtemp(prefix='synapse-tests-')\n", "self.addCleanup(shutil.rmtree, self.test_dir)\n", "self.primary_base_path = os.path.join(self.test_dir, 'primary')\n", "self.secondary_base_path = os.path.join(self.test_dir, 'secondary')\n", "hs.config.media_store_path = self.primary_base_path\n", "storage_providers = [FileStorageProviderBackend(hs, self.secondary_base_path)]\n", "self.filepaths = MediaFilePaths(self.primary_base_path)\n", "self.media_storage = MediaStorage(hs, self.primary_base_path, self.\n filepaths, storage_providers)\n", "def test_ensure_media_is_in_local_cache(self):...\n", "media_id = 'some_media_id'\n", "test_body = 'Test\\n'\n", "rel_path = self.filepaths.local_media_filepath_rel(media_id)\n", "secondary_path = os.path.join(self.secondary_base_path, rel_path)\n", "os.makedirs(os.path.dirname(secondary_path))\n", "f.write(test_body)\n", "file_info = FileInfo(None, media_id)\n", "x = defer.ensureDeferred(self.media_storage.ensure_media_is_in_local_cache(\n file_info))\n", "self.wait_on_thread(x)\n", "local_path = self.get_success(x)\n", "self.assertTrue(os.path.exists(local_path))\n", "self.assertEquals(os.path.commonprefix([self.primary_base_path, local_path]\n ), self.primary_base_path)\n", "body = f.read()\n", "self.assertEqual(test_body, body)\n", "\"\"\"An image for testing thumbnailing with the expected results\n\n Attributes:\n data: The raw image to thumbnail\n content_type: The type of the image as a content type, e.g. \"image/png\"\n extension: The extension associated with the format, e.g. \".png\"\n expected_cropped: The expected bytes from cropped thumbnailing, or None if\n test should just check for success.\n expected_scaled: The expected bytes from scaled thumbnailing, or None if\n test should just check for a valid image returned.\n \"\"\"\n", "data = attr.ib(type=bytes)\n", "content_type = attr.ib(type=bytes)\n", "extension = attr.ib(type=bytes)\n", "expected_cropped = attr.ib(type=Optional[bytes])\n", "expected_scaled = attr.ib(type=Optional[bytes])\n", "expected_found = attr.ib(default=True, type=bool)\n", "hijack_auth = True\n", "user_id = '@test:user'\n", "def make_homeserver(self, reactor, clock):...\n", "self.fetches = []\n", "def get_file(destination, path, output_stream, args=None, max_size=None):...\n", "\"\"\"docstring\"\"\"\n", "def write_to(r):...\n", "data, response = r\n", "output_stream.write(data)\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "FunctionDef'", "Docstring", "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "@VAR_2.route('/ajax/view', methods=['POST'])...\n", "VAR_55 = request.get_json()\n", "for VAR_113 in VAR_55:\n", "VAR_3.error('Could not save view_settings: %r %r: %e', request, VAR_55, ex)\n", "return '1', 200\n", "for VAR_111 in VAR_55[VAR_113]:\n", "return 'Invalid request', 400\n", "VAR_87.set_view_property(VAR_113, VAR_111, VAR_55[VAR_113][VAR_111])\n" ]
[ "@web.route('/ajax/view', methods=['POST'])...\n", "to_save = request.get_json()\n", "for element in to_save:\n", "log.error('Could not save view_settings: %r %r: %e', request, to_save, ex)\n", "return '1', 200\n", "for param in to_save[element]:\n", "return 'Invalid request', 400\n", "current_user.set_view_property(element, param, to_save[element][param])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "For", "Expr'", "Return'", "For", "Return'", "Expr'" ]
[ "def FUNC_1(self):...\n", "return FUNC_1(self.answer)\n" ]
[ "def get_markdown(self):...\n", "return get_markdown(self.answer)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_25.whitelist(allow_guest=True)...\n", "VAR_25.local.login_manager.logout()\n", "VAR_25.db.commit()\n" ]
[ "@frappe.whitelist(allow_guest=True)...\n", "frappe.local.login_manager.logout()\n", "frappe.db.commit()\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'" ]
[ "def FUNC_83(*VAR_2, **VAR_3):...\n", "VAR_9 = VAR_2[0]\n", "VAR_65 = get_course_by_id(CourseKey.from_string(VAR_3['course_id']))\n", "if has_access(VAR_9.user, VAR_4, VAR_65):\n", "return VAR_1(*VAR_2, **kwargs)\n", "return HttpResponseForbidden()\n" ]
[ "def wrapped(*args, **kwargs):...\n", "request = args[0]\n", "course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))\n", "if has_access(request.user, level, course):\n", "return func(*args, **kwargs)\n", "return HttpResponseForbidden()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@CLASS_4('version')...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = jinja.render('version.html', title='Version info', version=version\n .version(), copyright=qutebrowser.__copyright__)\n", "return 'text/html', VAR_20\n" ]
[ "@add_handler('version')...\n", "\"\"\"docstring\"\"\"\n", "html = jinja.render('version.html', title='Version info', version=version.\n version(), copyright=qutebrowser.__copyright__)\n", "return 'text/html', html\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_20():...\n", "VAR_14 = request.view_args['locale']\n", "VAR_31 = request.view_args['domain']\n", "VAR_89 = util.flask.check_etag(FUNC_17(request.view_args['locale'], request\n .view_args['domain']))\n", "VAR_34 = FUNC_18(VAR_14, VAR_31)\n", "VAR_90 = VAR_34 is None or util.flask.check_lastmodified(VAR_34)\n", "return VAR_89 and VAR_90\n" ]
[ "def _check_etag_and_lastmodified_for_i18n():...\n", "locale = request.view_args['locale']\n", "domain = request.view_args['domain']\n", "etag_ok = util.flask.check_etag(_compute_etag_for_i18n(request.view_args[\n 'locale'], request.view_args['domain']))\n", "lastmodified = _compute_date_for_i18n(locale, domain)\n", "lastmodified_ok = lastmodified is None or util.flask.check_lastmodified(\n lastmodified)\n", "return etag_ok and lastmodified_ok\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_27, VAR_9):...\n", "self._base_builder = VAR_27\n", "self._event_id = VAR_9\n" ]
[ "def __init__(self, base_builder, event_id):...\n", "self._base_builder = base_builder\n", "self._event_id = event_id\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_8(VAR_9: Union[int, float, str]) ->Union[int, float]:...\n", "if '.' in str(VAR_9):\n", "return float(VAR_9)\n", "return int(VAR_9)\n" ]
[ "def _int_or_float_type(x: Union[int, float, str]) ->Union[int, float]:...\n", "if '.' in str(x):\n", "return float(x)\n", "return int(x)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_3():...\n", "assert utils.snake_case('httpResponseLowerCamel'\n ) == 'http_response_lower_camel'\n" ]
[ "def test_snake_case_from_camel():...\n", "assert utils.snake_case('httpResponseLowerCamel'\n ) == 'http_response_lower_camel'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assert'" ]