lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"@VAR_1.get('/', response_model=List[AModel], operation_id='getUserList')...\n",
"\"\"\"docstring\"\"\"\n",
"return\n"
] | [
"@test_router.get('/', response_model=List[AModel], operation_id='getUserList')...\n",
"\"\"\"docstring\"\"\"\n",
"return\n"
] | [
5,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_42():...\n",
"\"\"\"docstring\"\"\"\n",
"import operator\n",
"import os\n",
"import hashlib\n",
"VAR_3 = FUNC_5()\n",
"if is_gae:\n",
"VAR_161 = 'dbold' if 'old' in (request.args(1) or '') else 'dbnew'\n",
"VAR_161 = request.args(1) or 'new'\n",
"VAR_101 = {}\n",
"VAR_101['status'] = FUNC_43(VAR_3)\n",
"VAR_101['errmessage'] = T('No ticket_storage.txt found under /private folder')\n",
"VAR_101['errlink'\n ] = 'http://web2py.com/books/default/chapter/29/13#Collecting-tickets'\n",
"if VAR_161 == 'new':\n",
"VAR_162 = apath('%s/errors' % VAR_3, VAR_122=request)\n",
"if VAR_161 == 'dbnew':\n",
"VAR_163 = []\n",
"VAR_162 = apath('%s/errors' % VAR_3, VAR_122=request)\n",
"if VAR_161 == 'dbold':\n",
"for VAR_70 in request.vars:\n",
"VAR_192, VAR_193 = FUNC_43(VAR_3)\n",
"VAR_192, VAR_193 = FUNC_43(VAR_3)\n",
"for VAR_70 in request.vars:\n",
"if VAR_70[:7] == 'delete_':\n",
"VAR_164 = dict()\n",
"VAR_163 = []\n",
"for VAR_70 in request.vars:\n",
"if VAR_70[:7] == 'delete_' and not VAR_70 == 'delete_all}':\n",
"VAR_209 = lambda p: os.stat(apath('%s/errors/%s' % (VAR_3, p), VAR_122=request)\n ).st_mtime\n",
"VAR_163.append(VAR_70[7:])\n",
"for fn in listdir(VAR_162, '^[a-fA-F0-9.\\\\-]+$'):\n",
"for VAR_70 in request.vars:\n",
"if VAR_70[:7] == 'delete_':\n",
"VAR_206 = VAR_192(VAR_193.id > 0).select(VAR_193.ticket_id, VAR_193.\n created_datetime, orderby=~tk_table.created_datetime)\n",
"os.unlink(apath('%s/errors/%s' % (VAR_3, VAR_70[7:]), VAR_122=request))\n",
"VAR_207 = sorted(listdir(apath('%s/errors/' % VAR_3, VAR_122=request),\n '^\\\\w.*'), VAR_143=func, reverse=True)\n",
"VAR_190 = os.path.join(VAR_162, fn)\n",
"VAR_165 = [(x['count'], x) for x in VAR_164.values()]\n",
"if VAR_70[:7] == 'delete_':\n",
"VAR_164 = dict()\n",
"VAR_192(VAR_193.ticket_id == VAR_70[7:]).delete()\n",
"VAR_207 = [row.ticket_id for row in VAR_206]\n",
"return dict(VAR_3=app, VAR_207=tickets, VAR_161=method, VAR_101=db_ready)\n",
"if not os.path.isfile(VAR_190):\n",
"VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)\n",
"VAR_163.append(VAR_70[7:])\n",
"for fn in VAR_192(VAR_193.id > 0).select():\n",
"VAR_192.commit()\n",
"VAR_208 = dict([(row.ticket_id, row.created_datetime) for row in VAR_206])\n",
"VAR_205 = FUNC_2(VAR_190, 'rb')\n",
"VAR_191 = hashlib.md5(to_bytes(VAR_125['traceback'])).hexdigest()\n",
"return dict(errors=[x[1] for x in decorated], VAR_3=app, VAR_161=method,\n VAR_101=db_ready)\n",
"VAR_165 = [(x['count'], x) for x in VAR_164.values()]\n",
"VAR_125 = pickle.loads(fn.ticket_data)\n",
"VAR_192(VAR_193.id == fn.id).delete()\n",
"return dict(VAR_3=app, VAR_207=tickets, VAR_161=method, VAR_208=times,\n VAR_101=db_ready)\n",
"VAR_125 = pickle.load(VAR_205)\n",
"VAR_205.close()\n",
"if VAR_191 in VAR_163:\n",
"VAR_165.sort(VAR_143=operator.itemgetter(0), reverse=True)\n",
"VAR_191 = hashlib.md5(VAR_125['traceback']).hexdigest()\n",
"VAR_192.commit()\n",
"os.unlink(VAR_190)\n",
"VAR_164[VAR_191]['count'] += 1\n",
"VAR_218 = VAR_125['traceback'].split('\\n')\n",
"return dict(errors=[x[1] for x in decorated], VAR_3=app, VAR_161=method,\n VAR_101=db_ready)\n",
"if VAR_191 in VAR_163:\n",
"VAR_219 = VAR_218[-2] if len(VAR_218) > 1 else 'unknown'\n",
"VAR_192(VAR_193.id == fn.id).delete()\n",
"VAR_164[VAR_191]['count'] += 1\n",
"VAR_218 = VAR_125['traceback'].split('\\n')\n",
"VAR_220 = os.path.split(VAR_125['layer'])[1]\n",
"VAR_192.commit()\n",
"VAR_219 = VAR_218[-2]\n",
"VAR_164[VAR_191] = dict(count=1, pickel=error, causer=error_causer, VAR_219\n =last_line, VAR_191=hash, VAR_113=fn)\n",
"VAR_220 = os.path.split(VAR_125['layer'])[1]\n",
"VAR_164[VAR_191] = dict(count=1, pickel=error, causer=error_causer, VAR_219\n =last_line, VAR_191=hash, VAR_113=fn.ticket_id)\n"
] | [
"def errors():...\n",
"\"\"\"docstring\"\"\"\n",
"import operator\n",
"import os\n",
"import hashlib\n",
"app = get_app()\n",
"if is_gae:\n",
"method = 'dbold' if 'old' in (request.args(1) or '') else 'dbnew'\n",
"method = request.args(1) or 'new'\n",
"db_ready = {}\n",
"db_ready['status'] = get_ticket_storage(app)\n",
"db_ready['errmessage'] = T('No ticket_storage.txt found under /private folder')\n",
"db_ready['errlink'\n ] = 'http://web2py.com/books/default/chapter/29/13#Collecting-tickets'\n",
"if method == 'new':\n",
"errors_path = apath('%s/errors' % app, r=request)\n",
"if method == 'dbnew':\n",
"delete_hashes = []\n",
"errors_path = apath('%s/errors' % app, r=request)\n",
"if method == 'dbold':\n",
"for item in request.vars:\n",
"tk_db, tk_table = get_ticket_storage(app)\n",
"tk_db, tk_table = get_ticket_storage(app)\n",
"for item in request.vars:\n",
"if item[:7] == 'delete_':\n",
"hash2error = dict()\n",
"delete_hashes = []\n",
"for item in request.vars:\n",
"if item[:7] == 'delete_' and not item == 'delete_all}':\n",
"func = lambda p: os.stat(apath('%s/errors/%s' % (app, p), r=request)).st_mtime\n",
"delete_hashes.append(item[7:])\n",
"for fn in listdir(errors_path, '^[a-fA-F0-9.\\\\-]+$'):\n",
"for item in request.vars:\n",
"if item[:7] == 'delete_':\n",
"tickets_ = tk_db(tk_table.id > 0).select(tk_table.ticket_id, tk_table.\n created_datetime, orderby=~tk_table.created_datetime)\n",
"os.unlink(apath('%s/errors/%s' % (app, item[7:]), r=request))\n",
"tickets = sorted(listdir(apath('%s/errors/' % app, r=request), '^\\\\w.*'),\n key=func, reverse=True)\n",
"fullpath = os.path.join(errors_path, fn)\n",
"decorated = [(x['count'], x) for x in hash2error.values()]\n",
"if item[:7] == 'delete_':\n",
"hash2error = dict()\n",
"tk_db(tk_table.ticket_id == item[7:]).delete()\n",
"tickets = [row.ticket_id for row in tickets_]\n",
"return dict(app=app, tickets=tickets, method=method, db_ready=db_ready)\n",
"if not os.path.isfile(fullpath):\n",
"decorated.sort(key=operator.itemgetter(0), reverse=True)\n",
"delete_hashes.append(item[7:])\n",
"for fn in tk_db(tk_table.id > 0).select():\n",
"tk_db.commit()\n",
"times = dict([(row.ticket_id, row.created_datetime) for row in tickets_])\n",
"fullpath_file = safe_open(fullpath, 'rb')\n",
"hash = hashlib.md5(to_bytes(error['traceback'])).hexdigest()\n",
"return dict(errors=[x[1] for x in decorated], app=app, method=method,\n db_ready=db_ready)\n",
"decorated = [(x['count'], x) for x in hash2error.values()]\n",
"error = pickle.loads(fn.ticket_data)\n",
"tk_db(tk_table.id == fn.id).delete()\n",
"return dict(app=app, tickets=tickets, method=method, times=times, db_ready=\n db_ready)\n",
"error = pickle.load(fullpath_file)\n",
"fullpath_file.close()\n",
"if hash in delete_hashes:\n",
"decorated.sort(key=operator.itemgetter(0), reverse=True)\n",
"hash = hashlib.md5(error['traceback']).hexdigest()\n",
"tk_db.commit()\n",
"os.unlink(fullpath)\n",
"hash2error[hash]['count'] += 1\n",
"error_lines = error['traceback'].split('\\n')\n",
"return dict(errors=[x[1] for x in decorated], app=app, method=method,\n db_ready=db_ready)\n",
"if hash in delete_hashes:\n",
"last_line = error_lines[-2] if len(error_lines) > 1 else 'unknown'\n",
"tk_db(tk_table.id == fn.id).delete()\n",
"hash2error[hash]['count'] += 1\n",
"error_lines = error['traceback'].split('\\n')\n",
"error_causer = os.path.split(error['layer'])[1]\n",
"tk_db.commit()\n",
"last_line = error_lines[-2]\n",
"hash2error[hash] = dict(count=1, pickel=error, causer=error_causer,\n last_line=last_line, hash=hash, ticket=fn)\n",
"error_causer = os.path.split(error['layer'])[1]\n",
"hash2error[hash] = dict(count=1, pickel=error, causer=error_causer,\n last_line=last_line, hash=hash, ticket=fn.ticket_id)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"Import'",
"Import'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Expr'",
"For",
"For",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Expr'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"AugAssign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Expr'",
"AugAssign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@default('client_id')...\n",
"return os.getenv('GITHUB_CLIENT_ID', '')\n"
] | [
"@default('client_id')...\n",
"return os.getenv('GITHUB_CLIENT_ID', '')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_6=None, VAR_7=None):...\n",
"self.type = VAR_6\n",
"if self.type not in ('readonly', 'latest'):\n",
"self.fallback = VAR_7\n"
] | [
"def __init__(self, type=None, fallback=None):...\n",
"self.type = type\n",
"if self.type not in ('readonly', 'latest'):\n",
"self.fallback = fallback\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_14(self, VAR_0, VAR_1, VAR_2):...\n",
"self.federation_sender = VAR_2.get_federation_sender()\n",
"self.event_builder_factory = VAR_2.get_event_builder_factory()\n",
"self.federation_handler = VAR_2.get_federation_handler()\n",
"self.presence_handler = VAR_2.get_presence_handler()\n",
"self.store = VAR_2.get_datastore()\n",
"self.state = VAR_2.get_state_handler()\n",
"self.auth = VAR_2.get_auth()\n",
"self.random_signing_key = generate_signing_key('ver')\n"
] | [
"def prepare(self, reactor, clock, hs):...\n",
"self.federation_sender = hs.get_federation_sender()\n",
"self.event_builder_factory = hs.get_event_builder_factory()\n",
"self.federation_handler = hs.get_federation_handler()\n",
"self.presence_handler = hs.get_presence_handler()\n",
"self.store = hs.get_datastore()\n",
"self.state = hs.get_state_handler()\n",
"self.auth = hs.get_auth()\n",
"self.random_signing_key = generate_signing_key('ver')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_50(self, VAR_112=VAR_3, VAR_140=VAR_3, VAR_136=VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"self._reset_two_factor_auth(VAR_263.session)\n",
"if VAR_112 is VAR_3:\n",
"VAR_112 = self.get_vars_next() or self.settings.logout_next\n",
"if VAR_140 is VAR_3:\n",
"VAR_140 = self.settings.logout_onlogout\n",
"if VAR_140:\n",
"VAR_140(self.user)\n",
"if VAR_136 is VAR_3:\n",
"VAR_136 = self.messages['logout_log']\n",
"if self.user:\n",
"self.log_event(VAR_136, self.user)\n",
"if self.settings.login_form != self:\n",
"VAR_370 = self.settings.login_form\n",
"VAR_263.session.auth = None\n",
"VAR_371 = VAR_370.get_user()\n",
"self.user = None\n",
"if VAR_371:\n",
"if self.settings.renew_session_onlogout:\n",
"VAR_112 = VAR_370.logout_url(VAR_112)\n",
"VAR_263.session.renew(clear_session=not self.settings.keep_session_onlogout)\n",
"VAR_263.session.flash = self.messages.logged_out\n",
"if VAR_112 is not None:\n",
"redirect(VAR_112)\n"
] | [
"def logout(self, next=DEFAULT, onlogout=DEFAULT, log=DEFAULT):...\n",
"\"\"\"docstring\"\"\"\n",
"self._reset_two_factor_auth(current.session)\n",
"if next is DEFAULT:\n",
"next = self.get_vars_next() or self.settings.logout_next\n",
"if onlogout is DEFAULT:\n",
"onlogout = self.settings.logout_onlogout\n",
"if onlogout:\n",
"onlogout(self.user)\n",
"if log is DEFAULT:\n",
"log = self.messages['logout_log']\n",
"if self.user:\n",
"self.log_event(log, self.user)\n",
"if self.settings.login_form != self:\n",
"cas = self.settings.login_form\n",
"current.session.auth = None\n",
"cas_user = cas.get_user()\n",
"self.user = None\n",
"if cas_user:\n",
"if self.settings.renew_session_onlogout:\n",
"next = cas.logout_url(next)\n",
"current.session.renew(clear_session=not self.settings.keep_session_onlogout)\n",
"current.session.flash = self.messages.logged_out\n",
"if next is not None:\n",
"redirect(next)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_2(self, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = False\n",
"if VAR_5.session.get('callback') is None:\n",
"VAR_5.session['callback'] = dict()\n",
"if VAR_5.session.get('shares') is None:\n",
"VAR_11 = True\n",
"VAR_5.session['shares'] = dict()\n",
"if VAR_11:\n",
"VAR_11 = True\n",
"VAR_5.session.modified = True\n"
] | [
"def prepare_session(self, request):...\n",
"\"\"\"docstring\"\"\"\n",
"changes = False\n",
"if request.session.get('callback') is None:\n",
"request.session['callback'] = dict()\n",
"if request.session.get('shares') is None:\n",
"changes = True\n",
"request.session['shares'] = dict()\n",
"if changes:\n",
"changes = True\n",
"request.session.modified = True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_25(self):...\n",
"self.assert_expected(self.folder.t, 'BooleanAttributes.html')\n"
] | [
"def testBooleanAttributes(self):...\n",
"self.assert_expected(self.folder.t, 'BooleanAttributes.html')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"async def FUNC_6(self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_36 = {'token': VAR_15, 'client_secret': VAR_16, 'sid': VAR_17}\n",
"VAR_37 = (self.hs.config.public_baseurl + \n '_matrix/client/unstable/add_threepid/email/submit_token?%s' % urllib.\n parse.urlencode(VAR_36))\n",
"VAR_38 = {'link': VAR_37}\n",
"await self.send_email(VAR_14, self.email_subjects.email_validation % {\n 'server_name': self.hs.config.server_name}, VAR_38)\n"
] | [
"async def send_add_threepid_mail(self, email_address: str, token: str,...\n",
"\"\"\"docstring\"\"\"\n",
"params = {'token': token, 'client_secret': client_secret, 'sid': sid}\n",
"link = (self.hs.config.public_baseurl + \n '_matrix/client/unstable/add_threepid/email/submit_token?%s' % urllib.\n parse.urlencode(params))\n",
"template_vars = {'link': link}\n",
"await self.send_email(email_address, self.email_subjects.email_validation %\n {'server_name': self.hs.config.server_name}, template_vars)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_4, VAR_5, VAR_6):...\n",
"VAR_16 = self.get_success(self.handler.on_make_join_request(VAR_4, VAR_6,\n VAR_5))\n",
"VAR_16.signatures[VAR_4] = {'x': 'y'}\n",
"VAR_11 = run_in_background(self.handler.on_send_join_request, VAR_4, VAR_16)\n",
"self.get_success(VAR_11)\n",
"VAR_21 = self.get_success(self.store.get_current_state_ids(VAR_6))\n",
"self.assertEqual(VAR_21[EventTypes.Member, VAR_5], VAR_16.event_id)\n",
"return VAR_16\n"
] | [
"def _build_and_send_join_event(self, other_server, other_user, room_id):...\n",
"join_event = self.get_success(self.handler.on_make_join_request(\n other_server, room_id, other_user))\n",
"join_event.signatures[other_server] = {'x': 'y'}\n",
"d = run_in_background(self.handler.on_send_join_request, other_server,\n join_event)\n",
"self.get_success(d)\n",
"r = self.get_success(self.store.get_current_state_ids(room_id))\n",
"self.assertEqual(r[EventTypes.Member, other_user], join_event.event_id)\n",
"return join_event\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2, VAR_3 = qutescheme.data_for_url(VAR_0.url())\n",
"VAR_5 = 'No handler found for {}!'.format(VAR_0.url().toDisplayString())\n",
"return networkreply.FixedDataNetworkReply(VAR_0, VAR_3, VAR_2)\n",
"return networkreply.ErrorNetworkReply(VAR_0, VAR_5, QNetworkReply.\n ContentNotFoundError)\n"
] | [
"def handler(request):...\n",
"\"\"\"docstring\"\"\"\n",
"mimetype, data = qutescheme.data_for_url(request.url())\n",
"errorstr = 'No handler found for {}!'.format(request.url().toDisplayString())\n",
"return networkreply.FixedDataNetworkReply(request, data, mimetype)\n",
"return networkreply.ErrorNetworkReply(request, errorstr, QNetworkReply.\n ContentNotFoundError)\n"
] | [
0,
0,
3,
3,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_26(self):...\n",
"VAR_29 = coreapi.Document(VAR_5='', title='Example API', content={'animals':\n {'dog': {'vet': {'list': coreapi.Link(url='/animals/dog/{id}/vet',\n action='get', fields=[coreapi.Field('id', required=True, location=\n 'path', schema=coreschema.String())])}, 'read': coreapi.Link(url=\n '/animals/dog/{id}', action='get', fields=[coreapi.Field('id', required\n =True, location='path', schema=coreschema.String())])}, 'cat': {'list':\n coreapi.Link(url='/animals/cat/', action='get', fields=[coreapi.Field(\n 'id', required=True, location='path', schema=coreschema.String())]),\n 'create': coreapi.Link(url='/aniamls/cat', action='post', fields=[])}}})\n",
"VAR_30 = VAR_29['animals']\n",
"VAR_31 = schema_links(VAR_30)\n",
"assert len(VAR_31) is 4\n",
"assert 'cat > create' in VAR_31\n",
"assert 'cat > list' in VAR_31\n",
"assert 'dog > read' in VAR_31\n",
"assert 'dog > vet > list' in VAR_31\n"
] | [
"def test_multiple_nested_routes(self):...\n",
"schema = coreapi.Document(url='', title='Example API', content={'animals':\n {'dog': {'vet': {'list': coreapi.Link(url='/animals/dog/{id}/vet',\n action='get', fields=[coreapi.Field('id', required=True, location=\n 'path', schema=coreschema.String())])}, 'read': coreapi.Link(url=\n '/animals/dog/{id}', action='get', fields=[coreapi.Field('id', required\n =True, location='path', schema=coreschema.String())])}, 'cat': {'list':\n coreapi.Link(url='/animals/cat/', action='get', fields=[coreapi.Field(\n 'id', required=True, location='path', schema=coreschema.String())]),\n 'create': coreapi.Link(url='/aniamls/cat', action='post', fields=[])}}})\n",
"section = schema['animals']\n",
"flat_links = schema_links(section)\n",
"assert len(flat_links) is 4\n",
"assert 'cat > create' in flat_links\n",
"assert 'cat > list' in flat_links\n",
"assert 'dog > read' in flat_links\n",
"assert 'dog > vet > list' in flat_links\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_7():...\n",
"\"\"\"docstring\"\"\"\n",
"session.forget()\n",
"session._unlock(VAR_43)\n",
"VAR_24, VAR_25 = check_new_version(request.env.web2py_version,\n WEB2PY_VERSION_URL)\n",
"if VAR_24 in (-1, -2):\n",
"return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)\n",
"if not VAR_24:\n",
"return A(T('web2py is up to date'), _href=WEB2PY_URL)\n",
"if platform.system().lower() in ('windows', 'win32', 'win64'\n",
"return SPAN('You should upgrade to %s' % VAR_25.split('(')[0])\n",
"return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % VAR_25.\n split('(')[0])\n"
] | [
"def check_version():...\n",
"\"\"\"docstring\"\"\"\n",
"session.forget()\n",
"session._unlock(response)\n",
"new_version, version = check_new_version(request.env.web2py_version,\n WEB2PY_VERSION_URL)\n",
"if new_version in (-1, -2):\n",
"return A(T('Unable to check for upgrades'), _href=WEB2PY_URL)\n",
"if not new_version:\n",
"return A(T('web2py is up to date'), _href=WEB2PY_URL)\n",
"if platform.system().lower() in ('windows', 'win32', 'win64'\n",
"return SPAN('You should upgrade to %s' % version.split('(')[0])\n",
"return sp_button(URL('upgrade_web2py'), T('upgrade now to %s') % version.\n split('(')[0])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"For",
"Return'",
"Return'"
] |
[
"@login_required...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_7 is None:\n",
"VAR_18 = None\n",
"if VAR_8 is None:\n",
"VAR_5 = {}\n",
"VAR_18 = get_project(VAR_6, VAR_7)\n",
"VAR_18 = get_component(VAR_6, VAR_7, VAR_8)\n",
"VAR_12 = ReportsForm(VAR_6.POST)\n",
"VAR_5 = {'project': VAR_18}\n",
"VAR_5 = {'component': VAR_18}\n",
"if not VAR_12.is_valid():\n",
"show_form_errors(VAR_6, VAR_12)\n",
"VAR_13 = FUNC_2(None if VAR_6.user.has_perm('reports.view', VAR_18) else\n VAR_6.user, VAR_12.cleaned_data['start_date'], VAR_12.cleaned_data[\n 'end_date'], **kwargs)\n",
"return redirect_param(VAR_18 or 'home', '#reports')\n",
"if VAR_12.cleaned_data['style'] == 'json':\n",
"return JsonResponse(VAR_13=data, safe=False)\n",
"VAR_16 = ('Name', 'Email', 'Count total', 'Edits total',\n 'Source words total', 'Source chars total', 'Target words total',\n 'Target chars total', 'Count new', 'Edits new', 'Source words new',\n 'Source chars new', 'Target words new', 'Target chars new',\n 'Count approved', 'Edits approved', 'Source words approved',\n 'Source chars approved', 'Target words approved',\n 'Target chars approved', 'Count edited', 'Edits edited',\n 'Source words edited', 'Source chars edited', 'Target words edited',\n 'Target chars edited')\n",
"if VAR_12.cleaned_data['style'] == 'html':\n",
"VAR_19 = VAR_1.format(''.join(f'<th>{h}</th>' for h in VAR_16))\n",
"VAR_19 = '{0}\\n{1} {2}\\n{0}'.format(VAR_0, ' '.join(f'{h:40}' for h in\n VAR_16[:2]), ' '.join(f'{h:24}' for h in VAR_16[2:]))\n",
"VAR_20 = '<tr>'\n",
"VAR_20 = ''\n",
"VAR_37 = VAR_38 = '<td>{0}</td>\\n'\n",
"VAR_37 = '{0:40} '\n",
"VAR_25 = '</tr>'\n",
"VAR_38 = '{0:24} '\n",
"VAR_26 = 'text/html'\n",
"VAR_25 = ''\n",
"VAR_27 = '</table>'\n",
"VAR_26 = 'text/plain'\n",
"VAR_10 = [VAR_19]\n",
"VAR_27 = VAR_0\n",
"for item in VAR_13:\n",
"if VAR_20:\n",
"VAR_10.append(VAR_27)\n",
"VAR_10.append(VAR_20)\n",
"VAR_10.append(''.join((VAR_37.format(item['name'] or 'Anonymous'), VAR_37.\n format(item['email'] or ''), VAR_38.format(item['count']), VAR_38.\n format(item['edits']), VAR_38.format(item['words']), VAR_38.format(item\n ['chars']), VAR_38.format(item['t_words']), VAR_38.format(item[\n 't_chars']), VAR_38.format(item['count_new']), VAR_38.format(item[\n 'edits_new']), VAR_38.format(item['words_new']), VAR_38.format(item[\n 'chars_new']), VAR_38.format(item['t_words_new']), VAR_38.format(item[\n 't_chars_new']), VAR_38.format(item['count_approve']), VAR_38.format(\n item['edits_approve']), VAR_38.format(item['words_approve']), VAR_38.\n format(item['chars_approve']), VAR_38.format(item['t_words_approve']),\n VAR_38.format(item['t_chars_approve']), VAR_38.format(item['count_edit'\n ]), VAR_38.format(item['edits_edit']), VAR_38.format(item['words_edit']\n ), VAR_38.format(item['chars_edit']), VAR_38.format(item['t_words_edit'\n ]), VAR_38.format(item['t_chars_edit']))))\n",
"return HttpResponse('\\n'.join(VAR_10), content_type=f'{mime}; charset=utf-8')\n",
"if VAR_25:\n",
"VAR_10.append(VAR_25)\n"
] | [
"@login_required...\n",
"\"\"\"docstring\"\"\"\n",
"if project is None:\n",
"obj = None\n",
"if component is None:\n",
"kwargs = {}\n",
"obj = get_project(request, project)\n",
"obj = get_component(request, project, component)\n",
"form = ReportsForm(request.POST)\n",
"kwargs = {'project': obj}\n",
"kwargs = {'component': obj}\n",
"if not form.is_valid():\n",
"show_form_errors(request, form)\n",
"data = generate_counts(None if request.user.has_perm('reports.view', obj) else\n request.user, form.cleaned_data['start_date'], form.cleaned_data[\n 'end_date'], **kwargs)\n",
"return redirect_param(obj or 'home', '#reports')\n",
"if form.cleaned_data['style'] == 'json':\n",
"return JsonResponse(data=data, safe=False)\n",
"headers = ('Name', 'Email', 'Count total', 'Edits total',\n 'Source words total', 'Source chars total', 'Target words total',\n 'Target chars total', 'Count new', 'Edits new', 'Source words new',\n 'Source chars new', 'Target words new', 'Target chars new',\n 'Count approved', 'Edits approved', 'Source words approved',\n 'Source chars approved', 'Target words approved',\n 'Target chars approved', 'Count edited', 'Edits edited',\n 'Source words edited', 'Source chars edited', 'Target words edited',\n 'Target chars edited')\n",
"if form.cleaned_data['style'] == 'html':\n",
"start = HTML_HEADING.format(''.join(f'<th>{h}</th>' for h in headers))\n",
"start = '{0}\\n{1} {2}\\n{0}'.format(RST_HEADING, ' '.join(f'{h:40}' for h in\n headers[:2]), ' '.join(f'{h:24}' for h in headers[2:]))\n",
"row_start = '<tr>'\n",
"row_start = ''\n",
"cell_name = cell_count = '<td>{0}</td>\\n'\n",
"cell_name = '{0:40} '\n",
"row_end = '</tr>'\n",
"cell_count = '{0:24} '\n",
"mime = 'text/html'\n",
"row_end = ''\n",
"end = '</table>'\n",
"mime = 'text/plain'\n",
"result = [start]\n",
"end = RST_HEADING\n",
"for item in data:\n",
"if row_start:\n",
"result.append(end)\n",
"result.append(row_start)\n",
"result.append(''.join((cell_name.format(item['name'] or 'Anonymous'),\n cell_name.format(item['email'] or ''), cell_count.format(item['count']),\n cell_count.format(item['edits']), cell_count.format(item['words']),\n cell_count.format(item['chars']), cell_count.format(item['t_words']),\n cell_count.format(item['t_chars']), cell_count.format(item['count_new']\n ), cell_count.format(item['edits_new']), cell_count.format(item[\n 'words_new']), cell_count.format(item['chars_new']), cell_count.format(\n item['t_words_new']), cell_count.format(item['t_chars_new']),\n cell_count.format(item['count_approve']), cell_count.format(item[\n 'edits_approve']), cell_count.format(item['words_approve']), cell_count\n .format(item['chars_approve']), cell_count.format(item[\n 't_words_approve']), cell_count.format(item['t_chars_approve']),\n cell_count.format(item['count_edit']), cell_count.format(item[\n 'edits_edit']), cell_count.format(item['words_edit']), cell_count.\n format(item['chars_edit']), cell_count.format(item['t_words_edit']),\n cell_count.format(item['t_chars_edit']))))\n",
"return HttpResponse('\\n'.join(result), content_type=f'{mime}; charset=utf-8')\n",
"if row_end:\n",
"result.append(row_end)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Condition",
"Expr'"
] |
[
"def FUNC_25(VAR_18):...\n",
"return VAR_18 is not None and bool(VAR_18['wizard']['order'])\n"
] | [
"def wizard_active(templates):...\n",
"return templates is not None and bool(templates['wizard']['order'])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_25.whitelist(allow_guest=True)...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = VAR_25.get_doc(VAR_9, VAR_10)\n",
"if getattr(VAR_14, VAR_11, VAR_25._dict()).is_whitelisted:\n",
"VAR_25.call(getattr(VAR_14, VAR_11), **frappe.local.form_dict)\n",
"VAR_25.throw(_('Not permitted'), VAR_25.PermissionError)\n"
] | [
"@frappe.whitelist(allow_guest=True)...\n",
"\"\"\"docstring\"\"\"\n",
"doc = frappe.get_doc(doctype, name)\n",
"if getattr(doc, custom_method, frappe._dict()).is_whitelisted:\n",
"frappe.call(getattr(doc, custom_method), **frappe.local.form_dict)\n",
"frappe.throw(_('Not permitted'), frappe.PermissionError)\n"
] | [
2,
0,
2,
2,
2,
2
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_120(self, VAR_201):...\n",
"VAR_324 = []\n",
"VAR_327 = SPAN()\n",
"VAR_327.append(A(VAR_201, _href=URL()))\n",
"for arg in self.args:\n",
"VAR_327.append('/')\n",
"return VAR_327\n",
"VAR_324.append(arg)\n",
"VAR_327.append(A(arg, _href=URL(args='/'.join(path))))\n"
] | [
"def breadcrumbs(self, basename):...\n",
"path = []\n",
"span = SPAN()\n",
"span.append(A(basename, _href=URL()))\n",
"for arg in self.args:\n",
"span.append('/')\n",
"return span\n",
"path.append(arg)\n",
"span.append(A(arg, _href=URL(args='/'.join(path))))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Return'",
"Expr'",
"Expr'"
] |
[
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_10 = test.test_src_dir_path(VAR_0)\n",
"VAR_45 = np.array([[1], [2]])\n",
"VAR_46 = np.zeros((6, 3))\n",
"VAR_32 = os.path.join(test.get_temp_dir(), 'testRunCommandNewOutdir_inputs.npz'\n )\n",
"VAR_42 = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"if os.path.isdir(VAR_42):\n",
"shutil.rmtree(VAR_42)\n",
"np.savez(VAR_32, VAR_27=x, VAR_28=x_notused)\n",
"VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n VAR_32 + '[x0]', '--outdir', VAR_42, '--tf_debug'] + (['--use_tfrt'] if\n VAR_5 else []))\n",
"def FUNC_40(VAR_48):...\n",
"return VAR_48\n"
] | [
"@parameterized.named_parameters(('non_tfrt', False))...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"x = np.array([[1], [2]])\n",
"x_notused = np.zeros((6, 3))\n",
"input_path = os.path.join(test.get_temp_dir(),\n 'testRunCommandNewOutdir_inputs.npz')\n",
"output_dir = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"if os.path.isdir(output_dir):\n",
"shutil.rmtree(output_dir)\n",
"np.savez(input_path, x0=x, x1=x_notused)\n",
"args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n input_path + '[x0]', '--outdir', output_dir, '--tf_debug'] + ([\n '--use_tfrt'] if use_tfrt else []))\n",
"def fake_wrapper_session(sess):...\n",
"return sess\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_86(VAR_71):...\n",
"for VAR_433 in ['Dataset', 'Image', 'Plate']:\n",
"VAR_187 = VAR_71.POST.get(VAR_433, None)\n",
"if VAR_187 is not None:\n",
"for o in list(VAR_5.getObjects(VAR_433, VAR_187.split(','))):\n",
"return o.getDetails().owner.id.val\n"
] | [
"def getObjectOwnerId(r):...\n",
"for t in ['Dataset', 'Image', 'Plate']:\n",
"ids = r.POST.get(t, None)\n",
"if ids is not None:\n",
"for o in list(conn.getObjects(t, ids.split(','))):\n",
"return o.getDetails().owner.id.val\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Assign'",
"Condition",
"For",
"Return'"
] |
[
"def FUNC_6():...\n",
"VAR_26 = mysql.connector.connect(user=DB_USER, password=DB_PASSWORD, host=\n DB_HOST, database=DB_DBNAME, port=DB_PORT)\n",
"VAR_27 = VAR_26.cursor()\n",
"VAR_27.execute(VAR_20, VAR_22)\n",
"yield '<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\\n'\n",
"if VAR_1:\n",
"yield '<?xml-stylesheet type=\"text/xsl\" href=\"/scopelist.xsl\" ?>\\n'\n",
"yield '<objectlist>\\n'\n",
"for VAR_17, VAR_18, VAR_19 in VAR_27:\n",
"yield '<count adjust=\"1\"/>\\n'\n",
"yield '</objectlist>\\n'\n",
"yield FUNC_5(VAR_13, VAR_17, VAR_18, VAR_19) + '\\n'\n"
] | [
"def generate():...\n",
"cnx = mysql.connector.connect(user=DB_USER, password=DB_PASSWORD, host=\n DB_HOST, database=DB_DBNAME, port=DB_PORT)\n",
"cursor = cnx.cursor()\n",
"cursor.execute(query, substitutes)\n",
"yield '<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\\n'\n",
"if STYLE:\n",
"yield '<?xml-stylesheet type=\"text/xsl\" href=\"/scopelist.xsl\" ?>\\n'\n",
"yield '<objectlist>\\n'\n",
"for seq_no, rel_path, download_link in cursor:\n",
"yield '<count adjust=\"1\"/>\\n'\n",
"yield '</objectlist>\\n'\n",
"yield _get_object_element(dataset, seq_no, rel_path, download_link) + '\\n'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_28(VAR_40, VAR_41, VAR_48, **VAR_3):...\n",
"self.assertEqual(VAR_40, self.mock_perspective_server.server_name)\n",
"self.assertEqual(VAR_41, '/_matrix/key/v2/query')\n",
"VAR_51 = VAR_48['server_keys']\n",
"self.assertEqual(list(VAR_51[VAR_14].keys()), [VAR_15])\n",
"return {'server_keys': [VAR_16]}\n"
] | [
"async def post_json(destination, path, data, **kwargs):...\n",
"self.assertEqual(destination, self.mock_perspective_server.server_name)\n",
"self.assertEqual(path, '/_matrix/key/v2/query')\n",
"q = data['server_keys']\n",
"self.assertEqual(list(q[expected_server_name].keys()), [expected_key_id])\n",
"return {'server_keys': [response]}\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"async def FUNC_6(VAR_6):...\n",
""
] | [
"async def middleware_handler(request):...\n",
""
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Condition"
] |
[
"def FUNC_0(VAR_6=None, VAR_7=None):...\n",
"if VAR_6 is None:\n",
"VAR_6 = request.url_root\n",
"VAR_35 = not settings().getBoolean(['devel', 'cache', 'preemptive']\n ) or VAR_6 in settings().get(['server', 'preemptiveCache', 'exceptions']\n ) or not (VAR_6.startswith('http://') or VAR_6.startswith('https://'))\n",
"VAR_36 = request.headers.get('X-Preemptive-Recording', 'no') == 'yes'\n",
"if callable(VAR_7):\n",
"return VAR_36 or VAR_35 or VAR_7()\n",
"return VAR_36 or VAR_35\n"
] | [
"def _preemptive_unless(base_url=None, additional_unless=None):...\n",
"if base_url is None:\n",
"base_url = request.url_root\n",
"disabled_for_root = not settings().getBoolean(['devel', 'cache', 'preemptive']\n ) or base_url in settings().get(['server', 'preemptiveCache', 'exceptions']\n ) or not (base_url.startswith('http://') or base_url.startswith('https://')\n )\n",
"recording_disabled = request.headers.get('X-Preemptive-Recording', 'no'\n ) == 'yes'\n",
"if callable(additional_unless):\n",
"return recording_disabled or disabled_for_root or additional_unless()\n",
"return recording_disabled or disabled_for_root\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_17(self, VAR_0):...\n",
"from openapi_python_client import Project\n",
"VAR_8 = Project(VAR_3=mocker.MagicMock(title='My Test API'))\n",
"VAR_8.project_dir = VAR_0.MagicMock()\n",
"VAR_8.project_dir.mkdir.side_effect = FileExistsError\n",
"VAR_11 = VAR_8.build()\n",
"VAR_8.project_dir.mkdir.assert_called_once()\n",
"assert VAR_11 == [GeneratorError(detail=\n 'Directory already exists. Delete it or use the update command.')]\n"
] | [
"def test_build_file_exists(self, mocker):...\n",
"from openapi_python_client import Project\n",
"project = Project(openapi=mocker.MagicMock(title='My Test API'))\n",
"project.project_dir = mocker.MagicMock()\n",
"project.project_dir.mkdir.side_effect = FileExistsError\n",
"result = project.build()\n",
"project.project_dir.mkdir.assert_called_once()\n",
"assert result == [GeneratorError(detail=\n 'Directory already exists. Delete it or use the update command.')]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assert'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5.operationId is None:\n",
"return ParseError(VAR_5=data, detail=\n 'Path operations with operationId are not yet supported')\n",
"VAR_7 = CLASS_2(VAR_8=path, VAR_9=method, description=data.description,\n VAR_11=data.operationId, requires_security=bool(data.security), VAR_10=tag)\n",
"VAR_18 = CLASS_2._add_parameters(VAR_7, VAR_5)\n",
"if isinstance(VAR_18, ParseError):\n",
"return VAR_18\n",
"VAR_18 = CLASS_2._add_responses(VAR_18, VAR_5.responses)\n",
"if isinstance(VAR_18, ParseError):\n",
"return VAR_18\n",
"VAR_18 = CLASS_2._add_body(VAR_18, VAR_5)\n",
"return VAR_18\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"if data.operationId is None:\n",
"return ParseError(data=data, detail=\n 'Path operations with operationId are not yet supported')\n",
"endpoint = Endpoint(path=path, method=method, description=data.description,\n name=data.operationId, requires_security=bool(data.security), tag=tag)\n",
"result = Endpoint._add_parameters(endpoint, data)\n",
"if isinstance(result, ParseError):\n",
"return result\n",
"result = Endpoint._add_responses(result, data.responses)\n",
"if isinstance(result, ParseError):\n",
"return result\n",
"result = Endpoint._add_body(result, data)\n",
"return result\n"
] | [
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Return'"
] |
[
"@VAR_0.route('/plugin/<plugin_name>')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_111 = FUNC_58(f'/internal/plugins/{VAR_12}', 'get')\n",
"flash(str(err), 'danger')\n",
"return render_template('plugin.html', title=f'{plugin_name}', VAR_111=\n plugin_info)\n",
"return redirect(url_for('plugins_page'))\n"
] | [
"@gui.route('/plugin/<plugin_name>')...\n",
"\"\"\"docstring\"\"\"\n",
"plugin_info = query_internal_api(f'/internal/plugins/{plugin_name}', 'get')\n",
"flash(str(err), 'danger')\n",
"return render_template('plugin.html', title=f'{plugin_name}', plugin_info=\n plugin_info)\n",
"return redirect(url_for('plugins_page'))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"@VAR_7.route('/id/<path:object_path>')...\n",
"VAR_28 = Headers([('Content-Type', 'text/xml')])\n",
"return Response(FUNC_7(VAR_19=object_path), '200 OK', VAR_28=headers)\n"
] | [
"@scope_blueprint.route('/id/<path:object_path>')...\n",
"headers = Headers([('Content-Type', 'text/xml')])\n",
"return Response(_get_object_element(object_path=object_path), '200 OK',\n headers=headers)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_37(self, VAR_44):...\n",
"VAR_14 = VAR_44.group(2) or VAR_44.group(1)\n",
"VAR_14 = self.output(VAR_14)\n",
"return self.renderer.emphasis(VAR_14)\n"
] | [
"def output_emphasis(self, m):...\n",
"text = m.group(2) or m.group(1)\n",
"text = self.output(text)\n",
"return self.renderer.emphasis(text)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@VAR_2.route('/get_series_json', methods=['GET'])...\n",
"return calibre_db.get_typeahead(db.Series, request.args.get('q'))\n"
] | [
"@web.route('/get_series_json', methods=['GET'])...\n",
"return calibre_db.get_typeahead(db.Series, request.args.get('q'))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_12(VAR_4):...\n",
"if not self._check_permission([VAR_0], VAR_1, VAR_4):\n",
"if callable(VAR_11):\n",
"return VAR_4\n",
"VAR_11()\n",
"self._deny_hook()\n"
] | [
"def decorator(view_func):...\n",
"if not self._check_permission([role], method, view_func):\n",
"if callable(callback):\n",
"return view_func\n",
"callback()\n",
"self._deny_hook()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"super().tearDown()\n",
"SecurityManager.setSecurityPolicy(self.oldPolicy)\n",
"noSecurityManager()\n"
] | [
"def tearDown(self):...\n",
"super().tearDown()\n",
"SecurityManager.setSecurityPolicy(self.oldPolicy)\n",
"noSecurityManager()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(VAR_0):...\n",
"VAR_0.DEBUG = True\n",
"VAR_6 = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view key='blob'\"\n )\n",
"VAR_7 = unicorn(None, VAR_6)\n",
"VAR_10 = CLASS_0(component_name='test', component_id='asdf')\n",
"VAR_8 = {'view': VAR_10}\n",
"VAR_7.render(VAR_8)\n",
"assert VAR_7.component_id == 'asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:blob'\n"
] | [
"def test_unicorn_render_parent_with_key(settings):...\n",
"settings.DEBUG = True\n",
"token = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentKwargs' parent=view key='blob'\"\n )\n",
"unicorn_node = unicorn(None, token)\n",
"view = FakeComponentParent(component_name='test', component_id='asdf')\n",
"context = {'view': view}\n",
"unicorn_node.render(context)\n",
"assert unicorn_node.component_id == 'asdf:tests.templatetags.test_unicorn_render.FakeComponentKwargs:blob'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assert'"
] |
[
"@VAR_7.route('/meta/<path:object_path>')...\n",
"VAR_34 = FUNC_10(VAR_19)\n",
"VAR_35 = dict()\n",
"VAR_35['_gt_label'] = VAR_34.split('/')[-2]\n",
"return jsonify(VAR_35)\n"
] | [
"@scope_blueprint.route('/meta/<path:object_path>')...\n",
"path = _get_obj_absolute_path(object_path)\n",
"attrs = dict()\n",
"attrs['_gt_label'] = path.split('/')[-2]\n",
"return jsonify(attrs)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@ensure_csrf_cookie...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n",
"def FUNC_86(VAR_123, VAR_86, VAR_124):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_185 = HttpResponse(content_type='text/csv')\n",
"VAR_185['Content-Disposition'] = 'attachment; filename={0}'.format(unicode(\n VAR_123).encode('utf-8'))\n",
"VAR_186 = VAR_26.writer(VAR_185, dialect='excel', quotechar='\"', quoting=\n csv.QUOTE_ALL)\n",
"VAR_187 = [unicode(s).encode('utf-8') for s in VAR_86]\n",
"VAR_186.writerow(VAR_187)\n",
"for row in VAR_124:\n",
"VAR_187 = [unicode(s).encode('utf-8') for s in row]\n",
"return VAR_185\n",
"VAR_186.writerow(VAR_187)\n"
] | [
"@ensure_csrf_cookie...\n",
"\"\"\"docstring\"\"\"\n",
"course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n",
"def csv_response(filename, header, rows):...\n",
"\"\"\"docstring\"\"\"\n",
"response = HttpResponse(content_type='text/csv')\n",
"response['Content-Disposition'] = 'attachment; filename={0}'.format(unicode\n (filename).encode('utf-8'))\n",
"writer = csv.writer(response, dialect='excel', quotechar='\"', quoting=csv.\n QUOTE_ALL)\n",
"encoded = [unicode(s).encode('utf-8') for s in header]\n",
"writer.writerow(encoded)\n",
"for row in rows:\n",
"encoded = [unicode(s).encode('utf-8') for s in row]\n",
"return response\n",
"writer.writerow(encoded)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"\"\"\"QtWebKit specific qute://* handlers and glue code.\"\"\"\n",
"import mimetypes\n",
"from PyQt5.QtNetwork import QNetworkReply\n",
"from qutebrowser.browser import pdfjs, qutescheme\n",
"from qutebrowser.browser.webkit.network import networkreply\n",
"from qutebrowser.utils import log, usertypes, qtutils\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2, VAR_3 = qutescheme.data_for_url(VAR_0.url())\n",
"VAR_5 = 'No handler found for {}!'.format(VAR_0.url().toDisplayString())\n",
"return networkreply.FixedDataNetworkReply(VAR_0, VAR_3, VAR_2)\n",
"return networkreply.ErrorNetworkReply(VAR_0, VAR_5, QNetworkReply.\n ContentNotFoundError)\n"
] | [
"\"\"\"QtWebKit specific qute://* handlers and glue code.\"\"\"\n",
"import mimetypes\n",
"from PyQt5.QtNetwork import QNetworkReply\n",
"from qutebrowser.browser import pdfjs, qutescheme\n",
"from qutebrowser.browser.webkit.network import networkreply\n",
"from qutebrowser.utils import log, usertypes, qtutils\n",
"def handler(request):...\n",
"\"\"\"docstring\"\"\"\n",
"mimetype, data = qutescheme.data_for_url(request.url())\n",
"errorstr = 'No handler found for {}!'.format(request.url().toDisplayString())\n",
"return networkreply.FixedDataNetworkReply(request, data, mimetype)\n",
"return networkreply.ErrorNetworkReply(request, errorstr, QNetworkReply.\n ContentNotFoundError)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
3,
3,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_0(VAR_2: T) ->VAR_1:...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_2.__name__.startswith('get_'):\n",
"VAR_3 = VAR_2.__name__[len('get'):]\n",
"VAR_4 = [False]\n",
"@functools.wraps(VAR_2)...\n",
"return getattr(self, VAR_3)\n",
"if VAR_4[0]:\n",
"VAR_4[0] = True\n",
"VAR_18 = VAR_2(self)\n",
"VAR_4[0] = False\n",
"return VAR_18\n",
"setattr(self, VAR_3, VAR_18)\n"
] | [
"def cache_in_self(builder: T) ->T:...\n",
"\"\"\"docstring\"\"\"\n",
"if not builder.__name__.startswith('get_'):\n",
"depname = builder.__name__[len('get'):]\n",
"building = [False]\n",
"@functools.wraps(builder)...\n",
"return getattr(self, depname)\n",
"if building[0]:\n",
"building[0] = True\n",
"dep = builder(self)\n",
"building[0] = False\n",
"return dep\n",
"setattr(self, depname, dep)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_5, VAR_39):...\n",
"if self.force or not os.path.exists(VAR_39):\n",
"return True\n",
"VAR_50 = FUNC_6(VAR_39)\n",
"for source in self.sources(VAR_5):\n",
"if FUNC_6(source) > VAR_50:\n",
"return False\n",
"print(source, VAR_39)\n",
"return True\n"
] | [
"def should_run(self, name, target):...\n",
"if self.force or not os.path.exists(target):\n",
"return True\n",
"target_mtime = mtime(target)\n",
"for source in self.sources(name):\n",
"if mtime(source) > target_mtime:\n",
"return False\n",
"print(source, target)\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Return'",
"Assign'",
"For",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"def __call__(self, VAR_14, VAR_43=None):...\n",
"return self.parse(VAR_14, VAR_43)\n"
] | [
"def __call__(self, text, rules=None):...\n",
"return self.parse(text, rules)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_138(self):...\n",
"return True\n"
] | [
"def can_search(self):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_4(self, VAR_14: str, VAR_15: str, VAR_16: str, VAR_17: str...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_36 = {'token': VAR_15, 'client_secret': VAR_16, 'sid': VAR_17}\n",
"VAR_37 = (self.hs.config.public_baseurl + \n '_synapse/client/password_reset/email/submit_token?%s' % urllib.parse.\n urlencode(VAR_36))\n",
"VAR_38 = {'link': VAR_37}\n",
"await self.send_email(VAR_14, self.email_subjects.password_reset % {\n 'server_name': self.hs.config.server_name}, VAR_38)\n"
] | [
"async def send_password_reset_mail(self, email_address: str, token: str,...\n",
"\"\"\"docstring\"\"\"\n",
"params = {'token': token, 'client_secret': client_secret, 'sid': sid}\n",
"link = (self.hs.config.public_baseurl + \n '_synapse/client/password_reset/email/submit_token?%s' % urllib.parse.\n urlencode(params))\n",
"template_vars = {'link': link}\n",
"await self.send_email(email_address, self.email_subjects.password_reset % {\n 'server_name': self.hs.config.server_name}, template_vars)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_55(VAR_118):...\n",
"\"\"\"docstring\"\"\"\n",
"return importlib.import_module(VAR_118)\n"
] | [
"def get_module(modulename):...\n",
"\"\"\"docstring\"\"\"\n",
"return importlib.import_module(modulename)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __iter__(self):...\n",
"if self.field.empty_label is not None:\n",
"yield '', self.field.empty_label\n",
"for VAR_55 in self.queryset:\n",
"yield self.choice(VAR_55)\n"
] | [
"def __iter__(self):...\n",
"if self.field.empty_label is not None:\n",
"yield '', self.field.empty_label\n",
"for obj in self.queryset:\n",
"yield self.choice(obj)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"For",
"Expr'"
] |
[
"@VAR_0.route('/jobs/<int:job_id>/edit', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_57 = os.path.join(VAR_0.config['UPLOAD_FOLDER'], 'loadfile.txt')\n",
"VAR_56 = os.path.join(VAR_0.config['UPLOAD_FOLDER'], 'export.txt')\n",
"if request.method == 'POST':\n",
"VAR_99 = request.form.get('edited-job-info')\n",
"VAR_98 = FUNC_58(f'/internal/jobs/{VAR_9}/export', 'get', VAR_73={'path':\n export_path})\n",
"flash(str(err), 'danger')\n",
"return render_template('edit_job.html', title=f'Edit Job {job_id}', VAR_9=\n job_id, VAR_121=exported_data)\n",
"VAR_14.write(VAR_99)\n",
"VAR_121 = VAR_14.read()\n",
"return redirect(url_for('job_page', VAR_9=job_id))\n",
"VAR_98 = FUNC_58('/internal/load', 'get', VAR_73={'path': loadfile_path})\n",
"flash(str(err), 'danger')\n",
"flash(VAR_98['message'], 'success')\n",
"return redirect(request.url)\n"
] | [
"@gui.route('/jobs/<int:job_id>/edit', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"loadfile_path = os.path.join(gui.config['UPLOAD_FOLDER'], 'loadfile.txt')\n",
"export_path = os.path.join(gui.config['UPLOAD_FOLDER'], 'export.txt')\n",
"if request.method == 'POST':\n",
"edited_job_info = request.form.get('edited-job-info')\n",
"response_info = query_internal_api(f'/internal/jobs/{job_id}/export', 'get',\n params={'path': export_path})\n",
"flash(str(err), 'danger')\n",
"return render_template('edit_job.html', title=f'Edit Job {job_id}', job_id=\n job_id, exported_data=exported_data)\n",
"f.write(edited_job_info)\n",
"exported_data = f.read()\n",
"return redirect(url_for('job_page', job_id=job_id))\n",
"response_info = query_internal_api('/internal/load', 'get', params={'path':\n loadfile_path})\n",
"flash(str(err), 'danger')\n",
"flash(response_info['message'], 'success')\n",
"return redirect(request.url)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_34(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_61 = False\n",
"self._action = 'save'\n",
"if not self.get('__islocal'):\n",
"if self.meta.issingle:\n",
"self.check_docstatus_transition(0)\n",
"VAR_100 = frappe.db.sql(\n \"\"\"select value from tabSingles\n\t\t\t\t\twhere doctype=%s and field='modified' for update\"\"\"\n , self.doctype)\n",
"VAR_101 = frappe.db.sql(\n \"\"\"select modified, docstatus from `tab{0}`\n\t\t\t\t\twhere name = %s for update\"\"\"\n .format(self.doctype), self.name, as_dict=True)\n",
"VAR_100 = VAR_100 and VAR_100[0][0]\n",
"if not VAR_101:\n",
"if VAR_100 and VAR_100 != cstr(self._original_modified):\n",
"frappe.throw(_('Record does not exist'))\n",
"VAR_101 = VAR_101[0]\n",
"VAR_61 = True\n",
"if VAR_61:\n",
"VAR_100 = cstr(VAR_101.modified)\n",
"frappe.msgprint(_(\n 'Error: Document has been modified after you have opened it') + \n ' (%s, %s). ' % (VAR_100, self.modified) + _(\n 'Please refresh to get the latest document.'), VAR_33=frappe.\n TimestampMismatchError)\n",
"if VAR_100 and VAR_100 != cstr(self._original_modified):\n",
"VAR_61 = True\n",
"self.check_docstatus_transition(VAR_101.docstatus)\n"
] | [
"def check_if_latest(self):...\n",
"\"\"\"docstring\"\"\"\n",
"conflict = False\n",
"self._action = 'save'\n",
"if not self.get('__islocal'):\n",
"if self.meta.issingle:\n",
"self.check_docstatus_transition(0)\n",
"modified = frappe.db.sql(\n \"\"\"select value from tabSingles\n\t\t\t\t\twhere doctype=%s and field='modified' for update\"\"\"\n , self.doctype)\n",
"tmp = frappe.db.sql(\n \"\"\"select modified, docstatus from `tab{0}`\n\t\t\t\t\twhere name = %s for update\"\"\"\n .format(self.doctype), self.name, as_dict=True)\n",
"modified = modified and modified[0][0]\n",
"if not tmp:\n",
"if modified and modified != cstr(self._original_modified):\n",
"frappe.throw(_('Record does not exist'))\n",
"tmp = tmp[0]\n",
"conflict = True\n",
"if conflict:\n",
"modified = cstr(tmp.modified)\n",
"frappe.msgprint(_(\n 'Error: Document has been modified after you have opened it') + \n ' (%s, %s). ' % (modified, self.modified) + _(\n 'Please refresh to get the latest document.'), raise_exception=frappe.\n TimestampMismatchError)\n",
"if modified and modified != cstr(self._original_modified):\n",
"conflict = True\n",
"self.check_docstatus_transition(tmp.docstatus)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(VAR_4: List[str]):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_6():...\n",
"yield '<!-- CLICK_WEB START HEADER -->'\n",
"yield '<div class=\"command-line\">Executing: {}</div>'.format('/'.join(VAR_4))\n",
"yield '<!-- CLICK_WEB END HEADER -->'\n",
"VAR_10 = '\\n'.join(FUNC_6())\n",
"return VAR_10\n"
] | [
"def _create_cmd_header(commands: List[str]):...\n",
"\"\"\"docstring\"\"\"\n",
"def generate():...\n",
"yield '<!-- CLICK_WEB START HEADER -->'\n",
"yield '<div class=\"command-line\">Executing: {}</div>'.format('/'.join(commands)\n )\n",
"yield '<!-- CLICK_WEB END HEADER -->'\n",
"html_str = '\\n'.join(generate())\n",
"return html_str\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@VAR_2.route('/get_series_json', methods=['GET'])...\n",
"return calibre_db.get_typeahead(db.Series, request.args.get('q'))\n"
] | [
"@web.route('/get_series_json', methods=['GET'])...\n",
"return calibre_db.get_typeahead(db.Series, request.args.get('q'))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"async def FUNC_11(VAR_6, VAR_7, VAR_20):...\n",
"self.assertEqual(VAR_6, self.hs.hostname)\n",
"self.assertEqual(VAR_7, '/_matrix/key/v2/query')\n",
"VAR_10 = FakeChannel(self.site, self.reactor)\n",
"VAR_11 = SynapseRequest(VAR_10)\n",
"VAR_11.content = BytesIO(encode_canonical_json(VAR_20))\n",
"VAR_11.requestReceived(b'POST', VAR_7.encode('utf-8'), b'1.1')\n",
"VAR_10.await_result()\n",
"self.assertEqual(VAR_10.code, 200)\n",
"VAR_12 = VAR_10.json_body\n",
"return VAR_12\n"
] | [
"async def post_json(destination, path, data):...\n",
"self.assertEqual(destination, self.hs.hostname)\n",
"self.assertEqual(path, '/_matrix/key/v2/query')\n",
"channel = FakeChannel(self.site, self.reactor)\n",
"req = SynapseRequest(channel)\n",
"req.content = BytesIO(encode_canonical_json(data))\n",
"req.requestReceived(b'POST', path.encode('utf-8'), b'1.1')\n",
"channel.await_result()\n",
"self.assertEqual(channel.code, 200)\n",
"resp = channel.json_body\n",
"return resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(VAR_23, *VAR_24, **VAR_25):...\n",
"VAR_66 = type(VAR_23)\n",
"if isinstance(VAR_23, VAR_72):\n",
"VAR_56 = fromstring(VAR_23)\n",
"VAR_56 = copy.deepcopy(VAR_23)\n",
"FUNC_1(VAR_56, *VAR_24, **kw)\n",
"return _transform_result(VAR_66, VAR_56)\n"
] | [
"def autolink_html(html, *args, **kw):...\n",
"result_type = type(html)\n",
"if isinstance(html, basestring):\n",
"doc = fromstring(html)\n",
"doc = copy.deepcopy(html)\n",
"autolink(doc, *args, **kw)\n",
"return _transform_result(result_type, doc)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"import logging\n",
"from typing import Any, Callable, List, Optional, Tuple\n",
"import attr\n",
"from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime\n",
"from twisted.internet.protocol import Protocol\n",
"from twisted.internet.task import LoopingCall\n",
"from twisted.web.http import HTTPChannel\n",
"from synapse.app.generic_worker import GenericWorkerReplicationHandler, GenericWorkerServer\n",
"from synapse.http.server import JsonResource\n",
"from synapse.http.site import SynapseRequest, SynapseSite\n",
"from synapse.replication.http import ReplicationRestResource, streams\n",
"from synapse.replication.tcp.handler import ReplicationCommandHandler\n",
"from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol\n",
"from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory\n",
"from synapse.server import HomeServer\n",
"from synapse.util import Clock\n",
"from tests import unittest\n",
"from tests.server import FakeTransport\n",
"import hiredis\n",
"VAR_28 = None\n",
"VAR_0 = logging.getLogger(__name__)\n",
"\"\"\"Base class for tests of the replication streams\"\"\"\n",
"if not VAR_28:\n",
"VAR_29 = 'Requires hiredis'\n",
"VAR_1 = [streams.register_servlets]\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"VAR_30 = ReplicationStreamProtocolFactory(VAR_4)\n",
"self.streamer = VAR_4.get_replication_streamer()\n",
"self.server = VAR_30.buildProtocol(None)\n",
"self.reactor.lookups['testserv'] = '1.2.3.4'\n",
"self.worker_hs = self.setup_test_homeserver(http_client=None,\n homeserver_to_use=GenericWorkerServer, VAR_32=self.\n _get_worker_hs_config(), VAR_2=self.reactor)\n",
"self.worker_hs.get_datastore().db_pool = VAR_4.get_datastore().db_pool\n",
"self.test_handler = self._build_replication_data_handler()\n",
"self.worker_hs._replication_data_handler = self.test_handler\n",
"VAR_31 = ReplicationCommandHandler(self.worker_hs)\n",
"self.client = ClientReplicationStreamProtocol(self.worker_hs, 'client',\n 'test', VAR_3, VAR_31)\n",
"self._client_transport = None\n",
"self._server_transport = None\n",
"def FUNC_1(self) ->dict:...\n",
"VAR_32 = self.default_config()\n",
"VAR_32['worker_app'] = 'synapse.app.generic_worker'\n",
"VAR_32['worker_replication_host'] = 'testserv'\n",
"VAR_32['worker_replication_http_port'] = '8765'\n",
"return VAR_32\n"
] | [
"import logging\n",
"from typing import Any, Callable, List, Optional, Tuple\n",
"import attr\n",
"from twisted.internet.interfaces import IConsumer, IPullProducer, IReactorTime\n",
"from twisted.internet.protocol import Protocol\n",
"from twisted.internet.task import LoopingCall\n",
"from twisted.web.http import HTTPChannel\n",
"from synapse.app.generic_worker import GenericWorkerReplicationHandler, GenericWorkerServer\n",
"from synapse.http.server import JsonResource\n",
"from synapse.http.site import SynapseRequest, SynapseSite\n",
"from synapse.replication.http import ReplicationRestResource, streams\n",
"from synapse.replication.tcp.handler import ReplicationCommandHandler\n",
"from synapse.replication.tcp.protocol import ClientReplicationStreamProtocol\n",
"from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory\n",
"from synapse.server import HomeServer\n",
"from synapse.util import Clock\n",
"from tests import unittest\n",
"from tests.server import FakeTransport\n",
"import hiredis\n",
"hiredis = None\n",
"logger = logging.getLogger(__name__)\n",
"\"\"\"Base class for tests of the replication streams\"\"\"\n",
"if not hiredis:\n",
"skip = 'Requires hiredis'\n",
"servlets = [streams.register_servlets]\n",
"def prepare(self, reactor, clock, hs):...\n",
"server_factory = ReplicationStreamProtocolFactory(hs)\n",
"self.streamer = hs.get_replication_streamer()\n",
"self.server = server_factory.buildProtocol(None)\n",
"self.reactor.lookups['testserv'] = '1.2.3.4'\n",
"self.worker_hs = self.setup_test_homeserver(http_client=None,\n homeserver_to_use=GenericWorkerServer, config=self.\n _get_worker_hs_config(), reactor=self.reactor)\n",
"self.worker_hs.get_datastore().db_pool = hs.get_datastore().db_pool\n",
"self.test_handler = self._build_replication_data_handler()\n",
"self.worker_hs._replication_data_handler = self.test_handler\n",
"repl_handler = ReplicationCommandHandler(self.worker_hs)\n",
"self.client = ClientReplicationStreamProtocol(self.worker_hs, 'client',\n 'test', clock, repl_handler)\n",
"self._client_transport = None\n",
"self._server_transport = None\n",
"def _get_worker_hs_config(self) ->dict:...\n",
"config = self.default_config()\n",
"config['worker_app'] = 'synapse.app.generic_worker'\n",
"config['worker_replication_host'] = 'testserv'\n",
"config['worker_replication_http_port'] = '8765'\n",
"return config\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"async def FUNC_27(VAR_40, VAR_41, **VAR_3):...\n",
"self.assertEqual(VAR_40, VAR_34)\n",
"self.assertEqual(VAR_41, '/_matrix/key/v2/server/key1')\n",
"return VAR_16\n"
] | [
"async def get_json(destination, path, **kwargs):...\n",
"self.assertEqual(destination, SERVER_NAME)\n",
"self.assertEqual(path, '/_matrix/key/v2/server/key1')\n",
"return response\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self, VAR_21):...\n",
"VAR_21.password = generate_password_hash(VAR_21.password)\n"
] | [
"def pre_add(self, item):...\n",
"item.password = generate_password_hash(item.password)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_12(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_33 = self.reactor.tcpClients\n",
"self.assertEqual(len(VAR_33), 1)\n",
"VAR_34, VAR_35, VAR_36, VAR_37, VAR_38 = VAR_33.pop(0)\n",
"self.assertEqual(VAR_34, 'localhost')\n",
"self.assertEqual(VAR_35, 6379)\n",
"VAR_39 = VAR_36.buildProtocol(None)\n",
"VAR_47 = self._redis_server.buildProtocol(None)\n",
"VAR_41 = FakeTransport(VAR_47, self.reactor, VAR_39)\n",
"VAR_39.makeConnection(VAR_41)\n",
"VAR_42 = FakeTransport(VAR_39, self.reactor, VAR_47)\n",
"VAR_47.makeConnection(VAR_42)\n",
"return VAR_41, VAR_42\n"
] | [
"def connect_any_redis_attempts(self):...\n",
"\"\"\"docstring\"\"\"\n",
"clients = self.reactor.tcpClients\n",
"self.assertEqual(len(clients), 1)\n",
"host, port, client_factory, _timeout, _bindAddress = clients.pop(0)\n",
"self.assertEqual(host, 'localhost')\n",
"self.assertEqual(port, 6379)\n",
"client_protocol = client_factory.buildProtocol(None)\n",
"server_protocol = self._redis_server.buildProtocol(None)\n",
"client_to_server_transport = FakeTransport(server_protocol, self.reactor,\n client_protocol)\n",
"client_protocol.makeConnection(client_to_server_transport)\n",
"server_to_client_transport = FakeTransport(client_protocol, self.reactor,\n server_protocol)\n",
"server_protocol.makeConnection(server_to_client_transport)\n",
"return client_to_server_transport, server_to_client_transport\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_25(self, VAR_13, VAR_14):...\n",
"VAR_30 = []\n",
"for fieldname, VAR_4 in VAR_13:\n",
"VAR_5 = frappe.get_meta(VAR_4)\n",
"return VAR_30\n",
"if VAR_5.get_field(fieldname):\n",
"VAR_41 = VAR_5.get_field(fieldname)\n",
"if fieldname == '_aggregate_column':\n",
"VAR_30.append(VAR_41)\n",
"VAR_32 = FUNC_3(VAR_14, VAR_5)\n",
"VAR_32 = VAR_5.get_label(fieldname)\n",
"VAR_41 = frappe._dict(fieldname=fieldname, VAR_32=label)\n",
"if fieldname == 'name':\n",
"VAR_41.fieldtype = 'Link'\n",
"VAR_41.options = VAR_4\n"
] | [
"def build_standard_report_columns(self, columns, group_by_args):...\n",
"_columns = []\n",
"for fieldname, doctype in columns:\n",
"meta = frappe.get_meta(doctype)\n",
"return _columns\n",
"if meta.get_field(fieldname):\n",
"field = meta.get_field(fieldname)\n",
"if fieldname == '_aggregate_column':\n",
"_columns.append(field)\n",
"label = get_group_by_column_label(group_by_args, meta)\n",
"label = meta.get_label(fieldname)\n",
"field = frappe._dict(fieldname=fieldname, label=label)\n",
"if fieldname == 'name':\n",
"field.fieldtype = 'Link'\n",
"field.options = doctype\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_57(self, VAR_18):...\n",
"\"\"\"docstring\"\"\"\n",
"return frappe.db.get_value(self.doctype, self.name, VAR_18)\n"
] | [
"def db_get(self, fieldname):...\n",
"\"\"\"docstring\"\"\"\n",
"return frappe.db.get_value(self.doctype, self.name, fieldname)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_26(VAR_32, VAR_17):...\n",
"if 'btn-upload-cover' in VAR_32.files:\n",
"VAR_40 = VAR_32.files['btn-upload-cover']\n",
"return None\n",
"if VAR_40.filename != '':\n",
"if not current_user.role_upload():\n",
"abort(403)\n",
"VAR_80, VAR_111 = helper.save_cover(VAR_40, VAR_17.path)\n",
"if VAR_80 is True:\n",
"return True\n",
"flash(VAR_111, category='error')\n",
"return False\n"
] | [
"def upload_cover(request, book):...\n",
"if 'btn-upload-cover' in request.files:\n",
"requested_file = request.files['btn-upload-cover']\n",
"return None\n",
"if requested_file.filename != '':\n",
"if not current_user.role_upload():\n",
"abort(403)\n",
"ret, message = helper.save_cover(requested_file, book.path)\n",
"if ret is True:\n",
"return True\n",
"flash(message, category='error')\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"@require_POST...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_224 = {}\n",
"VAR_225 = ['Project', 'Dataset', 'Image', 'Screen', 'Plate', 'Fileset']\n",
"for VAR_215 in VAR_225:\n",
"VAR_154 = VAR_2.POST.get(VAR_215, None)\n",
"if VAR_27 == 'chgrp':\n",
"if VAR_154 is not None:\n",
"VAR_332 = getIntOrDefault(VAR_2, 'group_id', None)\n",
"if VAR_27 == 'chown':\n",
"VAR_144 = [int(VAR_40) for VAR_40 in VAR_154.split(',')]\n",
"VAR_84 = VAR_5.submitDryRun(VAR_27, VAR_224, VAR_332)\n",
"VAR_332 = getIntOrDefault(VAR_2, 'owner_id', None)\n",
"VAR_224[VAR_215] = VAR_144\n",
"VAR_198 = VAR_345(VAR_84)\n",
"return HttpResponse(VAR_198)\n"
] | [
"@require_POST...\n",
"\"\"\"docstring\"\"\"\n",
"targetObjects = {}\n",
"dtypes = ['Project', 'Dataset', 'Image', 'Screen', 'Plate', 'Fileset']\n",
"for dtype in dtypes:\n",
"oids = request.POST.get(dtype, None)\n",
"if action == 'chgrp':\n",
"if oids is not None:\n",
"target_id = getIntOrDefault(request, 'group_id', None)\n",
"if action == 'chown':\n",
"obj_ids = [int(oid) for oid in oids.split(',')]\n",
"handle = conn.submitDryRun(action, targetObjects, target_id)\n",
"target_id = getIntOrDefault(request, 'owner_id', None)\n",
"targetObjects[dtype] = obj_ids\n",
"jobId = str(handle)\n",
"return HttpResponse(jobId)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@FUNC_0...\n",
"return InsecureInterceptableContextFactory(\n ) if self.config.use_insecure_ssl_client_just_for_testing_do_not_use else RegularPolicyForHTTPS(\n )\n"
] | [
"@cache_in_self...\n",
"return InsecureInterceptableContextFactory(\n ) if self.config.use_insecure_ssl_client_just_for_testing_do_not_use else RegularPolicyForHTTPS(\n )\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@log_function...\n",
"VAR_2 = FUNC_2('/invite/%s/%s', VAR_6, VAR_7)\n",
"VAR_37 = await self.client.put_json(VAR_5=destination, VAR_2=path, VAR_39=\n content, VAR_15=True)\n",
"return VAR_37\n"
] | [
"@log_function...\n",
"path = _create_v2_path('/invite/%s/%s', room_id, event_id)\n",
"response = await self.client.put_json(destination=destination, path=path,\n data=content, ignore_backoff=True)\n",
"return response\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(VAR_0, VAR_1: FlaskClient):...\n",
"VAR_5 = VAR_1.get('/notes/new')\n",
"assert VAR_5.status_code == 200\n"
] | [
"def test_get_new_note(test_app, client: FlaskClient):...\n",
"response = client.get('/notes/new')\n",
"assert response.status_code == 200\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assert'"
] |
[
"@VAR_0.simple_tag...\n",
"return VAR_8.to_html()\n"
] | [
"@register.simple_tag...\n",
"return pager.to_html()\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"import tornado.ioloop\n",
"import tornado.web\n",
"import os\n",
"import json\n",
"import sys\n",
"import rtxcomplete\n",
"import traceback\n",
"VAR_0 = os.path.dirname(os.path.abspath(__file__))\n",
"rtxcomplete.load()\n",
"def FUNC_2(self, VAR_1, VAR_2=None):...\n",
"VAR_8 = self.get_argument('limit')\n",
"print(sys.exc_info()[:])\n",
"def FUNC_2(self, VAR_1, VAR_2=None):...\n",
"VAR_2 = self.get_argument('word')\n",
"traceback.print_tb(sys.exc_info()[-1])\n",
"VAR_8 = self.get_argument('limit')\n",
"print(sys.exc_info()[:])\n",
"def FUNC_2(self, VAR_1, VAR_2=None):...\n",
"VAR_9 = self.get_argument('callback')\n",
"self.write('error')\n",
"VAR_2 = self.get_argument('word')\n",
"traceback.print_tb(sys.exc_info()[-1])\n",
"VAR_8 = self.get_argument('limit')\n",
"print(sys.exc_info()[:])\n",
"def FUNC_2(self, VAR_1, VAR_2=None):...\n",
"VAR_10 = rtxcomplete.prefix(VAR_2, VAR_8)\n",
"VAR_9 = self.get_argument('callback')\n",
"self.write('error')\n",
"VAR_2 = self.get_argument('word')\n",
"traceback.print_tb(sys.exc_info()[-1])\n",
"if 1 == 1:\n",
"VAR_10 = VAR_9 + '(' + json.dumps(VAR_10) + ');'\n",
"VAR_10 = rtxcomplete.fuzzy(VAR_2, VAR_8)\n",
"VAR_9 = self.get_argument('callback')\n",
"self.write('error')\n",
"VAR_8 = self.get_argument('limit')\n",
"def FUNC_2(self, VAR_1, VAR_2=None):...\n",
"self.write(VAR_10)\n",
"VAR_10 = VAR_9 + '(' + json.dumps(VAR_10) + ');'\n",
"VAR_10 = rtxcomplete.autofuzzy(VAR_2, VAR_8)\n",
"VAR_2 = self.get_argument('word')\n",
"print('matched define search: not implemented')\n",
"self.write(VAR_10)\n",
"VAR_10 = VAR_9 + '(' + json.dumps(VAR_10) + ');'\n",
"VAR_9 = self.get_argument('callback')\n",
"self.write('')\n",
"self.write(VAR_10)\n",
"VAR_10 = rtxcomplete.get_nodes_like(VAR_2, VAR_8)\n",
"def FUNC_0():...\n",
"VAR_10 = VAR_9 + '(' + json.dumps(VAR_10) + ');'\n",
"return tornado.web.Application([('/autofuzzy(.*)', CLASS_2), ('/auto(.*)',\n CLASS_0), ('/fuzzy(.*)', CLASS_1), ('/define(.*)', CLASS_4), (\n '/nodeslike(.*)', CLASS_3), ('/(.*)', tornado.web.StaticFileHandler, {\n 'path': VAR_0, 'default_filename': 'rtxcomplete.html'})],\n compress_response=True)\n",
"self.write(VAR_10)\n"
] | [
"import tornado.ioloop\n",
"import tornado.web\n",
"import os\n",
"import json\n",
"import sys\n",
"import rtxcomplete\n",
"import traceback\n",
"root = os.path.dirname(os.path.abspath(__file__))\n",
"rtxcomplete.load()\n",
"def get(self, arg, word=None):...\n",
"limit = self.get_argument('limit')\n",
"print(sys.exc_info()[:])\n",
"def get(self, arg, word=None):...\n",
"word = self.get_argument('word')\n",
"traceback.print_tb(sys.exc_info()[-1])\n",
"limit = self.get_argument('limit')\n",
"print(sys.exc_info()[:])\n",
"def get(self, arg, word=None):...\n",
"callback = self.get_argument('callback')\n",
"self.write('error')\n",
"word = self.get_argument('word')\n",
"traceback.print_tb(sys.exc_info()[-1])\n",
"limit = self.get_argument('limit')\n",
"print(sys.exc_info()[:])\n",
"def get(self, arg, word=None):...\n",
"result = rtxcomplete.prefix(word, limit)\n",
"callback = self.get_argument('callback')\n",
"self.write('error')\n",
"word = self.get_argument('word')\n",
"traceback.print_tb(sys.exc_info()[-1])\n",
"if 1 == 1:\n",
"result = callback + '(' + json.dumps(result) + ');'\n",
"result = rtxcomplete.fuzzy(word, limit)\n",
"callback = self.get_argument('callback')\n",
"self.write('error')\n",
"limit = self.get_argument('limit')\n",
"def get(self, arg, word=None):...\n",
"self.write(result)\n",
"result = callback + '(' + json.dumps(result) + ');'\n",
"result = rtxcomplete.autofuzzy(word, limit)\n",
"word = self.get_argument('word')\n",
"print('matched define search: not implemented')\n",
"self.write(result)\n",
"result = callback + '(' + json.dumps(result) + ');'\n",
"callback = self.get_argument('callback')\n",
"self.write('')\n",
"self.write(result)\n",
"result = rtxcomplete.get_nodes_like(word, limit)\n",
"def make_https_app():...\n",
"result = callback + '(' + json.dumps(result) + ');'\n",
"return tornado.web.Application([('/autofuzzy(.*)', autofuzzySearch), (\n '/auto(.*)', autoSearch), ('/fuzzy(.*)', fuzzySearch), ('/define(.*)',\n defineSearch), ('/nodeslike(.*)', nodesLikeSearch), ('/(.*)', tornado.\n web.StaticFileHandler, {'path': root, 'default_filename':\n 'rtxcomplete.html'})], compress_response=True)\n",
"self.write(result)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
2,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"FunctionDef'",
"Assign'",
"Return'",
"Expr'"
] |
[
"async def FUNC_12(self, VAR_2, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_31 = {}\n",
"for requested_key_id in VAR_6:\n",
"if requested_key_id in VAR_31:\n",
"return VAR_31\n",
"VAR_39 = self.clock.time_msec()\n",
"VAR_21 = await self.client.get_json(destination=server_name, path=\n '/_matrix/key/v2/server/' + urllib.parse.quote(requested_key_id),\n ignore_backoff=True, timeout=10000)\n",
"if VAR_21['server_name'] != VAR_2:\n",
"VAR_51 = await self.process_v2_response(VAR_17=server_name, VAR_18=response,\n VAR_19=time_now_ms)\n",
"await self.store.store_server_verify_keys(VAR_2, VAR_39, ((VAR_2, VAR_43,\n VAR_49) for VAR_43, VAR_49 in VAR_51.items()))\n",
"VAR_31.update(VAR_51)\n"
] | [
"async def get_server_verify_key_v2_direct(self, server_name, key_ids):...\n",
"\"\"\"docstring\"\"\"\n",
"keys = {}\n",
"for requested_key_id in key_ids:\n",
"if requested_key_id in keys:\n",
"return keys\n",
"time_now_ms = self.clock.time_msec()\n",
"response = await self.client.get_json(destination=server_name, path=\n '/_matrix/key/v2/server/' + urllib.parse.quote(requested_key_id),\n ignore_backoff=True, timeout=10000)\n",
"if response['server_name'] != server_name:\n",
"response_keys = await self.process_v2_response(from_server=server_name,\n response_json=response, time_added_ms=time_now_ms)\n",
"await self.store.store_server_verify_keys(server_name, time_now_ms, ((\n server_name, key_id, key) for key_id, key in response_keys.items()))\n",
"keys.update(response_keys)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.route('/token', methods=['POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_67 = request.json if request.json else {}\n",
"VAR_68 = VAR_67.get('username')\n",
"VAR_69 = VAR_67.get('password')\n",
"if not VAR_68 or not VAR_69:\n",
"VAR_70 = {'success': False, 'message': 'Could not verify user.'}\n",
"VAR_37 = CLASS_0.query.filter_by(VAR_37=request_user).first()\n",
"return jsonify(VAR_70), 401\n",
"if VAR_37 and VAR_37.verify_password(VAR_69):\n",
"VAR_112 = VAR_37.generate_auth_token().decode('UTF-8')\n",
"VAR_70 = {'success': False, 'message': 'Could not verify user.'}\n",
"VAR_70 = {'token': VAR_112}\n",
"return jsonify(VAR_70), 401\n",
"return jsonify(VAR_70)\n"
] | [
"@gui.route('/token', methods=['POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"request_json = request.json if request.json else {}\n",
"request_user = request_json.get('username')\n",
"request_password = request_json.get('password')\n",
"if not request_user or not request_password:\n",
"response_data = {'success': False, 'message': 'Could not verify user.'}\n",
"user = User.query.filter_by(user=request_user).first()\n",
"return jsonify(response_data), 401\n",
"if user and user.verify_password(request_password):\n",
"token = user.generate_auth_token().decode('UTF-8')\n",
"response_data = {'success': False, 'message': 'Could not verify user.'}\n",
"response_data = {'token': token}\n",
"return jsonify(response_data), 401\n",
"return jsonify(response_data)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_1(VAR_3=VAR_3, VAR_4=True, **VAR_5):...\n",
"VAR_24 = CLASS_8()\n",
"def FUNC_9(VAR_25):...\n",
"return VAR_25[:-1] if VAR_25.endswith('_') else VAR_25\n"
] | [
"def createChameleonEngine(types=types, untrusted=True, **overrides):...\n",
"e = ChameleonEngine()\n",
"def norm(k):...\n",
"return k[:-1] if k.endswith('_') else k\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"@property...\n",
"VAR_38 = self.ip if self.ip else '[all ip addresses on your system]'\n",
"return self._url(VAR_38)\n"
] | [
"@property...\n",
"ip = self.ip if self.ip else '[all ip addresses on your system]'\n",
"return self._url(ip)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_164(VAR_179):...\n",
"if isinstance(VAR_179, unicodeT):\n",
"return VAR_179.encode('utf8')\n",
"if hasattr(VAR_179, 'isoformat'):\n",
"return VAR_179.isoformat()[:19].replace('T', ' ')\n",
"if VAR_179 is None:\n",
"return '<NULL>'\n",
"return VAR_179\n"
] | [
"def none_exception(value):...\n",
"if isinstance(value, unicodeT):\n",
"return value.encode('utf8')\n",
"if hasattr(value, 'isoformat'):\n",
"return value.isoformat()[:19].replace('T', ' ')\n",
"if value is None:\n",
"return '<NULL>'\n",
"return value\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"For",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import argparse\n",
"import os\n",
"import re\n",
"import sys\n",
"from absl import app\n",
"import numpy as np\n",
"import six\n",
"from tensorflow.core.example import example_pb2\n",
"from tensorflow.core.framework import types_pb2\n",
"from tensorflow.python.client import session\n",
"from tensorflow.python.debug.wrappers import local_cli_wrapper\n",
"from tensorflow.python.eager import def_function\n",
"from tensorflow.python.eager import function as defun\n",
"from tensorflow.python.framework import meta_graph as meta_graph_lib\n",
"from tensorflow.python.framework import ops as ops_lib\n",
"from tensorflow.python.framework import tensor_spec\n",
"from tensorflow.python.lib.io import file_io\n",
"from tensorflow.python.platform import tf_logging as logging\n",
"from tensorflow.python.saved_model import load\n",
"from tensorflow.python.saved_model import loader\n",
"from tensorflow.python.saved_model import save\n",
"from tensorflow.python.saved_model import signature_constants\n",
"from tensorflow.python.tools import saved_model_aot_compile\n",
"from tensorflow.python.tools import saved_model_utils\n",
"from tensorflow.python.tpu import tpu\n",
"from tensorflow.python.util.compat import collections_abc\n",
"VAR_0 = (\n 'https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/debug_options_flags.cc'\n )\n",
"VAR_1 = set(['WriteFile', 'ReadFile', 'PrintV2'])\n",
"def FUNC_0(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_22 = saved_model_utils.get_saved_model_tag_sets(VAR_2)\n",
"print('The given SavedModel contains the following tag-sets:')\n",
"for VAR_3 in sorted(VAR_22):\n",
"print('%r' % ', '.join(sorted(VAR_3)))\n",
"def FUNC_1(VAR_2, VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_23 = FUNC_10(VAR_2, VAR_3)\n",
"print(\n 'The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:'\n )\n",
"for VAR_5 in sorted(VAR_23.keys()):\n",
"print('SignatureDef key: \"%s\"' % VAR_5)\n",
"def FUNC_2(VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 not in VAR_4.signature_def:\n",
"return VAR_4.signature_def[VAR_5].inputs\n"
] | [
"\"\"\"Command-line interface to inspect and execute a graph in a SavedModel.\n\nFor detailed usages and examples, please refer to:\nhttps://www.tensorflow.org/guide/saved_model#cli_to_inspect_and_execute_savedmodel\n\n\"\"\"\n",
"import argparse\n",
"import os\n",
"import re\n",
"import sys\n",
"from absl import app\n",
"import numpy as np\n",
"import six\n",
"from tensorflow.core.example import example_pb2\n",
"from tensorflow.core.framework import types_pb2\n",
"from tensorflow.python.client import session\n",
"from tensorflow.python.debug.wrappers import local_cli_wrapper\n",
"from tensorflow.python.eager import def_function\n",
"from tensorflow.python.eager import function as defun\n",
"from tensorflow.python.framework import meta_graph as meta_graph_lib\n",
"from tensorflow.python.framework import ops as ops_lib\n",
"from tensorflow.python.framework import tensor_spec\n",
"from tensorflow.python.lib.io import file_io\n",
"from tensorflow.python.platform import tf_logging as logging\n",
"from tensorflow.python.saved_model import load\n",
"from tensorflow.python.saved_model import loader\n",
"from tensorflow.python.saved_model import save\n",
"from tensorflow.python.saved_model import signature_constants\n",
"from tensorflow.python.tools import saved_model_aot_compile\n",
"from tensorflow.python.tools import saved_model_utils\n",
"from tensorflow.python.tpu import tpu\n",
"from tensorflow.python.util.compat import collections_abc\n",
"_XLA_DEBUG_OPTIONS_URL = (\n 'https://github.com/tensorflow/tensorflow/blob/master/tensorflow/compiler/xla/debug_options_flags.cc'\n )\n",
"_OP_DENYLIST = set(['WriteFile', 'ReadFile', 'PrintV2'])\n",
"def _show_tag_sets(saved_model_dir):...\n",
"\"\"\"docstring\"\"\"\n",
"tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)\n",
"print('The given SavedModel contains the following tag-sets:')\n",
"for tag_set in sorted(tag_sets):\n",
"print('%r' % ', '.join(sorted(tag_set)))\n",
"def _show_signature_def_map_keys(saved_model_dir, tag_set):...\n",
"\"\"\"docstring\"\"\"\n",
"signature_def_map = get_signature_def_map(saved_model_dir, tag_set)\n",
"print(\n 'The given SavedModel MetaGraphDef contains SignatureDefs with the following keys:'\n )\n",
"for signature_def_key in sorted(signature_def_map.keys()):\n",
"print('SignatureDef key: \"%s\"' % signature_def_key)\n",
"def _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def,...\n",
"\"\"\"docstring\"\"\"\n",
"if signature_def_key not in meta_graph_def.signature_def:\n",
"return meta_graph_def.signature_def[signature_def_key].inputs\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"For",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"For",
"Expr'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'"
] |
[
"def FUNC_2(self, VAR_5, VAR_6, VAR_3=None):...\n",
"VAR_31 = self.rel.to\n",
"if VAR_3 is None:\n",
"VAR_3 = {}\n",
"VAR_32 = []\n",
"if VAR_31 in self.admin_site._registry:\n",
"VAR_38 = reverse('admin:%s_%s_changelist' % (VAR_31._meta.app_label, VAR_31\n ._meta.model_name), current_app=self.admin_site.name)\n",
"VAR_27 = [super(CLASS_7, self).render(VAR_5, VAR_6, VAR_3)] + VAR_32\n",
"VAR_12 = self.url_parameters()\n",
"if VAR_6:\n",
"if VAR_12:\n",
"VAR_27.append(self.label_for_value(VAR_6))\n",
"return mark_safe(''.join(VAR_27))\n",
"VAR_40 = '?' + '&'.join([('%s=%s' % (k, VAR_39)) for k, VAR_39 in\n VAR_12.items()])\n",
"VAR_40 = ''\n",
"if 'class' not in VAR_3:\n",
"VAR_3['class'] = 'vForeignKeyRawIdAdminField'\n",
"VAR_32.append('string' % (VAR_38, VAR_40, VAR_5))\n",
"VAR_32.append('<img src=\"%s\" width=\"16\" height=\"16\" alt=\"%s\" /></a>' % (\n static('admin/img/selector-search.gif'), _('Lookup')))\n"
] | [
"def render(self, name, value, attrs=None):...\n",
"rel_to = self.rel.to\n",
"if attrs is None:\n",
"attrs = {}\n",
"extra = []\n",
"if rel_to in self.admin_site._registry:\n",
"related_url = reverse('admin:%s_%s_changelist' % (rel_to._meta.app_label,\n rel_to._meta.model_name), current_app=self.admin_site.name)\n",
"output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)\n ] + extra\n",
"params = self.url_parameters()\n",
"if value:\n",
"if params:\n",
"output.append(self.label_for_value(value))\n",
"return mark_safe(''.join(output))\n",
"url = '?' + '&'.join([('%s=%s' % (k, v)) for k, v in params.items()])\n",
"url = ''\n",
"if 'class' not in attrs:\n",
"attrs['class'] = 'vForeignKeyRawIdAdminField'\n",
"extra.append(\n '<a href=\"%s%s\" class=\"related-lookup\" id=\"lookup_id_%s\" onclick=\"return showRelatedObjectLookupPopup(this);\"> '\n % (related_url, url, name))\n",
"extra.append('<img src=\"%s\" width=\"16\" height=\"16\" alt=\"%s\" /></a>' % (\n static('admin/img/selector-search.gif'), _('Lookup')))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def __repr__(self):...\n",
"return '<Book %r>' % self.id\n"
] | [
"def __repr__(self):...\n",
"return '<Book %r>' % self.id\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_20():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = FUNC_5()\n",
"remove_compiled_application(apath(VAR_3, VAR_122=request))\n",
"session.flash = T('compiled application removed')\n",
"redirect(URL('site'))\n"
] | [
"def remove_compiled_app():...\n",
"\"\"\"docstring\"\"\"\n",
"app = get_app()\n",
"remove_compiled_application(apath(app, r=request))\n",
"session.flash = T('compiled application removed')\n",
"redirect(URL('site'))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_9(self, VAR_3, VAR_4, VAR_5):...\n",
"self.requester = self.register_user('requester', 'pass')\n",
"self.requester_tok = self.login('requester', 'pass')\n"
] | [
"def prepare(self, reactor, clock, hs):...\n",
"self.requester = self.register_user('requester', 'pass')\n",
"self.requester_tok = self.login('requester', 'pass')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_9(self, VAR_13, VAR_14, VAR_9):...\n",
"VAR_35 = '?width=32&height=32&method=' + VAR_13\n",
"VAR_31, VAR_32 = make_request(self.reactor, FakeSite(self.\n thumbnail_resource), 'GET', self.media_id + VAR_35, shorthand=False,\n await_result=False)\n",
"self.pump()\n",
"VAR_33 = {b'Content-Length': [b'%d' % len(self.test_image.data)],\n b'Content-Type': [self.test_image.content_type]}\n",
"self.fetches[0][0].callback((self.test_image.data, (len(self.test_image.\n data), VAR_33)))\n",
"self.pump()\n",
"if VAR_9:\n",
"self.assertEqual(VAR_32.code, 200)\n",
"self.assertEqual(VAR_32.code, 404)\n",
"if VAR_14 is not None:\n",
"self.assertEqual(VAR_32.json_body, {'errcode': 'M_NOT_FOUND', 'error':\n \"Not found [b'example.com', b'12345']\"})\n",
"self.assertEqual(VAR_32.result['body'], VAR_14, VAR_32.result['body'])\n",
"Image.open(BytesIO(VAR_32.result['body']))\n"
] | [
"def _test_thumbnail(self, method, expected_body, expected_found):...\n",
"params = '?width=32&height=32&method=' + method\n",
"request, channel = make_request(self.reactor, FakeSite(self.\n thumbnail_resource), 'GET', self.media_id + params, shorthand=False,\n await_result=False)\n",
"self.pump()\n",
"headers = {b'Content-Length': [b'%d' % len(self.test_image.data)],\n b'Content-Type': [self.test_image.content_type]}\n",
"self.fetches[0][0].callback((self.test_image.data, (len(self.test_image.\n data), headers)))\n",
"self.pump()\n",
"if expected_found:\n",
"self.assertEqual(channel.code, 200)\n",
"self.assertEqual(channel.code, 404)\n",
"if expected_body is not None:\n",
"self.assertEqual(channel.json_body, {'errcode': 'M_NOT_FOUND', 'error':\n \"Not found [b'example.com', b'12345']\"})\n",
"self.assertEqual(channel.result['body'], expected_body, channel.result['body'])\n",
"Image.open(BytesIO(channel.result['body']))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@FUNC_0...\n",
"return SpamChecker(self)\n"
] | [
"@cache_in_self...\n",
"return SpamChecker(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_11(self, VAR_56):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_82 = VAR_7.search\n",
"self._kill_elements(VAR_56, lambda VAR_16: VAR_82(VAR_16.text), etree.Comment)\n"
] | [
"def kill_conditional_comments(self, doc):...\n",
"\"\"\"docstring\"\"\"\n",
"has_conditional_comment = _conditional_comment_re.search\n",
"self._kill_elements(doc, lambda el: has_conditional_comment(el.text), etree\n .Comment)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"Model with a callable concrete function.\"\"\"\n",
"def __init__(self):...\n",
"VAR_56 = def_function.function(self.multiply, input_signature=[tensor_spec.\n TensorSpec(shape=(), dtype=dtypes.float32), tensor_spec.TensorSpec(\n shape=(), dtype=dtypes.float32)])\n",
"self.pure_concrete_function = VAR_56.get_concrete_function()\n",
"super(CLASS_2, self).__init__()\n",
"def FUNC_43(self, VAR_53, VAR_54):...\n",
"return VAR_53 * VAR_54\n"
] | [
"def testShowAllWithPureConcreteFunction(self):...\n",
"\"\"\"Model with a callable concrete function.\"\"\"\n",
"def __init__(self):...\n",
"function = def_function.function(self.multiply, input_signature=[\n tensor_spec.TensorSpec(shape=(), dtype=dtypes.float32), tensor_spec.\n TensorSpec(shape=(), dtype=dtypes.float32)])\n",
"self.pure_concrete_function = function.get_concrete_function()\n",
"super(DummyModel, self).__init__()\n",
"def multiply(self, a, b):...\n",
"return a * b\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_39(self):...\n",
"VAR_131 = self.cleaned_data['expires']\n",
"VAR_131 = VAR_131.replace(hour=23, minute=59, second=59, microsecond=999999)\n",
"if VAR_131 < timezone.now():\n",
"return VAR_131\n"
] | [
"def clean_expires(self):...\n",
"expires = self.cleaned_data['expires']\n",
"expires = expires.replace(hour=23, minute=59, second=59, microsecond=999999)\n",
"if expires < timezone.now():\n",
"return expires\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_27(VAR_0, VAR_1):...\n",
"create_dir('random')\n",
"create_dir('random2')\n",
"VAR_8 = VAR_1.post('/folders/rename', data={'current_path': 'inexisting',\n 'new_name': 'random3'}, follow_redirects=True)\n",
"assert b'Directory not found' in VAR_8.data\n",
"VAR_8 = VAR_1.post('/folders/rename', data={'current_path': 'random',\n 'new_name': 'random2'}, follow_redirects=True)\n",
"assert b'Target directory exists.' in VAR_8.data\n",
"VAR_16 = ['../adarnad', '~/adasd', '/illegal_dir', '.']\n",
"for p in VAR_16:\n",
"print(p)\n",
"VAR_8 = VAR_1.post('/folders/rename', data={'current_path': 'random',\n 'new_name': p}, follow_redirects=True)\n",
"assert b'Invalid input' in VAR_8.data\n"
] | [
"def test_invalid_inputs_fail_renaming(test_app, client):...\n",
"create_dir('random')\n",
"create_dir('random2')\n",
"resp = client.post('/folders/rename', data={'current_path': 'inexisting',\n 'new_name': 'random3'}, follow_redirects=True)\n",
"assert b'Directory not found' in resp.data\n",
"resp = client.post('/folders/rename', data={'current_path': 'random',\n 'new_name': 'random2'}, follow_redirects=True)\n",
"assert b'Target directory exists.' in resp.data\n",
"faulty_paths = ['../adarnad', '~/adasd', '/illegal_dir', '.']\n",
"for p in faulty_paths:\n",
"print(p)\n",
"resp = client.post('/folders/rename', data={'current_path': 'random',\n 'new_name': p}, follow_redirects=True)\n",
"assert b'Invalid input' in resp.data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Assert'",
"Assign'",
"Assert'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Assert'"
] |
[
"def FUNC_18(VAR_22, VAR_16):...\n",
"VAR_40 = False\n",
"VAR_22 = VAR_22 or '1'\n",
"if not VAR_22.replace('.', '', 1).isdigit():\n",
"flash(_('%(seriesindex)s is not a valid number, skipping', seriesindex=\n series_index), category='warning')\n",
"if str(VAR_16.series_index) != VAR_22:\n",
"return False\n",
"VAR_16.series_index = VAR_22\n",
"return VAR_40\n",
"VAR_40 = True\n"
] | [
"def edit_book_series_index(series_index, book):...\n",
"modif_date = False\n",
"series_index = series_index or '1'\n",
"if not series_index.replace('.', '', 1).isdigit():\n",
"flash(_('%(seriesindex)s is not a valid number, skipping', seriesindex=\n series_index), category='warning')\n",
"if str(book.series_index) != series_index:\n",
"return False\n",
"book.series_index = series_index\n",
"return modif_date\n",
"modif_date = True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def __init__(self):...\n",
"VAR_57 = def_function.function(self.multiply, input_signature=[tensor_spec.\n TensorSpec(shape=(), dtype=dtypes.float32), tensor_spec.TensorSpec(\n shape=(), dtype=dtypes.float32)])\n",
"self.pure_concrete_function = VAR_57.get_concrete_function()\n",
"super(CLASS_2, self).__init__()\n"
] | [
"def __init__(self):...\n",
"function = def_function.function(self.multiply, input_signature=[\n tensor_spec.TensorSpec(shape=(), dtype=dtypes.float32), tensor_spec.\n TensorSpec(shape=(), dtype=dtypes.float32)])\n",
"self.pure_concrete_function = function.get_concrete_function()\n",
"super(DummyModel, self).__init__()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self):...\n",
"self.server_name = 'mock_server'\n",
"self.key = signedjson.key.generate_signing_key(0)\n"
] | [
"def __init__(self):...\n",
"self.server_name = 'mock_server'\n",
"self.key = signedjson.key.generate_signing_key(0)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"@default('client_secret')...\n",
"return os.getenv('GITHUB_CLIENT_SECRET', '')\n"
] | [
"@default('client_secret')...\n",
"return os.getenv('GITHUB_CLIENT_SECRET', '')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_8(VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_17['is_paginated']:\n",
"\"\"\" Initialize variables \"\"\"\n",
"VAR_28 = VAR_29 = False\n",
"VAR_30 = VAR_31 = range(0)\n",
"if VAR_17['pages'] <= VAR_11:\n",
"VAR_28 = VAR_29 = True\n",
"if VAR_17['current_page_number'] <= VAR_13:\n",
"VAR_34 = [n for n in range(1, VAR_17['pages'] + 1) if n > 0 and n <= VAR_17\n ['pages']]\n",
"VAR_28 = True\n",
"if VAR_17['current_page_number'] > VAR_17['pages'] - VAR_14:\n",
"VAR_32 = VAR_17['page_object']\n",
"VAR_34 = [n for n in range(1, VAR_11 + 1) if n > 0 and n <= VAR_17['pages']]\n",
"VAR_29 = True\n",
"VAR_34 = [n for n in range(VAR_17['current_page_number'] - VAR_16, VAR_17[\n 'current_page_number'] + VAR_16 + 1) if n > 0 and n <= VAR_17['pages']]\n",
"if VAR_32.has_previous():\n",
"VAR_30 = [(n + VAR_17['pages']) for n in range(0, -VAR_15, -1)]\n",
"VAR_34 = [n for n in range(VAR_17['pages'] - VAR_12 + 1, VAR_17['pages'] + \n 1) if n > 0 and n <= VAR_17['pages']]\n",
"VAR_30 = [(n + VAR_17['pages']) for n in range(0, -VAR_15, -1)]\n",
"VAR_35 = VAR_32.previous_page_number()\n",
"VAR_35 = None\n",
"VAR_31 = [(n + 1) for n in range(0, VAR_15)]\n",
"VAR_31 = [(n + 1) for n in range(0, VAR_15)]\n",
"if VAR_32.has_next():\n",
"VAR_36 = VAR_32.next_page_number()\n",
"VAR_36 = None\n",
"return {'base_url': VAR_17['base_url'], 'is_paginated': VAR_17[\n 'is_paginated'], 'previous': VAR_35, 'has_previous': VAR_32.\n has_previous(), 'next': VAR_36, 'has_next': VAR_32.has_next(), 'page':\n VAR_17['current_page_number'], 'pages': VAR_17['pages'], 'page_numbers':\n VAR_34, 'in_leading_range': VAR_28, 'in_trailing_range': VAR_29,\n 'pages_outside_leading_range': VAR_30, 'pages_outside_trailing_range':\n VAR_31}\n"
] | [
"def setup_paginator(context):...\n",
"\"\"\"docstring\"\"\"\n",
"if context['is_paginated']:\n",
"\"\"\" Initialize variables \"\"\"\n",
"in_leading_range = in_trailing_range = False\n",
"pages_outside_leading_range = pages_outside_trailing_range = range(0)\n",
"if context['pages'] <= LEADING_PAGE_RANGE_DISPLAYED:\n",
"in_leading_range = in_trailing_range = True\n",
"if context['current_page_number'] <= LEADING_PAGE_RANGE:\n",
"page_numbers = [n for n in range(1, context['pages'] + 1) if n > 0 and n <=\n context['pages']]\n",
"in_leading_range = True\n",
"if context['current_page_number'] > context['pages'] - TRAILING_PAGE_RANGE:\n",
"page_object = context['page_object']\n",
"page_numbers = [n for n in range(1, LEADING_PAGE_RANGE_DISPLAYED + 1) if n >\n 0 and n <= context['pages']]\n",
"in_trailing_range = True\n",
"page_numbers = [n for n in range(context['current_page_number'] -\n ADJACENT_PAGES, context['current_page_number'] + ADJACENT_PAGES + 1) if\n n > 0 and n <= context['pages']]\n",
"if page_object.has_previous():\n",
"pages_outside_leading_range = [(n + context['pages']) for n in range(0, -\n NUM_PAGES_OUTSIDE_RANGE, -1)]\n",
"page_numbers = [n for n in range(context['pages'] -\n TRAILING_PAGE_RANGE_DISPLAYED + 1, context['pages'] + 1) if n > 0 and n <=\n context['pages']]\n",
"pages_outside_leading_range = [(n + context['pages']) for n in range(0, -\n NUM_PAGES_OUTSIDE_RANGE, -1)]\n",
"previous_page_number = page_object.previous_page_number()\n",
"previous_page_number = None\n",
"pages_outside_trailing_range = [(n + 1) for n in range(0,\n NUM_PAGES_OUTSIDE_RANGE)]\n",
"pages_outside_trailing_range = [(n + 1) for n in range(0,\n NUM_PAGES_OUTSIDE_RANGE)]\n",
"if page_object.has_next():\n",
"next_page_number = page_object.next_page_number()\n",
"next_page_number = None\n",
"return {'base_url': context['base_url'], 'is_paginated': context[\n 'is_paginated'], 'previous': previous_page_number, 'has_previous':\n page_object.has_previous(), 'next': next_page_number, 'has_next':\n page_object.has_next(), 'page': context['current_page_number'], 'pages':\n context['pages'], 'page_numbers': page_numbers, 'in_leading_range':\n in_leading_range, 'in_trailing_range': in_trailing_range,\n 'pages_outside_leading_range': pages_outside_leading_range,\n 'pages_outside_trailing_range': pages_outside_trailing_range}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"@transaction.non_atomic_requests...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_38 = SlashSeparatedCourseKey.from_string(VAR_10)\n",
"def FUNC_88(VAR_176, VAR_177):...\n",
"return JsonResponse({'error': unicode(err)}, status=400)\n",
"return JsonResponse()\n",
"\"\"\"docstring\"\"\"\n",
"VAR_207 = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')\n",
"VAR_210 = next(VAR_207)\n",
"VAR_210 = []\n",
"VAR_180 = None\n",
"if 'cohort' not in VAR_210:\n",
"VAR_180 = _(\"The file must contain a 'cohort' column containing cohort names.\")\n",
"if 'email' not in VAR_210 and 'username' not in VAR_210:\n",
"if VAR_180:\n",
"VAR_180 = _(\n \"The file must contain a 'username' column, an 'email' column, or both.\")\n",
"VAR_72, VAR_123 = store_uploaded_file(VAR_9, 'uploaded-file', ['.csv'],\n course_and_time_based_filename_generator(VAR_38, 'cohorts'),\n max_file_size=2000000, validator=validator)\n",
"instructor_task.api.submit_cohort_students(VAR_9, VAR_38, VAR_123)\n"
] | [
"@transaction.non_atomic_requests...\n",
"\"\"\"docstring\"\"\"\n",
"course_key = SlashSeparatedCourseKey.from_string(course_id)\n",
"def validator(file_storage, file_to_validate):...\n",
"return JsonResponse({'error': unicode(err)}, status=400)\n",
"return JsonResponse()\n",
"\"\"\"docstring\"\"\"\n",
"reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')\n",
"fieldnames = next(reader)\n",
"fieldnames = []\n",
"msg = None\n",
"if 'cohort' not in fieldnames:\n",
"msg = _(\"The file must contain a 'cohort' column containing cohort names.\")\n",
"if 'email' not in fieldnames and 'username' not in fieldnames:\n",
"if msg:\n",
"msg = _(\n \"The file must contain a 'username' column, an 'email' column, or both.\")\n",
"__, filename = store_uploaded_file(request, 'uploaded-file', ['.csv'],\n course_and_time_based_filename_generator(course_key, 'cohorts'),\n max_file_size=2000000, validator=validator)\n",
"instructor_task.api.submit_cohort_students(request, course_key, filename)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"FunctionDef'",
"Return'",
"Return'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_11) ->str:...\n",
"return VAR_11.getClientIP()\n"
] | [
"def get_ip_from_request(self, request) ->str:...\n",
"return request.getClientIP()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_5(self):...\n",
"self.set_doctype_roles()\n"
] | [
"def before_insert(self):...\n",
"self.set_doctype_roles()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@property...\n",
"return True\n"
] | [
"@property...\n",
"return True\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@VAR_0.register...\n",
"VAR_2 = pickle.loads(bz2.decompress(base64.urlsafe_b64decode(VAR_1)))\n",
"VAR_3, VAR_4 = read_host_config(SESSION, VAR_2)\n",
"if VAR_3 is not None:\n",
"return VAR_4 + 'checked in successful'\n",
"return VAR_4 + 'error checking in'\n"
] | [
"@XMLRPC.register...\n",
"config = pickle.loads(bz2.decompress(base64.urlsafe_b64decode(pickledata)))\n",
"r, message = read_host_config(SESSION, config)\n",
"if r is not None:\n",
"return message + 'checked in successful'\n",
"return message + 'error checking in'\n"
] | [
0,
5,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_65(self):...\n",
""
] | [
"def close(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_15(self, VAR_0):...\n",
"VAR_40 = VAR_0 / 'foo'\n",
"VAR_40.ensure()\n",
"VAR_36 = QUrl.fromLocalFile(str(VAR_40))\n",
"VAR_37 = QNetworkRequest(VAR_36)\n",
"VAR_38 = filescheme.handler(VAR_37)\n",
"assert VAR_38 is None\n"
] | [
"def test_file(self, tmpdir):...\n",
"filename = tmpdir / 'foo'\n",
"filename.ensure()\n",
"url = QUrl.fromLocalFile(str(filename))\n",
"req = QNetworkRequest(url)\n",
"reply = filescheme.handler(req)\n",
"assert reply is None\n"
] | [
0,
0,
0,
0,
0,
3,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assert'"
] |
[
"def FUNC_26(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_52 = self.meta.get_set_only_once_fields()\n",
"if VAR_52 and self._doc_before_save:\n",
"for field in VAR_52:\n",
"return False\n",
"VAR_97 = False\n",
"VAR_26 = self.get(field.fieldname)\n",
"VAR_53 = self._doc_before_save.get(field.fieldname)\n",
"if field.fieldtype in VAR_81:\n",
"VAR_97 = not self.is_child_table_same(field.fieldname)\n",
"if field.fieldtype in ('Date', 'Datetime', 'Time'):\n",
"if VAR_97:\n",
"VAR_97 = str(VAR_26) != str(VAR_53)\n",
"VAR_97 = VAR_26 != VAR_53\n",
"frappe.throw(_('Value cannot be changed for {0}').format(self.meta.\n get_label(field.fieldname)), frappe.CannotChangeConstantError)\n"
] | [
"def validate_set_only_once(self):...\n",
"\"\"\"docstring\"\"\"\n",
"set_only_once_fields = self.meta.get_set_only_once_fields()\n",
"if set_only_once_fields and self._doc_before_save:\n",
"for field in set_only_once_fields:\n",
"return False\n",
"fail = False\n",
"value = self.get(field.fieldname)\n",
"original_value = self._doc_before_save.get(field.fieldname)\n",
"if field.fieldtype in table_fields:\n",
"fail = not self.is_child_table_same(field.fieldname)\n",
"if field.fieldtype in ('Date', 'Datetime', 'Time'):\n",
"if fail:\n",
"fail = str(value) != str(original_value)\n",
"fail = value != original_value\n",
"frappe.throw(_('Value cannot be changed for {0}').format(self.meta.\n get_label(field.fieldname)), frappe.CannotChangeConstantError)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"For",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_135(self, VAR_212):...\n",
"if 'everybody' in VAR_212.can_read or not self.settings.manage_permissions:\n",
"return True\n",
"if self.auth.user:\n",
"VAR_167 = self.settings.groups\n",
"return False\n",
"if 'wiki_editor' in VAR_167 or set(VAR_167).intersection(set(VAR_212.\n",
"return True\n"
] | [
"def can_read(self, page):...\n",
"if 'everybody' in page.can_read or not self.settings.manage_permissions:\n",
"return True\n",
"if self.auth.user:\n",
"groups = self.settings.groups\n",
"return False\n",
"if 'wiki_editor' in groups or set(groups).intersection(set(page.can_read +\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_27 = np.array([[1], [2]])\n",
"VAR_32 = os.path.join(test.get_temp_dir(), 'input.npz')\n",
"np.savez(VAR_32, VAR_54=x0)\n",
"VAR_28 = np.ones([2, 10])\n",
"VAR_21 = 'x0=' + VAR_32 + '[a]'\n",
"VAR_22 = 'x1=np.ones([2,10])'\n",
"VAR_31 = saved_model_cli.load_inputs_from_input_arg_string(VAR_21, VAR_22, '')\n",
"self.assertTrue(np.all(VAR_31['x0'] == VAR_27))\n",
"self.assertTrue(np.all(VAR_31['x1'] == VAR_28))\n"
] | [
"def testInputParserBoth(self):...\n",
"x0 = np.array([[1], [2]])\n",
"input_path = os.path.join(test.get_temp_dir(), 'input.npz')\n",
"np.savez(input_path, a=x0)\n",
"x1 = np.ones([2, 10])\n",
"input_str = 'x0=' + input_path + '[a]'\n",
"input_expr_str = 'x1=np.ones([2,10])'\n",
"feed_dict = saved_model_cli.load_inputs_from_input_arg_string(input_str,\n input_expr_str, '')\n",
"self.assertTrue(np.all(feed_dict['x0'] == x0))\n",
"self.assertTrue(np.all(feed_dict['x1'] == x1))\n"
] | [
0,
5,
5,
5,
5,
5,
5,
5,
5,
5
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_13(self, VAR_6, VAR_11, VAR_12, VAR_13):...\n",
"await super().on_rdata(VAR_6, VAR_11, VAR_12, VAR_13)\n",
"for r in VAR_13:\n",
"self.received_rdata_rows.append((VAR_6, VAR_12, r))\n"
] | [
"async def on_rdata(self, stream_name, instance_name, token, rows):...\n",
"await super().on_rdata(stream_name, instance_name, token, rows)\n",
"for r in rows:\n",
"self.received_rdata_rows.append((stream_name, token, r))\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_75(self):...\n",
"return self.renderer.header(self.inline(self.token['text']), self.token[\n 'level'], self.token['text'])\n"
] | [
"def output_heading(self):...\n",
"return self.renderer.header(self.inline(self.token['text']), self.token[\n 'level'], self.token['text'])\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"async def FUNC_2(*, VAR_10: Dependant, VAR_11: Dict[str, Any], VAR_9: bool...\n",
"assert VAR_10.call is not None, 'dependant.call must be a function'\n",
"if VAR_9:\n",
"return await VAR_10.call(**values)\n",
"return await run_in_threadpool(VAR_10.call, **values)\n"
] | [
"async def run_endpoint_function(*, dependant: Dependant, values: Dict[str,...\n",
"assert dependant.call is not None, 'dependant.call must be a function'\n",
"if is_coroutine:\n",
"return await dependant.call(**values)\n",
"return await run_in_threadpool(dependant.call, **values)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assert'",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self):...\n",
"self._allowed = []\n",
"self._exempt = []\n",
"self.seted = False\n"
] | [
"def __init__(self):...\n",
"self._allowed = []\n",
"self._exempt = []\n",
"self.seted = False\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_6='OMERO.web', VAR_7=False, VAR_8=False, VAR_9=True,...\n",
"\"\"\"docstring\"\"\"\n",
"self.useragent = VAR_6\n",
"self.isAdmin = VAR_7\n",
"self.isGroupOwner = VAR_8\n",
"self.doConnectionCleanup = VAR_9\n",
"self.omero_group = VAR_10\n",
"self.allowPublic = VAR_11\n"
] | [
"def __init__(self, useragent='OMERO.web', isAdmin=False, isGroupOwner=False,...\n",
"\"\"\"docstring\"\"\"\n",
"self.useragent = useragent\n",
"self.isAdmin = isAdmin\n",
"self.isGroupOwner = isGroupOwner\n",
"self.doConnectionCleanup = doConnectionCleanup\n",
"self.omero_group = omero_group\n",
"self.allowPublic = allowPublic\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_34(self):...\n",
"VAR_27 = widgets.AdminURLFieldWidget()\n",
"self.assertHTMLEqual(conditional_escape(VAR_27.render('test',\n 'http://example-äüö.com')), 'string')\n"
] | [
"def test_render_idn(self):...\n",
"w = widgets.AdminURLFieldWidget()\n",
"self.assertHTMLEqual(conditional_escape(w.render('test',\n 'http://example-äüö.com')),\n '<p class=\"url\">Currently:<a href=\"http://xn--example--7za4pnc.com\">http://example-äüö.com</a><br />Change:<input class=\"vURLField\" name=\"test\" type=\"url\" value=\"http://example-äüö.com\" /></p>'\n )\n"
] | [
0,
0,
2
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(self, *, VAR_4: str) ->Set[str]:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = super().get_imports(VAR_4=prefix)\n",
"VAR_8.update({'from datetime import date', 'from typing import cast'})\n",
"return VAR_8\n"
] | [
"def get_imports(self, *, prefix: str) ->Set[str]:...\n",
"\"\"\"docstring\"\"\"\n",
"imports = super().get_imports(prefix=prefix)\n",
"imports.update({'from datetime import date', 'from typing import cast'})\n",
"return imports\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"@VAR_4.route('/images/<path:path>')...\n",
"def FUNC_8(VAR_5):...\n",
"import os\n",
"VAR_28 = os.path.dirname(__file__)\n",
"VAR_29 = VAR_5\n",
"VAR_30 = os.path.join(VAR_28, VAR_29)\n",
"return VAR_30\n"
] | [
"@app.route('/images/<path:path>')...\n",
"def get_absolute_path(path):...\n",
"import os\n",
"script_dir = os.path.dirname(__file__)\n",
"rel_path = path\n",
"abs_file_path = os.path.join(script_dir, rel_path)\n",
"return abs_file_path\n"
] | [
0,
0,
0,
0,
0,
1,
0
] | [
"Condition",
"FunctionDef'",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.