lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_54():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = FUNC_5()\n",
"if not VAR_19:\n",
"session.flash = VAR_117\n",
"VAR_46 = FORM.confirm(T('Pull'), {T('Cancel'): URL('site')})\n",
"redirect(URL('site'))\n",
"if VAR_46.accepted:\n",
"if 'cancel' in request.vars:\n",
"VAR_195 = git.Repo(os.path.join(apath(VAR_122=request), VAR_3))\n",
"session.flash = T(\n 'Pull failed, certain files could not be checked out. Check logs for details.'\n )\n",
"return dict(VAR_3=app, VAR_46=dialog)\n",
"redirect(URL('site'))\n",
"VAR_196 = VAR_195.remotes.origin\n",
"redirect(URL('site'))\n",
"VAR_196.fetch()\n",
"session.flash = T('string')\n",
"VAR_196.pull()\n",
"redirect(URL('site'))\n",
"session.flash = T('Application updated via git pull')\n",
"session.flash = T('Pull failed, git exited abnormally. See logs for details.')\n",
"redirect(URL('site'))\n",
"redirect(URL('site'))\n",
"session.flash = T('string')\n",
"redirect(URL('site'))\n"
] | [
"def git_pull():...\n",
"\"\"\"docstring\"\"\"\n",
"app = get_app()\n",
"if not have_git:\n",
"session.flash = GIT_MISSING\n",
"dialog = FORM.confirm(T('Pull'), {T('Cancel'): URL('site')})\n",
"redirect(URL('site'))\n",
"if dialog.accepted:\n",
"if 'cancel' in request.vars:\n",
"repo = git.Repo(os.path.join(apath(r=request), app))\n",
"session.flash = T(\n 'Pull failed, certain files could not be checked out. Check logs for details.'\n )\n",
"return dict(app=app, dialog=dialog)\n",
"redirect(URL('site'))\n",
"origin = repo.remotes.origin\n",
"redirect(URL('site'))\n",
"origin.fetch()\n",
"session.flash = T(\n 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.'\n )\n",
"origin.pull()\n",
"redirect(URL('site'))\n",
"session.flash = T('Application updated via git pull')\n",
"session.flash = T('Pull failed, git exited abnormally. See logs for details.')\n",
"redirect(URL('site'))\n",
"redirect(URL('site'))\n",
"session.flash = T(\n 'Pull is not possible because you have unmerged files. Fix them up in the work tree, and then try again.'\n )\n",
"redirect(URL('site'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_212 = VAR_2.GET.get('Image', None)\n",
"VAR_213 = VAR_2.GET.get('Dataset', None)\n",
"VAR_214 = VAR_2.GET.get('Well', None)\n",
"if VAR_214 is not None:\n",
"VAR_214 = [VAR_241(VAR_318) for VAR_318 in VAR_214.split(',')]\n",
"if VAR_212 is None and VAR_213 is None:\n",
"VAR_113 = VAR_5.getObjects('Well', VAR_214)\n",
"return HttpResponse(\n 'Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2')\n",
"def FUNC_85(VAR_215, VAR_187):...\n",
"VAR_322 = getIntOrDefault(VAR_2, 'Index', 0)\n",
"VAR_323 = [int(VAR_40) for VAR_40 in VAR_187.split(',')]\n",
"VAR_212 = [VAR_345(w.getImage(VAR_322).getId()) for w in VAR_113]\n",
"VAR_324 = {}\n",
"VAR_212 = ','.join(VAR_212)\n",
"for VAR_38 in VAR_5.getObjects(VAR_215, VAR_323):\n",
"VAR_324[VAR_38.id] = VAR_38\n",
"VAR_325 = [VAR_41 for VAR_41 in VAR_323 if VAR_41 in VAR_324.keys()]\n",
"if len(VAR_325) == 0:\n",
"VAR_335 = list(VAR_324.values())[0].getDetails().group.id.val\n",
"return VAR_325, VAR_324\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_335)\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"imageIds = request.GET.get('Image', None)\n",
"datasetIds = request.GET.get('Dataset', None)\n",
"wellIds = request.GET.get('Well', None)\n",
"if wellIds is not None:\n",
"wellIds = [long(i) for i in wellIds.split(',')]\n",
"if imageIds is None and datasetIds is None:\n",
"wells = conn.getObjects('Well', wellIds)\n",
"return HttpResponse(\n 'Need to specify /?Image=1,2 or /?Dataset=1,2 or /?Well=1,2')\n",
"def validateIds(dtype, ids):...\n",
"wellIdx = getIntOrDefault(request, 'Index', 0)\n",
"ints = [int(oid) for oid in ids.split(',')]\n",
"imageIds = [str(w.getImage(wellIdx).getId()) for w in wells]\n",
"validObjs = {}\n",
"imageIds = ','.join(imageIds)\n",
"for obj in conn.getObjects(dtype, ints):\n",
"validObjs[obj.id] = obj\n",
"filteredIds = [iid for iid in ints if iid in validObjs.keys()]\n",
"if len(filteredIds) == 0:\n",
"gid = list(validObjs.values())[0].getDetails().group.id.val\n",
"return filteredIds, validObjs\n",
"conn.SERVICE_OPTS.setOmeroGroup(gid)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_6(VAR_20=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return formatdate(VAR_20, usegmt=True)\n"
] | [
"def http_date(epoch_seconds=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return formatdate(epoch_seconds, usegmt=True)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = VAR_0.POST.get('path') or VAR_0.META.get('HTTP_REFERER') or '/'\n",
"if VAR_1 == 'edit_on' or VAR_1 == 'edit_off':\n",
"set_edit_mode(VAR_0, VAR_1.endswith('_on'))\n",
"return HttpResponseRedirect(VAR_2)\n"
] | [
"def handle_command(request, command):...\n",
"\"\"\"docstring\"\"\"\n",
"path = request.POST.get('path') or request.META.get('HTTP_REFERER') or '/'\n",
"if command == 'edit_on' or command == 'edit_off':\n",
"set_edit_mode(request, command.endswith('_on'))\n",
"return HttpResponseRedirect(path)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"def FUNC_2(VAR_21, VAR_17, VAR_19, VAR_22):...\n",
"VAR_64 = ''\n",
"VAR_38 = []\n",
"VAR_65 = 0\n",
"while 1:\n",
"VAR_86, VAR_87 = None, None\n",
"for regex in VAR_17:\n",
"VAR_97 = VAR_65\n",
"if VAR_86 is None:\n",
"while 1:\n",
"if VAR_38:\n",
"VAR_61 = VAR_86.group(0)\n",
"VAR_103 = regex.search(VAR_21, pos=regex_pos)\n",
"assert not VAR_38[-1].tail\n",
"assert not VAR_64\n",
"VAR_88 = VAR_86.end()\n",
"if VAR_103 is None:\n",
"VAR_38[-1].tail = VAR_21\n",
"VAR_64 = VAR_21\n",
"if VAR_61.endswith('.') or VAR_61.endswith(','):\n",
"VAR_104 = VAR_103.group('host')\n",
"if VAR_103 is None:\n",
"return VAR_64, VAR_38\n",
"VAR_88 -= 1\n",
"VAR_89 = VAR_21[:VAR_86.start()]\n",
"for host_regex in VAR_19:\n",
"if VAR_87 is None or VAR_103.start() < VAR_87:\n",
"VAR_61 = VAR_61[:-1]\n",
"if VAR_38:\n",
"if host_regex.search(VAR_104):\n",
"VAR_86 = VAR_103\n",
"assert not VAR_38[-1].tail\n",
"assert not VAR_64\n",
"VAR_97 = VAR_103.end()\n",
"VAR_87 = VAR_103.start()\n",
"VAR_38[-1].tail = VAR_89\n",
"VAR_64 = VAR_89\n",
"VAR_57 = VAR_22('a')\n",
"VAR_57.set('href', VAR_61)\n",
"VAR_90 = VAR_86.group('body')\n",
"if not VAR_90:\n",
"VAR_90 = VAR_61\n",
"if VAR_90.endswith('.') or VAR_90.endswith(','):\n",
"VAR_90 = VAR_90[:-1]\n",
"VAR_57.text = VAR_90\n",
"VAR_38.append(VAR_57)\n",
"VAR_21 = VAR_21[VAR_88:]\n"
] | [
"def _link_text(text, link_regexes, avoid_hosts, factory):...\n",
"leading_text = ''\n",
"links = []\n",
"last_pos = 0\n",
"while 1:\n",
"best_match, best_pos = None, None\n",
"for regex in link_regexes:\n",
"regex_pos = last_pos\n",
"if best_match is None:\n",
"while 1:\n",
"if links:\n",
"link = best_match.group(0)\n",
"match = regex.search(text, pos=regex_pos)\n",
"assert not links[-1].tail\n",
"assert not leading_text\n",
"end = best_match.end()\n",
"if match is None:\n",
"links[-1].tail = text\n",
"leading_text = text\n",
"if link.endswith('.') or link.endswith(','):\n",
"host = match.group('host')\n",
"if match is None:\n",
"return leading_text, links\n",
"end -= 1\n",
"prev_text = text[:best_match.start()]\n",
"for host_regex in avoid_hosts:\n",
"if best_pos is None or match.start() < best_pos:\n",
"link = link[:-1]\n",
"if links:\n",
"if host_regex.search(host):\n",
"best_match = match\n",
"assert not links[-1].tail\n",
"assert not leading_text\n",
"regex_pos = match.end()\n",
"best_pos = match.start()\n",
"links[-1].tail = prev_text\n",
"leading_text = prev_text\n",
"anchor = factory('a')\n",
"anchor.set('href', link)\n",
"body = best_match.group('body')\n",
"if not body:\n",
"body = link\n",
"if body.endswith('.') or body.endswith(','):\n",
"body = body[:-1]\n",
"anchor.text = body\n",
"links.append(anchor)\n",
"text = text[end:]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"AugAssign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_37(self, VAR_90):...\n",
"self._queryset = VAR_90\n",
"self.widget.choices = self.choices\n"
] | [
"def _set_queryset(self, queryset):...\n",
"self._queryset = queryset\n",
"self.widget.choices = self.choices\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_7(self):...\n",
"return f'https://{self.hostname}/{self.namespace}.git'\n"
] | [
"def get_repo_url(self):...\n",
"return f'https://{self.hostname}/{self.namespace}.git'\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.route('/api/query/star', methods=['POST'])...\n",
"if get_user() is None:\n",
"return 'Unauthorized access', 403\n",
"VAR_14 = g.conn.session.query(Query).get(request.form['query_id'])\n",
"if VAR_14:\n",
"VAR_34 = Star()\n",
"return 'Query not found', 404\n",
"VAR_34.user = get_user()\n",
"VAR_34.query = VAR_14\n",
"g.conn.session.add(VAR_34)\n",
"g.conn.session.commit()\n",
"if e.args[0] == 1062:\n",
"return ''\n",
"g.conn.session.rollback()\n"
] | [
"@app.route('/api/query/star', methods=['POST'])...\n",
"if get_user() is None:\n",
"return 'Unauthorized access', 403\n",
"query = g.conn.session.query(Query).get(request.form['query_id'])\n",
"if query:\n",
"star = Star()\n",
"return 'Query not found', 404\n",
"star.user = get_user()\n",
"star.query = query\n",
"g.conn.session.add(star)\n",
"g.conn.session.commit()\n",
"if e.args[0] == 1062:\n",
"return ''\n",
"g.conn.session.rollback()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_3, VAR_4, VAR_6):...\n",
"self.room_id = self.helper.create_room_as(self.user_id)\n"
] | [
"def prepare(self, reactor, clock, hs):...\n",
"self.room_id = self.helper.create_room_as(self.user_id)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Assign'"
] |
[
"def FUNC_28(VAR_14):...\n",
"\"\"\"docstring\"\"\"\n",
"@wraps(VAR_14)...\n",
"VAR_112 = None\n",
"if 'X-Access-Token' in request.headers:\n",
"VAR_112 = request.headers['X-Access-Token']\n",
"if not VAR_112:\n",
"return jsonify({'success': False, 'message': 'Token is missing'}), 401\n",
"VAR_13 = jwt.decode(VAR_112, VAR_0.config['SECRET_KEY'], algorithms=['HS256'])\n",
"return jsonify({'success': False, 'message': 'Token is expired'}), 401\n",
"return VAR_14(VAR_15, *VAR_25, **kwargs)\n",
"VAR_15 = CLASS_0.query.filter_by(VAR_36=data['public_id']).first()\n",
"if VAR_15 is None:\n",
"return jsonify({'success': False, 'message': 'Token is old. Please renew'}\n ), 401\n"
] | [
"def token_required(f):...\n",
"\"\"\"docstring\"\"\"\n",
"@wraps(f)...\n",
"token = None\n",
"if 'X-Access-Token' in request.headers:\n",
"token = request.headers['X-Access-Token']\n",
"if not token:\n",
"return jsonify({'success': False, 'message': 'Token is missing'}), 401\n",
"data = jwt.decode(token, gui.config['SECRET_KEY'], algorithms=['HS256'])\n",
"return jsonify({'success': False, 'message': 'Token is expired'}), 401\n",
"return f(current_api_user, *args, **kwargs)\n",
"current_api_user = User.query.filter_by(public_id=data['public_id']).first()\n",
"if current_api_user is None:\n",
"return jsonify({'success': False, 'message': 'Token is old. Please renew'}\n ), 401\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Return'",
"Return'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_3(self, VAR_4, VAR_5):...\n",
"print(storage.location)\n",
"VAR_18 = storage.save('test.jpg', f)\n",
"VAR_12 = VAR_4.post(reverse('upload'), {'file': json.dumps([VAR_18]),\n 's3file': '[\"file\"]'})\n",
"assert VAR_12.status_code == 201\n"
] | [
"def test_value_from_datadict(self, client, upload_file):...\n",
"print(storage.location)\n",
"uploaded_file = storage.save('test.jpg', f)\n",
"response = client.post(reverse('upload'), {'file': json.dumps([\n uploaded_file]), 's3file': '[\"file\"]'})\n",
"assert response.status_code == 201\n"
] | [
0,
1,
1,
1,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assert'"
] |
[
"def FUNC_156():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_355 = {'prefix': VAR_113, 'user': VAR_116 if self.user_id else None}\n",
"for i in VAR_250:\n",
"if i['name'] == VAR_28('Log In'):\n",
"self.bar = VAR_355\n",
"VAR_346 = 'login'\n",
"if i['name'] == VAR_28('Sign Up'):\n",
"VAR_355[VAR_346] = i['href']\n",
"VAR_346 = 'register'\n",
"if i['name'] == VAR_28('Lost password?'):\n",
"VAR_346 = 'request_reset_password'\n",
"if i['name'] == VAR_28('Forgot username?'):\n",
"VAR_346 = 'retrieve_username'\n",
"if i['name'] == VAR_28('Log Out'):\n",
"VAR_346 = 'logout'\n",
"if i['name'] == VAR_28('Profile'):\n",
"VAR_346 = 'profile'\n",
"if i['name'] == VAR_28('Password'):\n",
"VAR_346 = 'change_password'\n"
] | [
"def bare():...\n",
"\"\"\"docstring\"\"\"\n",
"bare = {'prefix': prefix, 'user': user_identifier if self.user_id else None}\n",
"for i in items:\n",
"if i['name'] == T('Log In'):\n",
"self.bar = bare\n",
"k = 'login'\n",
"if i['name'] == T('Sign Up'):\n",
"bare[k] = i['href']\n",
"k = 'register'\n",
"if i['name'] == T('Lost password?'):\n",
"k = 'request_reset_password'\n",
"if i['name'] == T('Forgot username?'):\n",
"k = 'retrieve_username'\n",
"if i['name'] == T('Log Out'):\n",
"k = 'logout'\n",
"if i['name'] == T('Profile'):\n",
"k = 'profile'\n",
"if i['name'] == T('Password'):\n",
"k = 'change_password'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"@VAR_8.route('/config/', methods=['GET'])...\n",
"if VAR_8.interface.auth is None or current_user.is_authenticated:\n",
"return jsonify(VAR_8.interface.config)\n",
"return {'auth_required': True, 'auth_message': VAR_8.interface.auth_message}\n"
] | [
"@app.route('/config/', methods=['GET'])...\n",
"if app.interface.auth is None or current_user.is_authenticated:\n",
"return jsonify(app.interface.config)\n",
"return {'auth_required': True, 'auth_message': app.interface.auth_message}\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"VAR_15 = self._makeOne()\n",
"VAR_16 = CLASS_1()\n",
"VAR_18 = {VAR_15.cookie_name: 'deleted', 'RESPONSE': VAR_16}\n",
"VAR_17 = CLASS_0(**req_data)\n",
"self.assertEqual(len(VAR_16.cookies), 0)\n",
"self.assertEqual(VAR_15.extractCredentials(VAR_17), {})\n"
] | [
"def test_extractCredentials_with_deleted_cookie(self):...\n",
"helper = self._makeOne()\n",
"response = FauxCookieResponse()\n",
"req_data = {helper.cookie_name: 'deleted', 'RESPONSE': response}\n",
"request = FauxSettableRequest(**req_data)\n",
"self.assertEqual(len(response.cookies), 0)\n",
"self.assertEqual(helper.extractCredentials(request), {})\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_1):...\n",
"@VAR_1.url_value_preprocessor...\n",
"g.auth_token = VAR_7.pop('auth_token')\n"
] | [
"def register_url_value_preprocessor(kobo):...\n",
"@kobo.url_value_preprocessor...\n",
"g.auth_token = values.pop('auth_token')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'"
] |
[
"def FUNC_21(self):...\n",
"VAR_29 = coreapi.Document(VAR_5='', title='Example API', content={'users':\n {'list': {}}})\n",
"VAR_30 = VAR_29['users']\n",
"VAR_31 = schema_links(VAR_30)\n",
"assert len(VAR_31) is 0\n"
] | [
"def test_schema_with_empty_links(self):...\n",
"schema = coreapi.Document(url='', title='Example API', content={'users': {\n 'list': {}}})\n",
"section = schema['users']\n",
"flat_links = schema_links(section)\n",
"assert len(flat_links) is 0\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'"
] |
[
"async def FUNC_5(self, VAR_11: str, VAR_6: str, VAR_12: int, VAR_13:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_43 = await self.store.get_threepid_validation_session('email', VAR_6,\n VAR_20=email_address, validated=False)\n",
"if VAR_43 and VAR_43.get('validated_at') is None:\n",
"VAR_33 = VAR_43['session_id']\n",
"VAR_33 = random_string(16)\n",
"VAR_58 = VAR_43['last_send_attempt']\n",
"if VAR_14:\n",
"if VAR_12 <= VAR_58:\n",
"if '?' in VAR_14:\n",
"VAR_18 = random_string(32)\n",
"return VAR_33\n",
"VAR_14 += '&'\n",
"VAR_14 += '?'\n",
"await VAR_13(VAR_11, VAR_18, VAR_6, VAR_33)\n",
"VAR_0.exception('Error sending threepid validation email to %s', VAR_11)\n",
"VAR_44 = self.hs.get_clock().time_msec(\n ) + self.hs.config.email_validation_token_lifetime\n",
"VAR_14 += 'sid=' + urllib.parse.quote(VAR_33)\n",
"await self.store.start_or_continue_validation_session('email', VAR_11,\n VAR_33, VAR_6, VAR_12, VAR_14, VAR_18, VAR_44)\n",
"return VAR_33\n"
] | [
"async def send_threepid_validation(self, email_address: str, client_secret:...\n",
"\"\"\"docstring\"\"\"\n",
"session = await self.store.get_threepid_validation_session('email',\n client_secret, address=email_address, validated=False)\n",
"if session and session.get('validated_at') is None:\n",
"session_id = session['session_id']\n",
"session_id = random_string(16)\n",
"last_send_attempt = session['last_send_attempt']\n",
"if next_link:\n",
"if send_attempt <= last_send_attempt:\n",
"if '?' in next_link:\n",
"token = random_string(32)\n",
"return session_id\n",
"next_link += '&'\n",
"next_link += '?'\n",
"await send_email_func(email_address, token, client_secret, session_id)\n",
"logger.exception('Error sending threepid validation email to %s', email_address\n )\n",
"token_expires = self.hs.get_clock().time_msec(\n ) + self.hs.config.email_validation_token_lifetime\n",
"next_link += 'sid=' + urllib.parse.quote(session_id)\n",
"await self.store.start_or_continue_validation_session('email',\n email_address, session_id, client_secret, send_attempt, next_link,\n token, token_expires)\n",
"return session_id\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Return'",
"AugAssign'",
"AugAssign'",
"Expr'",
"Expr'",
"Assign'",
"AugAssign'",
"Expr'",
"Return'"
] |
[
"@FUNC_0...\n",
"return IdentityHandler(self)\n"
] | [
"@cache_in_self...\n",
"return IdentityHandler(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"import urllib2\n",
"from flask import Flask, render_template, session, request, json\n",
"from core.trape import Trape\n",
"from core.db import Database\n",
"VAR_0 = Trape()\n",
"VAR_1 = Flask(__name__, template_folder='../templates', static_folder=\n '../static')\n",
"VAR_2 = Database()\n",
"VAR_0.header()\n",
"@VAR_1.route('/' + VAR_0.stats_path)...\n",
"return render_template('/login.html')\n"
] | [
"import urllib2\n",
"from flask import Flask, render_template, session, request, json\n",
"from core.trape import Trape\n",
"from core.db import Database\n",
"trape = Trape()\n",
"app = Flask(__name__, template_folder='../templates', static_folder='../static'\n )\n",
"db = Database()\n",
"trape.header()\n",
"@app.route('/' + trape.stats_path)...\n",
"return render_template('/login.html')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Return'"
] |
[
"def __init__(self, VAR_11: IReactorPluggableNameResolver, VAR_8: Optional[...\n",
"\"\"\"docstring\"\"\"\n",
"self._reactor = VAR_11\n",
"self._ip_whitelist = VAR_8\n",
"self._ip_blacklist = VAR_9\n"
] | [
"def __init__(self, reactor: IReactorPluggableNameResolver, ip_whitelist:...\n",
"\"\"\"docstring\"\"\"\n",
"self._reactor = reactor\n",
"self._ip_whitelist = ip_whitelist\n",
"self._ip_blacklist = ip_blacklist\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_25(self, VAR_15, **VAR_14):...\n",
"if VAR_15.status_code != 401:\n",
"return VAR_15\n",
"if not self.prompting:\n",
"return VAR_15\n",
"VAR_75 = urllib_parse.urlparse(VAR_15.url)\n",
"VAR_4, VAR_73, VAR_76 = self._prompt_for_password(VAR_75.netloc)\n",
"self._credentials_to_save = None\n",
"if VAR_4 is not None and VAR_73 is not None:\n",
"self.passwords[VAR_75.netloc] = VAR_4, VAR_73\n",
"VAR_15.content\n",
"if VAR_76 and self._should_save_password_to_keyring():\n",
"VAR_15.raw.release_conn()\n",
"self._credentials_to_save = VAR_75.netloc, VAR_4, VAR_73\n",
"VAR_31 = HTTPBasicAuth(VAR_4 or '', VAR_73 or '')(VAR_15.request)\n",
"VAR_31.register_hook('response', self.warn_on_401)\n",
"if self._credentials_to_save:\n",
"VAR_31.register_hook('response', self.save_credentials)\n",
"VAR_77 = VAR_15.connection.send(VAR_31, **kwargs)\n",
"VAR_77.history.append(VAR_15)\n",
"return VAR_77\n"
] | [
"def handle_401(self, resp, **kwargs):...\n",
"if resp.status_code != 401:\n",
"return resp\n",
"if not self.prompting:\n",
"return resp\n",
"parsed = urllib_parse.urlparse(resp.url)\n",
"username, password, save = self._prompt_for_password(parsed.netloc)\n",
"self._credentials_to_save = None\n",
"if username is not None and password is not None:\n",
"self.passwords[parsed.netloc] = username, password\n",
"resp.content\n",
"if save and self._should_save_password_to_keyring():\n",
"resp.raw.release_conn()\n",
"self._credentials_to_save = parsed.netloc, username, password\n",
"req = HTTPBasicAuth(username or '', password or '')(resp.request)\n",
"req.register_hook('response', self.warn_on_401)\n",
"if self._credentials_to_save:\n",
"req.register_hook('response', self.save_credentials)\n",
"new_resp = resp.connection.send(req, **kwargs)\n",
"new_resp.history.append(resp)\n",
"return new_resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_68(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.jwt_handler:\n",
"VAR_380 = self.jwt_handler.jwt_token_manager()\n"
] | [
"def jwt(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.jwt_handler:\n",
"rtn = self.jwt_handler.jwt_token_manager()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'"
] |
[
"def FUNC_30(VAR_63):...\n",
"VAR_63 = io.StringIO(VAR_63)\n",
"VAR_82 = list(csv.reader(VAR_63))\n",
"VAR_83 = VAR_82[0]\n",
"VAR_84 = VAR_83.index('flag')\n",
"VAR_82[VAR_24][VAR_84] = VAR_23\n",
"VAR_47 = io.StringIO()\n",
"VAR_85 = csv.writer(VAR_47)\n",
"VAR_85.writerows(VAR_82)\n",
"return VAR_47.getvalue()\n"
] | [
"def replace_flag_at_index(file_content):...\n",
"file_content = io.StringIO(file_content)\n",
"content = list(csv.reader(file_content))\n",
"header = content[0]\n",
"flag_col_index = header.index('flag')\n",
"content[flag_index][flag_col_index] = flag_option\n",
"output = io.StringIO()\n",
"writer = csv.writer(output)\n",
"writer.writerows(content)\n",
"return output.getvalue()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(VAR_11):...\n",
"VAR_3 = VAR_11.dataroot\n",
"VAR_4 = VAR_11.yfcc100m_db_host\n",
"VAR_5 = VAR_11.yfcc100m_db_dbname\n",
"VAR_6 = VAR_11.yfcc100m_db_user\n",
"VAR_7 = VAR_11.yfcc100m_db_password\n",
"VAR_8 = VAR_11.yfcc100m_db_port\n"
] | [
"def init(config):...\n",
"DATAROOT = config.dataroot\n",
"DB_HOST = config.yfcc100m_db_host\n",
"DB_DBNAME = config.yfcc100m_db_dbname\n",
"DB_USER = config.yfcc100m_db_user\n",
"DB_PASSWORD = config.yfcc100m_db_password\n",
"DB_PORT = config.yfcc100m_db_port\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_74(self, VAR_44=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_77 = self.get_signature()\n",
"if file_lock.lock_exists(VAR_77):\n",
"VAR_96 = True\n",
"file_lock.create_lock(VAR_77)\n",
"if VAR_44:\n",
"for i in range(VAR_44):\n",
"if VAR_96:\n",
"time.sleep(1)\n",
"if not file_lock.lock_exists(VAR_77):\n",
"VAR_96 = False\n"
] | [
"def lock(self, timeout=None):...\n",
"\"\"\"docstring\"\"\"\n",
"signature = self.get_signature()\n",
"if file_lock.lock_exists(signature):\n",
"lock_exists = True\n",
"file_lock.create_lock(signature)\n",
"if timeout:\n",
"for i in range(timeout):\n",
"if lock_exists:\n",
"time.sleep(1)\n",
"if not file_lock.lock_exists(signature):\n",
"lock_exists = False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"For",
"Condition",
"Expr'",
"Condition",
"Assign'"
] |
[
"@pytest.fixture(scope='session')...\n",
"VAR_2 = webdriver.ChromeOptions()\n",
"VAR_2.headless = True\n",
"VAR_5 = webdriver.Chrome(options=chrome_options)\n",
"pytest.skip(force_str(e))\n",
"yield VAR_5\n",
"VAR_5.quit()\n"
] | [
"@pytest.fixture(scope='session')...\n",
"chrome_options = webdriver.ChromeOptions()\n",
"chrome_options.headless = True\n",
"b = webdriver.Chrome(options=chrome_options)\n",
"pytest.skip(force_str(e))\n",
"yield b\n",
"b.quit()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(*VAR_0, **VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_0:\n",
"if isinstance(VAR_0[0], BaseDocument):\n",
"if len(VAR_0) < 2 and VAR_1:\n",
"return VAR_0[0]\n",
"if isinstance(VAR_0[0], string_types):\n",
"if 'doctype' in VAR_1:\n",
"VAR_5 = get_controller(VAR_2)\n",
"VAR_2 = VAR_0[0]\n",
"if isinstance(VAR_0[0], dict):\n",
"VAR_2 = VAR_1['doctype']\n",
"if VAR_5:\n",
"VAR_1 = VAR_0[0]\n",
"return VAR_5(*VAR_0, **kwargs)\n"
] | [
"def get_doc(*args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"if args:\n",
"if isinstance(args[0], BaseDocument):\n",
"if len(args) < 2 and kwargs:\n",
"return args[0]\n",
"if isinstance(args[0], string_types):\n",
"if 'doctype' in kwargs:\n",
"controller = get_controller(doctype)\n",
"doctype = args[0]\n",
"if isinstance(args[0], dict):\n",
"doctype = kwargs['doctype']\n",
"if controller:\n",
"kwargs = args[0]\n",
"return controller(*args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self, VAR_13: Text, VAR_14: Text) ->None:...\n",
"\"\"\"docstring\"\"\"\n",
"self.s3.Object(self.bucket_name, VAR_13).put(Body=f)\n"
] | [
"def _persist_tar(self, file_key: Text, tar_path: Text) ->None:...\n",
"\"\"\"docstring\"\"\"\n",
"self.s3.Object(self.bucket_name, file_key).put(Body=f)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"assert set(ClearableFileInput().build_attrs({}).keys()) == {'class',\n 'data-url', 'data-fields-x-amz-algorithm', 'data-fields-x-amz-date',\n 'data-fields-x-amz-signature', 'data-fields-x-amz-credential',\n 'data-fields-policy', 'data-fields-key'}\n",
"assert ClearableFileInput().build_attrs({})['class'] == 's3file'\n",
"assert ClearableFileInput().build_attrs({'class': 'my-class'})['class'\n ] == 'my-class s3file'\n"
] | [
"def test_build_attr(self):...\n",
"assert set(ClearableFileInput().build_attrs({}).keys()) == {'class',\n 'data-url', 'data-fields-x-amz-algorithm', 'data-fields-x-amz-date',\n 'data-fields-x-amz-signature', 'data-fields-x-amz-credential',\n 'data-fields-policy', 'data-fields-key'}\n",
"assert ClearableFileInput().build_attrs({})['class'] == 's3file'\n",
"assert ClearableFileInput().build_attrs({'class': 'my-class'})['class'\n ] == 'my-class s3file'\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_43(self, VAR_9):...\n",
"VAR_84 = cherry.config['media.basedir']\n",
"VAR_85 = os.path.join(VAR_84, VAR_9)\n",
"return json.dumps(metainfo.getSongInfo(VAR_85).dict())\n"
] | [
"def api_getsonginfo(self, path):...\n",
"basedir = cherry.config['media.basedir']\n",
"abspath = os.path.join(basedir, path)\n",
"return json.dumps(metainfo.getSongInfo(abspath).dict())\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_27(VAR_8, VAR_60, VAR_10=None, VAR_11=None, VAR_7=None):...\n",
"if VAR_10 is None and VAR_11 is None or g.locale is None:\n",
"return VAR_60\n",
"VAR_37 = FUNC_1(VAR_8, VAR_10=data, VAR_11=additional_request_data)\n",
"def FUNC_35():...\n",
"return FUNC_0(VAR_6=request.url_root, VAR_7=additional_unless)\n"
] | [
"def get_preemptively_cached_view(key, view, data=None,...\n",
"if data is None and additional_request_data is None or g.locale is None:\n",
"return view\n",
"d = _preemptive_data(key, data=data, additional_request_data=\n additional_request_data)\n",
"def unless():...\n",
"return _preemptive_unless(base_url=request.url_root, additional_unless=\n additional_unless)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_15(VAR_2, VAR_4, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_29 = VAR_2 / 'session.yml'\n",
"VAR_29.write('windows: []')\n",
"VAR_11 = FUNC_0(VAR_4.config) + ['--temp-basedir', '-r', str(VAR_29)]\n",
"VAR_6.start(VAR_11)\n",
"VAR_6.compare_session('string')\n",
"VAR_6.send_cmd(':quit')\n",
"VAR_6.wait_for_quit()\n"
] | [
"def test_loading_empty_session(tmpdir, request, quteproc_new):...\n",
"\"\"\"docstring\"\"\"\n",
"session = tmpdir / 'session.yml'\n",
"session.write('windows: []')\n",
"args = _base_args(request.config) + ['--temp-basedir', '-r', str(session)]\n",
"quteproc_new.start(args)\n",
"quteproc_new.compare_session(\n \"\"\"\n windows:\n - tabs:\n - history:\n - url: about:blank\n \"\"\"\n )\n",
"quteproc_new.send_cmd(':quit')\n",
"quteproc_new.wait_for_quit()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_4.route('/')...\n",
"VAR_15 = copy.deepcopy(VAR_3)\n",
"VAR_16 = {'total_munhak': len(VAR_15), 'source_list': sorted(set([\n munhak_row.source for munhak_row in VAR_15]))}\n",
"print(VAR_16)\n",
"VAR_17['quiz_count'] = 0\n",
"return render_template('quiz/index.html', VAR_16=data)\n"
] | [
"@app.route('/')...\n",
"munhak_rows = copy.deepcopy(munhak_rows_data)\n",
"data = {'total_munhak': len(munhak_rows), 'source_list': sorted(set([\n munhak_row.source for munhak_row in munhak_rows]))}\n",
"print(data)\n",
"session['quiz_count'] = 0\n",
"return render_template('quiz/index.html', data=data)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_13(VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_39 = {}\n",
"VAR_40 = VAR_16.split(';')\n",
"for input_raw in filter(bool, VAR_40):\n",
"VAR_67 = re.match('([^=]+)=([^\\\\[\\\\]]+)\\\\[([^\\\\[\\\\]]+)\\\\]$', input_raw)\n",
"return VAR_39\n",
"if VAR_67:\n",
"VAR_39[VAR_67.group(1)] = VAR_67.group(2), VAR_67.group(3)\n",
"VAR_67 = re.match('([^=]+)=([^\\\\[\\\\]]+)$', input_raw)\n",
"if VAR_67:\n",
"VAR_39[VAR_67.group(1)] = VAR_67.group(2), None\n"
] | [
"def preprocess_inputs_arg_string(inputs_str):...\n",
"\"\"\"docstring\"\"\"\n",
"input_dict = {}\n",
"inputs_raw = inputs_str.split(';')\n",
"for input_raw in filter(bool, inputs_raw):\n",
"match = re.match('([^=]+)=([^\\\\[\\\\]]+)\\\\[([^\\\\[\\\\]]+)\\\\]$', input_raw)\n",
"return input_dict\n",
"if match:\n",
"input_dict[match.group(1)] = match.group(2), match.group(3)\n",
"match = re.match('([^=]+)=([^\\\\[\\\\]]+)$', input_raw)\n",
"if match:\n",
"input_dict[match.group(1)] = match.group(2), None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_39(VAR_33, VAR_34, VAR_101=None):...\n",
"if callable(VAR_64):\n",
"if VAR_34 and not isinstance(VAR_34, str):\n",
"VAR_143 = VAR_64()\n",
"VAR_0.exception(\n 'Error while trying to retrieve custom ETag value for plugin {}'.format\n (VAR_8))\n",
"from werkzeug.http import http_date\n",
"if VAR_101 is None:\n",
"if VAR_143:\n",
"VAR_34 = http_date(VAR_34)\n",
"VAR_101 = []\n",
"import hashlib\n",
"return VAR_143\n",
"VAR_86 = hashlib.sha1()\n",
"def FUNC_32(VAR_87):...\n",
"VAR_86.update(to_bytes(VAR_87, encoding='utf-8', errors='replace'))\n",
"FUNC_32(octoprint.__version__)\n",
"FUNC_32(get_python_version_string())\n",
"FUNC_32(','.join(sorted(VAR_33)))\n",
"if VAR_34:\n",
"FUNC_32(VAR_34)\n",
"for add in VAR_101:\n",
"FUNC_32(add)\n",
"return VAR_86.hexdigest()\n"
] | [
"def compute_etag(files, lastmodified, additional=None):...\n",
"if callable(custom_etag):\n",
"if lastmodified and not isinstance(lastmodified, str):\n",
"etag = custom_etag()\n",
"_logger.exception(\n 'Error while trying to retrieve custom ETag value for plugin {}'.format\n (key))\n",
"from werkzeug.http import http_date\n",
"if additional is None:\n",
"if etag:\n",
"lastmodified = http_date(lastmodified)\n",
"additional = []\n",
"import hashlib\n",
"return etag\n",
"hash = hashlib.sha1()\n",
"def hash_update(value):...\n",
"hash.update(to_bytes(value, encoding='utf-8', errors='replace'))\n",
"hash_update(octoprint.__version__)\n",
"hash_update(get_python_version_string())\n",
"hash_update(','.join(sorted(files)))\n",
"if lastmodified:\n",
"hash_update(lastmodified)\n",
"for add in additional:\n",
"hash_update(add)\n",
"return hash.hexdigest()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"ImportFrom'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Import'",
"Return'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_6(self, VAR_2, VAR_14, VAR_15):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14.SERVICE_OPTS.setOmeroShare()\n",
"if VAR_15 is None:\n",
"return None\n",
"VAR_26 = VAR_14.getShare(VAR_15)\n",
"if VAR_26.getOwner().id != VAR_14.getUserId():\n",
"VAR_0.error('Error retrieving share connection.', exc_info=True)\n",
"if VAR_26.active and not VAR_26.isExpired():\n",
"return None\n",
"return self.get_share_connection(VAR_2, VAR_14, VAR_15)\n",
"VAR_0.debug('Share is unavailable.')\n",
"return None\n"
] | [
"def prepare_share_connection(self, request, conn, share_id):...\n",
"\"\"\"docstring\"\"\"\n",
"conn.SERVICE_OPTS.setOmeroShare()\n",
"if share_id is None:\n",
"return None\n",
"share = conn.getShare(share_id)\n",
"if share.getOwner().id != conn.getUserId():\n",
"logger.error('Error retrieving share connection.', exc_info=True)\n",
"if share.active and not share.isExpired():\n",
"return None\n",
"return self.get_share_connection(request, conn, share_id)\n",
"logger.debug('Share is unavailable.')\n",
"return None\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Return'",
"Return'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self, *VAR_8, **VAR_9):...\n",
"VAR_12 = VAR_9.get('str', False)\n",
"VAR_13 = VAR_9.get('okayToFail', False)\n",
"VAR_10 = VAR_9.get('type', self.type)\n",
"VAR_11 = self._buildPath(VAR_8=segs, VAR_10=fileType)\n",
"if VAR_12:\n",
"if self.fallback:\n",
"return fh.read()\n",
"return open(VAR_11, encoding='utf-8')\n",
"return self._fail(VAR_11, VAR_12, VAR_13)\n",
"return self.fallback.fetch(*VAR_8, VAR_12=str, VAR_13=okayToFail)\n",
"return self._fail(VAR_11, VAR_12, VAR_13)\n"
] | [
"def fetch(self, *segs, **kwargs):...\n",
"str = kwargs.get('str', False)\n",
"okayToFail = kwargs.get('okayToFail', False)\n",
"fileType = kwargs.get('type', self.type)\n",
"location = self._buildPath(segs=segs, fileType=fileType)\n",
"if str:\n",
"if self.fallback:\n",
"return fh.read()\n",
"return open(location, encoding='utf-8')\n",
"return self._fail(location, str, okayToFail)\n",
"return self.fallback.fetch(*segs, str=str, okayToFail=okayToFail)\n",
"return self._fail(location, str, okayToFail)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_82(self, VAR_2, VAR_57=None, VAR_22=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_22 is None:\n",
"VAR_337 = VAR_2.GET.get('server', VAR_2.POST.get('server'))\n",
"VAR_53 = {'version': omero_version, 'build_year': build_year, 'error':\n VAR_57, 'form': VAR_22}\n",
"if VAR_337 is not None:\n",
"VAR_7 = VAR_2.GET.get('url')\n",
"VAR_115 = {'server': unicode(VAR_337)}\n",
"VAR_22 = LoginForm()\n",
"if VAR_7 is not None and len(VAR_7) != 0:\n",
"VAR_22 = LoginForm(VAR_115=initial)\n",
"VAR_53['url'] = urlencode({'url': VAR_7})\n",
"if hasattr(settings, 'LOGIN_LOGO'):\n",
"VAR_53['LOGIN_LOGO'] = settings.LOGIN_LOGO\n",
"if settings.PUBLIC_ENABLED:\n",
"VAR_338 = VAR_350('webindex')\n",
"VAR_53['show_download_links'] = settings.SHOW_CLIENT_DOWNLOADS\n",
"if settings.PUBLIC_URL_FILTER.search(VAR_338):\n",
"if settings.SHOW_CLIENT_DOWNLOADS:\n",
"VAR_53['public_enabled'] = True\n",
"VAR_339 = re.match(\n '(?P<major>\\\\d+)\\\\.(?P<minor>\\\\d+)\\\\.(?P<patch>\\\\d+\\\\.?)?(?P<dev>(dev|a|b|rc)\\\\d+)?.*'\n , omero_version)\n",
"return render(VAR_2, self.template, VAR_53)\n",
"VAR_53['public_login_redirect'] = VAR_338\n",
"VAR_340 = '^v%s\\\\.%s\\\\.[^-]+$' % (VAR_339.group('major'), VAR_339.group(\n 'minor'))\n",
"VAR_53['client_download_tag_re'] = VAR_340\n",
"VAR_53['client_download_repo'] = settings.CLIENT_DOWNLOAD_GITHUB_REPO\n"
] | [
"def handle_not_logged_in(self, request, error=None, form=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if form is None:\n",
"server_id = request.GET.get('server', request.POST.get('server'))\n",
"context = {'version': omero_version, 'build_year': build_year, 'error':\n error, 'form': form}\n",
"if server_id is not None:\n",
"url = request.GET.get('url')\n",
"initial = {'server': unicode(server_id)}\n",
"form = LoginForm()\n",
"if url is not None and len(url) != 0:\n",
"form = LoginForm(initial=initial)\n",
"context['url'] = urlencode({'url': url})\n",
"if hasattr(settings, 'LOGIN_LOGO'):\n",
"context['LOGIN_LOGO'] = settings.LOGIN_LOGO\n",
"if settings.PUBLIC_ENABLED:\n",
"redirect = reverse('webindex')\n",
"context['show_download_links'] = settings.SHOW_CLIENT_DOWNLOADS\n",
"if settings.PUBLIC_URL_FILTER.search(redirect):\n",
"if settings.SHOW_CLIENT_DOWNLOADS:\n",
"context['public_enabled'] = True\n",
"ver = re.match(\n '(?P<major>\\\\d+)\\\\.(?P<minor>\\\\d+)\\\\.(?P<patch>\\\\d+\\\\.?)?(?P<dev>(dev|a|b|rc)\\\\d+)?.*'\n , omero_version)\n",
"return render(request, self.template, context)\n",
"context['public_login_redirect'] = redirect\n",
"client_download_tag_re = '^v%s\\\\.%s\\\\.[^-]+$' % (ver.group('major'), ver.\n group('minor'))\n",
"context['client_download_tag_re'] = client_download_tag_re\n",
"context['client_download_repo'] = settings.CLIENT_DOWNLOAD_GITHUB_REPO\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@FUNC_2.__func__...\n",
"\"\"\"docstring\"\"\"\n",
"self._submit()\n"
] | [
"@whitelist.__func__...\n",
"\"\"\"docstring\"\"\"\n",
"self._submit()\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'"
] |
[
"def FUNC_49(VAR_21, VAR_22, VAR_23):...\n",
"if VAR_87.filter_language() != 'all':\n",
"VAR_21 = VAR_21.filter(db.Books.languages.any(db.Languages.lang_code ==\n VAR_87.filter_language()))\n",
"for language in VAR_22:\n",
"return VAR_21\n",
"VAR_21 = VAR_21.filter(db.Books.languages.any(db.Languages.id == language))\n",
"for language in VAR_23:\n",
"VAR_21 = VAR_21.filter(not_(db.Books.series.any(db.Languages.id == language)))\n"
] | [
"def adv_search_language(q, include_languages_inputs, exclude_languages_inputs):...\n",
"if current_user.filter_language() != 'all':\n",
"q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.\n filter_language()))\n",
"for language in include_languages_inputs:\n",
"return q\n",
"q = q.filter(db.Books.languages.any(db.Languages.id == language))\n",
"for language in exclude_languages_inputs:\n",
"q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"For",
"Return'",
"Assign'",
"For",
"Assign'"
] |
[
"def __init__(self, VAR_7, VAR_8):...\n",
"self.errorstring = VAR_7\n",
"self.error = VAR_8\n",
"super().__init__(VAR_7)\n"
] | [
"def __init__(self, errorstring, error):...\n",
"self.errorstring = errorstring\n",
"self.error = error\n",
"super().__init__(errorstring)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_118(*VAR_11, **VAR_351):...\n",
""
] | [
"def f(*args, **kwargs):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_26(VAR_66):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_81 = StringIO()\n",
"VAR_82 = csv.writer(VAR_81)\n",
"VAR_82.writerow([VAR_63, VAR_64])\n",
"for csv_line in VAR_66:\n",
"VAR_82.writerow([str(csv_line[0][:-4]).replace('T', ' '), csv_line[1]])\n",
"VAR_81.seek(0)\n",
"yield VAR_81.read()\n",
"VAR_81.truncate(0)\n",
"VAR_81.seek(0)\n"
] | [
"def iter_csv(data):...\n",
"\"\"\"docstring\"\"\"\n",
"line = StringIO()\n",
"writer = csv.writer(line)\n",
"writer.writerow([col_1, col_2])\n",
"for csv_line in data:\n",
"writer.writerow([str(csv_line[0][:-4]).replace('T', ' '), csv_line[1]])\n",
"line.seek(0)\n",
"yield line.read()\n",
"line.truncate(0)\n",
"line.seek(0)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_72(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"VAR_61 = self._get_canonical_alias()\n",
"self.assertEqual(VAR_61, {'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"self._set_canonical_alias({'alias': self.alias})\n",
"VAR_61 = self._get_canonical_alias()\n",
"self.assertEqual(VAR_61, {'alias': self.alias})\n"
] | [
"def test_partial_modify(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"res = self._get_canonical_alias()\n",
"self.assertEqual(res, {'alias': self.alias, 'alt_aliases': [self.alias]})\n",
"self._set_canonical_alias({'alias': self.alias})\n",
"res = self._get_canonical_alias()\n",
"self.assertEqual(res, {'alias': self.alias})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_119(VAR_174):...\n",
"for df in VAR_174.meta.get('fields', {'no_copy': 1}):\n",
"if hasattr(VAR_174, df.fieldname):\n",
"VAR_174.set(df.fieldname, None)\n"
] | [
"def remove_no_copy_fields(d):...\n",
"for df in d.meta.get('fields', {'no_copy': 1}):\n",
"if hasattr(d, df.fieldname):\n",
"d.set(df.fieldname, None)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Expr'"
] |
[
"def FUNC_3(VAR_0):...\n",
"from MoinMoin import search\n",
"VAR_8 = VAR_0.values.get('pagename', '')\n",
"if VAR_8:\n",
"VAR_30 = search.searchPages(VAR_0, 't:\"%s\"' % VAR_8)\n",
"VAR_31 = [VAR_8]\n",
"VAR_31 = [p.page_name for p in VAR_30.hits]\n",
"VAR_0.write('string' % ''.join([('<option>%s</option>\\n' % wikiutil.escape(\n p)) for p in VAR_31]))\n"
] | [
"def page_list(request):...\n",
"from MoinMoin import search\n",
"name = request.values.get('pagename', '')\n",
"if name:\n",
"searchresult = search.searchPages(request, 't:\"%s\"' % name)\n",
"pages = [name]\n",
"pages = [p.page_name for p in searchresult.hits]\n",
"request.write(\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">\n<html>\n <head>\n <title>Insert Page Link</title>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n <meta content=\"noindex,nofollow\" name=\"robots\">\n </head>\n <body scroll=\"no\" style=\"OVERFLOW: hidden\">\n <table height=\"100%%\" cellSpacing=\"0\" cellPadding=\"0\" width=\"100%%\" border=\"0\">\n <tr>\n <td>\n <table cellSpacing=\"0\" cellPadding=\"0\" align=\"center\" border=\"0\">\n <tr>\n <td>\n <span fckLang=\"PageDlgName\">Page name</span><br>\n <select id=\"txtName\" size=\"1\">\n %s\n </select>\n </td>\n </tr>\n </table>\n </td>\n </tr>\n</table>\n</body>\n</html>\n\"\"\"\n % ''.join([('<option>%s</option>\\n' % wikiutil.escape(p)) for p in pages])\n )\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(self, VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.require_authentication:\n",
"if not is_authenticated(VAR_20.user):\n",
"VAR_35 = get_missing_permissions(VAR_20.user, self.permissions)\n",
"return _('Sign in to continue.')\n",
"if not getattr(VAR_20.user, 'is_staff', False):\n",
"if VAR_35:\n",
"return _(\n 'Your account must have `Access to Admin Panel` permissions to access this page.'\n )\n",
"if not get_shop(VAR_20):\n",
"return _('You do not have the required permissions: %s') % ', '.join(VAR_35)\n",
"return _('There is no active shop available. Contact support for more details.'\n )\n"
] | [
"def _get_unauth_reason(self, request):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.require_authentication:\n",
"if not is_authenticated(request.user):\n",
"missing_permissions = get_missing_permissions(request.user, self.permissions)\n",
"return _('Sign in to continue.')\n",
"if not getattr(request.user, 'is_staff', False):\n",
"if missing_permissions:\n",
"return _(\n 'Your account must have `Access to Admin Panel` permissions to access this page.'\n )\n",
"if not get_shop(request):\n",
"return _('You do not have the required permissions: %s') % ', '.join(\n missing_permissions)\n",
"return _('There is no active shop available. Contact support for more details.'\n )\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Assign'",
"Return'",
"Condition",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_121(*VAR_79, **VAR_42):...\n",
"if VAR_13.read_from_replica:\n",
"FUNC_6()\n",
"VAR_225 = VAR_129(*VAR_79, **get_newargs(fn, kwargs))\n",
"if VAR_1 and hasattr(VAR_1, 'primary_db'):\n",
"return VAR_225\n",
"VAR_1.db.close()\n",
"VAR_1.db = VAR_1.primary_db\n"
] | [
"def wrapper_fn(*args, **kwargs):...\n",
"if conf.read_from_replica:\n",
"connect_replica()\n",
"retval = fn(*args, **get_newargs(fn, kwargs))\n",
"if local and hasattr(local, 'primary_db'):\n",
"return retval\n",
"local.db.close()\n",
"local.db = local.primary_db\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'"
] |
[
"async def FUNC_17(self, VAR_3):...\n",
"await self.callback(VAR_3)\n"
] | [
"async def _async_render_GET(self, request):...\n",
"await self.callback(request)\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Expr'"
] |
[
"def FUNC_5(VAR_23=None, VAR_26=None, VAR_27=True):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.database import get_db\n",
"if VAR_23:\n",
"FUNC_4(VAR_23)\n",
"VAR_1.db = get_db(VAR_10=db_name or local.conf.db_name)\n",
"if VAR_27:\n",
"FUNC_22('Administrator')\n"
] | [
"def connect(site=None, db_name=None, set_admin_as_user=True):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.database import get_db\n",
"if site:\n",
"init(site)\n",
"local.db = get_db(user=db_name or local.conf.db_name)\n",
"if set_admin_as_user:\n",
"set_user('Administrator')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"self.export_doc()\n"
] | [
"def on_update(self):...\n",
"self.export_doc()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_118(VAR_120=None):...\n",
"VAR_184 = {}\n",
"for VAR_219 in ([VAR_120] if VAR_120 else FUNC_64(VAR_122=True)):\n",
"VAR_219 = 'frappe' if VAR_219 == 'webnotes' else VAR_219\n",
"return VAR_184\n",
"VAR_226 = FUNC_55(VAR_219 + '.hooks')\n",
"if VAR_1.flags.in_install_app:\n",
"for VAR_46 in dir(VAR_226):\n",
"print('Could not find app \"{0}\"'.format(VAR_120))\n",
"if not VAR_46.startswith('_'):\n",
"if not VAR_16:\n",
"FUNC_67(VAR_184, VAR_46, getattr(VAR_226, VAR_46))\n",
"sys.exit(1)\n"
] | [
"def load_app_hooks(app_name=None):...\n",
"hooks = {}\n",
"for app in ([app_name] if app_name else get_installed_apps(sort=True)):\n",
"app = 'frappe' if app == 'webnotes' else app\n",
"return hooks\n",
"app_hooks = get_module(app + '.hooks')\n",
"if local.flags.in_install_app:\n",
"for key in dir(app_hooks):\n",
"print('Could not find app \"{0}\"'.format(app_name))\n",
"if not key.startswith('_'):\n",
"if not request:\n",
"append_hook(hooks, key, getattr(app_hooks, key))\n",
"sys.exit(1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Condition",
"For",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Expr'"
] |
[
"import logging\n",
"import urllib.parse\n",
"from typing import List, Optional\n",
"from netaddr import AddrFormatError, IPAddress\n",
"from zope.interface import implementer\n",
"from twisted.internet import defer\n",
"from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS\n",
"from twisted.internet.interfaces import IProtocolFactory, IReactorCore, IStreamClientEndpoint\n",
"from twisted.web.client import URI, Agent, HTTPConnectionPool\n",
"from twisted.web.http_headers import Headers\n",
"from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer\n",
"from synapse.crypto.context_factory import FederationPolicyForHTTPS\n",
"from synapse.http.federation.srv_resolver import Server, SrvResolver\n",
"from synapse.http.federation.well_known_resolver import WellKnownResolver\n",
"from synapse.logging.context import make_deferred_yieldable, run_in_background\n",
"from synapse.util import Clock\n",
"VAR_0 = logging.getLogger(__name__)\n",
"\"\"\"string\"\"\"\n",
"def __init__(self, VAR_2: IReactorCore, VAR_3: Optional[...\n",
"self._reactor = VAR_2\n",
"self._clock = Clock(VAR_2)\n",
"self._pool = HTTPConnectionPool(VAR_2)\n",
"self._pool.retryAutomatically = False\n",
"self._pool.maxPersistentPerHost = 5\n",
"self._pool.cachedConnectionTimeout = 2 * 60\n",
"self._agent = Agent.usingEndpointFactory(self._reactor, CLASS_1(VAR_2,\n VAR_3, VAR_5), pool=self._pool)\n",
"self.user_agent = VAR_4\n",
"if VAR_6 is None:\n",
"VAR_6 = WellKnownResolver(self._reactor, agent=Agent(self._reactor, pool=\n self._pool, contextFactory=tls_client_options_factory), VAR_4=self.\n user_agent)\n",
"self._well_known_resolver = VAR_6\n",
"@defer.inlineCallbacks...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = urllib.parse.urlparse(VAR_8)\n",
"assert VAR_12.hostname\n",
"VAR_15 = None\n",
"if VAR_12.scheme == b'matrix' and not FUNC_0(VAR_12.hostname\n",
"VAR_20 = yield defer.ensureDeferred(self._well_known_resolver.\n get_well_known(VAR_12.hostname))\n",
"if VAR_15:\n",
"VAR_15 = VAR_20.delegated_server\n",
"VAR_8 = urllib.parse.urlunparse((VAR_12.scheme, VAR_15, VAR_12.path, VAR_12\n .params, VAR_12.query, VAR_12.fragment))\n",
"if VAR_9 is None:\n",
"VAR_12 = urllib.parse.urlparse(VAR_8)\n",
"VAR_9 = Headers()\n",
"VAR_9 = VAR_9.copy()\n",
"if not VAR_9.hasHeader(b'host'):\n",
"VAR_9.addRawHeader(b'host', VAR_12.netloc)\n",
"if not VAR_9.hasHeader(b'user-agent'):\n",
"VAR_9.addRawHeader(b'user-agent', self.user_agent)\n",
"VAR_16 = yield make_deferred_yieldable(self._agent.request(VAR_7, VAR_8,\n VAR_9, VAR_10))\n",
"return VAR_16\n"
] | [
"import logging\n",
"import urllib.parse\n",
"from typing import List, Optional\n",
"from netaddr import AddrFormatError, IPAddress\n",
"from zope.interface import implementer\n",
"from twisted.internet import defer\n",
"from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS\n",
"from twisted.internet.interfaces import IProtocolFactory, IReactorCore, IStreamClientEndpoint\n",
"from twisted.web.client import URI, Agent, HTTPConnectionPool\n",
"from twisted.web.http_headers import Headers\n",
"from twisted.web.iweb import IAgent, IAgentEndpointFactory, IBodyProducer\n",
"from synapse.crypto.context_factory import FederationPolicyForHTTPS\n",
"from synapse.http.federation.srv_resolver import Server, SrvResolver\n",
"from synapse.http.federation.well_known_resolver import WellKnownResolver\n",
"from synapse.logging.context import make_deferred_yieldable, run_in_background\n",
"from synapse.util import Clock\n",
"logger = logging.getLogger(__name__)\n",
"\"\"\"An Agent-like thing which provides a `request` method which correctly\n handles resolving matrix server names when using matrix://. Handles standard\n https URIs as normal.\n\n Doesn't implement any retries. (Those are done in MatrixFederationHttpClient.)\n\n Args:\n reactor: twisted reactor to use for underlying requests\n\n tls_client_options_factory:\n factory to use for fetching client tls options, or none to disable TLS.\n\n user_agent:\n The user agent header to use for federation requests.\n\n _srv_resolver:\n SrvResolver implementation to use for looking up SRV records. None\n to use a default implementation.\n\n _well_known_resolver:\n WellKnownResolver to use to perform well-known lookups. None to use a\n default implementation.\n \"\"\"\n",
"def __init__(self, reactor: IReactorCore, tls_client_options_factory:...\n",
"self._reactor = reactor\n",
"self._clock = Clock(reactor)\n",
"self._pool = HTTPConnectionPool(reactor)\n",
"self._pool.retryAutomatically = False\n",
"self._pool.maxPersistentPerHost = 5\n",
"self._pool.cachedConnectionTimeout = 2 * 60\n",
"self._agent = Agent.usingEndpointFactory(self._reactor,\n MatrixHostnameEndpointFactory(reactor, tls_client_options_factory,\n _srv_resolver), pool=self._pool)\n",
"self.user_agent = user_agent\n",
"if _well_known_resolver is None:\n",
"_well_known_resolver = WellKnownResolver(self._reactor, agent=Agent(self.\n _reactor, pool=self._pool, contextFactory=tls_client_options_factory),\n user_agent=self.user_agent)\n",
"self._well_known_resolver = _well_known_resolver\n",
"@defer.inlineCallbacks...\n",
"\"\"\"docstring\"\"\"\n",
"parsed_uri = urllib.parse.urlparse(uri)\n",
"assert parsed_uri.hostname\n",
"delegated_server = None\n",
"if parsed_uri.scheme == b'matrix' and not _is_ip_literal(parsed_uri.hostname\n",
"well_known_result = yield defer.ensureDeferred(self._well_known_resolver.\n get_well_known(parsed_uri.hostname))\n",
"if delegated_server:\n",
"delegated_server = well_known_result.delegated_server\n",
"uri = urllib.parse.urlunparse((parsed_uri.scheme, delegated_server,\n parsed_uri.path, parsed_uri.params, parsed_uri.query, parsed_uri.fragment))\n",
"if headers is None:\n",
"parsed_uri = urllib.parse.urlparse(uri)\n",
"headers = Headers()\n",
"headers = headers.copy()\n",
"if not headers.hasHeader(b'host'):\n",
"headers.addRawHeader(b'host', parsed_uri.netloc)\n",
"if not headers.hasHeader(b'user-agent'):\n",
"headers.addRawHeader(b'user-agent', self.user_agent)\n",
"res = yield make_deferred_yieldable(self._agent.request(method, uri,\n headers, bodyProducer))\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Docstring",
"Assign'",
"Assert'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_21():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_47 = {'image_location': VAR_35, 'image_path': VAR_34}\n",
"VAR_10['properties']['image_state'] = 'downloading'\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"VAR_49 = []\n",
"VAR_0.exception(_('Failed to download %(image_location)s to %(image_path)s'\n ), VAR_47)\n",
"VAR_10['properties']['image_state'] = 'decrypting'\n",
"VAR_50 = VAR_16.find('image').getiterator('filename')\n",
"VAR_10['properties']['image_state'] = 'failed_download'\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"for fn_element in VAR_50:\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"VAR_52 = VAR_16.find('image/ec2_encrypted_key').text\n",
"VAR_0.exception(_('Failed to decrypt %(image_location)s to %(image_path)s'),\n VAR_47)\n",
"VAR_10['properties']['image_state'] = 'untarring'\n",
"VAR_57 = self._download_file(VAR_13, fn_element.text, VAR_34)\n",
"VAR_51 = os.path.join(VAR_34, 'image.encrypted')\n",
"return\n",
"VAR_18 = binascii.a2b_hex(VAR_52)\n",
"VAR_10['properties']['image_state'] = 'failed_decrypt'\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"VAR_49.append(VAR_57)\n",
"for VAR_14 in VAR_49:\n",
"VAR_53 = VAR_16.find('image/ec2_encrypted_iv').text\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"VAR_56 = self._untarzip_image(VAR_34, VAR_55)\n",
"VAR_0.exception(_('Failed to untar %(image_location)s to %(image_path)s'),\n VAR_47)\n",
"VAR_10['properties']['image_state'] = 'uploading'\n",
"shutil.copyfileobj(VAR_57, combined)\n",
"VAR_19 = binascii.a2b_hex(VAR_53)\n",
"return\n",
"VAR_10['properties']['image_state'] = 'failed_untar'\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"VAR_54 = crypto.key_path(VAR_5.project_id)\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"self.service.update(VAR_5, VAR_7, VAR_10, VAR_42)\n",
"VAR_0.exception(_('Failed to upload %(image_location)s to %(image_path)s'),\n VAR_47)\n",
"VAR_10['properties']['image_state'] = 'available'\n",
"VAR_55 = os.path.join(VAR_34, 'image.tar.gz')\n",
"return\n",
"VAR_10['properties']['image_state'] = 'failed_upload'\n",
"VAR_10['status'] = 'active'\n",
"self._decrypt_image(VAR_51, VAR_18, VAR_19, VAR_54, VAR_55)\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"self.service.update(VAR_5, VAR_7, VAR_10)\n",
"return\n",
"shutil.rmtree(VAR_34)\n"
] | [
"def delayed_create():...\n",
"\"\"\"docstring\"\"\"\n",
"log_vars = {'image_location': image_location, 'image_path': image_path}\n",
"metadata['properties']['image_state'] = 'downloading'\n",
"self.service.update(context, image_uuid, metadata)\n",
"parts = []\n",
"LOG.exception(_('Failed to download %(image_location)s to %(image_path)s'),\n log_vars)\n",
"metadata['properties']['image_state'] = 'decrypting'\n",
"elements = manifest.find('image').getiterator('filename')\n",
"metadata['properties']['image_state'] = 'failed_download'\n",
"self.service.update(context, image_uuid, metadata)\n",
"for fn_element in elements:\n",
"self.service.update(context, image_uuid, metadata)\n",
"hex_key = manifest.find('image/ec2_encrypted_key').text\n",
"LOG.exception(_('Failed to decrypt %(image_location)s to %(image_path)s'),\n log_vars)\n",
"metadata['properties']['image_state'] = 'untarring'\n",
"part = self._download_file(bucket, fn_element.text, image_path)\n",
"enc_filename = os.path.join(image_path, 'image.encrypted')\n",
"return\n",
"encrypted_key = binascii.a2b_hex(hex_key)\n",
"metadata['properties']['image_state'] = 'failed_decrypt'\n",
"self.service.update(context, image_uuid, metadata)\n",
"parts.append(part)\n",
"for filename in parts:\n",
"hex_iv = manifest.find('image/ec2_encrypted_iv').text\n",
"self.service.update(context, image_uuid, metadata)\n",
"unz_filename = self._untarzip_image(image_path, dec_filename)\n",
"LOG.exception(_('Failed to untar %(image_location)s to %(image_path)s'),\n log_vars)\n",
"metadata['properties']['image_state'] = 'uploading'\n",
"shutil.copyfileobj(part, combined)\n",
"encrypted_iv = binascii.a2b_hex(hex_iv)\n",
"return\n",
"metadata['properties']['image_state'] = 'failed_untar'\n",
"self.service.update(context, image_uuid, metadata)\n",
"cloud_pk = crypto.key_path(context.project_id)\n",
"self.service.update(context, image_uuid, metadata)\n",
"self.service.update(context, image_uuid, metadata, image_file)\n",
"LOG.exception(_('Failed to upload %(image_location)s to %(image_path)s'),\n log_vars)\n",
"metadata['properties']['image_state'] = 'available'\n",
"dec_filename = os.path.join(image_path, 'image.tar.gz')\n",
"return\n",
"metadata['properties']['image_state'] = 'failed_upload'\n",
"metadata['status'] = 'active'\n",
"self._decrypt_image(enc_filename, encrypted_key, encrypted_iv, cloud_pk,\n dec_filename)\n",
"self.service.update(context, image_uuid, metadata)\n",
"self.service.update(context, image_uuid, metadata)\n",
"return\n",
"shutil.rmtree(image_path)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Expr'"
] |
[
"def FUNC_29(VAR_16):...\n",
"VAR_41 = calibre_db.session.query(db.Tags).join(db.books_tags_link).join(db\n .Books).filter(calibre_db.common_filters()).group_by(text(\n 'books_tags_link.tag')).order_by(db.Tags.name).all()\n",
"VAR_73 = calibre_db.session.query(db.Series).join(db.books_series_link).join(db\n .Books).filter(calibre_db.common_filters()).group_by(text(\n 'books_series_link.series')).order_by(db.Series.name).filter(calibre_db\n .common_filters()).all()\n",
"VAR_74 = ub.session.query(ub.Shelf).filter(or_(ub.Shelf.is_public == 1, ub.\n Shelf.user_id == int(VAR_87.id))).order_by(ub.Shelf.name).all()\n",
"VAR_75 = calibre_db.session.query(db.Data).join(db.Books).filter(calibre_db\n .common_filters()).group_by(db.Data.format).order_by(db.Data.format).all()\n",
"if VAR_87.filter_language() == u'all':\n",
"VAR_48 = calibre_db.speaking_language()\n",
"VAR_48 = None\n",
"return render_title_template('search_form.html', VAR_41=tags, VAR_48=\n languages, VAR_75=extensions, VAR_73=series, VAR_74=shelves, VAR_150=_(\n u'Advanced Search'), VAR_16=cc, VAR_9='advsearch')\n"
] | [
"def render_prepare_search_form(cc):...\n",
"tags = calibre_db.session.query(db.Tags).join(db.books_tags_link).join(db.Books\n ).filter(calibre_db.common_filters()).group_by(text('books_tags_link.tag')\n ).order_by(db.Tags.name).all()\n",
"series = calibre_db.session.query(db.Series).join(db.books_series_link).join(db\n .Books).filter(calibre_db.common_filters()).group_by(text(\n 'books_series_link.series')).order_by(db.Series.name).filter(calibre_db\n .common_filters()).all()\n",
"shelves = ub.session.query(ub.Shelf).filter(or_(ub.Shelf.is_public == 1, ub\n .Shelf.user_id == int(current_user.id))).order_by(ub.Shelf.name).all()\n",
"extensions = calibre_db.session.query(db.Data).join(db.Books).filter(calibre_db\n .common_filters()).group_by(db.Data.format).order_by(db.Data.format).all()\n",
"if current_user.filter_language() == u'all':\n",
"languages = calibre_db.speaking_language()\n",
"languages = None\n",
"return render_title_template('search_form.html', tags=tags, languages=\n languages, extensions=extensions, series=series, shelves=shelves, title\n =_(u'Advanced Search'), cc=cc, page='advsearch')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_26 = np.array([[1], [2]])\n",
"VAR_31 = os.path.join(test.get_temp_dir(), 'input.npz')\n",
"np.savez(VAR_31, VAR_53=x0)\n",
"VAR_27 = np.ones([2, 10])\n",
"VAR_20 = 'x0=' + VAR_31 + '[a]'\n",
"VAR_21 = 'x0=np.ones([2,10])'\n",
"VAR_30 = saved_model_cli.load_inputs_from_input_arg_string(VAR_20, VAR_21, '')\n",
"self.assertTrue(np.all(VAR_30['x0'] == VAR_27))\n"
] | [
"def testInputParserBothDuplicate(self):...\n",
"x0 = np.array([[1], [2]])\n",
"input_path = os.path.join(test.get_temp_dir(), 'input.npz')\n",
"np.savez(input_path, a=x0)\n",
"x1 = np.ones([2, 10])\n",
"input_str = 'x0=' + input_path + '[a]'\n",
"input_expr_str = 'x0=np.ones([2,10])'\n",
"feed_dict = saved_model_cli.load_inputs_from_input_arg_string(input_str,\n input_expr_str, '')\n",
"self.assertTrue(np.all(feed_dict['x0'] == x1))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_65(self, VAR_112=VAR_3, VAR_134=VAR_3, VAR_135=VAR_3, VAR_136=VAR_3):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.is_logged_in():\n",
"redirect(self.settings.login_url, client_side=self.settings.client_side)\n",
"if self.settings.login_form != self:\n",
"VAR_370 = self.settings.login_form\n",
"VAR_101 = self.db\n",
"if hasattr(VAR_370, 'change_password_url'):\n",
"VAR_254 = self.table_user()\n",
"VAR_112 = VAR_370.change_password_url(VAR_112)\n",
"VAR_278 = VAR_101(VAR_254.id == self.user.id)\n",
"if VAR_112 is not None:\n",
"VAR_56 = VAR_263.request\n",
"redirect(VAR_112)\n",
"VAR_139 = VAR_263.session\n",
"if VAR_112 is VAR_3:\n",
"VAR_112 = self.get_vars_next() or self.settings.change_password_next\n",
"if VAR_134 is VAR_3:\n",
"VAR_134 = self.settings.change_password_onvalidation\n",
"if VAR_135 is VAR_3:\n",
"VAR_135 = self.settings.change_password_onaccept\n",
"if VAR_136 is VAR_3:\n",
"VAR_136 = self.messages['change_password_log']\n",
"VAR_261 = self.settings.password_field\n",
"VAR_279 = VAR_254[VAR_261].requires\n",
"if not isinstance(VAR_279, (list, tuple)):\n",
"VAR_279 = [VAR_279]\n",
"VAR_279 = [VAR_445 for VAR_445 in VAR_279 if isinstance(VAR_445, CRYPT)]\n",
"if VAR_279:\n",
"VAR_279[0] = CRYPT(**requires[0].__dict__)\n",
"VAR_7 = SQLFORM.factory(VAR_1('old_password', 'password', VAR_279=requires,\n VAR_60=self.messages.old_password), VAR_1('new_password', 'password',\n VAR_60=self.messages.new_password, VAR_279=table_user[passfield].\n requires), VAR_1('new_password2', 'password', VAR_60=self.messages.\n verify_password, VAR_279=[IS_EXPR('value==%s' % repr(request.vars.\n new_password), self.messages.mismatched_password)]), submit_button=self\n .messages.password_change_button, hidden=dict(_next=next), VAR_273=self\n .settings.formstyle, separator=self.settings.label_separator)\n",
"VAR_279[0].min_length = 0\n",
"if VAR_7.accepts(VAR_56, VAR_139, VAR_171='change_password', VAR_134=\n",
"VAR_379 = VAR_278.select(VAR_175=(0, 1), orderby_on_limitby=False).first()\n",
"return VAR_7\n",
"if not VAR_7.vars['old_password'] == VAR_379[VAR_261]:\n",
"VAR_7.errors['old_password'] = self.messages.invalid_password\n",
"VAR_27 = {VAR_261: str(VAR_7.vars.new_password)}\n",
"VAR_278.update(**d)\n",
"VAR_139.flash = self.messages.password_changed\n",
"self.log_event(VAR_136, self.user)\n",
"VAR_26(VAR_135, VAR_7)\n",
"if not VAR_112:\n",
"VAR_112 = self.url(VAR_11=request.args)\n",
"VAR_112 = FUNC_4(VAR_112, VAR_7)\n",
"redirect(VAR_112, client_side=self.settings.client_side)\n"
] | [
"def change_password(self, next=DEFAULT, onvalidation=DEFAULT, onaccept=...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.is_logged_in():\n",
"redirect(self.settings.login_url, client_side=self.settings.client_side)\n",
"if self.settings.login_form != self:\n",
"cas = self.settings.login_form\n",
"db = self.db\n",
"if hasattr(cas, 'change_password_url'):\n",
"table_user = self.table_user()\n",
"next = cas.change_password_url(next)\n",
"s = db(table_user.id == self.user.id)\n",
"if next is not None:\n",
"request = current.request\n",
"redirect(next)\n",
"session = current.session\n",
"if next is DEFAULT:\n",
"next = self.get_vars_next() or self.settings.change_password_next\n",
"if onvalidation is DEFAULT:\n",
"onvalidation = self.settings.change_password_onvalidation\n",
"if onaccept is DEFAULT:\n",
"onaccept = self.settings.change_password_onaccept\n",
"if log is DEFAULT:\n",
"log = self.messages['change_password_log']\n",
"passfield = self.settings.password_field\n",
"requires = table_user[passfield].requires\n",
"if not isinstance(requires, (list, tuple)):\n",
"requires = [requires]\n",
"requires = [t for t in requires if isinstance(t, CRYPT)]\n",
"if requires:\n",
"requires[0] = CRYPT(**requires[0].__dict__)\n",
"form = SQLFORM.factory(Field('old_password', 'password', requires=requires,\n label=self.messages.old_password), Field('new_password', 'password',\n label=self.messages.new_password, requires=table_user[passfield].\n requires), Field('new_password2', 'password', label=self.messages.\n verify_password, requires=[IS_EXPR('value==%s' % repr(request.vars.\n new_password), self.messages.mismatched_password)]), submit_button=self\n .messages.password_change_button, hidden=dict(_next=next), formstyle=\n self.settings.formstyle, separator=self.settings.label_separator)\n",
"requires[0].min_length = 0\n",
"if form.accepts(request, session, formname='change_password', onvalidation=\n",
"current_user = s.select(limitby=(0, 1), orderby_on_limitby=False).first()\n",
"return form\n",
"if not form.vars['old_password'] == current_user[passfield]:\n",
"form.errors['old_password'] = self.messages.invalid_password\n",
"d = {passfield: str(form.vars.new_password)}\n",
"s.update(**d)\n",
"session.flash = self.messages.password_changed\n",
"self.log_event(log, self.user)\n",
"callback(onaccept, form)\n",
"if not next:\n",
"next = self.url(args=request.args)\n",
"next = replace_id(next, form)\n",
"redirect(next, client_side=self.settings.client_side)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_22(self):...\n",
"VAR_29 = coreapi.Document(VAR_5='', title='Example API', content={'users':\n {'list': coreapi.Link(url='/users/', action='get', fields=[])}})\n",
"VAR_30 = VAR_29['users']\n",
"VAR_31 = schema_links(VAR_30)\n",
"assert len(VAR_31) is 1\n",
"assert 'list' in VAR_31\n"
] | [
"def test_single_action(self):...\n",
"schema = coreapi.Document(url='', title='Example API', content={'users': {\n 'list': coreapi.Link(url='/users/', action='get', fields=[])}})\n",
"section = schema['users']\n",
"flat_links = schema_links(section)\n",
"assert len(flat_links) is 1\n",
"assert 'list' in flat_links\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"def FUNC_11(VAR_37):...\n",
"VAR_4, VAR_39 = VAR_37\n",
"VAR_25.write(VAR_4)\n",
"return VAR_39\n"
] | [
"def write_to(r):...\n",
"data, response = r\n",
"output_stream.write(data)\n",
"return response\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_111(VAR_129):...\n",
"from frappe.desk.search import validate_and_sanitize_search_inputs as func\n",
"return func(VAR_129)\n"
] | [
"def validate_and_sanitize_search_inputs(fn):...\n",
"from frappe.desk.search import validate_and_sanitize_search_inputs as func\n",
"return func(fn)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Return'"
] |
[
"def __str__(self):...\n",
"return '%s' % self.name\n"
] | [
"def __str__(self):...\n",
"return '%s' % self.name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_4, VAR_58, *VAR_6, **VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"super().__init__(*VAR_6, **kwargs)\n",
"self.fields['lang'].choices += VAR_58.languages.as_choices()\n",
"self.fields['component'].choices += VAR_58.component_set.filter_access(VAR_4\n ).order().values_list('slug', 'name')\n"
] | [
"def __init__(self, user, project, *args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"super().__init__(*args, **kwargs)\n",
"self.fields['lang'].choices += project.languages.as_choices()\n",
"self.fields['component'].choices += project.component_set.filter_access(user\n ).order().values_list('slug', 'name')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"AugAssign'",
"AugAssign'"
] |
[
"def FUNC_12(self, VAR_3, VAR_4):...\n",
"return django.forms.CharField(widget=django.forms.HiddenInput, **options)\n"
] | [
"def create_hidden_field(self, field, options):...\n",
"return django.forms.CharField(widget=django.forms.HiddenInput, **options)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_61(VAR_70):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_137 = {' ': 'normal', '+': 'plus', '-': 'minus'}\n",
"return VAR_137[VAR_70[0]]\n"
] | [
"def getclass(item):...\n",
"\"\"\"docstring\"\"\"\n",
"operators = {' ': 'normal', '+': 'plus', '-': 'minus'}\n",
"return operators[item[0]]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_30(self, VAR_26):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_26, bytes):\n",
"VAR_26 = VAR_26.decode('utf-8')\n",
"if isinstance(VAR_26, str):\n",
"return '${len}\\r\\n{str}\\r\\n'.format(len=len(obj), str=obj)\n",
"if isinstance(VAR_26, int):\n",
"return ':{val}\\r\\n'.format(val=obj)\n",
"if isinstance(VAR_26, (list, tuple)):\n",
"VAR_54 = ''.join(self.encode(a) for a in VAR_26)\n",
"return '*{len}\\r\\n{items}'.format(len=len(obj), VAR_54=items)\n"
] | [
"def encode(self, obj):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(obj, bytes):\n",
"obj = obj.decode('utf-8')\n",
"if isinstance(obj, str):\n",
"return '${len}\\r\\n{str}\\r\\n'.format(len=len(obj), str=obj)\n",
"if isinstance(obj, int):\n",
"return ':{val}\\r\\n'.format(val=obj)\n",
"if isinstance(obj, (list, tuple)):\n",
"items = ''.join(self.encode(a) for a in obj)\n",
"return '*{len}\\r\\n{items}'.format(len=len(obj), items=items)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Return'"
] |
[
"async def FUNC_8(self, VAR_10: str, VAR_11: str, VAR_20: Iterable[str]):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = await self.store.get_room_version(VAR_11)\n",
"VAR_60 = {}\n",
"async def FUNC_48(VAR_12: str):...\n",
"VAR_1 = await self.federation_client.get_pdu([VAR_10], VAR_12, VAR_28,\n outlier=True)\n",
"VAR_0.warning('Error fetching missing state/auth event %s: %s %s', VAR_12,\n type(VAR_170), VAR_170)\n",
"await concurrently_execute(FUNC_48, VAR_20, 5)\n",
"if VAR_1 is None:\n",
"VAR_3 = [aid for VAR_1 in VAR_60.values() for aid in VAR_1.auth_event_ids() if\n aid not in VAR_60]\n",
"VAR_0.warning(\"Server %s didn't return event %s\", VAR_10, VAR_12)\n",
"VAR_60[VAR_1.event_id] = VAR_1\n",
"VAR_88 = await self.store.get_events(VAR_3, allow_rejected=True)\n",
"return\n",
"VAR_34 = []\n",
"for VAR_1 in VAR_60.values():\n",
"VAR_89 = {}\n",
"await self._handle_new_events(VAR_10, VAR_11, VAR_34)\n",
"for auth_event_id in VAR_1.auth_event_ids():\n",
"VAR_187 = VAR_88.get(auth_event_id) or VAR_60.get(auth_event_id)\n",
"VAR_34.append(CLASS_0(VAR_1, None, VAR_89))\n",
"if VAR_187:\n",
"VAR_89[VAR_187.type, VAR_187.state_key] = VAR_187\n",
"VAR_0.info('Missing auth event %s', auth_event_id)\n"
] | [
"async def _get_events_and_persist(self, destination: str, room_id: str,...\n",
"\"\"\"docstring\"\"\"\n",
"room_version = await self.store.get_room_version(room_id)\n",
"event_map = {}\n",
"async def get_event(event_id: str):...\n",
"event = await self.federation_client.get_pdu([destination], event_id,\n room_version, outlier=True)\n",
"logger.warning('Error fetching missing state/auth event %s: %s %s',\n event_id, type(e), e)\n",
"await concurrently_execute(get_event, events, 5)\n",
"if event is None:\n",
"auth_events = [aid for event in event_map.values() for aid in event.\n auth_event_ids() if aid not in event_map]\n",
"logger.warning(\"Server %s didn't return event %s\", destination, event_id)\n",
"event_map[event.event_id] = event\n",
"persisted_events = await self.store.get_events(auth_events, allow_rejected=True\n )\n",
"return\n",
"event_infos = []\n",
"for event in event_map.values():\n",
"auth = {}\n",
"await self._handle_new_events(destination, room_id, event_infos)\n",
"for auth_event_id in event.auth_event_ids():\n",
"ae = persisted_events.get(auth_event_id) or event_map.get(auth_event_id)\n",
"event_infos.append(_NewEventInfo(event, None, auth))\n",
"if ae:\n",
"auth[ae.type, ae.state_key] = ae\n",
"logger.info('Missing auth event %s', auth_event_id)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"AsyncFunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"For",
"Assign'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_174(VAR_212):...\n",
"if VAR_212.render:\n",
"if VAR_212.render in self.settings.render.keys():\n",
"VAR_472 = self.markmin_render\n",
"VAR_472 = self.settings.render[VAR_212.render]\n",
"VAR_472 = getattr(self, '%s_render' % VAR_212.render)\n",
"return VAR_472(VAR_212)\n"
] | [
"def custom_render(page):...\n",
"if page.render:\n",
"if page.render in self.settings.render.keys():\n",
"my_render = self.markmin_render\n",
"my_render = self.settings.render[page.render]\n",
"my_render = getattr(self, '%s_render' % page.render)\n",
"return my_render(page)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_33(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return json.dumps(cherry.config['media.playable'])\n"
] | [
"def api_getplayables(self):...\n",
"\"\"\"docstring\"\"\"\n",
"return json.dumps(cherry.config['media.playable'])\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_2(VAR_2, VAR_3, VAR_4, **VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = {}\n",
"VAR_14 = {Change.ACTION_NEW: 'new', Change.ACTION_APPROVE: 'approve'}\n",
"VAR_11 = Change.objects.content().filter(unit__isnull=False)\n",
"if VAR_2:\n",
"VAR_11 = VAR_11.filter(author=user)\n",
"VAR_11 = VAR_11.filter(author__isnull=False)\n",
"VAR_15 = VAR_11.filter(timestamp__range=(start_date, end_date), **kwargs\n ).prefetch_related('author', 'unit')\n",
"for change in VAR_15:\n",
"VAR_30 = change.author.email\n",
"return list(VAR_10.values())\n",
"if VAR_30 not in VAR_10:\n",
"VAR_10[VAR_30] = VAR_39 = {'name': change.author.full_name, 'email': VAR_30}\n",
"VAR_39 = VAR_10[VAR_30]\n",
"VAR_39.update(VAR_9)\n",
"VAR_31 = len(change.unit.source)\n",
"VAR_32 = change.unit.num_words\n",
"VAR_33 = len(change.target)\n",
"VAR_34 = len(change.target.split())\n",
"VAR_35 = change.get_distance()\n",
"VAR_39['chars'] += VAR_31\n",
"VAR_39['words'] += VAR_32\n",
"VAR_39['t_chars'] += VAR_33\n",
"VAR_39['t_words'] += VAR_34\n",
"VAR_39['edits'] += VAR_35\n",
"VAR_39['count'] += 1\n",
"VAR_36 = VAR_14.get(change.action, 'edit')\n",
"VAR_39['t_chars_' + VAR_36] += VAR_33\n",
"VAR_39['t_words_' + VAR_36] += VAR_34\n",
"VAR_39['chars_' + VAR_36] += VAR_31\n",
"VAR_39['words_' + VAR_36] += VAR_32\n",
"VAR_39['edits_' + VAR_36] += VAR_35\n",
"VAR_39['count_' + VAR_36] += 1\n"
] | [
"def generate_counts(user, start_date, end_date, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"result = {}\n",
"action_map = {Change.ACTION_NEW: 'new', Change.ACTION_APPROVE: 'approve'}\n",
"base = Change.objects.content().filter(unit__isnull=False)\n",
"if user:\n",
"base = base.filter(author=user)\n",
"base = base.filter(author__isnull=False)\n",
"changes = base.filter(timestamp__range=(start_date, end_date), **kwargs\n ).prefetch_related('author', 'unit')\n",
"for change in changes:\n",
"email = change.author.email\n",
"return list(result.values())\n",
"if email not in result:\n",
"result[email] = current = {'name': change.author.full_name, 'email': email}\n",
"current = result[email]\n",
"current.update(COUNT_DEFAULTS)\n",
"src_chars = len(change.unit.source)\n",
"src_words = change.unit.num_words\n",
"tgt_chars = len(change.target)\n",
"tgt_words = len(change.target.split())\n",
"edits = change.get_distance()\n",
"current['chars'] += src_chars\n",
"current['words'] += src_words\n",
"current['t_chars'] += tgt_chars\n",
"current['t_words'] += tgt_words\n",
"current['edits'] += edits\n",
"current['count'] += 1\n",
"suffix = action_map.get(change.action, 'edit')\n",
"current['t_chars_' + suffix] += tgt_chars\n",
"current['t_words_' + suffix] += tgt_words\n",
"current['chars_' + suffix] += src_chars\n",
"current['words_' + suffix] += src_words\n",
"current['edits_' + suffix] += edits\n",
"current['count_' + suffix] += 1\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"Assign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_138 = datetime.timedelta(0)\n",
"for val in self.followup_set.all():\n",
"if val.time_spent:\n",
"return VAR_138\n",
"VAR_138 = VAR_138 + val.time_spent\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"total = datetime.timedelta(0)\n",
"for val in self.followup_set.all():\n",
"if val.time_spent:\n",
"return total\n",
"total = total + val.time_spent\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'"
] |
[
"def FUNC_75(VAR_79, VAR_130=False, VAR_131=True):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_79 = CLASS_0(VAR_79)\n",
"if not VAR_79.doctype_or_field:\n",
"VAR_79.doctype_or_field = 'DocField'\n",
"if not VAR_79.doctype:\n",
"if not VAR_79.property_type:\n",
"VAR_211 = VAR_12.sql_list(\n 'select distinct parent from tabDocField where fieldname=%s', VAR_79.\n fieldname)\n",
"VAR_211 = [VAR_79.doctype]\n",
"VAR_79.property_type = VAR_12.get_value('DocField', {'parent': 'DocField',\n 'fieldname': VAR_79.property}, 'fieldtype') or 'Data'\n",
"for VAR_62 in VAR_211:\n",
"if not VAR_79.property_type:\n",
"VAR_79.property_type = VAR_12.get_value('DocField', {'parent': VAR_62,\n 'fieldname': VAR_79.fieldname}, 'fieldtype') or 'Data'\n",
"VAR_212 = FUNC_45({'doctype': 'Property Setter', 'doctype_or_field': VAR_79\n .doctype_or_field, 'doc_type': VAR_62, 'field_name': VAR_79.fieldname,\n 'row_name': VAR_79.row_name, 'property': VAR_79.property, 'value':\n VAR_79.value, 'property_type': VAR_79.property_type or 'Data',\n '__islocal': 1})\n",
"VAR_212.flags.ignore_validate = VAR_130\n",
"VAR_212.flags.validate_fields_for_doctype = VAR_131\n",
"VAR_212.validate_fieldtype_change()\n",
"VAR_212.insert()\n"
] | [
"def make_property_setter(args, ignore_validate=False,...\n",
"\"\"\"docstring\"\"\"\n",
"args = _dict(args)\n",
"if not args.doctype_or_field:\n",
"args.doctype_or_field = 'DocField'\n",
"if not args.doctype:\n",
"if not args.property_type:\n",
"doctype_list = db.sql_list(\n 'select distinct parent from tabDocField where fieldname=%s', args.\n fieldname)\n",
"doctype_list = [args.doctype]\n",
"args.property_type = db.get_value('DocField', {'parent': 'DocField',\n 'fieldname': args.property}, 'fieldtype') or 'Data'\n",
"for doctype in doctype_list:\n",
"if not args.property_type:\n",
"args.property_type = db.get_value('DocField', {'parent': doctype,\n 'fieldname': args.fieldname}, 'fieldtype') or 'Data'\n",
"ps = get_doc({'doctype': 'Property Setter', 'doctype_or_field': args.\n doctype_or_field, 'doc_type': doctype, 'field_name': args.fieldname,\n 'row_name': args.row_name, 'property': args.property, 'value': args.\n value, 'property_type': args.property_type or 'Data', '__islocal': 1})\n",
"ps.flags.ignore_validate = ignore_validate\n",
"ps.flags.validate_fields_for_doctype = validate_fields_for_doctype\n",
"ps.validate_fieldtype_change()\n",
"ps.insert()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_20(self):...\n",
"return [os.path.join(d, 'custom') for d in (self.config_dir, os.path.join(\n get_ipython_dir(), 'profile_default', 'static'), DEFAULT_STATIC_FILES_PATH)\n ]\n"
] | [
"def _static_custom_path_default(self):...\n",
"return [os.path.join(d, 'custom') for d in (self.config_dir, os.path.join(\n get_ipython_dir(), 'profile_default', 'static'), DEFAULT_STATIC_FILES_PATH)\n ]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"super(CLASS_0, self).setUp()\n",
"if platform.system() == 'Windows':\n",
"self.skipTest('Skipping failing tests on Windows.')\n"
] | [
"def setUp(self):...\n",
"super(SavedModelCLITestCase, self).setUp()\n",
"if platform.system() == 'Windows':\n",
"self.skipTest('Skipping failing tests on Windows.')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_24(self):...\n",
"VAR_5, VAR_15 = self._test_confirm_start()\n",
"VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',\n 'new_password2': 'anewpassword'})\n",
"self.assertEqual(VAR_3.status_code, 302)\n",
"self.assertURLEqual(VAR_3.url, '/reset/done/')\n"
] | [
"def test_confirm_redirect_default(self):...\n",
"url, path = self._test_confirm_start()\n",
"response = self.client.post(path, {'new_password1': 'anewpassword',\n 'new_password2': 'anewpassword'})\n",
"self.assertEqual(response.status_code, 302)\n",
"self.assertURLEqual(response.url, '/reset/done/')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(VAR_20):...\n",
"if VAR_8.interface.analytics_enabled:\n",
"requests.post(VAR_4, VAR_64={'ip_address': IP_ADDRESS, 'feature': feature},\n timeout=3)\n"
] | [
"def log_feature_analytics(feature):...\n",
"if app.interface.analytics_enabled:\n",
"requests.post(GRADIO_FEATURE_ANALYTICS_URL, data={'ip_address': IP_ADDRESS,\n 'feature': feature}, timeout=3)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"def FUNC_8(VAR_12):...\n",
""
] | [
"def migrate_guest_password(engine):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_27():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = FUNC_5(request.vars.app)\n",
"VAR_56 = apath(VAR_3, VAR_122=request)\n",
"VAR_57 = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true',\n 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false',\n 'linenumbers': 'true', 'highlightline': 'true'}\n",
"VAR_58 = Config(os.path.join(request.folder, 'settings.cfg'), section=\n 'editor', default_values={})\n",
"VAR_57.update(VAR_58.read())\n",
"if not request.ajax and not is_mobile:\n",
"VAR_43.title = T('Editing %s') % VAR_3\n",
"if 'settings' in request.vars:\n",
"return VAR_43.render('default/edit.html', dict(VAR_3=app, editor_settings=\n preferences))\n",
"if request.post_vars:\n",
"\"\"\" File edit handler \"\"\"\n",
"if PY2:\n",
"VAR_178 = {'realfilename': 'settings', 'filename': 'settings', 'id':\n 'editor_settings', 'force': False}\n",
"VAR_3 = FUNC_5(request.vars.app)\n",
"VAR_201 = request.post_vars.items()\n",
"VAR_201 = list(request.post_vars.items())\n",
"VAR_178['plain_html'] = VAR_43.render('default/editor_settings.html', {\n 'editor_settings': VAR_57})\n",
"VAR_5 = '/'.join(request.args)\n",
"VAR_201 += [(opt, 'false') for opt in VAR_57 if opt not in request.post_vars]\n",
"return VAR_43.json(VAR_178)\n",
"VAR_59 = request.args[-1]\n",
"if VAR_58.save(VAR_201):\n",
"if request.vars.app:\n",
"VAR_43.headers['web2py-component-flash'] = T('Preferences saved correctly')\n",
"VAR_43.headers['web2py-component-flash'] = T(\n 'Preferences saved on session only')\n",
"VAR_15 = abspath(VAR_5)\n",
"VAR_15 = apath(VAR_5, VAR_122=request)\n",
"VAR_43.headers['web2py-component-command'] = (\n \"update_editor(%s);$('a[href=#editor_settings] button.close').click();\" %\n VAR_43.json(VAR_58.read()))\n",
"if VAR_5[-3:] == '.py':\n",
"return\n",
"VAR_127 = 'python'\n",
"if VAR_5[-5:] == '.html':\n",
"if 'revert' in request.vars and os.path.exists(VAR_15 + '.bak'):\n",
"VAR_127 = 'html'\n",
"if VAR_5[-5:] == '.load':\n",
"VAR_2 = FUNC_3(VAR_15 + '.bak')\n",
"session.flash = T('Invalid action')\n",
"FUNC_4(VAR_15, VAR_2)\n",
"VAR_2 = FUNC_3(VAR_15)\n",
"session.flash = T('Invalid action')\n",
"VAR_130 = FUNC_0(VAR_2)\n",
"VAR_127 = 'html'\n",
"if VAR_5[-4:] == '.css':\n",
"VAR_179 = FUNC_3(VAR_15)\n",
"if 'from_ajax' in request.vars:\n",
"VAR_128 = md5_hash(VAR_2)\n",
"if 'from_ajax' in request.vars:\n",
"VAR_128 = md5_hash(VAR_2)\n",
"VAR_127 = 'css'\n",
"if VAR_5[-3:] == '.js':\n",
"return VAR_43.json({'error': str(T('Invalid action'))})\n",
"redirect(URL('site'))\n",
"VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])\n",
"return VAR_43.json({'error': str(T('Invalid action'))})\n",
"redirect(URL('site'))\n",
"VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])\n",
"VAR_127 = 'javascript'\n",
"VAR_127 = 'html'\n",
"FUNC_4(VAR_15 + '.bak', VAR_179)\n",
"if request.vars.file_hash and request.vars.file_hash != VAR_128:\n",
"VAR_43.flash = T('file \"%s\" of %s restored', (VAR_5, VAR_129))\n",
"session.flash = T('file changed on disk')\n",
"if request.vars.data:\n",
"VAR_60 = request.vars.data or request.vars.revert\n",
"VAR_2 = request.vars.data.replace('\\r\\n', '\\n').strip() + '\\n'\n",
"FUNC_4(VAR_15 + '.bak', VAR_2)\n",
"VAR_61 = None\n",
"FUNC_4(VAR_15 + '.1', VAR_2)\n",
"VAR_2 = request.vars.data.replace('\\r\\n', '\\n').strip() + '\\n'\n",
"if VAR_127 == 'python' and request.vars.data:\n",
"if 'from_ajax' in request.vars:\n",
"FUNC_4(VAR_15, VAR_2)\n",
"import _ast\n",
"if VAR_60 and request.args[1] == 'modules':\n",
"return VAR_43.json({'error': str(T('file changed on disk')), 'redirect':\n URL('resolve', VAR_98=request.args)})\n",
"redirect(URL('resolve', VAR_98=request.args))\n",
"VAR_202 = FUNC_0(VAR_2)\n",
"VAR_180 = request.vars.data.rstrip().replace('\\r\\n', '\\n') + '\\n'\n",
"VAR_203 = sum([(len(VAR_69) + 1) for l, VAR_69 in enumerate(request.vars.\n data.split('\\n')) if l < VAR_114.lineno - 1])\n",
"VAR_62 = None\n",
"VAR_181 = '.'.join(request.args[2:])[:-3]\n",
"VAR_43.flash = DIV(T('failed to reload module because:'), PRE(repr(VAR_114)))\n",
"FUNC_1(VAR_3, 'EDIT', VAR_5, VAR_6=lineno_new - lineno_old)\n",
"compile(VAR_180, VAR_15, 'exec', _ast.PyCF_ONLY_AST)\n",
"if VAR_114.text and VAR_114.offset:\n",
"VAR_63 = None\n",
"exec('import applications.%s.modules.%s' % (request.args[0], VAR_181))\n",
"VAR_128 = md5_hash(VAR_2)\n",
"VAR_213 = VAR_114.offset - (len(VAR_114.text) - len(VAR_114.text.splitlines\n ()[-1]))\n",
"VAR_213 = 0\n",
"VAR_64 = None\n",
"reload(sys.modules['applications.%s.modules.%s' % (request.args[0], VAR_181)])\n",
"VAR_129 = time.ctime(os.stat(VAR_15)[stat.ST_MTIME])\n",
"VAR_61 = {'start': VAR_203, 'end': VAR_203 + VAR_213 + 1, 'lineno': VAR_114\n .lineno, 'offset': VAR_213}\n",
"if VAR_127 == 'html' and len(request.args) >= 3:\n",
"VAR_43.flash = T('file saved on %s', VAR_129)\n",
"VAR_214 = VAR_114.__class__.__name__\n",
"VAR_214 = 'unknown exception!'\n",
"VAR_43.flash = DIV(T('failed to compile file because:'), BR(), B(VAR_214), \n ' ' + T('at line %s', VAR_114.lineno), VAR_213 and ' ' + T('at char %s',\n VAR_213) or '', PRE(repr(VAR_114)))\n",
"VAR_131 = os.path.join(request.args[0], 'controllers', request.args[2] + '.py')\n",
"if VAR_127 == 'python' and request.args[1] == 'controllers':\n",
"if os.path.exists(apath(VAR_131, VAR_122=request)):\n",
"VAR_3 = FUNC_5()\n",
"if len(request.args) > 2 and request.args[1] == 'controllers':\n",
"VAR_62 = URL('edit', VAR_98=[cfilename.replace(os.sep, '/')])\n",
"VAR_183 = os.path.splitext(request.args[2])[0]\n",
"VAR_132 = request.args[2][:-3]\n",
"VAR_132, VAR_83 = None, None\n",
"VAR_182 = request.args[3].replace('.html', '')\n",
"VAR_184 = os.path.join(VAR_3, 'views', VAR_183)\n",
"VAR_83 = find_exposed_functions(VAR_2)\n",
"VAR_83 = ['SyntaxError:Line:%d' % err.lineno]\n",
"if 'from_ajax' in request.vars:\n",
"VAR_64 = URL(request.args[0], request.args[2], VAR_182)\n",
"VAR_185 = apath(VAR_184, VAR_122=request)\n",
"VAR_83 = VAR_83 and sorted(VAR_83) or []\n",
"return VAR_43.json({'file_hash': VAR_128, 'saved_on': VAR_129, 'functions':\n VAR_83, 'controller': VAR_132, 'application': request.args[0],\n 'highlight': VAR_61})\n",
"VAR_133 = dict(VAR_3=request.args[0], VAR_160=request.vars.lineno or 1,\n editor_settings=preferences, VAR_5=filename, VAR_59=realfilename,\n VAR_127=filetype, VAR_2=data, VAR_62=edit_controller, VAR_128=file_hash,\n VAR_129=saved_on, VAR_132=controller, VAR_83=functions, VAR_64=\n view_link, VAR_63=editviewlinks, id=IS_SLUG()(filename)[0], force=True if\n request.vars.restore or request.vars.revert else False)\n",
"VAR_186 = []\n",
"VAR_134 = VAR_43.render('default/edit_js.html', VAR_133)\n",
"if os.path.exists(VAR_185):\n",
"VAR_133['plain_html'] = VAR_134\n",
"if os.path.isdir(VAR_185):\n",
"if os.path.exists(VAR_185 + '.html'):\n",
"if is_mobile:\n",
"VAR_186 = glob(os.path.join(VAR_185, '*.html'))\n",
"if len(VAR_186):\n",
"VAR_186.append(VAR_185 + '.html')\n",
"return VAR_43.render('default.mobile/edit.html', VAR_133, editor_settings=\n preferences)\n",
"return VAR_43.json(VAR_133)\n",
"VAR_63 = []\n",
"for v in sorted(VAR_186):\n",
"VAR_215 = os.path.split(v)[-1]\n",
"VAR_216 = '/'.join([VAR_184.replace(os.sep, '/'), VAR_215])\n",
"VAR_63.append(A(VAR_215.split('.')[0], VAR_144='editor_filelink', _href=URL\n ('edit', args=[vargs])))\n"
] | [
"def edit():...\n",
"\"\"\"docstring\"\"\"\n",
"app = get_app(request.vars.app)\n",
"app_path = apath(app, r=request)\n",
"preferences = {'theme': 'web2py', 'editor': 'default', 'closetag': 'true',\n 'codefolding': 'false', 'tabwidth': '4', 'indentwithtabs': 'false',\n 'linenumbers': 'true', 'highlightline': 'true'}\n",
"config = Config(os.path.join(request.folder, 'settings.cfg'), section=\n 'editor', default_values={})\n",
"preferences.update(config.read())\n",
"if not request.ajax and not is_mobile:\n",
"response.title = T('Editing %s') % app\n",
"if 'settings' in request.vars:\n",
"return response.render('default/edit.html', dict(app=app, editor_settings=\n preferences))\n",
"if request.post_vars:\n",
"\"\"\" File edit handler \"\"\"\n",
"if PY2:\n",
"details = {'realfilename': 'settings', 'filename': 'settings', 'id':\n 'editor_settings', 'force': False}\n",
"app = get_app(request.vars.app)\n",
"post_vars = request.post_vars.items()\n",
"post_vars = list(request.post_vars.items())\n",
"details['plain_html'] = response.render('default/editor_settings.html', {\n 'editor_settings': preferences})\n",
"filename = '/'.join(request.args)\n",
"post_vars += [(opt, 'false') for opt in preferences if opt not in request.\n post_vars]\n",
"return response.json(details)\n",
"realfilename = request.args[-1]\n",
"if config.save(post_vars):\n",
"if request.vars.app:\n",
"response.headers['web2py-component-flash'] = T('Preferences saved correctly')\n",
"response.headers['web2py-component-flash'] = T(\n 'Preferences saved on session only')\n",
"path = abspath(filename)\n",
"path = apath(filename, r=request)\n",
"response.headers['web2py-component-command'] = (\n \"update_editor(%s);$('a[href=#editor_settings] button.close').click();\" %\n response.json(config.read()))\n",
"if filename[-3:] == '.py':\n",
"return\n",
"filetype = 'python'\n",
"if filename[-5:] == '.html':\n",
"if 'revert' in request.vars and os.path.exists(path + '.bak'):\n",
"filetype = 'html'\n",
"if filename[-5:] == '.load':\n",
"data = safe_read(path + '.bak')\n",
"session.flash = T('Invalid action')\n",
"safe_write(path, data)\n",
"data = safe_read(path)\n",
"session.flash = T('Invalid action')\n",
"lineno_old = count_lines(data)\n",
"filetype = 'html'\n",
"if filename[-4:] == '.css':\n",
"data1 = safe_read(path)\n",
"if 'from_ajax' in request.vars:\n",
"file_hash = md5_hash(data)\n",
"if 'from_ajax' in request.vars:\n",
"file_hash = md5_hash(data)\n",
"filetype = 'css'\n",
"if filename[-3:] == '.js':\n",
"return response.json({'error': str(T('Invalid action'))})\n",
"redirect(URL('site'))\n",
"saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])\n",
"return response.json({'error': str(T('Invalid action'))})\n",
"redirect(URL('site'))\n",
"saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])\n",
"filetype = 'javascript'\n",
"filetype = 'html'\n",
"safe_write(path + '.bak', data1)\n",
"if request.vars.file_hash and request.vars.file_hash != file_hash:\n",
"response.flash = T('file \"%s\" of %s restored', (filename, saved_on))\n",
"session.flash = T('file changed on disk')\n",
"if request.vars.data:\n",
"data_or_revert = request.vars.data or request.vars.revert\n",
"data = request.vars.data.replace('\\r\\n', '\\n').strip() + '\\n'\n",
"safe_write(path + '.bak', data)\n",
"highlight = None\n",
"safe_write(path + '.1', data)\n",
"data = request.vars.data.replace('\\r\\n', '\\n').strip() + '\\n'\n",
"if filetype == 'python' and request.vars.data:\n",
"if 'from_ajax' in request.vars:\n",
"safe_write(path, data)\n",
"import _ast\n",
"if data_or_revert and request.args[1] == 'modules':\n",
"return response.json({'error': str(T('file changed on disk')), 'redirect':\n URL('resolve', args=request.args)})\n",
"redirect(URL('resolve', args=request.args))\n",
"lineno_new = count_lines(data)\n",
"code = request.vars.data.rstrip().replace('\\r\\n', '\\n') + '\\n'\n",
"start = sum([(len(line) + 1) for l, line in enumerate(request.vars.data.\n split('\\n')) if l < e.lineno - 1])\n",
"edit_controller = None\n",
"mopath = '.'.join(request.args[2:])[:-3]\n",
"response.flash = DIV(T('failed to reload module because:'), PRE(repr(e)))\n",
"log_progress(app, 'EDIT', filename, progress=lineno_new - lineno_old)\n",
"compile(code, path, 'exec', _ast.PyCF_ONLY_AST)\n",
"if e.text and e.offset:\n",
"editviewlinks = None\n",
"exec('import applications.%s.modules.%s' % (request.args[0], mopath))\n",
"file_hash = md5_hash(data)\n",
"offset = e.offset - (len(e.text) - len(e.text.splitlines()[-1]))\n",
"offset = 0\n",
"view_link = None\n",
"reload(sys.modules['applications.%s.modules.%s' % (request.args[0], mopath)])\n",
"saved_on = time.ctime(os.stat(path)[stat.ST_MTIME])\n",
"highlight = {'start': start, 'end': start + offset + 1, 'lineno': e.lineno,\n 'offset': offset}\n",
"if filetype == 'html' and len(request.args) >= 3:\n",
"response.flash = T('file saved on %s', saved_on)\n",
"ex_name = e.__class__.__name__\n",
"ex_name = 'unknown exception!'\n",
"response.flash = DIV(T('failed to compile file because:'), BR(), B(ex_name),\n ' ' + T('at line %s', e.lineno), offset and ' ' + T('at char %s',\n offset) or '', PRE(repr(e)))\n",
"cfilename = os.path.join(request.args[0], 'controllers', request.args[2] +\n '.py')\n",
"if filetype == 'python' and request.args[1] == 'controllers':\n",
"if os.path.exists(apath(cfilename, r=request)):\n",
"app = get_app()\n",
"if len(request.args) > 2 and request.args[1] == 'controllers':\n",
"edit_controller = URL('edit', args=[cfilename.replace(os.sep, '/')])\n",
"viewname = os.path.splitext(request.args[2])[0]\n",
"controller = request.args[2][:-3]\n",
"controller, functions = None, None\n",
"view = request.args[3].replace('.html', '')\n",
"viewpath = os.path.join(app, 'views', viewname)\n",
"functions = find_exposed_functions(data)\n",
"functions = ['SyntaxError:Line:%d' % err.lineno]\n",
"if 'from_ajax' in request.vars:\n",
"view_link = URL(request.args[0], request.args[2], view)\n",
"aviewpath = apath(viewpath, r=request)\n",
"functions = functions and sorted(functions) or []\n",
"return response.json({'file_hash': file_hash, 'saved_on': saved_on,\n 'functions': functions, 'controller': controller, 'application':\n request.args[0], 'highlight': highlight})\n",
"file_details = dict(app=request.args[0], lineno=request.vars.lineno or 1,\n editor_settings=preferences, filename=filename, realfilename=\n realfilename, filetype=filetype, data=data, edit_controller=\n edit_controller, file_hash=file_hash, saved_on=saved_on, controller=\n controller, functions=functions, view_link=view_link, editviewlinks=\n editviewlinks, id=IS_SLUG()(filename)[0], force=True if request.vars.\n restore or request.vars.revert else False)\n",
"viewlist = []\n",
"plain_html = response.render('default/edit_js.html', file_details)\n",
"if os.path.exists(aviewpath):\n",
"file_details['plain_html'] = plain_html\n",
"if os.path.isdir(aviewpath):\n",
"if os.path.exists(aviewpath + '.html'):\n",
"if is_mobile:\n",
"viewlist = glob(os.path.join(aviewpath, '*.html'))\n",
"if len(viewlist):\n",
"viewlist.append(aviewpath + '.html')\n",
"return response.render('default.mobile/edit.html', file_details,\n editor_settings=preferences)\n",
"return response.json(file_details)\n",
"editviewlinks = []\n",
"for v in sorted(viewlist):\n",
"vf = os.path.split(v)[-1]\n",
"vargs = '/'.join([viewpath.replace(os.sep, '/'), vf])\n",
"editviewlinks.append(A(vf.split('.')[0], _class='editor_filelink', _href=\n URL('edit', args=[vargs])))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Import'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Return'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(VAR_5):...\n",
"VAR_3 = VAR_5.indexdir\n",
"VAR_4 = VAR_5.dataroot\n"
] | [
"def init(config):...\n",
"INDEXDIR = config.indexdir\n",
"DATAROOT = config.dataroot\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_26(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_52 = self.meta.get_set_only_once_fields()\n",
"if VAR_52 and self._doc_before_save:\n",
"for field in VAR_52:\n",
"return False\n",
"VAR_97 = False\n",
"VAR_26 = self.get(field.fieldname)\n",
"VAR_53 = self._doc_before_save.get(field.fieldname)\n",
"if field.fieldtype in VAR_81:\n",
"VAR_97 = not self.is_child_table_same(field.fieldname)\n",
"if field.fieldtype in ('Date', 'Datetime', 'Time'):\n",
"if VAR_97:\n",
"VAR_97 = str(VAR_26) != str(VAR_53)\n",
"VAR_97 = VAR_26 != VAR_53\n",
"frappe.throw(_('Value cannot be changed for {0}').format(self.meta.\n get_label(field.fieldname)), frappe.CannotChangeConstantError)\n"
] | [
"def validate_set_only_once(self):...\n",
"\"\"\"docstring\"\"\"\n",
"set_only_once_fields = self.meta.get_set_only_once_fields()\n",
"if set_only_once_fields and self._doc_before_save:\n",
"for field in set_only_once_fields:\n",
"return False\n",
"fail = False\n",
"value = self.get(field.fieldname)\n",
"original_value = self._doc_before_save.get(field.fieldname)\n",
"if field.fieldtype in table_fields:\n",
"fail = not self.is_child_table_same(field.fieldname)\n",
"if field.fieldtype in ('Date', 'Datetime', 'Time'):\n",
"if fail:\n",
"fail = str(value) != str(original_value)\n",
"fail = value != original_value\n",
"frappe.throw(_('Value cannot be changed for {0}').format(self.meta.\n get_label(field.fieldname)), frappe.CannotChangeConstantError)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"For",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_17(self):...\n",
"self.reactor.advance(1000000000000)\n",
"VAR_5 = self.helper.create_room_as(self.user_id)\n",
"self.helper.join(VAR_5, '@test2:server')\n",
"self.get_success(self.presence_handler.set_state(UserID.from_string(\n '@test2:server'), {'presence': PresenceState.ONLINE}))\n",
"self.reactor.pump([0])\n",
"self.federation_sender.reset_mock()\n",
"self._add_new_user(VAR_5, '@alice:server2')\n",
"self.federation_sender.send_presence.assert_not_called()\n",
"VAR_14 = self.get_success(self.presence_handler.current_state_for_user(\n '@test2:server'))\n",
"self.assertEqual(VAR_14.state, PresenceState.ONLINE)\n",
"self.federation_sender.send_presence_to_destinations.assert_called_once_with(\n destinations=['server2'], states=[expected_state])\n",
"self.federation_sender.reset_mock()\n",
"self._add_new_user(VAR_5, '@bob:server3')\n",
"self.federation_sender.send_presence.assert_not_called()\n",
"self.federation_sender.send_presence_to_destinations.assert_called_once_with(\n destinations=['server3'], states=[expected_state])\n"
] | [
"def test_remote_joins(self):...\n",
"self.reactor.advance(1000000000000)\n",
"room_id = self.helper.create_room_as(self.user_id)\n",
"self.helper.join(room_id, '@test2:server')\n",
"self.get_success(self.presence_handler.set_state(UserID.from_string(\n '@test2:server'), {'presence': PresenceState.ONLINE}))\n",
"self.reactor.pump([0])\n",
"self.federation_sender.reset_mock()\n",
"self._add_new_user(room_id, '@alice:server2')\n",
"self.federation_sender.send_presence.assert_not_called()\n",
"expected_state = self.get_success(self.presence_handler.\n current_state_for_user('@test2:server'))\n",
"self.assertEqual(expected_state.state, PresenceState.ONLINE)\n",
"self.federation_sender.send_presence_to_destinations.assert_called_once_with(\n destinations=['server2'], states=[expected_state])\n",
"self.federation_sender.reset_mock()\n",
"self._add_new_user(room_id, '@bob:server3')\n",
"self.federation_sender.send_presence.assert_not_called()\n",
"self.federation_sender.send_presence_to_destinations.assert_called_once_with(\n destinations=['server3'], states=[expected_state])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_1, **VAR_3):...\n",
"self.enable_registration = strtobool(str(VAR_1.get('enable_registration', \n False)))\n",
"if 'disable_registration' in VAR_1:\n",
"self.enable_registration = not strtobool(str(VAR_1['disable_registration']))\n",
"self.account_validity = CLASS_0(VAR_1.get('account_validity') or {}, VAR_1)\n",
"self.registrations_require_3pid = VAR_1.get('registrations_require_3pid', [])\n",
"self.allowed_local_3pids = VAR_1.get('allowed_local_3pids', [])\n",
"self.enable_3pid_lookup = VAR_1.get('enable_3pid_lookup', True)\n",
"self.registration_shared_secret = VAR_1.get('registration_shared_secret')\n",
"self.bcrypt_rounds = VAR_1.get('bcrypt_rounds', 12)\n",
"self.trusted_third_party_id_servers = VAR_1.get(\n 'trusted_third_party_id_servers', ['matrix.org', 'vector.im'])\n",
"VAR_8 = VAR_1.get('account_threepid_delegates') or {}\n",
"self.account_threepid_delegate_email = VAR_8.get('email')\n",
"self.account_threepid_delegate_msisdn = VAR_8.get('msisdn')\n",
"self.default_identity_server = VAR_1.get('default_identity_server')\n",
"self.allow_guest_access = VAR_1.get('allow_guest_access', False)\n",
"if VAR_1.get('invite_3pid_guest', False):\n",
"self.auto_join_rooms = VAR_1.get('auto_join_rooms', [])\n",
"for room_alias in self.auto_join_rooms:\n",
"if not RoomAlias.is_valid(room_alias):\n",
"self.autocreate_auto_join_rooms = VAR_1.get('autocreate_auto_join_rooms', True)\n",
"self.autocreate_auto_join_rooms_federated = VAR_1.get(\n 'autocreate_auto_join_rooms_federated', True)\n",
"self.autocreate_auto_join_room_preset = VAR_1.get(\n 'autocreate_auto_join_room_preset') or RoomCreationPreset.PUBLIC_CHAT\n",
"self.auto_join_room_requires_invite = (self.\n autocreate_auto_join_room_preset in {RoomCreationPreset.PRIVATE_CHAT,\n RoomCreationPreset.TRUSTED_PRIVATE_CHAT})\n",
"VAR_9 = VAR_1.get('auto_join_mxid_localpart')\n",
"self.auto_join_user_id = None\n",
"if VAR_9:\n",
"self.auto_join_user_id = UserID(VAR_9, self.server_name).to_string()\n",
"if self.autocreate_auto_join_rooms:\n",
"if self.autocreate_auto_join_room_preset not in {RoomCreationPreset.\n",
"self.auto_join_rooms_for_guests = VAR_1.get('auto_join_rooms_for_guests', True)\n",
"if self.auto_join_room_requires_invite:\n",
"self.enable_set_displayname = VAR_1.get('enable_set_displayname', True)\n",
"if not VAR_9:\n",
"self.enable_set_avatar_url = VAR_1.get('enable_set_avatar_url', True)\n",
"self.enable_3pid_changes = VAR_1.get('enable_3pid_changes', True)\n",
"self.disable_msisdn_registration = VAR_1.get('disable_msisdn_registration',\n False)\n",
"VAR_10 = VAR_1.get('session_lifetime')\n",
"if VAR_10 is not None:\n",
"VAR_10 = self.parse_duration(VAR_10)\n",
"self.session_lifetime = VAR_10\n",
"self.fallback_success_template = self.read_templates(['auth_success.html'],\n autoescape=True)[0]\n"
] | [
"def read_config(self, config, **kwargs):...\n",
"self.enable_registration = strtobool(str(config.get('enable_registration', \n False)))\n",
"if 'disable_registration' in config:\n",
"self.enable_registration = not strtobool(str(config['disable_registration']))\n",
"self.account_validity = AccountValidityConfig(config.get('account_validity'\n ) or {}, config)\n",
"self.registrations_require_3pid = config.get('registrations_require_3pid', [])\n",
"self.allowed_local_3pids = config.get('allowed_local_3pids', [])\n",
"self.enable_3pid_lookup = config.get('enable_3pid_lookup', True)\n",
"self.registration_shared_secret = config.get('registration_shared_secret')\n",
"self.bcrypt_rounds = config.get('bcrypt_rounds', 12)\n",
"self.trusted_third_party_id_servers = config.get(\n 'trusted_third_party_id_servers', ['matrix.org', 'vector.im'])\n",
"account_threepid_delegates = config.get('account_threepid_delegates') or {}\n",
"self.account_threepid_delegate_email = account_threepid_delegates.get('email')\n",
"self.account_threepid_delegate_msisdn = account_threepid_delegates.get('msisdn'\n )\n",
"self.default_identity_server = config.get('default_identity_server')\n",
"self.allow_guest_access = config.get('allow_guest_access', False)\n",
"if config.get('invite_3pid_guest', False):\n",
"self.auto_join_rooms = config.get('auto_join_rooms', [])\n",
"for room_alias in self.auto_join_rooms:\n",
"if not RoomAlias.is_valid(room_alias):\n",
"self.autocreate_auto_join_rooms = config.get('autocreate_auto_join_rooms', True\n )\n",
"self.autocreate_auto_join_rooms_federated = config.get(\n 'autocreate_auto_join_rooms_federated', True)\n",
"self.autocreate_auto_join_room_preset = config.get(\n 'autocreate_auto_join_room_preset') or RoomCreationPreset.PUBLIC_CHAT\n",
"self.auto_join_room_requires_invite = (self.\n autocreate_auto_join_room_preset in {RoomCreationPreset.PRIVATE_CHAT,\n RoomCreationPreset.TRUSTED_PRIVATE_CHAT})\n",
"mxid_localpart = config.get('auto_join_mxid_localpart')\n",
"self.auto_join_user_id = None\n",
"if mxid_localpart:\n",
"self.auto_join_user_id = UserID(mxid_localpart, self.server_name).to_string()\n",
"if self.autocreate_auto_join_rooms:\n",
"if self.autocreate_auto_join_room_preset not in {RoomCreationPreset.\n",
"self.auto_join_rooms_for_guests = config.get('auto_join_rooms_for_guests', True\n )\n",
"if self.auto_join_room_requires_invite:\n",
"self.enable_set_displayname = config.get('enable_set_displayname', True)\n",
"if not mxid_localpart:\n",
"self.enable_set_avatar_url = config.get('enable_set_avatar_url', True)\n",
"self.enable_3pid_changes = config.get('enable_3pid_changes', True)\n",
"self.disable_msisdn_registration = config.get('disable_msisdn_registration',\n False)\n",
"session_lifetime = config.get('session_lifetime')\n",
"if session_lifetime is not None:\n",
"session_lifetime = self.parse_duration(session_lifetime)\n",
"self.session_lifetime = session_lifetime\n",
"self.fallback_success_template = self.read_templates(['auth_success.html'],\n autoescape=True)[0]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = Mock(spec=['put_json'])\n",
"VAR_6.put_json.return_value = make_awaitable({})\n",
"self.make_worker_hs('synapse.app.federation_sender', {'send_federation': \n True}, http_client=mock_client)\n",
"VAR_2 = self.register_user('user', 'pass')\n",
"VAR_3 = self.login('user', 'pass')\n",
"VAR_7 = self.create_room_with_remote_server(VAR_2, VAR_3)\n",
"VAR_6.put_json.reset_mock()\n",
"self.create_and_send_event(VAR_7, UserID.from_string(VAR_2))\n",
"self.replicate()\n",
"VAR_6.put_json.assert_called()\n",
"self.assertEqual(VAR_6.put_json.call_args[0][0], 'other_server')\n",
"self.assertTrue(VAR_6.put_json.call_args[1]['data'].get('pdus'))\n"
] | [
"def test_send_event_single_sender(self):...\n",
"\"\"\"docstring\"\"\"\n",
"mock_client = Mock(spec=['put_json'])\n",
"mock_client.put_json.return_value = make_awaitable({})\n",
"self.make_worker_hs('synapse.app.federation_sender', {'send_federation': \n True}, http_client=mock_client)\n",
"user = self.register_user('user', 'pass')\n",
"token = self.login('user', 'pass')\n",
"room = self.create_room_with_remote_server(user, token)\n",
"mock_client.put_json.reset_mock()\n",
"self.create_and_send_event(room, UserID.from_string(user))\n",
"self.replicate()\n",
"mock_client.put_json.assert_called()\n",
"self.assertEqual(mock_client.put_json.call_args[0][0], 'other_server')\n",
"self.assertTrue(mock_client.put_json.call_args[1]['data'].get('pdus'))\n"
] | [
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@require_POST...\n",
"VAR_5 = get_object_or_404(TopicNotification, VAR_2=pk, user=request.user)\n",
"VAR_4 = NotificationForm(data=request.POST, instance=notification)\n",
"if VAR_4.is_valid():\n",
"VAR_4.save()\n",
"messages.error(VAR_0, utils.render_form_errors(VAR_4))\n",
"return redirect(VAR_0.POST.get('next', VAR_5.topic.get_absolute_url()))\n"
] | [
"@require_POST...\n",
"notification = get_object_or_404(TopicNotification, pk=pk, user=request.user)\n",
"form = NotificationForm(data=request.POST, instance=notification)\n",
"if form.is_valid():\n",
"form.save()\n",
"messages.error(request, utils.render_form_errors(form))\n",
"return redirect(request.POST.get('next', notification.topic.get_absolute_url())\n )\n"
] | [
0,
0,
0,
0,
0,
0,
4
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"@TimeIt(logging.INFO)...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_19 = VAR_2.session['connector'].server_id\n",
"VAR_136 = FUNC_35(VAR_2)\n",
"VAR_54 = []\n",
"VAR_1.debug('searchObjects(%s)' % VAR_136['search'])\n",
"def FUNC_65(VAR_6):...\n",
"return reverse('webgateway_render_thumbnail', VAR_116=(iid,))\n"
] | [
"@TimeIt(logging.INFO)...\n",
"\"\"\"docstring\"\"\"\n",
"server_id = request.session['connector'].server_id\n",
"opts = searchOptFromRequest(request)\n",
"rv = []\n",
"logger.debug('searchObjects(%s)' % opts['search'])\n",
"def urlprefix(iid):...\n",
"return reverse('webgateway_render_thumbnail', args=(iid,))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.filter...\n",
"if VAR_17 is None:\n",
"return ''\n",
"return '%s' % VAR_17\n"
] | [
"@register.filter...\n",
"if value is None:\n",
"return ''\n",
"return '%s' % value\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_11(VAR_51):...\n",
"self.reactor.callLater(_EPSILON, VAR_51)\n"
] | [
"def schedule(x):...\n",
"self.reactor.callLater(_EPSILON, x)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"async def FUNC_15(VAR_41):...\n",
"VAR_2, VAR_6 = VAR_41\n",
"VAR_31 = await self.get_server_verify_key_v2_direct(VAR_2, VAR_6)\n",
"VAR_0.warning('Error looking up keys %s from %s: %s', VAR_6, VAR_2, e)\n",
"VAR_26[VAR_2] = VAR_31\n",
"VAR_0.exception('Error getting keys %s from %s', VAR_6, VAR_2)\n"
] | [
"async def get_key(key_to_fetch_item):...\n",
"server_name, key_ids = key_to_fetch_item\n",
"keys = await self.get_server_verify_key_v2_direct(server_name, key_ids)\n",
"logger.warning('Error looking up keys %s from %s: %s', key_ids, server_name, e)\n",
"results[server_name] = keys\n",
"logger.exception('Error getting keys %s from %s', key_ids, server_name)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_28(VAR_5):...\n",
""
] | [
"def mock_smart_urlquote(url):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = self.helper.create_room_as(self.test_user, tok=self.test_user_tok)\n",
"self.get_success(self.handler.create_association(create_requester(self.\n admin_user), self.room_alias, VAR_12))\n"
] | [
"def test_create_alias_admin(self):...\n",
"\"\"\"docstring\"\"\"\n",
"other_room_id = self.helper.create_room_as(self.test_user, tok=self.\n test_user_tok)\n",
"self.get_success(self.handler.create_association(create_requester(self.\n admin_user), self.room_alias, other_room_id))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_21(self) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"return self.should_retrain_core() or self.nlg\n"
] | [
"def should_retrain_nlg(self) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"return self.should_retrain_core() or self.nlg\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_95(*VAR_79, **VAR_42):...\n",
"\"\"\"docstring\"\"\"\n",
"import frappe.utils.background_jobs\n",
"return frappe.utils.background_jobs.enqueue(*VAR_79, **kwargs)\n"
] | [
"def enqueue(*args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"import frappe.utils.background_jobs\n",
"return frappe.utils.background_jobs.enqueue(*args, **kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"Return'"
] |
[
"def FUNC_19(VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_20.inputs and not VAR_20.input_exprs and not VAR_20.input_examples:\n",
"VAR_42 = FUNC_17(VAR_20.inputs, VAR_20.input_exprs, VAR_20.input_examples)\n",
"FUNC_12(VAR_20.dir, VAR_20.tag_set, VAR_20.signature_def, VAR_42, VAR_20.\n outdir, VAR_20.overwrite, VAR_13=args.worker, VAR_14=args.init_tpu,\n VAR_15=args.tf_debug)\n"
] | [
"def run(args):...\n",
"\"\"\"docstring\"\"\"\n",
"if not args.inputs and not args.input_exprs and not args.input_examples:\n",
"tensor_key_feed_dict = load_inputs_from_input_arg_string(args.inputs, args.\n input_exprs, args.input_examples)\n",
"run_saved_model_with_feed_dict(args.dir, args.tag_set, args.signature_def,\n tensor_key_feed_dict, args.outdir, args.overwrite, worker=args.worker,\n init_tpu=args.init_tpu, tf_debug=args.tf_debug)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_34(VAR_9, VAR_14, VAR_31):...\n",
"VAR_83 = {}\n",
"VAR_85 = read_po(FUNC_47, VAR_14=locale, VAR_31=domain)\n",
"for message in VAR_85:\n",
"VAR_141 = message.id\n",
"return VAR_83, VAR_85.plural_expr\n",
"if isinstance(VAR_141, (list, tuple)):\n",
"VAR_141 = VAR_141[0]\n",
"if message.string:\n",
"VAR_83[VAR_141] = message.string\n"
] | [
"def messages_from_po(path, locale, domain):...\n",
"messages = {}\n",
"catalog = read_po(f, locale=locale, domain=domain)\n",
"for message in catalog:\n",
"message_id = message.id\n",
"return messages, catalog.plural_expr\n",
"if isinstance(message_id, (list, tuple)):\n",
"message_id = message_id[0]\n",
"if message.string:\n",
"messages[message_id] = message.string\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_83(self, VAR_37, VAR_96):...\n",
"if self.settings.logger:\n",
"self.settings.logger.log_event(VAR_37, VAR_96, origin='crud')\n"
] | [
"def log_event(self, message, vars):...\n",
"if self.settings.logger:\n",
"self.settings.logger.log_event(message, vars, origin='crud')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_139 = 'false'\n",
"VAR_53 = VAR_2.GET\n",
"if VAR_8 is None:\n",
"VAR_92 = None\n",
"VAR_92 = VAR_8.getObject('Image', VAR_6)\n",
"if VAR_92 is not None:\n",
"VAR_115 = []\n",
"if VAR_53.get('callback', None):\n",
"for VAR_132 in VAR_92.getProject().listChildren():\n",
"VAR_139 = '%s(%s)' % (VAR_53['callback'], VAR_139)\n",
"return HttpJavascriptResponse(VAR_139)\n",
"VAR_115.extend(VAR_132.listChildren())\n",
"VAR_218 = VAR_92.getPrimaryPixels().getPixelsType().getValue()\n",
"VAR_219 = VAR_92.getSizeC()\n",
"VAR_220 = [VAR_30.getLabel() for VAR_30 in VAR_92.getChannels()]\n",
"VAR_220.sort()\n",
"def FUNC_76(VAR_212):...\n",
"if VAR_178(VAR_212.getId()) == VAR_178(VAR_6):\n",
"return False\n",
"VAR_285 = VAR_212.getPrimaryPixels()\n",
"if VAR_285 is None or VAR_212.getPrimaryPixels().getPixelsType().getValue(\n",
"return False\n",
"VAR_286 = [VAR_30.getLabel() for VAR_30 in VAR_212.getChannels()]\n",
"VAR_286.sort()\n",
"if VAR_286 != VAR_220:\n",
"return False\n",
"return True\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"json_data = 'false'\n",
"r = request.GET\n",
"if conn is None:\n",
"img = None\n",
"img = conn.getObject('Image', iid)\n",
"if img is not None:\n",
"imgs = []\n",
"if r.get('callback', None):\n",
"for ds in img.getProject().listChildren():\n",
"json_data = '%s(%s)' % (r['callback'], json_data)\n",
"return HttpJavascriptResponse(json_data)\n",
"imgs.extend(ds.listChildren())\n",
"img_ptype = img.getPrimaryPixels().getPixelsType().getValue()\n",
"img_ccount = img.getSizeC()\n",
"img_ew = [x.getLabel() for x in img.getChannels()]\n",
"img_ew.sort()\n",
"def compat(i):...\n",
"if long(i.getId()) == long(iid):\n",
"return False\n",
"pp = i.getPrimaryPixels()\n",
"if pp is None or i.getPrimaryPixels().getPixelsType().getValue(\n",
"return False\n",
"ew = [x.getLabel() for x in i.getChannels()]\n",
"ew.sort()\n",
"if ew != img_ew:\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Return'"
] |
[
"@property...\n",
"VAR_38 = self.ip if self.ip else 'localhost'\n",
"return self._url(VAR_38)\n"
] | [
"@property...\n",
"ip = self.ip if self.ip else 'localhost'\n",
"return self._url(ip)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def __getattr__(self, VAR_46):...\n",
"VAR_205 = self.get(VAR_46)\n",
"if not VAR_205 and VAR_46.startswith('__'):\n",
"return VAR_205\n"
] | [
"def __getattr__(self, key):...\n",
"ret = self.get(key)\n",
"if not ret and key.startswith('__'):\n",
"return ret\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'"
] |
[
"from __future__ import unicode_literals\n",
"import unittest\n",
"from django.template import Context, Template\n",
"from django.test import TestCase\n",
"from rest_framework.compat import coreapi, coreschema\n",
"from rest_framework.relations import Hyperlink\n",
"from rest_framework.templatetags import rest_framework\n",
"from rest_framework.templatetags.rest_framework import add_nested_class, add_query_param, as_string, break_long_headers, format_value, get_pagination_html, schema_links, urlize_quoted_links\n",
"from rest_framework.test import APIRequestFactory\n",
"VAR_0 = APIRequestFactory()\n",
"def FUNC_0(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"return VAR_1.replace('\\n', '').replace(' ', '')\n"
] | [
"from __future__ import unicode_literals\n",
"import unittest\n",
"from django.template import Context, Template\n",
"from django.test import TestCase\n",
"from rest_framework.compat import coreapi, coreschema\n",
"from rest_framework.relations import Hyperlink\n",
"from rest_framework.templatetags import rest_framework\n",
"from rest_framework.templatetags.rest_framework import add_nested_class, add_query_param, as_string, break_long_headers, format_value, get_pagination_html, schema_links, urlize_quoted_links\n",
"from rest_framework.test import APIRequestFactory\n",
"factory = APIRequestFactory()\n",
"def format_html(html):...\n",
"\"\"\"docstring\"\"\"\n",
"return html.replace('\\n', '').replace(' ', '')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_20(self, VAR_0):...\n",
"from openapi_python_client import Project\n",
"VAR_8 = Project(VAR_3=mocker.MagicMock(title='My Test API'))\n",
"VAR_8.package_dir = VAR_0.MagicMock()\n",
"VAR_17 = VAR_0.MagicMock()\n",
"VAR_8.env = VAR_0.MagicMock()\n",
"VAR_8.env.get_template.return_value = VAR_17\n",
"VAR_18 = VAR_0.MagicMock(autospec=pathlib.Path)\n",
"VAR_19 = VAR_0.MagicMock(autospec=pathlib.Path)\n",
"VAR_20 = {'__init__.py': VAR_18, 'py.typed': VAR_19}\n",
"VAR_8.package_dir.__truediv__.side_effect = lambda VAR_40: VAR_20[VAR_40]\n",
"VAR_8._create_package()\n",
"VAR_8.package_dir.mkdir.assert_called_once()\n",
"VAR_8.env.get_template.assert_called_once_with('package_init.pyi')\n",
"VAR_17.render.assert_called_once_with(description=project.package_description)\n",
"VAR_18.write_text.assert_called_once_with(VAR_17.render())\n",
"VAR_19.write_text.assert_called_once_with('# Marker file for PEP 561')\n"
] | [
"def test__create_package(self, mocker):...\n",
"from openapi_python_client import Project\n",
"project = Project(openapi=mocker.MagicMock(title='My Test API'))\n",
"project.package_dir = mocker.MagicMock()\n",
"package_init_template = mocker.MagicMock()\n",
"project.env = mocker.MagicMock()\n",
"project.env.get_template.return_value = package_init_template\n",
"package_init_path = mocker.MagicMock(autospec=pathlib.Path)\n",
"pytyped_path = mocker.MagicMock(autospec=pathlib.Path)\n",
"paths = {'__init__.py': package_init_path, 'py.typed': pytyped_path}\n",
"project.package_dir.__truediv__.side_effect = lambda x: paths[x]\n",
"project._create_package()\n",
"project.package_dir.mkdir.assert_called_once()\n",
"project.env.get_template.assert_called_once_with('package_init.pyi')\n",
"package_init_template.render.assert_called_once_with(description=project.\n package_description)\n",
"package_init_path.write_text.assert_called_once_with(package_init_template.\n render())\n",
"pytyped_path.write_text.assert_called_once_with('# Marker file for PEP 561')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(VAR_35, VAR_36=''):...\n",
"return VAR_35\n"
] | [
"def deserialize_component(ui_value, widget_id=''):...\n",
"return ui_value\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.