lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_6(self):...\n",
"self._record_users()\n",
"VAR_10 = self.get_success(self.handler.get_devices_by_user(VAR_0))\n",
"self.assertEqual(3, len(VAR_10))\n",
"VAR_14 = {d['device_id']: d for d in VAR_10}\n",
"self.assertDictContainsSubset({'user_id': VAR_0, 'device_id': 'xyz',\n 'display_name': 'display 0', 'last_seen_ip': None, 'last_seen_ts': None\n }, VAR_14['xyz'])\n",
"self.assertDictContainsSubset({'user_id': VAR_0, 'device_id': 'fco',\n 'display_name': 'display 1', 'last_seen_ip': 'ip1', 'last_seen_ts': \n 1000000}, VAR_14['fco'])\n",
"self.assertDictContainsSubset({'user_id': VAR_0, 'device_id': 'abc',\n 'display_name': 'display 2', 'last_seen_ip': 'ip3', 'last_seen_ts': \n 3000000}, VAR_14['abc'])\n"
] | [
"def test_get_devices_by_user(self):...\n",
"self._record_users()\n",
"res = self.get_success(self.handler.get_devices_by_user(user1))\n",
"self.assertEqual(3, len(res))\n",
"device_map = {d['device_id']: d for d in res}\n",
"self.assertDictContainsSubset({'user_id': user1, 'device_id': 'xyz',\n 'display_name': 'display 0', 'last_seen_ip': None, 'last_seen_ts': None\n }, device_map['xyz'])\n",
"self.assertDictContainsSubset({'user_id': user1, 'device_id': 'fco',\n 'display_name': 'display 1', 'last_seen_ip': 'ip1', 'last_seen_ts': \n 1000000}, device_map['fco'])\n",
"self.assertDictContainsSubset({'user_id': user1, 'device_id': 'abc',\n 'display_name': 'display 2', 'last_seen_ip': 'ip3', 'last_seen_ts': \n 3000000}, device_map['abc'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_10(self, VAR_26: str, VAR_28: Optional[str]=None) ->Callable[[...\n",
"def FUNC_20(VAR_66: DecoratedCallable) ->DecoratedCallable:...\n",
"self.add_api_websocket_route(VAR_26, VAR_66, VAR_28=name)\n",
"return VAR_66\n"
] | [
"def websocket(self, path: str, name: Optional[str]=None) ->Callable[[...\n",
"def decorator(func: DecoratedCallable) ->DecoratedCallable:...\n",
"self.add_api_websocket_route(path, func, name=name)\n",
"return func\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"@VAR_2.route('/ajax/deleteuser', methods=['POST'])...\n",
"VAR_53 = request.form.to_dict(flat=False)\n",
"VAR_54 = None\n",
"if 'userid[]' in VAR_53:\n",
"VAR_54 = ub.session.query(ub.User).filter(ub.User.id.in_(VAR_53['userid[]'])\n ).all()\n",
"if 'userid' in VAR_53:\n",
"VAR_55 = 0\n",
"VAR_54 = ub.session.query(ub.User).filter(ub.User.id == VAR_53['userid'][0]\n ).all()\n",
"VAR_56 = list()\n",
"VAR_57 = list()\n",
"if not VAR_54:\n",
"VAR_0.error('User not found')\n",
"for VAR_12 in VAR_54:\n",
"return Response(json.dumps({'type': 'danger', 'message': _('User not found'\n )}), mimetype='application/json')\n",
"if VAR_55 == 1:\n",
"VAR_71 = FUNC_53(VAR_12)\n",
"VAR_0.error(ex)\n",
"VAR_0.info('User {} deleted'.format(VAR_53))\n",
"if VAR_55 > 1:\n",
"VAR_55 += 1\n",
"VAR_56.append({'type': 'danger', 'message': str(ex)})\n",
"VAR_57 = [{'type': 'success', 'message': VAR_71}]\n",
"VAR_0.info('Users {} deleted'.format(VAR_53))\n",
"VAR_57.extend(VAR_56)\n",
"VAR_57 = [{'type': 'success', 'message': _('{} users deleted successfully')\n .format(VAR_55)}]\n",
"return Response(json.dumps(VAR_57), mimetype='application/json')\n"
] | [
"@admi.route('/ajax/deleteuser', methods=['POST'])...\n",
"user_ids = request.form.to_dict(flat=False)\n",
"users = None\n",
"if 'userid[]' in user_ids:\n",
"users = ub.session.query(ub.User).filter(ub.User.id.in_(user_ids['userid[]'])\n ).all()\n",
"if 'userid' in user_ids:\n",
"count = 0\n",
"users = ub.session.query(ub.User).filter(ub.User.id == user_ids['userid'][0]\n ).all()\n",
"errors = list()\n",
"success = list()\n",
"if not users:\n",
"log.error('User not found')\n",
"for user in users:\n",
"return Response(json.dumps({'type': 'danger', 'message': _('User not found'\n )}), mimetype='application/json')\n",
"if count == 1:\n",
"message = _delete_user(user)\n",
"log.error(ex)\n",
"log.info('User {} deleted'.format(user_ids))\n",
"if count > 1:\n",
"count += 1\n",
"errors.append({'type': 'danger', 'message': str(ex)})\n",
"success = [{'type': 'success', 'message': message}]\n",
"log.info('Users {} deleted'.format(user_ids))\n",
"success.extend(errors)\n",
"success = [{'type': 'success', 'message': _('{} users deleted successfully'\n ).format(count)}]\n",
"return Response(json.dumps(success), mimetype='application/json')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"For",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"AugAssign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(VAR_2: Directive, VAR_11: str, VAR_12: list[InsertEntryOption],...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3, VAR_4 = FUNC_8(VAR_2, VAR_12, VAR_11)\n",
"VAR_23 = FUNC_7(VAR_2, VAR_13, VAR_5)\n",
"VAR_28 = file.readlines()\n",
"if VAR_4 is None:\n",
"VAR_28 += '\\n' + VAR_23\n",
"VAR_28.insert(VAR_4, VAR_23 + '\\n')\n",
"file.writelines(VAR_28)\n",
"if VAR_4 is None:\n",
"return VAR_12\n",
"VAR_24 = VAR_23.count('\\n') + 1\n",
"return [(option._replace(VAR_4=option.lineno + added_lines) if option.\n filename == VAR_3 and option.lineno > VAR_4 else option) for option in\n VAR_12]\n"
] | [
"def insert_entry(entry: Directive, default_filename: str, insert_options:...\n",
"\"\"\"docstring\"\"\"\n",
"filename, lineno = find_insert_position(entry, insert_options, default_filename\n )\n",
"content = _format_entry(entry, currency_column, indent)\n",
"contents = file.readlines()\n",
"if lineno is None:\n",
"contents += '\\n' + content\n",
"contents.insert(lineno, content + '\\n')\n",
"file.writelines(contents)\n",
"if lineno is None:\n",
"return insert_options\n",
"added_lines = content.count('\\n') + 1\n",
"return [(option._replace(lineno=option.lineno + added_lines) if option.\n filename == filename and option.lineno > lineno else option) for option in\n insert_options]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Return'"
] |
[
"@FUNC_27(VAR_87=True)...\n",
"return 'pong'\n"
] | [
"@whitelist(allow_guest=True)...\n",
"return 'pong'\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@FUNC_0...\n",
"return SimpleHttpClient(self, http_proxy=os.getenvb(b'http_proxy'),\n https_proxy=os.getenvb(b'HTTPS_PROXY'))\n"
] | [
"@cache_in_self...\n",
"return SimpleHttpClient(self, http_proxy=os.getenvb(b'http_proxy'),\n https_proxy=os.getenvb(b'HTTPS_PROXY'))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_25 = saved_model_utils.get_saved_model_tag_sets(VAR_2)\n",
"print('The given SavedModel contains the following tag-sets:')\n",
"for VAR_3 in sorted(VAR_25):\n",
"print('%r' % ', '.join(sorted(VAR_3)))\n"
] | [
"def _show_tag_sets(saved_model_dir):...\n",
"\"\"\"docstring\"\"\"\n",
"tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)\n",
"print('The given SavedModel contains the following tag-sets:')\n",
"for tag_set in sorted(tag_sets):\n",
"print('%r' % ', '.join(sorted(tag_set)))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"For",
"Expr'"
] |
[
"import base64\n",
"import logging\n",
"import pathlib\n",
"import uuid\n",
"from django.conf import settings\n",
"from django.utils.functional import cached_property\n",
"from storages.utils import safe_join\n",
"from s3file.storages import storage\n",
"VAR_0 = logging.getLogger('s3file')\n",
"\"\"\"FileInput that uses JavaScript to directly upload to Amazon S3.\"\"\"\n",
"VAR_1 = False\n",
"VAR_2 = str(getattr(settings, 'S3FILE_UPLOAD_PATH', pathlib.PurePosixPath(\n 'tmp', 's3file')))\n",
"VAR_2 = safe_join(str(storage.location), VAR_2)\n",
"VAR_3 = settings.SESSION_COOKIE_AGE\n",
"@property...\n",
"return storage.bucket.name\n"
] | [
"import base64\n",
"import logging\n",
"import pathlib\n",
"import uuid\n",
"from django.conf import settings\n",
"from django.utils.functional import cached_property\n",
"from storages.utils import safe_join\n",
"from s3file.storages import storage\n",
"logger = logging.getLogger('s3file')\n",
"\"\"\"FileInput that uses JavaScript to directly upload to Amazon S3.\"\"\"\n",
"needs_multipart_form = False\n",
"upload_path = str(getattr(settings, 'S3FILE_UPLOAD_PATH', pathlib.\n PurePosixPath('tmp', 's3file')))\n",
"upload_path = safe_join(str(storage.location), upload_path)\n",
"expires = settings.SESSION_COOKIE_AGE\n",
"@property...\n",
"return storage.bucket.name\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
1,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"@expose('/login/')...\n",
"VAR_47 = request.environ.get('REMOTE_USER')\n",
"if g.user is not None and g.user.is_authenticated:\n",
"return redirect(self.appbuilder.get_url_for_index)\n",
"if VAR_47:\n",
"VAR_50 = self.appbuilder.sm.auth_user_remote_user(VAR_47)\n",
"flash(as_unicode(self.invalid_login_message), 'warning')\n",
"if VAR_50 is None:\n",
"return redirect(self.appbuilder.get_url_for_index)\n",
"flash(as_unicode(self.invalid_login_message), 'warning')\n",
"login_user(VAR_50)\n"
] | [
"@expose('/login/')...\n",
"username = request.environ.get('REMOTE_USER')\n",
"if g.user is not None and g.user.is_authenticated:\n",
"return redirect(self.appbuilder.get_url_for_index)\n",
"if username:\n",
"user = self.appbuilder.sm.auth_user_remote_user(username)\n",
"flash(as_unicode(self.invalid_login_message), 'warning')\n",
"if user is None:\n",
"return redirect(self.appbuilder.get_url_for_index)\n",
"flash(as_unicode(self.invalid_login_message), 'warning')\n",
"login_user(user)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.filter...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_4:\n",
"return ''\n",
"return mark_safe(FUNC_1(VAR_4)[1:-1] + VAR_5)\n"
] | [
"@register.filter...\n",
"\"\"\"docstring\"\"\"\n",
"if not d:\n",
"return ''\n",
"return mark_safe(json_dumps(d)[1:-1] + append)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"from __future__ import unicode_literals\n",
"import base64\n",
"import calendar\n",
"import datetime\n",
"import re\n",
"import sys\n",
"import unicodedata\n",
"from binascii import Error as BinasciiError\n",
"from email.utils import formatdate\n",
"from django.utils import six\n",
"from django.utils.datastructures import MultiValueDict\n",
"from django.utils.encoding import force_bytes, force_str, force_text\n",
"from django.utils.functional import keep_lazy_text\n",
"from django.utils.six.moves.urllib.parse import quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode, urlparse\n",
"VAR_0 = re.compile('(?:W/)?\"((?:\\\\\\\\.|[^\"])*)\"')\n",
"VAR_1 = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()\n",
"VAR_2 = '(?P<day>\\\\d{2})'\n",
"VAR_3 = '(?P<day>[ \\\\d]\\\\d)'\n",
"VAR_4 = '(?P<mon>\\\\w{3})'\n",
"VAR_5 = '(?P<year>\\\\d{4})'\n",
"VAR_6 = '(?P<year>\\\\d{2})'\n",
"VAR_7 = '(?P<hour>\\\\d{2}):(?P<min>\\\\d{2}):(?P<sec>\\\\d{2})'\n",
"VAR_8 = re.compile('^\\\\w{3}, %s %s %s %s GMT$' % (VAR_2, VAR_4, VAR_5, VAR_7))\n",
"VAR_9 = re.compile('^\\\\w{6,9}, %s-%s-%s %s GMT$' % (VAR_2, VAR_4, VAR_6, VAR_7)\n )\n",
"VAR_10 = re.compile('^\\\\w{3} %s %s %s %s$' % (VAR_4, VAR_3, VAR_7, VAR_5))\n",
"VAR_11 = str(':/?#[]@')\n",
"VAR_12 = str(\"!$&'()*+,;=\")\n",
"VAR_13 = {'http': 80, 'https': 443}\n",
"@keep_lazy_text...\n",
"\"\"\"docstring\"\"\"\n",
"return force_text(quote(force_str(VAR_14), force_str(VAR_15)))\n"
] | [
"from __future__ import unicode_literals\n",
"import base64\n",
"import calendar\n",
"import datetime\n",
"import re\n",
"import sys\n",
"import unicodedata\n",
"from binascii import Error as BinasciiError\n",
"from email.utils import formatdate\n",
"from django.utils import six\n",
"from django.utils.datastructures import MultiValueDict\n",
"from django.utils.encoding import force_bytes, force_str, force_text\n",
"from django.utils.functional import keep_lazy_text\n",
"from django.utils.six.moves.urllib.parse import quote, quote_plus, unquote, unquote_plus, urlencode as original_urlencode, urlparse\n",
"ETAG_MATCH = re.compile('(?:W/)?\"((?:\\\\\\\\.|[^\"])*)\"')\n",
"MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()\n",
"__D = '(?P<day>\\\\d{2})'\n",
"__D2 = '(?P<day>[ \\\\d]\\\\d)'\n",
"__M = '(?P<mon>\\\\w{3})'\n",
"__Y = '(?P<year>\\\\d{4})'\n",
"__Y2 = '(?P<year>\\\\d{2})'\n",
"__T = '(?P<hour>\\\\d{2}):(?P<min>\\\\d{2}):(?P<sec>\\\\d{2})'\n",
"RFC1123_DATE = re.compile('^\\\\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))\n",
"RFC850_DATE = re.compile('^\\\\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))\n",
"ASCTIME_DATE = re.compile('^\\\\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))\n",
"RFC3986_GENDELIMS = str(':/?#[]@')\n",
"RFC3986_SUBDELIMS = str(\"!$&'()*+,;=\")\n",
"PROTOCOL_TO_PORT = {'http': 80, 'https': 443}\n",
"@keep_lazy_text...\n",
"\"\"\"docstring\"\"\"\n",
"return force_text(quote(force_str(url), force_str(safe)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_29(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"VAR_9 = test.test_src_dir_path(VAR_0)\n",
"VAR_41 = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"VAR_10 = self.parser.parse_args(['run', '--dir', VAR_9, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":8.0,\"x2\":5.0}]', '--outdir', VAR_41])\n",
"saved_model_cli.run(VAR_10)\n"
] | [
"def testRunCommandInputExamplesFeatureValueNotListError(self):...\n",
"self.parser = saved_model_cli.create_parser()\n",
"base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n",
"output_dir = os.path.join(test.get_temp_dir(), 'new_dir')\n",
"args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'regress_x_to_y', '--input_examples',\n 'inputs=[{\"x\":8.0,\"x2\":5.0}]', '--outdir', output_dir])\n",
"saved_model_cli.run(args)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_23=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self.site = VAR_23 or ''\n"
] | [
"def __init__(self, site=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self.site = site or ''\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"def FUNC_12(VAR_2, VAR_17, VAR_18=0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_106 = None\n",
"if 'maps' in VAR_2:\n",
"VAR_196 = VAR_2['maps']\n",
"return VAR_106\n",
"VAR_106 = []\n",
"if isinstance(VAR_196, (unicode, str)):\n",
"VAR_1.debug('Invalid json for query ?maps=%s' % VAR_196)\n",
"VAR_196 = json.loads(VAR_196)\n",
"VAR_18 = max(len(VAR_196), VAR_18)\n",
"VAR_106 = None\n",
"for VAR_203 in range(VAR_18):\n",
"VAR_308 = None\n",
"if len(VAR_196) > VAR_203:\n",
"VAR_282 = VAR_196[VAR_203].get(VAR_17)\n",
"VAR_106.append(VAR_308)\n",
"if VAR_282 is not None:\n",
"VAR_308 = VAR_282.get('enabled') in (True, 'true')\n"
] | [
"def _get_maps_enabled(request, name, sizeC=0):...\n",
"\"\"\"docstring\"\"\"\n",
"codomains = None\n",
"if 'maps' in request:\n",
"map_json = request['maps']\n",
"return codomains\n",
"codomains = []\n",
"if isinstance(map_json, (unicode, str)):\n",
"logger.debug('Invalid json for query ?maps=%s' % map_json)\n",
"map_json = json.loads(map_json)\n",
"sizeC = max(len(map_json), sizeC)\n",
"codomains = None\n",
"for c in range(sizeC):\n",
"enabled = None\n",
"if len(map_json) > c:\n",
"m = map_json[c].get(name)\n",
"codomains.append(enabled)\n",
"if m is not None:\n",
"enabled = m.get('enabled') in (True, 'true')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'"
] |
[
"def FUNC_30(VAR_23):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_62 = '\\n'.join(['Usage example:',\n 'To compile a SavedModel signature via (CPU) XLA AOT:',\n '$saved_model_cli aot_compile_cpu \\\\', ' --dir /tmp/saved_model \\\\',\n ' --tag_set serve \\\\', ' --output_dir /tmp/saved_model_xla_aot', '',\n '',\n 'Note: Additional XLA compilation options are available by setting the ',\n 'XLA_FLAGS environment variable. See the XLA debug options flags for ',\n 'all the options: ', ' {}'.format(VAR_0), '',\n 'For example, to disable XLA fast math when compiling:', '',\n 'XLA_FLAGS=\"--xla_cpu_enable_fast_math=false\" $saved_model_cli aot_compile_cpu ...'\n , '', 'Some possibly useful flags:',\n ' --xla_cpu_enable_fast_math=false',\n ' --xla_force_host_platform_device_count=<num threads>',\n ' (useful in conjunction with disabling multi threading)'])\n",
"VAR_24 = VAR_23.add_parser('aot_compile_cpu', description=compile_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n",
"FUNC_28(VAR_24)\n",
"VAR_24.add_argument('--target_triple', type=str, default='x86_64-pc-linux',\n help=\n 'Target triple for LLVM during AOT compilation. Examples: x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, armv7-none-android. More examples are available in tfcompile.bzl in the tensorflow codebase.'\n )\n",
"VAR_24.add_argument('--target_cpu', type=str, default='', help=\n 'Target cpu name for LLVM during AOT compilation. Examples: x86_64, skylake, haswell, westmere, <empty> (unknown). For a complete list of options, run (for x86 targets): `llc -march=x86 -mcpu=help`'\n )\n",
"VAR_24.add_argument('--cpp_class', type=str, required=True, help=\n 'The name of the generated C++ class, wrapping the generated function. The syntax of this flag is [[<optional_namespace>::],...]<class_name>. This mirrors the C++ syntax for referring to a class, where multiple namespaces may precede the class name, separated by double-colons. The class will be generated in the given namespace(s), or if no namespaces are given, within the global namespace.'\n )\n",
"VAR_24.add_argument('--multithreading', type=str, default='False', help=\n 'Enable multithreading in the compiled computation. Note that if using this option, the resulting object files may have external dependencies on multithreading libraries like nsync.'\n )\n",
"VAR_24.set_defaults(func=aot_compile_cpu)\n"
] | [
"def add_aot_compile_cpu_subparser(subparsers):...\n",
"\"\"\"docstring\"\"\"\n",
"compile_msg = '\\n'.join(['Usage example:',\n 'To compile a SavedModel signature via (CPU) XLA AOT:',\n '$saved_model_cli aot_compile_cpu \\\\', ' --dir /tmp/saved_model \\\\',\n ' --tag_set serve \\\\', ' --output_dir /tmp/saved_model_xla_aot', '',\n '',\n 'Note: Additional XLA compilation options are available by setting the ',\n 'XLA_FLAGS environment variable. See the XLA debug options flags for ',\n 'all the options: ', ' {}'.format(_XLA_DEBUG_OPTIONS_URL), '',\n 'For example, to disable XLA fast math when compiling:', '',\n 'XLA_FLAGS=\"--xla_cpu_enable_fast_math=false\" $saved_model_cli aot_compile_cpu ...'\n , '', 'Some possibly useful flags:',\n ' --xla_cpu_enable_fast_math=false',\n ' --xla_force_host_platform_device_count=<num threads>',\n ' (useful in conjunction with disabling multi threading)'])\n",
"parser_compile = subparsers.add_parser('aot_compile_cpu', description=\n compile_msg, formatter_class=argparse.RawTextHelpFormatter)\n",
"_parse_common_freeze_and_aot(parser_compile)\n",
"parser_compile.add_argument('--target_triple', type=str, default=\n 'x86_64-pc-linux', help=\n 'Target triple for LLVM during AOT compilation. Examples: x86_64-none-darwin, x86_64-apple-ios, arm64-none-ios, armv7-none-android. More examples are available in tfcompile.bzl in the tensorflow codebase.'\n )\n",
"parser_compile.add_argument('--target_cpu', type=str, default='', help=\n 'Target cpu name for LLVM during AOT compilation. Examples: x86_64, skylake, haswell, westmere, <empty> (unknown). For a complete list of options, run (for x86 targets): `llc -march=x86 -mcpu=help`'\n )\n",
"parser_compile.add_argument('--cpp_class', type=str, required=True, help=\n 'The name of the generated C++ class, wrapping the generated function. The syntax of this flag is [[<optional_namespace>::],...]<class_name>. This mirrors the C++ syntax for referring to a class, where multiple namespaces may precede the class name, separated by double-colons. The class will be generated in the given namespace(s), or if no namespaces are given, within the global namespace.'\n )\n",
"parser_compile.add_argument('--multithreading', type=str, default='False',\n help=\n 'Enable multithreading in the compiled computation. Note that if using this option, the resulting object files may have external dependencies on multithreading libraries like nsync.'\n )\n",
"parser_compile.set_defaults(func=aot_compile_cpu)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_11(self):...\n",
"VAR_24 = 'inputs=[{\"text\":[\"foo\"], \"bytes\":[b\"bar\"]}]'\n",
"VAR_22 = saved_model_cli.preprocess_input_examples_arg_string(VAR_24)\n",
"VAR_25 = example_pb2.Example.FromString(VAR_22['inputs'][0])\n",
"self.assertProtoEquals('string', VAR_25)\n"
] | [
"def testInputPreProcessExamplesWithStrAndBytes(self):...\n",
"input_examples_str = 'inputs=[{\"text\":[\"foo\"], \"bytes\":[b\"bar\"]}]'\n",
"input_dict = saved_model_cli.preprocess_input_examples_arg_string(\n input_examples_str)\n",
"feature = example_pb2.Example.FromString(input_dict['inputs'][0])\n",
"self.assertProtoEquals(\n \"\"\"\n features {\n feature {\n key: \"bytes\"\n value {\n bytes_list {\n value: \"bar\"\n }\n }\n }\n feature {\n key: \"text\"\n value {\n bytes_list {\n value: \"foo\"\n }\n }\n }\n }\n \"\"\"\n , feature)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"import itertools\n",
"import os\n",
"import re\n",
"from urllib.parse import urlparse, ParseResult\n",
"from urlparse import urlparse, ParseResult\n",
"from django.conf import global_settings, settings\n",
"from django.contrib.sites.models import Site, RequestSite\n",
"from django.contrib.admin.models import LogEntry\n",
"from django.contrib.auth.models import User\n",
"from django.core import mail\n",
"from django.core.urlresolvers import reverse, NoReverseMatch\n",
"from django.http import QueryDict, HttpRequest\n",
"from django.utils.encoding import force_text\n",
"from django.utils.http import urlquote\n",
"from django.utils._os import upath\n",
"from django.test import TestCase\n",
"from django.test.utils import override_settings, patch_logger\n",
"from django.middleware.csrf import CsrfViewMiddleware\n",
"from django.contrib.sessions.middleware import SessionMiddleware\n",
"from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME\n",
"from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm, SetPasswordForm\n",
"from django.contrib.auth.tests.utils import skipIfCustomUser\n",
"from django.contrib.auth.views import login as login_view\n",
"\"\"\"\n Helper base class for all the follow test cases.\n \"\"\"\n",
"VAR_0 = ['authtestdata.json']\n",
"VAR_1 = 'django.contrib.auth.tests.urls'\n",
"def FUNC_0(self, VAR_2='password'):...\n",
"VAR_3 = self.client.post('/login/', {'username': 'testclient', 'password':\n VAR_2})\n",
"self.assertTrue(SESSION_KEY in self.client.session)\n",
"return VAR_3\n"
] | [
"import itertools\n",
"import os\n",
"import re\n",
"from urllib.parse import urlparse, ParseResult\n",
"from urlparse import urlparse, ParseResult\n",
"from django.conf import global_settings, settings\n",
"from django.contrib.sites.models import Site, RequestSite\n",
"from django.contrib.admin.models import LogEntry\n",
"from django.contrib.auth.models import User\n",
"from django.core import mail\n",
"from django.core.urlresolvers import reverse, NoReverseMatch\n",
"from django.http import QueryDict, HttpRequest\n",
"from django.utils.encoding import force_text\n",
"from django.utils.http import urlquote\n",
"from django.utils._os import upath\n",
"from django.test import TestCase\n",
"from django.test.utils import override_settings, patch_logger\n",
"from django.middleware.csrf import CsrfViewMiddleware\n",
"from django.contrib.sessions.middleware import SessionMiddleware\n",
"from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME\n",
"from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm, SetPasswordForm\n",
"from django.contrib.auth.tests.utils import skipIfCustomUser\n",
"from django.contrib.auth.views import login as login_view\n",
"\"\"\"\n Helper base class for all the follow test cases.\n \"\"\"\n",
"fixtures = ['authtestdata.json']\n",
"urls = 'django.contrib.auth.tests.urls'\n",
"def login(self, password='password'):...\n",
"response = self.client.post('/login/', {'username': 'testclient',\n 'password': password})\n",
"self.assertTrue(SESSION_KEY in self.client.session)\n",
"return response\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Expr'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_39(self):...\n",
"VAR_53.lib.sessions.expire()\n"
] | [
"def api_logout(self):...\n",
"cherrypy.lib.sessions.expire()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_75(self, VAR_90=None):...\n",
"if self.settings.enable_tokens is True:\n",
"VAR_141 = None\n",
"return self.requires(True, VAR_90=otherwise)\n",
"VAR_56 = VAR_263.request\n",
"VAR_85 = VAR_56.env.http_web2py_user_token or VAR_56.vars._token\n",
"VAR_277 = self.table_token()\n",
"VAR_254 = self.table_user()\n",
"from gluon.settings import global_settings\n",
"if global_settings.web2py_runtime_gae:\n",
"VAR_266 = VAR_277(VAR_85=token)\n",
"VAR_266 = self.db(VAR_277.token == VAR_85)(VAR_254.id == VAR_277.user_id\n ).select().first()\n",
"if VAR_266:\n",
"if VAR_266:\n",
"VAR_141 = VAR_254(VAR_266.user_id)\n",
"if VAR_141:\n",
"VAR_141 = VAR_266[VAR_254._tablename]\n",
"self.login_user(VAR_141)\n"
] | [
"def requires_login_or_token(self, otherwise=None):...\n",
"if self.settings.enable_tokens is True:\n",
"user = None\n",
"return self.requires(True, otherwise=otherwise)\n",
"request = current.request\n",
"token = request.env.http_web2py_user_token or request.vars._token\n",
"table_token = self.table_token()\n",
"table_user = self.table_user()\n",
"from gluon.settings import global_settings\n",
"if global_settings.web2py_runtime_gae:\n",
"row = table_token(token=token)\n",
"row = self.db(table_token.token == token)(table_user.id == table_token.user_id\n ).select().first()\n",
"if row:\n",
"if row:\n",
"user = table_user(row.user_id)\n",
"if user:\n",
"user = row[table_user._tablename]\n",
"self.login_user(user)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"ImportFrom'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"@login_required()...\n",
""
] | [
"@login_required()...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_75(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': '@unknown:test'}, VAR_17=400)\n",
"self._set_canonical_alias({'alt_aliases': ['@unknown:test']}, VAR_17=400)\n"
] | [
"def test_bad_alias(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': '@unknown:test'}, expected_code=400)\n",
"self._set_canonical_alias({'alt_aliases': ['@unknown:test']}, expected_code=400\n )\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"from builtins import range, str\n",
"import datetime\n",
"import json\n",
"import os\n",
"import subprocess\n",
"import sys\n",
"from math import ceil\n",
"from flask import Blueprint, Response, request, stream_with_context, url_for\n",
"from opendiamond.dataretriever.util import DiamondTextAttr\n",
"from werkzeug.datastructures import Headers\n",
"VAR_0 = 'video'\n",
"VAR_1 = False\n",
"VAR_2 = VAR_3 = None\n",
"def FUNC_0(VAR_4):...\n",
"VAR_2 = VAR_4.indexdir\n",
"VAR_3 = VAR_4.dataroot\n",
"VAR_5 = Blueprint('video_store', __name__)\n",
"@VAR_5.route('/scope/<gididx>')...\n",
"VAR_12 = 'GIDIDX' + VAR_6.upper()\n",
"def FUNC_9():...\n",
"yield '<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\\n'\n",
"if VAR_1:\n",
"yield '<?xml-stylesheet type=\"text/xsl\" href=\"/scopelist.xsl\" ?>\\n'\n",
"yield '<objectlist>\\n'\n",
"for line in f:\n",
"VAR_10 = line.strip()\n",
"yield '</objectlist>\\n'\n",
"VAR_13 = str(FUNC_5(VAR_10))\n",
"VAR_16 = Headers([('Content-Type', 'text/xml')])\n",
"VAR_25 = FUNC_7(VAR_13)\n",
"print('Error parsing {}. {}. Skip.'.format(VAR_10, str(e)), file=sys.stderr)\n",
"return Response(stream_with_context(FUNC_9()), status='200 OK', VAR_16=headers)\n",
"VAR_26 = float(VAR_25['format']['duration'])\n",
"VAR_27 = int(ceil(VAR_26 / VAR_7))\n",
"yield '<count adjust=\"{}\"/>\\n'.format(VAR_27)\n",
"for clip in range(VAR_27):\n",
"yield FUNC_4(VAR_9=clip * stride, VAR_8=span, VAR_10=video) + '\\n'\n"
] | [
"from builtins import range, str\n",
"import datetime\n",
"import json\n",
"import os\n",
"import subprocess\n",
"import sys\n",
"from math import ceil\n",
"from flask import Blueprint, Response, request, stream_with_context, url_for\n",
"from opendiamond.dataretriever.util import DiamondTextAttr\n",
"from werkzeug.datastructures import Headers\n",
"BASEURL = 'video'\n",
"STYLE = False\n",
"INDEXDIR = DATAROOT = None\n",
"def init(config):...\n",
"INDEXDIR = config.indexdir\n",
"DATAROOT = config.dataroot\n",
"scope_blueprint = Blueprint('video_store', __name__)\n",
"@scope_blueprint.route('/scope/<gididx>')...\n",
"index = 'GIDIDX' + gididx.upper()\n",
"def generate():...\n",
"yield '<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\\n'\n",
"if STYLE:\n",
"yield '<?xml-stylesheet type=\"text/xsl\" href=\"/scopelist.xsl\" ?>\\n'\n",
"yield '<objectlist>\\n'\n",
"for line in f:\n",
"video = line.strip()\n",
"yield '</objectlist>\\n'\n",
"video_path = str(_get_obj_absolute_path(video))\n",
"headers = Headers([('Content-Type', 'text/xml')])\n",
"video_meta = _ffprobe(video_path)\n",
"print('Error parsing {}. {}. Skip.'.format(video, str(e)), file=sys.stderr)\n",
"return Response(stream_with_context(generate()), status='200 OK', headers=\n headers)\n",
"length_sec = float(video_meta['format']['duration'])\n",
"num_clips = int(ceil(length_sec / stride))\n",
"yield '<count adjust=\"{}\"/>\\n'.format(num_clips)\n",
"for clip in range(num_clips):\n",
"yield _get_object_element(start=clip * stride, span=span, video=video) + '\\n'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_41(self, VAR_5, VAR_7=False):...\n",
"VAR_3 = self.client.get('/login_required/')\n",
"self.assertEqual(VAR_3.status_code, 302)\n",
"self.assertURLEqual(VAR_3.url, VAR_5, VAR_7=parse_qs)\n"
] | [
"def assertLoginURLEquals(self, url, parse_qs=False):...\n",
"response = self.client.get('/login_required/')\n",
"self.assertEqual(response.status_code, 302)\n",
"self.assertURLEqual(response.url, url, parse_qs=parse_qs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_50(VAR_21):...\n",
"return VAR_21.depth, VAR_21.event_id\n"
] | [
"def sort_fun(ev):...\n",
"return ev.depth, ev.event_id\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0():...\n",
"VAR_0 = {'a': 'clsdict'}\n",
"VAR_4 = 'first'\n",
"VAR_5 = 'first'\n",
"VAR_5 = 'second'\n",
"VAR_6 = 'second'\n",
"VAR_1 = CLASS_6, CLASS_7\n",
"assert deep_getattr(VAR_0, VAR_1, 'a') == 'clsdict'\n",
"assert deep_getattr(VAR_0, VAR_1, 'b') == 'first'\n",
"assert deep_getattr(VAR_0, VAR_1, 'c') == 'second'\n",
"deep_getattr(VAR_0, VAR_1, 'd')\n",
"assert deep_getattr(VAR_0, VAR_1, 'a', 'default') == 'clsdict'\n",
"assert deep_getattr(VAR_0, VAR_1, 'b', 'default') == 'first'\n",
"assert deep_getattr(VAR_0, VAR_1, 'c', 'default') == 'second'\n",
"assert deep_getattr(VAR_0, VAR_1, 'd', 'default') == 'default'\n"
] | [
"def test_deep_getattr():...\n",
"clsdict = {'a': 'clsdict'}\n",
"a = 'first'\n",
"b = 'first'\n",
"b = 'second'\n",
"c = 'second'\n",
"bases = First, Second\n",
"assert deep_getattr(clsdict, bases, 'a') == 'clsdict'\n",
"assert deep_getattr(clsdict, bases, 'b') == 'first'\n",
"assert deep_getattr(clsdict, bases, 'c') == 'second'\n",
"deep_getattr(clsdict, bases, 'd')\n",
"assert deep_getattr(clsdict, bases, 'a', 'default') == 'clsdict'\n",
"assert deep_getattr(clsdict, bases, 'b', 'default') == 'first'\n",
"assert deep_getattr(clsdict, bases, 'c', 'default') == 'second'\n",
"assert deep_getattr(clsdict, bases, 'd', 'default') == 'default'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assert'",
"Expr'",
"Assert'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"def FUNC_0(self, VAR_3, VAR_4):...\n",
"VAR_51 = self.default_config()\n",
"VAR_51['allow_per_room_profiles'] = False\n",
"self.hs = self.setup_test_homeserver(VAR_51=config)\n",
"return self.hs\n"
] | [
"def make_homeserver(self, reactor, clock):...\n",
"config = self.default_config()\n",
"config['allow_per_room_profiles'] = False\n",
"self.hs = self.setup_test_homeserver(config=config)\n",
"return self.hs\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"VAR_3 = '@foo:bar'\n",
"VAR_7 = 5000000\n",
"VAR_10 = UserPresenceState.default(VAR_3)\n",
"VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=0, last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1)\n",
"VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n",
"self.assertIsNotNone(VAR_9)\n",
"self.assertEquals(VAR_9.state, PresenceState.OFFLINE)\n"
] | [
"def test_sync_timeout(self):...\n",
"user_id = '@foo:bar'\n",
"now = 5000000\n",
"state = UserPresenceState.default(user_id)\n",
"state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=0,\n last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1)\n",
"new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n",
"self.assertIsNotNone(new_state)\n",
"self.assertEquals(new_state.state, PresenceState.OFFLINE)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1():...\n",
"assert utils.snake_case('HTTPResponse') == 'http_response'\n",
"assert utils.snake_case('APIClientHTTPResponse') == 'api_client_http_response'\n",
"assert utils.snake_case('OAuthClientHTTPResponse'\n ) == 'o_auth_client_http_response'\n"
] | [
"def test_snake_case_from_pascal_with_acronyms():...\n",
"assert utils.snake_case('HTTPResponse') == 'http_response'\n",
"assert utils.snake_case('APIClientHTTPResponse') == 'api_client_http_response'\n",
"assert utils.snake_case('OAuthClientHTTPResponse'\n ) == 'o_auth_client_http_response'\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"@app.before_request...\n",
"VAR_6 = request.path.startswith('/login') or request.path.startswith('/static'\n ) or request.path.startswith('/api/login')\n",
"if not current_user.is_authenticated and not VAR_6:\n",
"return redirect(url_for('login', next=request.path))\n",
"return\n"
] | [
"@app.before_request...\n",
"allowed_path = request.path.startswith('/login') or request.path.startswith(\n '/static') or request.path.startswith('/api/login')\n",
"if not current_user.is_authenticated and not allowed_path:\n",
"return redirect(url_for('login', next=request.path))\n",
"return\n"
] | [
0,
0,
0,
0,
0
] | [
"For",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_1(self, VAR_3, VAR_4, VAR_11):...\n",
"self.creator = self.register_user('creator', 'test')\n",
"self.creator_tok = self.login('creator', 'test')\n",
"self.second_user_id = self.register_user('second', 'test')\n",
"self.second_tok = self.login('second', 'test')\n",
"self.room_id = self.helper.create_room_as(self.creator, VAR_52=self.creator_tok\n )\n"
] | [
"def prepare(self, reactor, clock, homeserver):...\n",
"self.creator = self.register_user('creator', 'test')\n",
"self.creator_tok = self.login('creator', 'test')\n",
"self.second_user_id = self.register_user('second', 'test')\n",
"self.second_tok = self.login('second', 'test')\n",
"self.room_id = self.helper.create_room_as(self.creator, tok=self.creator_tok)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_20(self):...\n",
"from zope.tales.expressions import DeferWrapper\n",
"VAR_5 = self._makeContext()\n",
"VAR_7 = VAR_5.evaluate('defer: b')\n",
"self.assertIsInstance(VAR_7, DeferWrapper)\n"
] | [
"def test_defer_expression_returns_wrapper(self):...\n",
"from zope.tales.expressions import DeferWrapper\n",
"ec = self._makeContext()\n",
"defer = ec.evaluate('defer: b')\n",
"self.assertIsInstance(defer, DeferWrapper)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_49(self):...\n",
"self.admin_login(username='super', password='secret', login_url='/')\n",
"self.selenium.get('%s%s' % (self.live_server_url, '/admin_widgets/event/add/'))\n",
"VAR_51 = self.selenium.current_window_handle\n",
"self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').\n get_attribute('value'), '')\n",
"self.selenium.find_element_by_id('lookup_id_supporting_bands').click()\n",
"self.selenium.switch_to_window('id_supporting_bands')\n",
"self.wait_page_loaded()\n",
"VAR_52 = self.selenium.find_element_by_link_text('Bogey Blues')\n",
"self.assertTrue('/band/42/' in VAR_52.get_attribute('href'))\n",
"VAR_52.click()\n",
"self.selenium.switch_to_window(VAR_51)\n",
"self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').\n get_attribute('value'), '42')\n",
"self.selenium.find_element_by_id('lookup_id_supporting_bands').click()\n",
"self.selenium.switch_to_window('id_supporting_bands')\n",
"self.wait_page_loaded()\n",
"VAR_52 = self.selenium.find_element_by_link_text('Green Potatoes')\n",
"self.assertTrue('/band/98/' in VAR_52.get_attribute('href'))\n",
"VAR_52.click()\n",
"self.selenium.switch_to_window(VAR_51)\n",
"self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').\n get_attribute('value'), '42,98')\n"
] | [
"def test_many_to_many(self):...\n",
"self.admin_login(username='super', password='secret', login_url='/')\n",
"self.selenium.get('%s%s' % (self.live_server_url, '/admin_widgets/event/add/'))\n",
"main_window = self.selenium.current_window_handle\n",
"self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').\n get_attribute('value'), '')\n",
"self.selenium.find_element_by_id('lookup_id_supporting_bands').click()\n",
"self.selenium.switch_to_window('id_supporting_bands')\n",
"self.wait_page_loaded()\n",
"link = self.selenium.find_element_by_link_text('Bogey Blues')\n",
"self.assertTrue('/band/42/' in link.get_attribute('href'))\n",
"link.click()\n",
"self.selenium.switch_to_window(main_window)\n",
"self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').\n get_attribute('value'), '42')\n",
"self.selenium.find_element_by_id('lookup_id_supporting_bands').click()\n",
"self.selenium.switch_to_window('id_supporting_bands')\n",
"self.wait_page_loaded()\n",
"link = self.selenium.find_element_by_link_text('Green Potatoes')\n",
"self.assertTrue('/band/98/' in link.get_attribute('href'))\n",
"link.click()\n",
"self.selenium.switch_to_window(main_window)\n",
"self.assertEqual(self.selenium.find_element_by_id('id_supporting_bands').\n get_attribute('value'), '42,98')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_16(*VAR_8):...\n",
""
] | [
"def _throw(*args):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_7(self):...\n",
"VAR_5 = {'types': ['m.room.message', 'org.matrix.foo.bar']}\n",
"VAR_6 = FUNC_0(sender='@foo:bar', type=\n 'now.for.something.completely.different', room_id='!foo:bar')\n",
"self.assertFalse(Filter(VAR_5).check(VAR_6))\n"
] | [
"def test_definition_types_works_with_unknowns(self):...\n",
"definition = {'types': ['m.room.message', 'org.matrix.foo.bar']}\n",
"event = MockEvent(sender='@foo:bar', type=\n 'now.for.something.completely.different', room_id='!foo:bar')\n",
"self.assertFalse(Filter(definition).check(event))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"\"\"\"Our own QNetworkAccessManager.\"\"\"\n",
"import collections\n",
"import html\n",
"import attr\n",
"from PyQt5.QtCore import pyqtSlot, pyqtSignal, QCoreApplication, QUrl, QByteArray\n",
"from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket\n",
"from qutebrowser.config import config\n",
"from qutebrowser.utils import message, log, usertypes, utils, objreg, urlutils, debug\n",
"from qutebrowser.browser import shared\n",
"from qutebrowser.browser.webkit import certificateerror\n",
"from qutebrowser.browser.webkit.network import webkitqutescheme, networkreply, filescheme\n",
"VAR_0 = '%HOSTBLOCK%'\n",
"VAR_1 = {}\n",
"\"\"\"Information identifying a proxy server.\"\"\"\n",
"VAR_3 = attr.ib()\n",
"VAR_4 = attr.ib()\n",
"VAR_5 = attr.ib()\n",
"def FUNC_0(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = [e.upper() for e in VAR_2.name().split('-')]\n",
"if VAR_2.usedBits() < 128:\n",
"return False\n",
"if VAR_2.keyExchangeMethod() == 'DH' and utils.is_windows:\n",
"return False\n",
"if VAR_2.encryptionMethod().upper().startswith('RC4'):\n",
"return False\n",
"if VAR_2.encryptionMethod().upper().startswith('DES'):\n",
"return False\n",
"if 'MD5' in VAR_6:\n",
"return False\n",
"if VAR_2.authenticationMethod() in ['aNULL', 'NULL']:\n",
"return False\n",
"if VAR_2.encryptionMethod() in ['eNULL', 'NULL']:\n",
"return False\n",
"if 'EXP' in VAR_6 or 'EXPORT' in VAR_6:\n",
"return False\n",
"if 'ADH' in VAR_6:\n",
"return False\n",
"return True\n"
] | [
"\"\"\"Our own QNetworkAccessManager.\"\"\"\n",
"import collections\n",
"import html\n",
"import attr\n",
"from PyQt5.QtCore import pyqtSlot, pyqtSignal, QCoreApplication, QUrl, QByteArray\n",
"from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QSslSocket\n",
"from qutebrowser.config import config\n",
"from qutebrowser.utils import message, log, usertypes, utils, objreg, urlutils, debug\n",
"from qutebrowser.browser import shared\n",
"from qutebrowser.browser.webkit import certificateerror\n",
"from qutebrowser.browser.webkit.network import webkitqutescheme, networkreply, filescheme\n",
"HOSTBLOCK_ERROR_STRING = '%HOSTBLOCK%'\n",
"_proxy_auth_cache = {}\n",
"\"\"\"Information identifying a proxy server.\"\"\"\n",
"type = attr.ib()\n",
"hostname = attr.ib()\n",
"port = attr.ib()\n",
"def _is_secure_cipher(cipher):...\n",
"\"\"\"docstring\"\"\"\n",
"tokens = [e.upper() for e in cipher.name().split('-')]\n",
"if cipher.usedBits() < 128:\n",
"return False\n",
"if cipher.keyExchangeMethod() == 'DH' and utils.is_windows:\n",
"return False\n",
"if cipher.encryptionMethod().upper().startswith('RC4'):\n",
"return False\n",
"if cipher.encryptionMethod().upper().startswith('DES'):\n",
"return False\n",
"if 'MD5' in tokens:\n",
"return False\n",
"if cipher.authenticationMethod() in ['aNULL', 'NULL']:\n",
"return False\n",
"if cipher.encryptionMethod() in ['eNULL', 'NULL']:\n",
"return False\n",
"if 'EXP' in tokens or 'EXPORT' in tokens:\n",
"return False\n",
"if 'ADH' in tokens:\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_0():...\n",
"\"\"\"docstring\"\"\"\n",
"import zmq\n",
"return True\n",
"VAR_18 = zmq.__version__\n",
"VAR_19 = re.match('^(\\\\d+)\\\\.(\\\\d+)(?:\\\\.(\\\\d+))?', VAR_18)\n",
"if not VAR_19:\n",
"VAR_32 = \"Using untested zmq python bindings version: '{0}'\".format(VAR_18)\n",
"VAR_20, VAR_21, VAR_22 = VAR_19.groups()\n",
"if is_console_configured():\n",
"if VAR_20.isdigit():\n",
"VAR_0.warning(VAR_32)\n",
"sys.stderr.write('WARNING {0}\\n'.format(VAR_32))\n",
"VAR_20 = int(VAR_20)\n",
"if VAR_21.isdigit():\n",
"return True\n",
"VAR_21 = int(VAR_21)\n",
"if VAR_22 and VAR_22.isdigit():\n",
"VAR_22 = int(VAR_22)\n",
"if VAR_20 == 2 and VAR_21 == 1:\n",
"if 'dev' in VAR_18 and not VAR_22:\n",
"if VAR_20 > 2 or VAR_20 == 2 and VAR_21 > 1:\n",
"VAR_32 = 'Using dev zmq module, please report unexpected results'\n",
"if VAR_22 and VAR_22 >= 9:\n",
"return True\n",
"VAR_0.critical('ZeroMQ python bindings >= 2.1.9 are required')\n",
"if is_console_configured():\n",
"return True\n",
"if 'salt-master' in sys.argv[0]:\n",
"VAR_0.warning(VAR_32)\n",
"sys.stderr.write('WARNING: {0}\\n'.format(VAR_32))\n",
"VAR_32 = 'string'\n",
"return False\n",
"return True\n",
"if is_console_configured():\n",
"VAR_0.critical(VAR_32)\n",
"sys.stderr.write('CRITICAL {0}\\n'.format(VAR_32))\n"
] | [
"def zmq_version():...\n",
"\"\"\"docstring\"\"\"\n",
"import zmq\n",
"return True\n",
"ver = zmq.__version__\n",
"match = re.match('^(\\\\d+)\\\\.(\\\\d+)(?:\\\\.(\\\\d+))?', ver)\n",
"if not match:\n",
"msg = \"Using untested zmq python bindings version: '{0}'\".format(ver)\n",
"major, minor, point = match.groups()\n",
"if is_console_configured():\n",
"if major.isdigit():\n",
"log.warning(msg)\n",
"sys.stderr.write('WARNING {0}\\n'.format(msg))\n",
"major = int(major)\n",
"if minor.isdigit():\n",
"return True\n",
"minor = int(minor)\n",
"if point and point.isdigit():\n",
"point = int(point)\n",
"if major == 2 and minor == 1:\n",
"if 'dev' in ver and not point:\n",
"if major > 2 or major == 2 and minor > 1:\n",
"msg = 'Using dev zmq module, please report unexpected results'\n",
"if point and point >= 9:\n",
"return True\n",
"log.critical('ZeroMQ python bindings >= 2.1.9 are required')\n",
"if is_console_configured():\n",
"return True\n",
"if 'salt-master' in sys.argv[0]:\n",
"log.warning(msg)\n",
"sys.stderr.write('WARNING: {0}\\n'.format(msg))\n",
"msg = (\n 'The Salt Master is unstable using a ZeroMQ version lower than 2.1.11 and requires this fix: http://lists.zeromq.org/pipermail/zeromq-dev/2011-June/012094.html'\n )\n",
"return False\n",
"return True\n",
"if is_console_configured():\n",
"log.critical(msg)\n",
"sys.stderr.write('CRITICAL {0}\\n'.format(msg))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Condition",
"Return'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Return'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_25(VAR_36):...\n",
""
] | [
"def url_request(url):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_38(self):...\n",
"if self.flags.ignore_validate_update_after_submit:\n",
"return\n",
"self._validate_update_after_submit()\n",
"for VAR_21 in self.get_all_children():\n",
"if VAR_21.is_new() and self.meta.get_field(VAR_21.parentfield).allow_on_submit:\n",
"VAR_21._validate_update_after_submit()\n"
] | [
"def validate_update_after_submit(self):...\n",
"if self.flags.ignore_validate_update_after_submit:\n",
"return\n",
"self._validate_update_after_submit()\n",
"for d in self.get_all_children():\n",
"if d.is_new() and self.meta.get_field(d.parentfield).allow_on_submit:\n",
"d._validate_update_after_submit()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Expr'",
"For",
"Condition",
"Expr'"
] |
[
"async def FUNC_21():...\n",
"context_11.request = 'context_11'\n",
"VAR_52 = VAR_4.verify_json_objects_for_server([('server10', VAR_21, 0,\n 'test10'), ('server11', {}, 0, 'test11')])\n",
"self.assertTrue(VAR_52[1].called)\n",
"await VAR_52[1]\n",
"self.assertFalse(VAR_52[0].called)\n",
"self.assertFalse(\"unsigned json didn't cause a failure\")\n",
"VAR_52[0].addBoth(self.check_context, None)\n",
"await make_deferred_yieldable(VAR_52[0])\n"
] | [
"async def first_lookup():...\n",
"context_11.request = 'context_11'\n",
"res_deferreds = kr.verify_json_objects_for_server([('server10', json1, 0,\n 'test10'), ('server11', {}, 0, 'test11')])\n",
"self.assertTrue(res_deferreds[1].called)\n",
"await res_deferreds[1]\n",
"self.assertFalse(res_deferreds[0].called)\n",
"self.assertFalse(\"unsigned json didn't cause a failure\")\n",
"res_deferreds[0].addBoth(self.check_context, None)\n",
"await make_deferred_yieldable(res_deferreds[0])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@FUNC_0...\n",
"return GroupAttestationSigning(self)\n"
] | [
"@cache_in_self...\n",
"return GroupAttestationSigning(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_30(self):...\n",
"self.assert_expected(self.folder.t, 'Default.html')\n"
] | [
"def testDefaultKeywordHandling(self):...\n",
"self.assert_expected(self.folder.t, 'Default.html')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_6):...\n",
"VAR_3 = '/srv/diamond/STREAM'\n",
"VAR_4 = VAR_6.dataroot\n"
] | [
"def init(config):...\n",
"INDEXDIR = '/srv/diamond/STREAM'\n",
"DATAROOT = config.dataroot\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_6(self):...\n",
"\"\"\"docstring\"\"\"\n",
"for value in [-2 ** 53, 2 ** 53, 1.0, float('inf'), float('-inf'), float('nan')\n",
"event_from_pdu_json({'type': EventTypes.Message, 'content': {'foo': value},\n 'room_id': '!room:test', 'sender': '@user:test', 'depth': 1,\n 'prev_events': [], 'auth_events': [], 'origin_server_ts': 1234},\n RoomVersions.V6)\n"
] | [
"def test_invalid_numbers(self):...\n",
"\"\"\"docstring\"\"\"\n",
"for value in [-2 ** 53, 2 ** 53, 1.0, float('inf'), float('-inf'), float('nan')\n",
"event_from_pdu_json({'type': EventTypes.Message, 'content': {'foo': value},\n 'room_id': '!room:test', 'sender': '@user:test', 'depth': 1,\n 'prev_events': [], 'auth_events': [], 'origin_server_ts': 1234},\n RoomVersions.V6)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._accepted_ssl_errors.clear()\n",
"self._rejected_ssl_errors.clear()\n"
] | [
"def clear_all_ssl_errors(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._accepted_ssl_errors.clear()\n",
"self._rejected_ssl_errors.clear()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def FUNC_17(VAR_17, VAR_18, VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_46 = {}\n",
"VAR_47 = FUNC_13(VAR_17)\n",
"VAR_48 = FUNC_14(VAR_18, VAR_19=False)\n",
"VAR_49 = FUNC_15(VAR_20)\n",
"for VAR_74, (filename, variable_name) in VAR_47.items():\n",
"VAR_73 = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True)\n",
"for VAR_74, py_expr_evaluated in VAR_48.items():\n",
"if variable_name:\n",
"if VAR_74 in VAR_46:\n",
"for VAR_74, VAR_45 in VAR_49.items():\n",
"if isinstance(VAR_73, np.ndarray):\n",
"if isinstance(VAR_73, np.lib.npyio.NpzFile):\n",
"logging.warn('string' % VAR_74)\n",
"VAR_46[VAR_74] = py_expr_evaluated\n",
"if VAR_74 in VAR_46:\n",
"return VAR_46\n",
"logging.warn(\n 'Input file %s contains a single ndarray. Name key \"%s\" ignored.' % (\n filename, variable_name))\n",
"if variable_name in VAR_73:\n",
"VAR_83 = VAR_73.files\n",
"VAR_46[VAR_74] = VAR_73\n",
"logging.warn(\n 'input_key %s has been specified in multiple options. Value in --input_examples will be used.'\n % VAR_74)\n",
"VAR_46[VAR_74] = VAR_45\n",
"VAR_46[VAR_74] = VAR_73\n",
"VAR_46[VAR_74] = VAR_73[variable_name]\n",
"if len(VAR_83) != 1:\n",
"VAR_46[VAR_74] = VAR_73[VAR_83[0]]\n"
] | [
"def load_inputs_from_input_arg_string(inputs_str, input_exprs_str,...\n",
"\"\"\"docstring\"\"\"\n",
"tensor_key_feed_dict = {}\n",
"inputs = preprocess_inputs_arg_string(inputs_str)\n",
"input_exprs = preprocess_input_exprs_arg_string(input_exprs_str, safe=False)\n",
"input_examples = preprocess_input_examples_arg_string(input_examples_str)\n",
"for input_tensor_key, (filename, variable_name) in inputs.items():\n",
"data = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True)\n",
"for input_tensor_key, py_expr_evaluated in input_exprs.items():\n",
"if variable_name:\n",
"if input_tensor_key in tensor_key_feed_dict:\n",
"for input_tensor_key, example in input_examples.items():\n",
"if isinstance(data, np.ndarray):\n",
"if isinstance(data, np.lib.npyio.NpzFile):\n",
"logging.warn(\n 'input_key %s has been specified with both --inputs and --input_exprs options. Value in --input_exprs will be used.'\n % input_tensor_key)\n",
"tensor_key_feed_dict[input_tensor_key] = py_expr_evaluated\n",
"if input_tensor_key in tensor_key_feed_dict:\n",
"return tensor_key_feed_dict\n",
"logging.warn(\n 'Input file %s contains a single ndarray. Name key \"%s\" ignored.' % (\n filename, variable_name))\n",
"if variable_name in data:\n",
"variable_name_list = data.files\n",
"tensor_key_feed_dict[input_tensor_key] = data\n",
"logging.warn(\n 'input_key %s has been specified in multiple options. Value in --input_examples will be used.'\n % input_tensor_key)\n",
"tensor_key_feed_dict[input_tensor_key] = example\n",
"tensor_key_feed_dict[input_tensor_key] = data\n",
"tensor_key_feed_dict[input_tensor_key] = data[variable_name]\n",
"if len(variable_name_list) != 1:\n",
"tensor_key_feed_dict[input_tensor_key] = data[variable_name_list[0]]\n"
] | [
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"For",
"Condition",
"Condition",
"For",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_5(self, VAR_3, VAR_4):...\n",
"return django.forms.URLField(**options)\n"
] | [
"def create_url_field(self, field, options):...\n",
"return django.forms.URLField(**options)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@FUNC_0...\n",
"return MacaroonGenerator(self)\n"
] | [
"@cache_in_self...\n",
"return MacaroonGenerator(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.get_success(self.inject_room_member(self.room1, self.u_alice,\n Membership.JOIN))\n",
"VAR_13 = self.event_builder_factory.for_room_version(RoomVersions.V1, {\n 'type': EventTypes.Redaction, 'sender': self.u_alice.to_string(),\n 'room_id': self.room1.to_string(), 'content': {'reason': 'foo'}})\n",
"VAR_26, VAR_15 = self.get_success(self.event_creation_handler.\n create_new_client_event(VAR_13))\n",
"self.get_success(self.storage.persistence.persist_event(VAR_26, VAR_15))\n",
"self.reactor.advance(60 * 60 * 24 * 31)\n",
"self.get_success(self.store.get_event(VAR_26.event_id, allow_none=True))\n"
] | [
"def test_store_redacted_redaction(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.get_success(self.inject_room_member(self.room1, self.u_alice,\n Membership.JOIN))\n",
"builder = self.event_builder_factory.for_room_version(RoomVersions.V1, {\n 'type': EventTypes.Redaction, 'sender': self.u_alice.to_string(),\n 'room_id': self.room1.to_string(), 'content': {'reason': 'foo'}})\n",
"redaction_event, context = self.get_success(self.event_creation_handler.\n create_new_client_event(builder))\n",
"self.get_success(self.storage.persistence.persist_event(redaction_event,\n context))\n",
"self.reactor.advance(60 * 60 * 24 * 31)\n",
"self.get_success(self.store.get_event(redaction_event.event_id, allow_none=\n True))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"async def FUNC_12(VAR_16):...\n",
"return {str(u) for u in self.room_members}\n"
] | [
"async def get_users_in_room(room_id):...\n",
"return {str(u) for u in self.room_members}\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import os\n",
"import json\n",
"import cherrypy\n",
"import codecs\n",
"import sys\n",
"from urllib.parse import unquote\n",
"from backport.urllib.parse import unquote\n",
"from urllib import parse\n",
"from backport.urllib import parse\n",
"import audiotranscode\n",
"from tinytag import TinyTag\n",
"from cherrymusicserver import userdb\n",
"from cherrymusicserver import log\n",
"from cherrymusicserver import albumartfetcher\n",
"from cherrymusicserver import service\n",
"from cherrymusicserver.pathprovider import readRes\n",
"from cherrymusicserver.pathprovider import albumArtFilePath\n",
"import cherrymusicserver as cherry\n",
"import cherrymusicserver.metainfo as metainfo\n",
"from cherrymusicserver.util import Performance, MemoryZipFile\n",
"from cherrymusicserver.ext import zipstream\n",
"import time\n",
"VAR_0 = True\n",
"def __init__(self, VAR_1):...\n",
"self.config = VAR_1\n",
"VAR_42 = 'res/dist/main.html'\n",
"VAR_43 = 'res/login.html'\n",
"VAR_44 = 'res/firstrun.html'\n",
"self.mainpage = readRes(VAR_42)\n",
"self.loginpage = readRes(VAR_43)\n",
"self.firstrunpage = readRes(VAR_44)\n",
"self.handlers = {'search': self.api_search, 'rememberplaylist': self.\n api_rememberplaylist, 'saveplaylist': self.api_saveplaylist,\n 'loadplaylist': self.api_loadplaylist, 'generaterandomplaylist': self.\n api_generaterandomplaylist, 'deleteplaylist': self.api_deleteplaylist,\n 'getmotd': self.api_getmotd, 'restoreplaylist': self.\n api_restoreplaylist, 'getplayables': self.api_getplayables,\n 'getuserlist': self.api_getuserlist, 'adduser': self.api_adduser,\n 'userdelete': self.api_userdelete, 'userchangepassword': self.\n api_userchangepassword, 'showplaylists': self.api_showplaylists,\n 'logout': self.api_logout, 'downloadpls': self.api_downloadpls,\n 'downloadm3u': self.api_downloadm3u, 'getsonginfo': self.\n api_getsonginfo, 'getencoders': self.api_getencoders, 'getdecoders':\n self.api_getdecoders, 'transcodingenabled': self.api_transcodingenabled,\n 'updatedb': self.api_updatedb, 'getconfiguration': self.\n api_getconfiguration, 'compactlistdir': self.api_compactlistdir,\n 'listdir': self.api_listdir, 'fetchalbumart': self.api_fetchalbumart,\n 'fetchalbumarturls': self.api_fetchalbumarturls, 'albumart_set': self.\n api_albumart_set, 'heartbeat': self.api_heartbeat, 'getuseroptions':\n self.api_getuseroptions, 'setuseroption': self.api_setuseroption,\n 'changeplaylist': self.api_changeplaylist, 'downloadcheck': self.\n api_downloadcheck, 'setuseroptionfor': self.api_setuseroptionfor}\n",
"def FUNC_1(self, VAR_2):...\n",
"return parse.urlparse(VAR_2).scheme == 'https'\n"
] | [
"\"\"\"This class provides the api to talk to the client.\nIt will then call the cherrymodel, to get the\nrequested information\"\"\"\n",
"import os\n",
"import json\n",
"import cherrypy\n",
"import codecs\n",
"import sys\n",
"from urllib.parse import unquote\n",
"from backport.urllib.parse import unquote\n",
"from urllib import parse\n",
"from backport.urllib import parse\n",
"import audiotranscode\n",
"from tinytag import TinyTag\n",
"from cherrymusicserver import userdb\n",
"from cherrymusicserver import log\n",
"from cherrymusicserver import albumartfetcher\n",
"from cherrymusicserver import service\n",
"from cherrymusicserver.pathprovider import readRes\n",
"from cherrymusicserver.pathprovider import albumArtFilePath\n",
"import cherrymusicserver as cherry\n",
"import cherrymusicserver.metainfo as metainfo\n",
"from cherrymusicserver.util import Performance, MemoryZipFile\n",
"from cherrymusicserver.ext import zipstream\n",
"import time\n",
"debug = True\n",
"def __init__(self, config):...\n",
"self.config = config\n",
"template_main = 'res/dist/main.html'\n",
"template_login = 'res/login.html'\n",
"template_firstrun = 'res/firstrun.html'\n",
"self.mainpage = readRes(template_main)\n",
"self.loginpage = readRes(template_login)\n",
"self.firstrunpage = readRes(template_firstrun)\n",
"self.handlers = {'search': self.api_search, 'rememberplaylist': self.\n api_rememberplaylist, 'saveplaylist': self.api_saveplaylist,\n 'loadplaylist': self.api_loadplaylist, 'generaterandomplaylist': self.\n api_generaterandomplaylist, 'deleteplaylist': self.api_deleteplaylist,\n 'getmotd': self.api_getmotd, 'restoreplaylist': self.\n api_restoreplaylist, 'getplayables': self.api_getplayables,\n 'getuserlist': self.api_getuserlist, 'adduser': self.api_adduser,\n 'userdelete': self.api_userdelete, 'userchangepassword': self.\n api_userchangepassword, 'showplaylists': self.api_showplaylists,\n 'logout': self.api_logout, 'downloadpls': self.api_downloadpls,\n 'downloadm3u': self.api_downloadm3u, 'getsonginfo': self.\n api_getsonginfo, 'getencoders': self.api_getencoders, 'getdecoders':\n self.api_getdecoders, 'transcodingenabled': self.api_transcodingenabled,\n 'updatedb': self.api_updatedb, 'getconfiguration': self.\n api_getconfiguration, 'compactlistdir': self.api_compactlistdir,\n 'listdir': self.api_listdir, 'fetchalbumart': self.api_fetchalbumart,\n 'fetchalbumarturls': self.api_fetchalbumarturls, 'albumart_set': self.\n api_albumart_set, 'heartbeat': self.api_heartbeat, 'getuseroptions':\n self.api_getuseroptions, 'setuseroption': self.api_setuseroption,\n 'changeplaylist': self.api_changeplaylist, 'downloadcheck': self.\n api_downloadcheck, 'setuseroptionfor': self.api_setuseroptionfor}\n",
"def issecure(self, url):...\n",
"return parse.urlparse(url).scheme == 'https'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def __post_init__(self, VAR_5: str) ->None:...\n",
"super().__post_init__()\n",
"VAR_11 = Reference.from_ref(VAR_5)\n",
"VAR_12 = 0\n",
"while VAR_11.class_name in VAR_14:\n",
"VAR_17 = VAR_14[VAR_11.class_name]\n",
"self.reference = VAR_11\n",
"if self.values == VAR_17.values:\n",
"VAR_13 = {v: k for k, v in self.values.items()}\n",
"VAR_12 += 1\n",
"if self.default is not None:\n",
"VAR_11 = Reference.from_ref(f'{VAR_11.class_name}{VAR_12}')\n",
"self.default = f'{self.reference.class_name}.{VAR_13[self.default]}'\n",
"VAR_14[self.reference.class_name] = self\n"
] | [
"def __post_init__(self, title: str) ->None:...\n",
"super().__post_init__()\n",
"reference = Reference.from_ref(title)\n",
"dedup_counter = 0\n",
"while reference.class_name in _existing_enums:\n",
"existing = _existing_enums[reference.class_name]\n",
"self.reference = reference\n",
"if self.values == existing.values:\n",
"inverse_values = {v: k for k, v in self.values.items()}\n",
"dedup_counter += 1\n",
"if self.default is not None:\n",
"reference = Reference.from_ref(f'{reference.class_name}{dedup_counter}')\n",
"self.default = f'{self.reference.class_name}.{inverse_values[self.default]}'\n",
"_existing_enums[self.reference.class_name] = self\n"
] | [
0,
5,
0,
0,
0,
0,
0,
0,
5,
0,
5,
0,
5,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_0):...\n",
"self.password = VAR_0\n",
"self.vault = VaultLib(VAR_0)\n"
] | [
"def __init__(self, password):...\n",
"self.password = password\n",
"self.vault = VaultLib(password)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_10(self):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.utils import has_common\n",
"VAR_17 = [d.role for d in frappe.get_all('Has Role', fields=['role'], VAR_6\n ={'parent': self.name})]\n",
"VAR_18 = get_custom_allowed_roles('report', self.name)\n",
"VAR_17.extend(VAR_18)\n",
"if not VAR_17:\n",
"return True\n",
"if has_common(frappe.get_roles(), VAR_17):\n",
"return True\n"
] | [
"def is_permitted(self):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.utils import has_common\n",
"allowed = [d.role for d in frappe.get_all('Has Role', fields=['role'],\n filters={'parent': self.name})]\n",
"custom_roles = get_custom_allowed_roles('report', self.name)\n",
"allowed.extend(custom_roles)\n",
"if not allowed:\n",
"return True\n",
"if has_common(frappe.get_roles(), allowed):\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"def FUNC_1(VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"return urlutils.file_url(str(VAR_5))\n"
] | [
"def _file_url(path):...\n",
"\"\"\"docstring\"\"\"\n",
"return urlutils.file_url(str(path))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"async def FUNC_2(self, VAR_6: str, VAR_7: str, VAR_8: str, VAR_4: str,...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0.debug('Proxying threepid bind request for %s to %s', VAR_8, VAR_4)\n",
"if VAR_2 is None:\n",
"VAR_9 = False\n",
"VAR_36 = {}\n",
"VAR_37 = {'sid': VAR_7, 'client_secret': VAR_6, 'mxid': VAR_8}\n",
"if VAR_9:\n",
"VAR_56 = 'https://%s/_matrix/identity/v2/3pid/bind' % (VAR_4,)\n",
"VAR_56 = 'https://%s/_matrix/identity/api/v1/3pid/bind' % (VAR_4,)\n",
"VAR_36['Authorization'] = FUNC_0(VAR_2)\n",
"VAR_46 = await self.blacklisting_http_client.post_json_get_json(VAR_56,\n VAR_37, VAR_36=headers)\n",
"if e.code != 404 or not VAR_9:\n",
"VAR_0.info('Got 404 when POSTing JSON %s, falling back to v1 URL', VAR_56)\n",
"await self.store.add_user_bound_threepid(user_id=mxid, VAR_19=data['medium'\n ], VAR_20=data['address'], VAR_4=id_server)\n",
"VAR_0.error('3PID bind failed with Matrix error: %r', e)\n",
"VAR_46 = json_decoder.decode(e.msg)\n",
"VAR_38 = await self.bind_threepid(VAR_6, VAR_7, VAR_8, VAR_4, VAR_2, VAR_9=\n False)\n",
"return VAR_46\n",
"return VAR_46\n",
"return VAR_38\n"
] | [
"async def bind_threepid(self, client_secret: str, sid: str, mxid: str,...\n",
"\"\"\"docstring\"\"\"\n",
"logger.debug('Proxying threepid bind request for %s to %s', mxid, id_server)\n",
"if id_access_token is None:\n",
"use_v2 = False\n",
"headers = {}\n",
"bind_data = {'sid': sid, 'client_secret': client_secret, 'mxid': mxid}\n",
"if use_v2:\n",
"bind_url = 'https://%s/_matrix/identity/v2/3pid/bind' % (id_server,)\n",
"bind_url = 'https://%s/_matrix/identity/api/v1/3pid/bind' % (id_server,)\n",
"headers['Authorization'] = create_id_access_token_header(id_access_token)\n",
"data = await self.blacklisting_http_client.post_json_get_json(bind_url,\n bind_data, headers=headers)\n",
"if e.code != 404 or not use_v2:\n",
"logger.info('Got 404 when POSTing JSON %s, falling back to v1 URL', bind_url)\n",
"await self.store.add_user_bound_threepid(user_id=mxid, medium=data['medium'\n ], address=data['address'], id_server=id_server)\n",
"logger.error('3PID bind failed with Matrix error: %r', e)\n",
"data = json_decoder.decode(e.msg)\n",
"res = await self.bind_threepid(client_secret, sid, mxid, id_server,\n id_access_token, use_v2=False)\n",
"return data\n",
"return data\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_0(self, VAR_0, VAR_1):...\n",
"VAR_2 = self.setup_test_homeserver(http_client=None, homeserver_to_use=\n GenericWorkerServer)\n",
"return VAR_2\n"
] | [
"def make_homeserver(self, reactor, clock):...\n",
"hs = self.setup_test_homeserver(http_client=None, homeserver_to_use=\n GenericWorkerServer)\n",
"return hs\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@functools.wraps(VAR_2)...\n",
""
] | [
"@functools.wraps(builder)...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_7(self, VAR_2, VAR_3, VAR_16=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_2.is_ajax():\n",
"VAR_0.debug('Request is Ajax, returning HTTP 403.')\n",
"for VAR_1 in settings.LOGIN_REDIRECT['redirect']:\n",
"VAR_0.error('Error while redirection on not logged in.', exc_info=True)\n",
"VAR_23 = {'url': VAR_3}\n",
"return HttpResponseForbidden()\n",
"if VAR_3 == reverse(VAR_1):\n",
"resolve(VAR_1)\n",
"VAR_0.error('Cannot resolve url %s' % VAR_1)\n",
"VAR_0.debug('Request is not Ajax, redirecting to %s?%s' % (self.login_url,\n urlencode(VAR_23)))\n",
"VAR_3 = FUNC_0(settings.LOGIN_REDIRECT)\n",
"if VAR_3 == VAR_1:\n",
"return HttpResponseRedirect('%s?%s' % (self.login_url, urlencode(VAR_23)))\n",
"VAR_3 = FUNC_0(settings.LOGIN_REDIRECT)\n"
] | [
"def on_not_logged_in(self, request, url, error=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if request.is_ajax():\n",
"logger.debug('Request is Ajax, returning HTTP 403.')\n",
"for lookup_view in settings.LOGIN_REDIRECT['redirect']:\n",
"logger.error('Error while redirection on not logged in.', exc_info=True)\n",
"args = {'url': url}\n",
"return HttpResponseForbidden()\n",
"if url == reverse(lookup_view):\n",
"resolve(lookup_view)\n",
"logger.error('Cannot resolve url %s' % lookup_view)\n",
"logger.debug('Request is not Ajax, redirecting to %s?%s' % (self.login_url,\n urlencode(args)))\n",
"url = parse_url(settings.LOGIN_REDIRECT)\n",
"if url == lookup_view:\n",
"return HttpResponseRedirect('%s?%s' % (self.login_url, urlencode(args)))\n",
"url = parse_url(settings.LOGIN_REDIRECT)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"For",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'"
] |
[
"@def_function.function...\n",
"if VAR_55:\n",
"return VAR_53 + VAR_54\n",
"return VAR_53 * VAR_54\n"
] | [
"@def_function.function...\n",
"if c:\n",
"return a + b\n",
"return a * b\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"@login_required()...\n",
"VAR_13 = int(VAR_13)\n",
"VAR_14 = int(VAR_14)\n",
"VAR_65 = VAR_8.getQueryService().findByQuery(\n 'select shape from Roi as roi join roi.shapes as shape where roi.id = %d and shape.id = %d'\n % (VAR_13, VAR_14), None)\n",
"VAR_1.debug('Shape: %r' % VAR_65)\n",
"if VAR_65 is None:\n",
"VAR_1.debug('No such shape: %r' % VAR_14)\n",
"return JsonResponse(shapeMarshal(VAR_65))\n"
] | [
"@login_required()...\n",
"roiId = int(roiId)\n",
"shapeId = int(shapeId)\n",
"shape = conn.getQueryService().findByQuery(\n 'select shape from Roi as roi join roi.shapes as shape where roi.id = %d and shape.id = %d'\n % (roiId, shapeId), None)\n",
"logger.debug('Shape: %r' % shape)\n",
"if shape is None:\n",
"logger.debug('No such shape: %r' % shapeId)\n",
"return JsonResponse(shapeMarshal(shape))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Return'"
] |
[
"@VAR_0.route('/')...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_44 = {}\n",
"VAR_45 = []\n",
"VAR_46 = []\n",
"VAR_44 = FUNC_58('/internal/jobs/statistics', 'get')\n",
"flash(str(err), 'danger')\n",
"return render_template('dashboard.html', title='Dashboard', VAR_44=\n quick_statistics, VAR_45=recent_jobs_info, VAR_46=pinned_jobs_info,\n VAR_5=status_color)\n",
"VAR_45 = FUNC_58('/internal/jobs/recent', 'get')\n",
"VAR_72 = current_user\n",
"VAR_46 = FUNC_58('/internal/jobs', 'get', VAR_73={'ids': u.pinned_jobs if u\n .pinned_jobs is not None else json.dumps([]), 'auto-validate-ids': True})\n"
] | [
"@gui.route('/')...\n",
"\"\"\"docstring\"\"\"\n",
"quick_statistics = {}\n",
"recent_jobs_info = []\n",
"pinned_jobs_info = []\n",
"quick_statistics = query_internal_api('/internal/jobs/statistics', 'get')\n",
"flash(str(err), 'danger')\n",
"return render_template('dashboard.html', title='Dashboard',\n quick_statistics=quick_statistics, recent_jobs_info=recent_jobs_info,\n pinned_jobs_info=pinned_jobs_info, status_color=status_color)\n",
"recent_jobs_info = query_internal_api('/internal/jobs/recent', 'get')\n",
"u = current_user\n",
"pinned_jobs_info = query_internal_api('/internal/jobs', 'get', params={\n 'ids': u.pinned_jobs if u.pinned_jobs is not None else json.dumps([]),\n 'auto-validate-ids': True})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_27(self):...\n",
"return self._has_role(constants.ROLE_PASSWD)\n"
] | [
"def role_passwd(self):...\n",
"return self._has_role(constants.ROLE_PASSWD)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_33(VAR_19, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_67 = 'identifier-type-'\n",
"VAR_68 = 'identifier-val-'\n",
"VAR_69 = []\n",
"for type_key, type_value in VAR_19.items():\n",
"if not type_key.startswith(VAR_67):\n",
"return VAR_69\n",
"VAR_95 = VAR_68 + type_key[len(VAR_67):]\n",
"if VAR_95 not in VAR_19.keys():\n",
"VAR_69.append(db.Identifiers(VAR_19[VAR_95], type_value, VAR_16.id))\n"
] | [
"def identifier_list(to_save, book):...\n",
"\"\"\"docstring\"\"\"\n",
"id_type_prefix = 'identifier-type-'\n",
"id_val_prefix = 'identifier-val-'\n",
"result = []\n",
"for type_key, type_value in to_save.items():\n",
"if not type_key.startswith(id_type_prefix):\n",
"return result\n",
"val_key = id_val_prefix + type_key[len(id_type_prefix):]\n",
"if val_key not in to_save.keys():\n",
"result.append(db.Identifiers(to_save[val_key], type_value, book.id))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_45(VAR_16):...\n",
"VAR_83 = False\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_provider_url')\n",
"VAR_83 |= FUNC_38(VAR_16, 'config_ldap_port')\n",
"VAR_83 |= FUNC_38(VAR_16, 'config_ldap_authentication')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_dn')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_serv_username')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_user_object')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_group_object_filter')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_group_members_field')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_member_user_object')\n",
"VAR_83 |= FUNC_39(VAR_16, 'config_ldap_openldap')\n",
"VAR_83 |= FUNC_38(VAR_16, 'config_ldap_encryption')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_cacert_path')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_cert_path')\n",
"VAR_83 |= FUNC_41(VAR_16, 'config_ldap_key_path')\n",
"FUNC_41(VAR_16, 'config_ldap_group_name')\n",
"if VAR_16.get('config_ldap_serv_password', '') != '':\n",
"VAR_83 |= 1\n",
"config.save()\n",
"config.set_from_dictionary(VAR_16, 'config_ldap_serv_password', base64.\n b64encode, encode='UTF-8')\n",
"if not config.config_ldap_provider_url or not config.config_ldap_port or not config.config_ldap_dn or not config.config_ldap_user_object:\n",
"return VAR_83, FUNC_50(_(\n 'Please Enter a LDAP Provider, Port, DN and User Object Identifier'))\n",
"if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:\n",
"if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:\n",
"if config.config_ldap_group_object_filter:\n",
"if not config.config_ldap_serv_username or not bool(config.\n",
"if not config.config_ldap_serv_username:\n",
"if config.config_ldap_group_object_filter.count('%s') != 1:\n",
"if config.config_ldap_user_object.count('%s') != 1:\n",
"return VAR_83, FUNC_50(_('Please Enter a LDAP Service Account and Password'))\n",
"return VAR_83, FUNC_50(_('Please Enter a LDAP Service Account'))\n",
"return VAR_83, FUNC_50(_(\n 'LDAP Group Object Filter Needs to Have One \"%s\" Format Identifier'))\n",
"if config.config_ldap_group_object_filter.count('('\n",
"return VAR_83, FUNC_50(_(\n 'LDAP User Object Filter needs to Have One \"%s\" Format Identifier'))\n",
"if config.config_ldap_user_object.count('('\n",
"return VAR_83, FUNC_50(_('LDAP Group Object Filter Has Unmatched Parenthesis'))\n",
"return VAR_83, FUNC_50(_('LDAP User Object Filter Has Unmatched Parenthesis'))\n",
"if VAR_16.get('ldap_import_user_filter') == '0':\n",
"config.config_ldap_member_user_object = ''\n",
"if config.config_ldap_member_user_object.count('%s') != 1:\n",
"if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:\n",
"return VAR_83, FUNC_50(_(\n 'LDAP Member User Filter needs to Have One \"%s\" Format Identifier'))\n",
"if config.config_ldap_member_user_object.count('('\n",
"if not (os.path.isfile(config.config_ldap_cacert_path) and os.path.isfile(\n",
"return VAR_83, None\n",
"return VAR_83, FUNC_50(_('LDAP Member User Filter Has Unmatched Parenthesis'))\n",
"return VAR_83, FUNC_50(_(\n 'LDAP CACertificate, Certificate or Key Location is not Valid, Please Enter Correct Path'\n ))\n"
] | [
"def _configuration_ldap_helper(to_save):...\n",
"reboot_required = False\n",
"reboot_required |= _config_string(to_save, 'config_ldap_provider_url')\n",
"reboot_required |= _config_int(to_save, 'config_ldap_port')\n",
"reboot_required |= _config_int(to_save, 'config_ldap_authentication')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_dn')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_serv_username')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_user_object')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_group_object_filter')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_group_members_field')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_member_user_object')\n",
"reboot_required |= _config_checkbox(to_save, 'config_ldap_openldap')\n",
"reboot_required |= _config_int(to_save, 'config_ldap_encryption')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_cacert_path')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_cert_path')\n",
"reboot_required |= _config_string(to_save, 'config_ldap_key_path')\n",
"_config_string(to_save, 'config_ldap_group_name')\n",
"if to_save.get('config_ldap_serv_password', '') != '':\n",
"reboot_required |= 1\n",
"config.save()\n",
"config.set_from_dictionary(to_save, 'config_ldap_serv_password', base64.\n b64encode, encode='UTF-8')\n",
"if not config.config_ldap_provider_url or not config.config_ldap_port or not config.config_ldap_dn or not config.config_ldap_user_object:\n",
"return reboot_required, _configuration_result(_(\n 'Please Enter a LDAP Provider, Port, DN and User Object Identifier'))\n",
"if config.config_ldap_authentication > constants.LDAP_AUTH_ANONYMOUS:\n",
"if config.config_ldap_authentication > constants.LDAP_AUTH_UNAUTHENTICATE:\n",
"if config.config_ldap_group_object_filter:\n",
"if not config.config_ldap_serv_username or not bool(config.\n",
"if not config.config_ldap_serv_username:\n",
"if config.config_ldap_group_object_filter.count('%s') != 1:\n",
"if config.config_ldap_user_object.count('%s') != 1:\n",
"return reboot_required, _configuration_result(_(\n 'Please Enter a LDAP Service Account and Password'))\n",
"return reboot_required, _configuration_result(_(\n 'Please Enter a LDAP Service Account'))\n",
"return reboot_required, _configuration_result(_(\n 'LDAP Group Object Filter Needs to Have One \"%s\" Format Identifier'))\n",
"if config.config_ldap_group_object_filter.count('('\n",
"return reboot_required, _configuration_result(_(\n 'LDAP User Object Filter needs to Have One \"%s\" Format Identifier'))\n",
"if config.config_ldap_user_object.count('('\n",
"return reboot_required, _configuration_result(_(\n 'LDAP Group Object Filter Has Unmatched Parenthesis'))\n",
"return reboot_required, _configuration_result(_(\n 'LDAP User Object Filter Has Unmatched Parenthesis'))\n",
"if to_save.get('ldap_import_user_filter') == '0':\n",
"config.config_ldap_member_user_object = ''\n",
"if config.config_ldap_member_user_object.count('%s') != 1:\n",
"if config.config_ldap_cacert_path or config.config_ldap_cert_path or config.config_ldap_key_path:\n",
"return reboot_required, _configuration_result(_(\n 'LDAP Member User Filter needs to Have One \"%s\" Format Identifier'))\n",
"if config.config_ldap_member_user_object.count('('\n",
"if not (os.path.isfile(config.config_ldap_cacert_path) and os.path.isfile(\n",
"return reboot_required, None\n",
"return reboot_required, _configuration_result(_(\n 'LDAP Member User Filter Has Unmatched Parenthesis'))\n",
"return reboot_required, _configuration_result(_(\n 'LDAP CACertificate, Certificate or Key Location is not Valid, Please Enter Correct Path'\n ))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"Expr'",
"Condition",
"AugAssign'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_33(self):...\n",
"VAR_11 = self.folder.t\n",
"VAR_11.write('<p tal:define=\"p context/__class__\" />')\n",
"VAR_11()\n",
"VAR_11.write('<p tal:define=\"p nocall: random/_itertools/repeat\"/>')\n",
"VAR_11()\n",
"VAR_11.write('<p tal:content=\"random/_itertools/repeat/foobar\"/>')\n",
"VAR_11()\n"
] | [
"def test_underscore_traversal(self):...\n",
"t = self.folder.t\n",
"t.write('<p tal:define=\"p context/__class__\" />')\n",
"t()\n",
"t.write('<p tal:define=\"p nocall: random/_itertools/repeat\"/>')\n",
"t()\n",
"t.write('<p tal:content=\"random/_itertools/repeat/foobar\"/>')\n",
"t()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"return self.repo\n"
] | [
"def get_repo_url(self):...\n",
"return self.repo\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(VAR_16: Optional[Mapping[str, Union[str, List[str]]]]) ->bytes:...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_16 is None:\n",
"return b''\n",
"VAR_38 = {}\n",
"for VAR_50, VAR_57 in VAR_16.items():\n",
"if isinstance(VAR_57, str):\n",
"VAR_39 = urllib.parse.urlencode(VAR_38, True)\n",
"VAR_57 = [VAR_57]\n",
"VAR_38[VAR_50] = [v.encode('utf8') for v in VAR_57]\n",
"return VAR_39.encode('utf8')\n"
] | [
"def encode_query_args(args: Optional[Mapping[str, Union[str, List[str]]]]...\n",
"\"\"\"docstring\"\"\"\n",
"if args is None:\n",
"return b''\n",
"encoded_args = {}\n",
"for k, vs in args.items():\n",
"if isinstance(vs, str):\n",
"query_str = urllib.parse.urlencode(encoded_args, True)\n",
"vs = [vs]\n",
"encoded_args[k] = [v.encode('utf8') for v in vs]\n",
"return query_str.encode('utf8')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_66(self):...\n",
"VAR_3 = self.client.post('/admin/auth/user/%s/password/' % self.admin.pk, {\n 'password1': 'password1', 'password2': 'password1'})\n",
"self.assertRedirects(VAR_3, '/admin/auth/user/%s/' % self.admin.pk)\n",
"VAR_28 = LogEntry.objects.latest('id')\n",
"self.assertEqual(VAR_28.change_message, 'Changed password.')\n",
"self.logout()\n",
"self.login(VAR_2='password1')\n"
] | [
"def test_user_change_password(self):...\n",
"response = self.client.post('/admin/auth/user/%s/password/' % self.admin.pk,\n {'password1': 'password1', 'password2': 'password1'})\n",
"self.assertRedirects(response, '/admin/auth/user/%s/' % self.admin.pk)\n",
"row = LogEntry.objects.latest('id')\n",
"self.assertEqual(row.change_message, 'Changed password.')\n",
"self.logout()\n",
"self.login(password='password1')\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(VAR_13, VAR_7):...\n",
"VAR_82 = VAR_1.query(CLASS_14).filter(CLASS_14.user_id == VAR_7).filter(\n CLASS_14.book_id == VAR_13).first()\n",
"if not VAR_82:\n",
"VAR_90 = CLASS_14(VAR_7=user_id, VAR_13=book_id)\n",
"VAR_1.add(VAR_90)\n",
"VAR_1.commit()\n",
"VAR_1.rollback()\n"
] | [
"def update_download(book_id, user_id):...\n",
"check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(\n Downloads.book_id == book_id).first()\n",
"if not check:\n",
"new_download = Downloads(user_id=user_id, book_id=book_id)\n",
"session.add(new_download)\n",
"session.commit()\n",
"session.rollback()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
""
] | [
""
] | [
0
] | [
"Condition"
] |
[
"@VAR_0.route('/api/jobs/<int:job_id>', methods=['DELETE'])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_98 = FUNC_58(f'/internal/jobs/{VAR_9}', 'delete')\n",
"return jsonify({'success': False, 'message': str(err)}), 400\n",
"return jsonify(VAR_98)\n"
] | [
"@gui.route('/api/jobs/<int:job_id>', methods=['DELETE'])...\n",
"\"\"\"docstring\"\"\"\n",
"response_info = query_internal_api(f'/internal/jobs/{job_id}', 'delete')\n",
"return jsonify({'success': False, 'message': str(err)}), 400\n",
"return jsonify(response_info)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_24(VAR_14, VAR_16, VAR_31, VAR_19):...\n",
"VAR_32 = calibre_db.session.query(db.Custom_Columns).filter(db.\n Custom_Columns.datatype.notin_(db.cc_exceptions)).filter(db.\n Custom_Columns.id == VAR_31).all()\n",
"return FUNC_26(VAR_14, VAR_16, VAR_19, VAR_32)\n"
] | [
"def edit_single_cc_data(book_id, book, column_id, to_save):...\n",
"cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.\n datatype.notin_(db.cc_exceptions)).filter(db.Custom_Columns.id == column_id\n ).all()\n",
"return edit_cc_data(book_id, book, to_save, cc)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_14():...\n",
"VAR_47 = f.readline()\n",
"VAR_47 = '/'.join(VAR_47.split('/')[:-2])\n",
"VAR_35 = []\n",
"for c in VAR_34:\n",
"VAR_35.append(FUNC_10(VAR_47 + '/' + c.strip()))\n",
"return VAR_35\n"
] | [
"def get_class_path():...\n",
"dataset_path = f.readline()\n",
"dataset_path = '/'.join(dataset_path.split('/')[:-2])\n",
"class_paths = []\n",
"for c in classes_list:\n",
"class_paths.append(_get_obj_absolute_path(dataset_path + '/' + c.strip()))\n",
"return class_paths\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_29(self):...\n",
"VAR_5 = self._makeContext()\n",
"VAR_5.evaluate('nocall:open')\n"
] | [
"def test_open_in_path_expr(self):...\n",
"ec = self._makeContext()\n",
"ec.evaluate('nocall:open')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"@pytest.mark.linux...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = VAR_2 / 'home'\n",
"VAR_17.ensure(dir=True)\n",
"VAR_8['HOME'] = str(VAR_17)\n",
"assert VAR_8['XDG_CONFIG_HOME'] == VAR_2 / 'config'\n",
"(VAR_2 / 'config' / 'user-dirs.dirs').write('XDG_DOWNLOAD_DIR=\"relative\"',\n ensure=True)\n",
"VAR_6.start(FUNC_0(VAR_4.config), VAR_16=temp_basedir_env)\n",
"VAR_6.set_setting('downloads.location.prompt', 'false')\n",
"VAR_7 = 'http://localhost:{port}/data/downloads/download.bin'.format(VAR_24\n =server.port)\n",
"VAR_6.send_cmd(':download {}'.format(VAR_7))\n",
"VAR_18 = VAR_6.wait_for(loglevel=logging.ERROR, category='message', message\n =\n 'XDG_DOWNLOAD_DIR points to a relative path - please check your ~/.config/user-dirs.dirs. The download is saved in your home directory.'\n )\n",
"VAR_18.expected = True\n",
"VAR_6.wait_for(category='downloads', message='Download download.bin finished')\n",
"assert (VAR_17 / 'download.bin').exists()\n"
] | [
"@pytest.mark.linux...\n",
"\"\"\"docstring\"\"\"\n",
"home = tmpdir / 'home'\n",
"home.ensure(dir=True)\n",
"temp_basedir_env['HOME'] = str(home)\n",
"assert temp_basedir_env['XDG_CONFIG_HOME'] == tmpdir / 'config'\n",
"(tmpdir / 'config' / 'user-dirs.dirs').write('XDG_DOWNLOAD_DIR=\"relative\"',\n ensure=True)\n",
"quteproc_new.start(_base_args(request.config), env=temp_basedir_env)\n",
"quteproc_new.set_setting('downloads.location.prompt', 'false')\n",
"url = 'http://localhost:{port}/data/downloads/download.bin'.format(port=\n server.port)\n",
"quteproc_new.send_cmd(':download {}'.format(url))\n",
"line = quteproc_new.wait_for(loglevel=logging.ERROR, category='message',\n message=\n 'XDG_DOWNLOAD_DIR points to a relative path - please check your ~/.config/user-dirs.dirs. The download is saved in your home directory.'\n )\n",
"line.expected = True\n",
"quteproc_new.wait_for(category='downloads', message=\n 'Download download.bin finished')\n",
"assert (home / 'download.bin').exists()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Assert'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assert'"
] |
[
"@VAR_2.route('/ajax/deleterestriction/<int:res_type>', methods=['POST'])...\n",
"return FUNC_33(VAR_8, 0)\n"
] | [
"@admi.route('/ajax/deleterestriction/<int:res_type>', methods=['POST'])...\n",
"return delete_restriction(res_type, 0)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@VAR_2.route('/ajax/table_settings', methods=['POST'])...\n",
"VAR_87.view_settings['table'] = json.loads(request.data)\n",
"VAR_3.error('Invalid request received: %r ', request)\n",
"return ''\n",
"flag_modified(VAR_87, 'view_settings')\n",
"ub.session.commit()\n",
"return 'Invalid request', 400\n"
] | [
"@web.route('/ajax/table_settings', methods=['POST'])...\n",
"current_user.view_settings['table'] = json.loads(request.data)\n",
"log.error('Invalid request received: %r ', request)\n",
"return ''\n",
"flag_modified(current_user, 'view_settings')\n",
"ub.session.commit()\n",
"return 'Invalid request', 400\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_29(self):...\n",
"return [entry.to_dict() for entry in self.model.randomMusicEntries(50)]\n"
] | [
"def api_generaterandomplaylist(self):...\n",
"return [entry.to_dict() for entry in self.model.randomMusicEntries(50)]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_8(self, VAR_8, *VAR_9, **VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.isAuthorized():\n",
"VAR_53.session.release_lock()\n",
"if cherry.config['media.transcode'] and VAR_9:\n",
"VAR_93 = VAR_10.pop('bitrate', None) or None\n",
"if VAR_93:\n",
"VAR_9 = os.path.sep.join(VAR_9)\n",
"VAR_93 = max(0, int(VAR_93)) or None\n",
"if sys.version_info < (3, 0):\n",
"VAR_9 = VAR_9.decode('utf-8')\n",
"VAR_9 = codecs.decode(codecs.encode(VAR_9, 'latin1'), 'utf-8')\n",
"VAR_94 = os.path.join(cherry.config['media.basedir'], VAR_9)\n",
"VAR_95 = int(VAR_10.pop('starttime', 0))\n",
"VAR_96 = audiotranscode.AudioTranscode()\n",
"VAR_97 = audiotranscode.mime_type(VAR_8)\n",
"VAR_53.response.headers['Content-Type'] = VAR_97\n",
"return VAR_96.transcode_stream(VAR_94, VAR_8, VAR_93=bitrate, VAR_95=starttime)\n"
] | [
"def trans(self, newformat, *path, **params):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.isAuthorized():\n",
"cherrypy.session.release_lock()\n",
"if cherry.config['media.transcode'] and path:\n",
"bitrate = params.pop('bitrate', None) or None\n",
"if bitrate:\n",
"path = os.path.sep.join(path)\n",
"bitrate = max(0, int(bitrate)) or None\n",
"if sys.version_info < (3, 0):\n",
"path = path.decode('utf-8')\n",
"path = codecs.decode(codecs.encode(path, 'latin1'), 'utf-8')\n",
"fullpath = os.path.join(cherry.config['media.basedir'], path)\n",
"starttime = int(params.pop('starttime', 0))\n",
"transcoder = audiotranscode.AudioTranscode()\n",
"mimetype = audiotranscode.mime_type(newformat)\n",
"cherrypy.response.headers['Content-Type'] = mimetype\n",
"return transcoder.transcode_stream(fullpath, newformat, bitrate=bitrate,\n starttime=starttime)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"async def FUNC_6(VAR_6):...\n",
""
] | [
"async def middleware_handler(request):...\n",
""
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Condition"
] |
[
"async def FUNC_22(self, VAR_11: str, VAR_12: str) ->List[str]:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = await self.store.get_event(VAR_12, check_room_id=room_id)\n",
"VAR_100 = await self.state_store.get_state_groups_ids(VAR_11, [VAR_12])\n",
"if VAR_100:\n",
"VAR_166, VAR_2 = list(VAR_100.items()).pop()\n",
"return []\n",
"VAR_167 = VAR_2\n",
"if VAR_1.is_state():\n",
"if 'replaces_state' in VAR_1.unsigned:\n",
"return list(VAR_167.values())\n",
"VAR_198 = VAR_1.unsigned['replaces_state']\n",
"VAR_167.pop((VAR_1.type, VAR_1.state_key), None)\n",
"if VAR_198 != VAR_1.event_id:\n",
"VAR_167[VAR_1.type, VAR_1.state_key] = VAR_198\n"
] | [
"async def get_state_ids_for_pdu(self, room_id: str, event_id: str) ->List[str]:...\n",
"\"\"\"docstring\"\"\"\n",
"event = await self.store.get_event(event_id, check_room_id=room_id)\n",
"state_groups = await self.state_store.get_state_groups_ids(room_id, [event_id])\n",
"if state_groups:\n",
"_, state = list(state_groups.items()).pop()\n",
"return []\n",
"results = state\n",
"if event.is_state():\n",
"if 'replaces_state' in event.unsigned:\n",
"return list(results.values())\n",
"prev_id = event.unsigned['replaces_state']\n",
"results.pop((event.type, event.state_key), None)\n",
"if prev_id != event.event_id:\n",
"results[event.type, event.state_key] = prev_id\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Assign'"
] |
[
"import json\n",
"from mock import ANY, Mock, call\n",
"from twisted.internet import defer\n",
"from synapse.api.errors import AuthError\n",
"from synapse.types import UserID, create_requester\n",
"from tests import unittest\n",
"from tests.test_utils import make_awaitable\n",
"from tests.unittest import override_config\n",
"from tests.utils import register_federation_servlets\n",
"VAR_0 = UserID.from_string('@apple:test')\n",
"VAR_1 = UserID.from_string('@banana:test')\n",
"VAR_2 = UserID.from_string('@onion:farm')\n",
"VAR_3 = 'a-room'\n",
"def FUNC_0(VAR_4, VAR_5, VAR_6='test'):...\n",
"return {'origin': VAR_6, 'origin_server_ts': 1000000, 'pdus': [], 'edus': [\n {'edu_type': VAR_4, 'content': VAR_5}]}\n"
] | [
"import json\n",
"from mock import ANY, Mock, call\n",
"from twisted.internet import defer\n",
"from synapse.api.errors import AuthError\n",
"from synapse.types import UserID, create_requester\n",
"from tests import unittest\n",
"from tests.test_utils import make_awaitable\n",
"from tests.unittest import override_config\n",
"from tests.utils import register_federation_servlets\n",
"U_APPLE = UserID.from_string('@apple:test')\n",
"U_BANANA = UserID.from_string('@banana:test')\n",
"U_ONION = UserID.from_string('@onion:farm')\n",
"ROOM_ID = 'a-room'\n",
"def _expect_edu_transaction(edu_type, content, origin='test'):...\n",
"return {'origin': origin, 'origin_server_ts': 1000000, 'pdus': [], 'edus':\n [{'edu_type': edu_type, 'content': content}]}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_25(VAR_5, VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_78 = {'success': False}\n",
"for VAR_12, VAR_82 in VAR_17.items():\n",
"if VAR_12 == 'orphaned':\n",
"VAR_78['success'] = True\n",
"for VAR_13, children in VAR_82.items():\n",
"return JsonResponse(VAR_78)\n",
"for VAR_14, VAR_15 in children.items():\n",
"VAR_386 = FUNC_20(VAR_5, VAR_12, VAR_13, VAR_14, VAR_15)\n",
"if VAR_386 is None:\n",
"VAR_387, VAR_240 = VAR_386\n",
"VAR_388 = [VAR_71.id.val for VAR_71 in VAR_240]\n",
"VAR_0.info('api_link: Deleting %s links' % len(VAR_388))\n",
"VAR_5.deleteObjects(VAR_387, VAR_388, wait=True)\n",
"VAR_387, VAR_389 = FUNC_20(VAR_5, VAR_12, None, VAR_14, VAR_15)\n",
"for rl in VAR_389:\n",
"VAR_359 = rl.parent.id.val\n",
"VAR_422 = rl.child.id.val\n",
"if VAR_359 == int(VAR_13):\n",
"if VAR_12 not in VAR_78:\n",
"VAR_78[VAR_12] = {}\n",
"if VAR_359 not in VAR_78[VAR_12]:\n",
"VAR_78[VAR_12][VAR_359] = {VAR_14: []}\n",
"VAR_78[VAR_12][VAR_359][VAR_14].append(VAR_422)\n"
] | [
"def _api_links_DELETE(conn, json_data):...\n",
"\"\"\"docstring\"\"\"\n",
"response = {'success': False}\n",
"for parent_type, parents in json_data.items():\n",
"if parent_type == 'orphaned':\n",
"response['success'] = True\n",
"for parent_id, children in parents.items():\n",
"return JsonResponse(response)\n",
"for child_type, child_ids in children.items():\n",
"objLnks = get_object_links(conn, parent_type, parent_id, child_type, child_ids)\n",
"if objLnks is None:\n",
"linkType, links = objLnks\n",
"linkIds = [r.id.val for r in links]\n",
"logger.info('api_link: Deleting %s links' % len(linkIds))\n",
"conn.deleteObjects(linkType, linkIds, wait=True)\n",
"linkType, remainingLinks = get_object_links(conn, parent_type, None,\n child_type, child_ids)\n",
"for rl in remainingLinks:\n",
"pid = rl.parent.id.val\n",
"cid = rl.child.id.val\n",
"if pid == int(parent_id):\n",
"if parent_type not in response:\n",
"response[parent_type] = {}\n",
"if pid not in response[parent_type]:\n",
"response[parent_type][pid] = {child_type: []}\n",
"response[parent_type][pid][child_type].append(cid)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Assign'",
"For",
"Return'",
"For",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_10():...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_29:\n",
"from frappe.utils.redis_wrapper import RedisWrapper\n",
"return VAR_29\n",
"VAR_29 = RedisWrapper.from_url(VAR_13.get('redis_cache') or\n 'redis://localhost:11311')\n"
] | [
"def cache():...\n",
"\"\"\"docstring\"\"\"\n",
"if not redis_server:\n",
"from frappe.utils.redis_wrapper import RedisWrapper\n",
"return redis_server\n",
"redis_server = RedisWrapper.from_url(conf.get('redis_cache') or\n 'redis://localhost:11311')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"ImportFrom'",
"Return'",
"Assign'"
] |
[
"def FUNC_54(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if getattr(self.meta, 'track_seen', False):\n",
"frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps([frappe.\n session.user]), VAR_27=False)\n"
] | [
"def reset_seen(self):...\n",
"\"\"\"docstring\"\"\"\n",
"if getattr(self.meta, 'track_seen', False):\n",
"frappe.db.set_value(self.doctype, self.name, '_seen', json.dumps([frappe.\n session.user]), update_modified=False)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'"
] |
[
"def FUNC_9():...\n",
"\"\"\"docstring\"\"\"\n",
"from django.contrib.auth.models import User\n",
"return User.objects.filter(is_superuser=True)[0]\n"
] | [
"def get_admin():...\n",
"\"\"\"docstring\"\"\"\n",
"from django.contrib.auth.models import User\n",
"return User.objects.filter(is_superuser=True)[0]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_28(self):...\n",
"VAR_5 = self._makeContext()\n",
"self.assertIs(VAR_5.evaluate('True'), True)\n",
"self.assertIs(VAR_5.evaluate('False'), False)\n",
"self.assertIs(VAR_5.evaluate('nocall: test'), safe_builtins['test'])\n"
] | [
"def test_builtin_in_path_expr(self):...\n",
"ec = self._makeContext()\n",
"self.assertIs(ec.evaluate('True'), True)\n",
"self.assertIs(ec.evaluate('False'), False)\n",
"self.assertIs(ec.evaluate('nocall: test'), safe_builtins['test'])\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"super(CLASS_0, self).setUp()\n",
"if platform.system() == 'Windows':\n",
"self.skipTest('Skipping failing tests on Windows.')\n"
] | [
"def setUp(self):...\n",
"super(SavedModelCLITestCase, self).setUp()\n",
"if platform.system() == 'Windows':\n",
"self.skipTest('Skipping failing tests on Windows.')\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"For",
"Expr'"
] |
[
"@VAR_0.route('/ajax/delete/<int:book_id>')...\n",
"return Response(FUNC_13(VAR_14, '', True), mimetype='application/json')\n"
] | [
"@editbook.route('/ajax/delete/<int:book_id>')...\n",
"return Response(delete_book_from_table(book_id, '', True), mimetype=\n 'application/json')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_21(VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"from tensorflow.python.compiler.tensorrt import trt_convert as trt\n",
"if not VAR_20.convert_tf1_model:\n",
"VAR_73 = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(max_workspace_size_bytes\n =args.max_workspace_size_bytes, precision_mode=args.precision_mode,\n minimum_segment_size=args.minimum_segment_size)\n",
"trt.create_inference_graph(None, None, max_batch_size=1,\n max_workspace_size_bytes=args.max_workspace_size_bytes, precision_mode=\n args.precision_mode, minimum_segment_size=args.minimum_segment_size,\n is_dynamic_op=True, input_saved_model_dir=args.dir,\n input_saved_model_tags=args.tag_set.split(','), output_saved_model_dir=\n args.output_dir)\n",
"VAR_74 = trt.TrtGraphConverterV2(input_saved_model_dir=args.dir,\n input_saved_model_tags=args.tag_set.split(','), conversion_params=params)\n",
"VAR_74.convert()\n",
"VAR_74.save(output_saved_model_dir=args.output_dir)\n"
] | [
"def convert_with_tensorrt(args):...\n",
"\"\"\"docstring\"\"\"\n",
"from tensorflow.python.compiler.tensorrt import trt_convert as trt\n",
"if not args.convert_tf1_model:\n",
"params = trt.DEFAULT_TRT_CONVERSION_PARAMS._replace(max_workspace_size_bytes\n =args.max_workspace_size_bytes, precision_mode=args.precision_mode,\n minimum_segment_size=args.minimum_segment_size)\n",
"trt.create_inference_graph(None, None, max_batch_size=1,\n max_workspace_size_bytes=args.max_workspace_size_bytes, precision_mode=\n args.precision_mode, minimum_segment_size=args.minimum_segment_size,\n is_dynamic_op=True, input_saved_model_dir=args.dir,\n input_saved_model_tags=args.tag_set.split(','), output_saved_model_dir=\n args.output_dir)\n",
"converter = trt.TrtGraphConverterV2(input_saved_model_dir=args.dir,\n input_saved_model_tags=args.tag_set.split(','), conversion_params=params)\n",
"converter.convert()\n",
"converter.save(output_saved_model_dir=args.output_dir)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@staticmethod...\n",
"return 'admin/projects', f'project-{VAR_39.name}'\n"
] | [
"@staticmethod...\n",
"return 'admin/projects', f'project-{field.name}'\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_6() ->Optional[Text]:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_66 = check_output(['git', 'remote', 'get-url', 'origin'], stderr=DEVNULL)\n",
"return None\n",
"return hashlib.sha256(VAR_66).hexdigest()\n"
] | [
"def project_fingerprint() ->Optional[Text]:...\n",
"\"\"\"docstring\"\"\"\n",
"remote = check_output(['git', 'remote', 'get-url', 'origin'], stderr=DEVNULL)\n",
"return None\n",
"return hashlib.sha256(remote).hexdigest()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"VAR_5 = {'not_types': ['m.room.message', 'org.matrix.*']}\n",
"VAR_6 = FUNC_0(sender='@foo:bar', type='org.matrix.custom.event', room_id=\n '!foo:bar')\n",
"self.assertFalse(Filter(VAR_5).check(VAR_6))\n"
] | [
"def test_definition_not_types_works_with_wildcards(self):...\n",
"definition = {'not_types': ['m.room.message', 'org.matrix.*']}\n",
"event = MockEvent(sender='@foo:bar', type='org.matrix.custom.event',\n room_id='!foo:bar')\n",
"self.assertFalse(Filter(definition).check(event))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@log_function...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = FUNC_1('/groups/local/%s/users/%s/invite', VAR_30, VAR_16)\n",
"return self.client.post_json(VAR_5=destination, VAR_2=path, VAR_39=content,\n VAR_15=True)\n"
] | [
"@log_function...\n",
"\"\"\"docstring\"\"\"\n",
"path = _create_v1_path('/groups/local/%s/users/%s/invite', group_id, user_id)\n",
"return self.client.post_json(destination=destination, path=path, data=\n content, ignore_backoff=True)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_22(self) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"return self.force_training or self.nlu\n"
] | [
"def should_retrain_nlu(self) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"return self.force_training or self.nlu\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"@login_required(doConnectionCleanup=False)...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_176 = VAR_2.GET.get('query', '*')\n",
"VAR_177 = FUNC_0(VAR_2, 'offset', 0)\n",
"VAR_88 = FUNC_0(VAR_2, 'limit', settings.PAGE)\n",
"VAR_178 = None\n",
"VAR_178 = VAR_350('omero_iviewer_index')\n",
"VAR_32 = VAR_241(VAR_32)\n",
"VAR_173 = VAR_5.getObject('OriginalFile', VAR_32)\n",
"if VAR_173 is None:\n",
"VAR_179 = VAR_33 == 'csv'\n",
"VAR_53 = webgateway_views._table_query(VAR_2, VAR_32, VAR_5=conn, VAR_176=\n query, VAR_177=offset, VAR_88=limit, VAR_179=lazy)\n",
"if VAR_53.get('error') or not VAR_53.get('data'):\n",
"return JsonResponse(VAR_53)\n",
"if VAR_33 == 'csv':\n",
"VAR_301 = VAR_53.get('data')\n",
"VAR_53['data']['name'] = VAR_173.name\n",
"def FUNC_87():...\n",
"VAR_53['data']['path'] = VAR_173.path\n",
"VAR_364 = ','.join(VAR_301.get('columns'))\n",
"VAR_53['data']['id'] = VAR_32\n",
"yield VAR_364\n",
"VAR_53['meta']['query'] = VAR_176\n",
"for rows in VAR_301.get('lazy_rows'):\n",
"if VAR_177 == 0 or VAR_177 / VAR_88 == VAR_177 // VAR_88:\n",
"yield '\\n' + '\\n'.join([','.join([VAR_345(VAR_362) for VAR_362 in VAR_378]) for\n VAR_378 in rows])\n",
"VAR_300 = VAR_173.name.replace(' ', '_').replace(',', '.')\n",
"VAR_53['meta']['page'] = VAR_177 // VAR_88 + 1 if VAR_177 > 0 else 1\n",
"VAR_7 = VAR_350('omero_table', args=[file_id])\n",
"VAR_300 = VAR_300 + '.csv'\n",
"VAR_53['meta']['url'] = VAR_7\n",
"VAR_174 = TableClosingHttpResponse(FUNC_87(), content_type='text/csv')\n",
"VAR_7 += '?limit=%s' % VAR_88\n",
"VAR_174.conn = VAR_5\n",
"if VAR_176 != '*':\n",
"VAR_174.table = VAR_53.get('table')\n",
"VAR_7 += '&query=%s' % VAR_176\n",
"if VAR_177 + VAR_88 < VAR_53['meta']['totalCount']:\n",
"VAR_174['Content-Type'] = 'application/force-download'\n",
"VAR_53['meta']['next'] = VAR_7 + '&offset=%s' % (VAR_177 + VAR_88)\n",
"if VAR_177 > 0:\n",
"VAR_174['Content-Disposition'] = 'attachment; filename=%s' % VAR_300\n",
"VAR_53['meta']['prev'] = VAR_7 + '&offset=%s' % max(0, VAR_177 - VAR_88)\n",
"if VAR_33 is None:\n",
"return VAR_174\n",
"VAR_53['template'] = 'webclient/annotations/omero_table.html'\n",
"return VAR_53\n",
"VAR_53['iviewer_url'] = VAR_178\n",
"VAR_302 = VAR_53['data']['column_types']\n",
"if 'ImageColumn' in VAR_302:\n",
"VAR_53['image_column_index'] = VAR_302.index('ImageColumn')\n",
"if 'WellColumn' in VAR_302:\n",
"VAR_53['well_column_index'] = VAR_302.index('WellColumn')\n",
"if 'RoiColumn' in VAR_302:\n",
"VAR_53['roi_column_index'] = VAR_302.index('RoiColumn')\n",
"for idx, VAR_23 in enumerate(VAR_302):\n",
"if VAR_23 in ('DoubleColumn', 'LongColumn'):\n",
"VAR_406 = VAR_53['data']['columns'][idx]\n",
"VAR_407 = []\n",
"for VAR_378 in VAR_53['data']['rows']:\n",
"if VAR_378[idx]:\n",
"if ' ' in VAR_406 or len(VAR_407) < 2:\n",
"VAR_407.append(VAR_378[idx])\n",
"if len(VAR_407) > 3:\n",
"VAR_53['example_column'] = VAR_406\n",
"VAR_53['example_min_value'] = min(VAR_407)\n",
"VAR_53['example_max_value'] = max(VAR_407)\n"
] | [
"@login_required(doConnectionCleanup=False)...\n",
"\"\"\"docstring\"\"\"\n",
"query = request.GET.get('query', '*')\n",
"offset = get_long_or_default(request, 'offset', 0)\n",
"limit = get_long_or_default(request, 'limit', settings.PAGE)\n",
"iviewer_url = None\n",
"iviewer_url = reverse('omero_iviewer_index')\n",
"file_id = long(file_id)\n",
"orig_file = conn.getObject('OriginalFile', file_id)\n",
"if orig_file is None:\n",
"lazy = mtype == 'csv'\n",
"context = webgateway_views._table_query(request, file_id, conn=conn, query=\n query, offset=offset, limit=limit, lazy=lazy)\n",
"if context.get('error') or not context.get('data'):\n",
"return JsonResponse(context)\n",
"if mtype == 'csv':\n",
"table_data = context.get('data')\n",
"context['data']['name'] = orig_file.name\n",
"def csv_gen():...\n",
"context['data']['path'] = orig_file.path\n",
"csv_cols = ','.join(table_data.get('columns'))\n",
"context['data']['id'] = file_id\n",
"yield csv_cols\n",
"context['meta']['query'] = query\n",
"for rows in table_data.get('lazy_rows'):\n",
"if offset == 0 or offset / limit == offset // limit:\n",
"yield '\\n' + '\\n'.join([','.join([str(d) for d in row]) for row in rows])\n",
"downloadName = orig_file.name.replace(' ', '_').replace(',', '.')\n",
"context['meta']['page'] = offset // limit + 1 if offset > 0 else 1\n",
"url = reverse('omero_table', args=[file_id])\n",
"downloadName = downloadName + '.csv'\n",
"context['meta']['url'] = url\n",
"rsp = TableClosingHttpResponse(csv_gen(), content_type='text/csv')\n",
"url += '?limit=%s' % limit\n",
"rsp.conn = conn\n",
"if query != '*':\n",
"rsp.table = context.get('table')\n",
"url += '&query=%s' % query\n",
"if offset + limit < context['meta']['totalCount']:\n",
"rsp['Content-Type'] = 'application/force-download'\n",
"context['meta']['next'] = url + '&offset=%s' % (offset + limit)\n",
"if offset > 0:\n",
"rsp['Content-Disposition'] = 'attachment; filename=%s' % downloadName\n",
"context['meta']['prev'] = url + '&offset=%s' % max(0, offset - limit)\n",
"if mtype is None:\n",
"return rsp\n",
"context['template'] = 'webclient/annotations/omero_table.html'\n",
"return context\n",
"context['iviewer_url'] = iviewer_url\n",
"col_types = context['data']['column_types']\n",
"if 'ImageColumn' in col_types:\n",
"context['image_column_index'] = col_types.index('ImageColumn')\n",
"if 'WellColumn' in col_types:\n",
"context['well_column_index'] = col_types.index('WellColumn')\n",
"if 'RoiColumn' in col_types:\n",
"context['roi_column_index'] = col_types.index('RoiColumn')\n",
"for idx, c_type in enumerate(col_types):\n",
"if c_type in ('DoubleColumn', 'LongColumn'):\n",
"col_name = context['data']['columns'][idx]\n",
"vals = []\n",
"for row in context['data']['rows']:\n",
"if row[idx]:\n",
"if ' ' in col_name or len(vals) < 2:\n",
"vals.append(row[idx])\n",
"if len(vals) > 3:\n",
"context['example_column'] = col_name\n",
"context['example_min_value'] = min(vals)\n",
"context['example_max_value'] = max(vals)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"For",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __getattr__(VAR_59, VAR_60):...\n",
"if VAR_60 == 'nameResolver':\n",
"return VAR_49\n",
"return getattr(VAR_48, VAR_60)\n"
] | [
"def __getattr__(_self, attr):...\n",
"if attr == 'nameResolver':\n",
"return nameResolver\n",
"return getattr(real_reactor, attr)\n"
] | [
0,
4,
4,
4
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_12(self):...\n",
"VAR_20 = (\n 'inputx=C:\\\\Program Files\\\\data.npz[v:0];input:0=c:\\\\PROGRA~1\\\\data.npy')\n",
"VAR_22 = saved_model_cli.preprocess_inputs_arg_string(VAR_20)\n",
"self.assertTrue(VAR_22['inputx'] == ('C:\\\\Program Files\\\\data.npz', 'v:0'))\n",
"self.assertTrue(VAR_22['input:0'] == ('c:\\\\PROGRA~1\\\\data.npy', None))\n"
] | [
"def testInputPreProcessFileNames(self):...\n",
"input_str = (\n 'inputx=C:\\\\Program Files\\\\data.npz[v:0];input:0=c:\\\\PROGRA~1\\\\data.npy')\n",
"input_dict = saved_model_cli.preprocess_inputs_arg_string(input_str)\n",
"self.assertTrue(input_dict['inputx'] == ('C:\\\\Program Files\\\\data.npz', 'v:0'))\n",
"self.assertTrue(input_dict['input:0'] == ('c:\\\\PROGRA~1\\\\data.npy', None))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_9(*VAR_15):...\n",
"return defer.succeed(None)\n"
] | [
"def get_received_txn_response(*args):...\n",
"return defer.succeed(None)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@def_function.function...\n",
"return VAR_49 + 2 * VAR_55\n"
] | [
"@def_function.function...\n",
"return y + 2 * c\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.