lines
sequencelengths
1
383
raw_lines
sequencelengths
1
383
label
sequencelengths
1
383
type
sequencelengths
1
383
[ "def FUNC_12(VAR_0):...\n", "VAR_0.DEBUG = True\n", "VAR_6 = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls'\")\n", "VAR_7 = unicorn(None, VAR_6)\n", "VAR_8 = {}\n", "VAR_12 = VAR_7.render(VAR_8)\n", "assert '<script type=\"module\"' in VAR_12\n", "assert len(re.findall('<script type=\"module\"', VAR_12)) == 1\n", "assert '\"calls\":[{\"fn\":\"testCall\",\"args\":[]}]' in VAR_12\n" ]
[ "def test_unicorn_render_calls(settings):...\n", "settings.DEBUG = True\n", "token = Token(TokenType.TEXT,\n \"unicorn 'tests.templatetags.test_unicorn_render.FakeComponentCalls'\")\n", "unicorn_node = unicorn(None, token)\n", "context = {}\n", "html = unicorn_node.render(context)\n", "assert '<script type=\"module\"' in html\n", "assert len(re.findall('<script type=\"module\"', html)) == 1\n", "assert '\"calls\":[{\"fn\":\"testCall\",\"args\":[]}]' in html\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assert'", "Assert'" ]
[ "def __init__(self, VAR_4, VAR_17, *VAR_6, **VAR_7):...\n", "super().__init__(VAR_4, VAR_17, *VAR_6, **kwargs)\n", "self.helper.form_action = reverse('save_zen', VAR_7=unit.translation.\n get_reverse_url_kwargs())\n", "self.helper.form_tag = True\n", "self.helper.disable_csrf = False\n", "self.helper.layout.append(Field('checksum'))\n" ]
[ "def __init__(self, user, unit, *args, **kwargs):...\n", "super().__init__(user, unit, *args, **kwargs)\n", "self.helper.form_action = reverse('save_zen', kwargs=unit.translation.\n get_reverse_url_kwargs())\n", "self.helper.form_tag = True\n", "self.helper.disable_csrf = False\n", "self.helper.layout.append(Field('checksum'))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_42(VAR_62, VAR_9):...\n", "return '{0}::{1}'.format(VAR_62, VAR_9)\n" ]
[ "def get_document_cache_key(doctype, name):...\n", "return '{0}::{1}'.format(doctype, name)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(self) ->str:...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def directory(self) ->str:...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_148(self):...\n", "VAR_101 = self.auth.db\n", "VAR_343 = VAR_101.wiki_tag.wiki_page.count(distinct=True)\n", "VAR_344 = VAR_101(VAR_101.wiki_tag).select(VAR_101.wiki_tag.name, VAR_343,\n distinct=True, groupby=db.wiki_tag.name, VAR_174=~count, VAR_175=(0, 20))\n", "if VAR_344:\n", "VAR_9, VAR_13 = VAR_344[0](VAR_343), VAR_344[-1](VAR_343)\n", "def FUNC_171(VAR_14):...\n", "VAR_415 = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'\n", "VAR_416 = 1.5 * (VAR_14 - VAR_13) / max(VAR_9 - VAR_13, 1) + 1.3\n", "return VAR_415 % (1.3, VAR_416)\n" ]
[ "def cloud(self):...\n", "db = self.auth.db\n", "count = db.wiki_tag.wiki_page.count(distinct=True)\n", "ids = db(db.wiki_tag).select(db.wiki_tag.name, count, distinct=True,\n groupby=db.wiki_tag.name, orderby=~count, limitby=(0, 20))\n", "if ids:\n", "a, b = ids[0](count), ids[-1](count)\n", "def style(c):...\n", "STYLE = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'\n", "size = 1.5 * (c - b) / max(a - b, 1) + 1.3\n", "return STYLE % (1.3, size)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_1: str, VAR_2: Optional[str]=None, VAR_3: Optional[...\n", "if VAR_2 is None and VAR_3 is None or VAR_2 is not None and VAR_3 is not None:\n", "self.name = VAR_1\n", "self.path = VAR_2\n", "self.url = VAR_3\n" ]
[ "def __init__(self, name: str, path: Optional[str]=None, url: Optional[str]=None...\n", "if path is None and url is None or path is not None and url is not None:\n", "self.name = name\n", "self.path = path\n", "self.url = url\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_23(VAR_9, VAR_5, VAR_10):...\n", "VAR_13 = calibre_db.session.query(db.Ratings).filter(db.Ratings.id == VAR_5\n ).first()\n", "VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, db.\n Books.ratings.any(db.Ratings.id == VAR_5), [VAR_10[0]])\n", "if VAR_13 and VAR_13.rating <= 10:\n", "return render_title_template('index.html', VAR_68=random, VAR_65=pagination,\n VAR_63=entries, id=book_id, VAR_149=_(u'Rating: %(rating)s stars',\n rating=int(name.rating / 2)), VAR_9='ratings')\n", "abort(404)\n" ]
[ "def render_ratings_books(page, book_id, order):...\n", "name = calibre_db.session.query(db.Ratings).filter(db.Ratings.id == book_id\n ).first()\n", "entries, random, pagination = calibre_db.fill_indexpage(page, 0, db.Books,\n db.Books.ratings.any(db.Ratings.id == book_id), [order[0]])\n", "if name and name.rating <= 10:\n", "return render_title_template('index.html', random=random, pagination=\n pagination, entries=entries, id=book_id, title=_(\n u'Rating: %(rating)s stars', rating=int(name.rating / 2)), page='ratings')\n", "abort(404)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Expr'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_81 = {'image': 'dataset', 'dataset': 'project', 'plate': 'screen'}\n", "VAR_82 = []\n", "for VAR_14, VAR_12 in VAR_81.items():\n", "VAR_187 = VAR_2.GET.getlist(VAR_14)\n", "return JsonResponse({'data': VAR_82})\n", "if len(VAR_187) == 0:\n", "VAR_15 = []\n", "for id in VAR_187:\n", "for VAR_318 in id.split(','):\n", "VAR_72, VAR_223 = FUNC_20(VAR_5, VAR_12, None, VAR_14, VAR_15)\n", "VAR_15.append(VAR_318)\n", "for VAR_344 in VAR_223:\n", "VAR_82.append({'id': VAR_344.id.val, 'parent': {'type': VAR_12, 'id':\n VAR_344.parent.id.val}, 'child': {'type': VAR_14, 'id': VAR_344.child.\n id.val}})\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "parent_types = {'image': 'dataset', 'dataset': 'project', 'plate': 'screen'}\n", "parents = []\n", "for child_type, parent_type in parent_types.items():\n", "ids = request.GET.getlist(child_type)\n", "return JsonResponse({'data': parents})\n", "if len(ids) == 0:\n", "child_ids = []\n", "for id in ids:\n", "for i in id.split(','):\n", "link_type, result = get_object_links(conn, parent_type, None, child_type,\n child_ids)\n", "child_ids.append(i)\n", "for link in result:\n", "parents.append({'id': link.id.val, 'parent': {'type': parent_type, 'id':\n link.parent.id.val}, 'child': {'type': child_type, 'id': link.child.id.\n val}})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'", "For", "For", "Assign'", "Expr'", "For", "Expr'" ]
[ "def FUNC_8(self):...\n", "VAR_5 = self._makeContext()\n", "self.assertEqual(VAR_5.evaluate('dummy'), 'dummy')\n" ]
[ "def test_evaluate_with_render_simple_callable(self):...\n", "ec = self._makeContext()\n", "self.assertEqual(ec.evaluate('dummy'), 'dummy')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_30(self):...\n", "VAR_5 = self._makeContext()\n", "self.assertIs(VAR_5.evaluate('nocall: list'), list)\n" ]
[ "def test_list_in_path_expr(self):...\n", "ec = self._makeContext()\n", "self.assertIs(ec.evaluate('nocall: list'), list)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_57(self):...\n", "\"\"\"docstring\"\"\"\n", "self.login()\n", "VAR_3 = self.client.get('/logout/next_page/')\n", "self.assertEqual(VAR_3.status_code, 302)\n", "self.assertURLEqual(VAR_3.url, '/somewhere/')\n", "self.confirm_logged_out()\n" ]
[ "def test_logout_with_next_page_specified(self):...\n", "\"\"\"docstring\"\"\"\n", "self.login()\n", "response = self.client.get('/logout/next_page/')\n", "self.assertEqual(response.status_code, 302)\n", "self.assertURLEqual(response.url, '/somewhere/')\n", "self.confirm_logged_out()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "@VAR_2.route('/admin/mailsettings', methods=['POST'])...\n", "VAR_16 = request.form.to_dict()\n", "FUNC_38(VAR_16, 'mail_server_type')\n", "if VAR_16.get('invalidate'):\n", "config.mail_gmail_token = {}\n", "if VAR_16.get('gmail'):\n", "flag_modified(config, 'mail_gmail_token')\n", "config.save()\n", "ub.session.rollback()\n", "if VAR_16.get('test'):\n", "FUNC_41(VAR_16, 'mail_server')\n", "config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token)\n", "flash(str(ex), category='error')\n", "VAR_0.error('Settings DB is not Writeable')\n", "if VAR_62.email:\n", "flash(_(u'E-mail server settings updated'), category='success')\n", "FUNC_38(VAR_16, 'mail_port')\n", "flash(_(u'Gmail Account Verification Successful'), category='success')\n", "VAR_0.error(ex)\n", "flash(_('Settings DB is not Writeable'), category='error')\n", "VAR_126 = send_test_mail(VAR_62.email, VAR_62.name)\n", "flash(_(u'Please configure your e-mail address first...'), category='error')\n", "return FUNC_56()\n", "FUNC_38(VAR_16, 'mail_use_ssl')\n", "return FUNC_56()\n", "return FUNC_56()\n", "if VAR_126 is None:\n", "FUNC_41(VAR_16, 'mail_login')\n", "flash(_(\n u'Test e-mail queued for sending to %(email)s, please check Tasks for result'\n , email=current_user.email), category='info')\n", "flash(_(u'There was an error sending the Test e-mail: %(res)s', res=result),\n category='error')\n", "FUNC_41(VAR_16, 'mail_password')\n", "FUNC_41(VAR_16, 'mail_from')\n", "FUNC_38(VAR_16, 'mail_size', lambda y: VAR_119(y) * 1024 * 1024)\n" ]
[ "@admi.route('/admin/mailsettings', methods=['POST'])...\n", "to_save = request.form.to_dict()\n", "_config_int(to_save, 'mail_server_type')\n", "if to_save.get('invalidate'):\n", "config.mail_gmail_token = {}\n", "if to_save.get('gmail'):\n", "flag_modified(config, 'mail_gmail_token')\n", "config.save()\n", "ub.session.rollback()\n", "if to_save.get('test'):\n", "_config_string(to_save, 'mail_server')\n", "config.mail_gmail_token = services.gmail.setup_gmail(config.mail_gmail_token)\n", "flash(str(ex), category='error')\n", "log.error('Settings DB is not Writeable')\n", "if current_user.email:\n", "flash(_(u'E-mail server settings updated'), category='success')\n", "_config_int(to_save, 'mail_port')\n", "flash(_(u'Gmail Account Verification Successful'), category='success')\n", "log.error(ex)\n", "flash(_('Settings DB is not Writeable'), category='error')\n", "result = send_test_mail(current_user.email, current_user.name)\n", "flash(_(u'Please configure your e-mail address first...'), category='error')\n", "return edit_mailsettings()\n", "_config_int(to_save, 'mail_use_ssl')\n", "return edit_mailsettings()\n", "return edit_mailsettings()\n", "if result is None:\n", "_config_string(to_save, 'mail_login')\n", "flash(_(\n u'Test e-mail queued for sending to %(email)s, please check Tasks for result'\n , email=current_user.email), category='info')\n", "flash(_(u'There was an error sending the Test e-mail: %(res)s', res=result),\n category='error')\n", "_config_string(to_save, 'mail_password')\n", "_config_string(to_save, 'mail_from')\n", "_config_int(to_save, 'mail_size', lambda y: int(y) * 1024 * 1024)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Return'", "Expr'", "Return'", "Return'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_82(VAR_67):...\n", "if not VAR_67.name in self.flags.notifications_executed:\n", "evaluate_alert(self, VAR_67.name, VAR_67.event)\n", "self.flags.notifications_executed.append(VAR_67.name)\n" ]
[ "def _evaluate_alert(alert):...\n", "if not alert.name in self.flags.notifications_executed:\n", "evaluate_alert(self, alert.name, alert.event)\n", "self.flags.notifications_executed.append(alert.name)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Expr'" ]
[ "@VAR_0.simple_tag...\n", "VAR_40 = VAR_12.get('renderer', HTMLFormRenderer())\n", "return VAR_40.render_field(VAR_11, VAR_12)\n" ]
[ "@register.simple_tag...\n", "renderer = style.get('renderer', HTMLFormRenderer())\n", "return renderer.render_field(field, style)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_120():...\n", "VAR_214 = VAR_1.db.get_value('DocType', VAR_62, 'module')\n", "return VAR_1.module_app[FUNC_56(VAR_214)]\n" ]
[ "def _get_doctype_app():...\n", "doctype_module = local.db.get_value('DocType', doctype, 'module')\n", "return local.module_app[scrub(doctype_module)]\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=True):...\n", "if VAR_3 is None and VAR_1.md.group is not None:\n", "VAR_3 = VAR_1.md.group.lower()\n", "if VAR_4 is None:\n", "if VAR_1.md.status is not None:\n", "VAR_14, VAR_4 = splitStatus(VAR_4)\n", "VAR_4 = VAR_1.md.status\n", "if VAR_1.md.rawStatus is not None:\n", "VAR_15 = VAR_1.md.localBoilerplate[VAR_2]\n", "VAR_4 = VAR_1.md.rawStatus\n", "def FUNC_5(*VAR_8):...\n", "return scriptPath('boilerplate', *VAR_8)\n" ]
[ "def retrieveBoilerplateFile(doc, name, group=None, status=None, error=True):...\n", "if group is None and doc.md.group is not None:\n", "group = doc.md.group.lower()\n", "if status is None:\n", "if doc.md.status is not None:\n", "megaGroup, status = splitStatus(status)\n", "status = doc.md.status\n", "if doc.md.rawStatus is not None:\n", "searchLocally = doc.md.localBoilerplate[name]\n", "status = doc.md.rawStatus\n", "def boilerplatePath(*segs):...\n", "return scriptPath('boilerplate', *segs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_11(VAR_23, VAR_24, VAR_25, VAR_26):...\n", "from jinja2.exceptions import TemplateNotFound\n", "VAR_80 = defaultdict(lambda : 1)\n", "VAR_81 = defaultdict(list)\n", "for VAR_28 in VAR_25:\n", "if not isinstance(VAR_28, dict):\n", "for VAR_57 in VAR_26:\n", "if 'type' not in VAR_28:\n", "if len(VAR_81[VAR_57]) == 0:\n", "return VAR_81\n", "VAR_57 = VAR_28['type']\n", "VAR_27 = VAR_26[VAR_57]\n", "if VAR_57 not in VAR_26:\n", "VAR_10 = FUNC_12(VAR_23, VAR_24, VAR_27)\n", "VAR_27 = VAR_26[VAR_57]\n", "if VAR_10 is not None:\n", "VAR_10 = FUNC_12(VAR_23, VAR_24, VAR_27, VAR_28=config, VAR_29=counters[\n template_type])\n", "app.jinja_env.get_or_select_template(VAR_10['template'])\n", "VAR_0.exception('Error in template {}, not going to include it'.format(\n VAR_10['template']))\n", "VAR_81[VAR_57].append(VAR_27['to_entry'](VAR_10))\n", "if VAR_10 is None:\n", "VAR_81[VAR_57].append(VAR_27['to_entry'](VAR_10))\n", "VAR_80[VAR_57] += 1\n" ]
[ "def _process_template_configs(name, implementation, configs, rules):...\n", "from jinja2.exceptions import TemplateNotFound\n", "counters = defaultdict(lambda : 1)\n", "includes = defaultdict(list)\n", "for config in configs:\n", "if not isinstance(config, dict):\n", "for template_type in rules:\n", "if 'type' not in config:\n", "if len(includes[template_type]) == 0:\n", "return includes\n", "template_type = config['type']\n", "rule = rules[template_type]\n", "if template_type not in rules:\n", "data = _process_template_config(name, implementation, rule)\n", "rule = rules[template_type]\n", "if data is not None:\n", "data = _process_template_config(name, implementation, rule, config=config,\n counter=counters[template_type])\n", "app.jinja_env.get_or_select_template(data['template'])\n", "_logger.exception('Error in template {}, not going to include it'.format(\n data['template']))\n", "includes[template_type].append(rule['to_entry'](data))\n", "if data is None:\n", "includes[template_type].append(rule['to_entry'](data))\n", "counters[template_type] += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "For", "Condition", "For", "Condition", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "AugAssign'" ]
[ "async def FUNC_0(*, VAR_0: Client, VAR_1: List[AnEnum], VAR_2: Union[date,...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = '{}/tests/'.format(VAR_0.base_url)\n", "VAR_12: Dict[str, Any] = VAR_0.get_headers()\n", "VAR_7 = []\n", "for an_enum_value_item_data in VAR_1:\n", "VAR_9 = an_enum_value_item_data.value\n", "if isinstance(VAR_2, date):\n", "VAR_7.append(VAR_9)\n", "VAR_10 = VAR_2.isoformat()\n", "VAR_10 = VAR_2.isoformat()\n", "params: Dict[str, Any] = {'an_enum_value': VAR_7, 'some_date': VAR_10}\n", "VAR_11 = await _client.get(VAR_6=url, VAR_12=headers, params=params)\n", "if VAR_11.status_code == 200:\n", "return [AModel.from_dict(item) for item in cast(List[Dict[str, Any]],\n VAR_11.json())]\n", "if VAR_11.status_code == 422:\n", "return HTTPValidationError.from_dict(cast(Dict[str, Any], VAR_11.json()))\n" ]
[ "async def get_user_list(*, client: Client, an_enum_value: List[AnEnum],...\n", "\"\"\"docstring\"\"\"\n", "url = '{}/tests/'.format(client.base_url)\n", "headers: Dict[str, Any] = client.get_headers()\n", "json_an_enum_value = []\n", "for an_enum_value_item_data in an_enum_value:\n", "an_enum_value_item = an_enum_value_item_data.value\n", "if isinstance(some_date, date):\n", "json_an_enum_value.append(an_enum_value_item)\n", "json_some_date = some_date.isoformat()\n", "json_some_date = some_date.isoformat()\n", "params: Dict[str, Any] = {'an_enum_value': json_an_enum_value, 'some_date':\n json_some_date}\n", "response = await _client.get(url=url, headers=headers, params=params)\n", "if response.status_code == 200:\n", "return [AModel.from_dict(item) for item in cast(List[Dict[str, Any]],\n response.json())]\n", "if response.status_code == 422:\n", "return HTTPValidationError.from_dict(cast(Dict[str, Any], response.json()))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "AnnAssign'", "Assign'", "For", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "AnnAssign'", "Assign'", "Condition", "Return'", "Condition", "Return'" ]
[ "from mock import Mock, call\n", "from signedjson.key import generate_signing_key\n", "from synapse.api.constants import EventTypes, Membership, PresenceState\n", "from synapse.api.presence import UserPresenceState\n", "from synapse.api.room_versions import KNOWN_ROOM_VERSIONS\n", "from synapse.events.builder import EventBuilder\n", "from synapse.handlers.presence import EXTERNAL_PROCESS_EXPIRY, FEDERATION_PING_INTERVAL, FEDERATION_TIMEOUT, IDLE_TIMER, LAST_ACTIVE_GRANULARITY, SYNC_ONLINE_TIMEOUT, handle_timeout, handle_update\n", "from synapse.rest.client.v1 import room\n", "from synapse.types import UserID, get_domain_from_id\n", "from tests import unittest\n", "def FUNC_0(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=now)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertTrue(VAR_11)\n", "self.assertTrue(VAR_10.currently_active)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n", "self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n", "self.assertEquals(VAR_6.insert.call_count, 3)\n", "VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_active_ts + IDLE_TIMER), call(VAR_7=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(VAR_7=now, obj\n =user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n", "def FUNC_1(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=\n now, currently_active=True)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=now)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertFalse(VAR_11)\n", "self.assertTrue(VAR_12)\n", "self.assertTrue(VAR_10.currently_active)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n", "self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n", "self.assertEquals(VAR_6.insert.call_count, 3)\n", "VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_active_ts + IDLE_TIMER), call(VAR_7=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(VAR_7=now, obj\n =user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n", "def FUNC_2(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=\n now - LAST_ACTIVE_GRANULARITY - 10, currently_active=True)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=now)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertFalse(VAR_11)\n", "self.assertTrue(VAR_12)\n", "self.assertTrue(VAR_10.currently_active)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n", "self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n", "self.assertEquals(VAR_6.insert.call_count, 3)\n", "VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_active_ts + IDLE_TIMER), call(VAR_7=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(VAR_7=now, obj\n =user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n", "def FUNC_3(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=\n now - LAST_ACTIVE_GRANULARITY - 1, currently_active=True)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertTrue(VAR_11)\n", "self.assertFalse(VAR_10.currently_active)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n", "self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n", "self.assertEquals(VAR_6.insert.call_count, 2)\n", "VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_active_ts + IDLE_TIMER), call(VAR_7=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT)], any_order=True)\n", "def FUNC_4(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=now)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=False, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertFalse(VAR_11)\n", "self.assertFalse(VAR_12)\n", "self.assertFalse(VAR_10.currently_active)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n", "self.assertEquals(VAR_6.insert.call_count, 1)\n", "VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_federation_update_ts + FEDERATION_TIMEOUT)], any_order=True)\n", "def FUNC_5(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=\n now, currently_active=True)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.OFFLINE)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertTrue(VAR_11)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n", "self.assertEquals(VAR_6.insert.call_count, 0)\n", "def FUNC_6(self):...\n", "VAR_6 = Mock()\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_8 = UserPresenceState.default(VAR_3)\n", "VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=\n now, currently_active=True)\n", "VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.UNAVAILABLE)\n", "VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n", "self.assertTrue(VAR_11)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n", "self.assertEquals(VAR_9.state, VAR_10.state)\n", "self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n", "self.assertEquals(VAR_6.insert.call_count, 1)\n", "VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_user_sync_ts + SYNC_ONLINE_TIMEOUT)], any_order=True)\n", "def FUNC_7(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now - IDLE_TIMER - 1, last_user_sync_ts=now)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_9.state, PresenceState.UNAVAILABLE)\n", "def FUNC_8(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=0, last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_9.state, PresenceState.OFFLINE)\n", "def FUNC_9(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now - SYNC_ONLINE_TIMEOUT - 1, last_user_sync_ts=now -\n SYNC_ONLINE_TIMEOUT - 1)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids={user_id},\n VAR_7=now)\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_9.state, PresenceState.ONLINE)\n", "def FUNC_10(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now, last_user_sync_ts=now, last_federation_update_ts=\n now - FEDERATION_PING_INTERVAL - 1)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_9, VAR_9)\n", "def FUNC_11(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now, last_user_sync_ts=now, last_federation_update_ts=now)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n", "self.assertIsNone(VAR_9)\n", "def FUNC_12(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now, last_user_sync_ts=now, last_federation_update_ts=\n now - FEDERATION_TIMEOUT - 1)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=False, syncing_user_ids=set(), VAR_7=now\n )\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_9.state, PresenceState.OFFLINE)\n", "def FUNC_13(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1, last_user_sync_ts=now,\n last_federation_update_ts=now)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids=set(), VAR_7=now)\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_10, VAR_9)\n", "def FUNC_14(self, VAR_0, VAR_1, VAR_2):...\n", "self.presence_handler = VAR_2.get_presence_handler()\n", "self.clock = VAR_2.get_clock()\n", "def FUNC_15(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = 1\n", "VAR_3 = '@test:server'\n", "self.get_success(self.presence_handler.update_external_syncs_row(VAR_13,\n VAR_3, True, self.clock.time_msec()))\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY / 2)\n", "VAR_10 = self.get_success(self.presence_handler.get_state(UserID.\n from_string(VAR_3)))\n", "self.assertEqual(VAR_10.state, PresenceState.ONLINE)\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY)\n", "VAR_10 = self.get_success(self.presence_handler.get_state(UserID.\n from_string(VAR_3)))\n", "self.assertEqual(VAR_10.state, PresenceState.OFFLINE)\n", "\"\"\"string\"\"\"\n", "VAR_3 = '@test:server'\n", "VAR_4 = [room.register_servlets]\n", "def FUNC_16(self, VAR_0, VAR_1):...\n", "VAR_2 = self.setup_test_homeserver('server', http_client=None,\n federation_sender=Mock())\n", "return VAR_2\n" ]
[ "from mock import Mock, call\n", "from signedjson.key import generate_signing_key\n", "from synapse.api.constants import EventTypes, Membership, PresenceState\n", "from synapse.api.presence import UserPresenceState\n", "from synapse.api.room_versions import KNOWN_ROOM_VERSIONS\n", "from synapse.events.builder import EventBuilder\n", "from synapse.handlers.presence import EXTERNAL_PROCESS_EXPIRY, FEDERATION_PING_INTERVAL, FEDERATION_TIMEOUT, IDLE_TIMER, LAST_ACTIVE_GRANULARITY, SYNC_ONLINE_TIMEOUT, handle_timeout, handle_update\n", "from synapse.rest.client.v1 import room\n", "from synapse.types import UserID, get_domain_from_id\n", "from tests import unittest\n", "def test_offline_to_online(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n", "self.assertTrue(persist_and_notify)\n", "self.assertTrue(state.currently_active)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(new_state.status_msg, state.status_msg)\n", "self.assertEquals(state.last_federation_update_ts, now)\n", "self.assertEquals(wheel_timer.insert.call_count, 3)\n", "wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_active_ts + IDLE_TIMER), call(now=now, obj=user_id, then\n =new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(now=now, obj=\n user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n", "def test_online_to_online(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now, currently_active=True)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n", "self.assertFalse(persist_and_notify)\n", "self.assertTrue(federation_ping)\n", "self.assertTrue(state.currently_active)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(new_state.status_msg, state.status_msg)\n", "self.assertEquals(state.last_federation_update_ts, now)\n", "self.assertEquals(wheel_timer.insert.call_count, 3)\n", "wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_active_ts + IDLE_TIMER), call(now=now, obj=user_id, then\n =new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(now=now, obj=\n user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n", "def test_online_to_online_last_active_noop(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now - LAST_ACTIVE_GRANULARITY - 10, currently_active=True)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n", "self.assertFalse(persist_and_notify)\n", "self.assertTrue(federation_ping)\n", "self.assertTrue(state.currently_active)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(new_state.status_msg, state.status_msg)\n", "self.assertEquals(state.last_federation_update_ts, now)\n", "self.assertEquals(wheel_timer.insert.call_count, 3)\n", "wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_active_ts + IDLE_TIMER), call(now=now, obj=user_id, then\n =new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT), call(now=now, obj=\n user_id, then=new_state.last_active_ts + LAST_ACTIVE_GRANULARITY)],\n any_order=True)\n", "def test_online_to_online_last_active(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1, currently_active=True)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n", "self.assertTrue(persist_and_notify)\n", "self.assertFalse(state.currently_active)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(new_state.status_msg, state.status_msg)\n", "self.assertEquals(state.last_federation_update_ts, now)\n", "self.assertEquals(wheel_timer.insert.call_count, 2)\n", "wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_active_ts + IDLE_TIMER), call(now=now, obj=user_id, then\n =new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT)], any_order=True)\n", "def test_remote_ping_timer(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=False, wheel_timer=wheel_timer, now=now)\n", "self.assertFalse(persist_and_notify)\n", "self.assertFalse(federation_ping)\n", "self.assertFalse(state.currently_active)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(new_state.status_msg, state.status_msg)\n", "self.assertEquals(wheel_timer.insert.call_count, 1)\n", "wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_federation_update_ts + FEDERATION_TIMEOUT)], any_order=True)\n", "def test_online_to_offline(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now, currently_active=True)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.OFFLINE)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n", "self.assertTrue(persist_and_notify)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(state.last_federation_update_ts, now)\n", "self.assertEquals(wheel_timer.insert.call_count, 0)\n", "def test_online_to_idle(self):...\n", "wheel_timer = Mock()\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "prev_state = UserPresenceState.default(user_id)\n", "prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now, currently_active=True)\n", "new_state = prev_state.copy_and_replace(state=PresenceState.UNAVAILABLE)\n", "state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n", "self.assertTrue(persist_and_notify)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(state.last_federation_update_ts, now)\n", "self.assertEquals(new_state.state, state.state)\n", "self.assertEquals(new_state.status_msg, state.status_msg)\n", "self.assertEquals(wheel_timer.insert.call_count, 1)\n", "wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT)], any_order=True)\n", "def test_idle_timer(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now - IDLE_TIMER - 1, last_user_sync_ts=now)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(new_state.state, PresenceState.UNAVAILABLE)\n", "def test_sync_timeout(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=0,\n last_user_sync_ts=now - SYNC_ONLINE_TIMEOUT - 1)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(new_state.state, PresenceState.OFFLINE)\n", "def test_sync_online(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now - SYNC_ONLINE_TIMEOUT - 1, last_user_sync_ts=now -\n SYNC_ONLINE_TIMEOUT - 1)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids={user_id},\n now=now)\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(new_state.state, PresenceState.ONLINE)\n", "def test_federation_ping(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now, last_user_sync_ts=now, last_federation_update_ts=now -\n FEDERATION_PING_INTERVAL - 1)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(new_state, new_state)\n", "def test_no_timeout(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now, last_user_sync_ts=now, last_federation_update_ts=now)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n", "self.assertIsNone(new_state)\n", "def test_federation_timeout(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now, last_user_sync_ts=now, last_federation_update_ts=now -\n FEDERATION_TIMEOUT - 1)\n", "new_state = handle_timeout(state, is_mine=False, syncing_user_ids=set(),\n now=now)\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(new_state.state, PresenceState.OFFLINE)\n", "def test_last_active(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now - LAST_ACTIVE_GRANULARITY - 1, last_user_sync_ts=now,\n last_federation_update_ts=now)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids=set(), now=now\n )\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(state, new_state)\n", "def prepare(self, reactor, clock, hs):...\n", "self.presence_handler = hs.get_presence_handler()\n", "self.clock = hs.get_clock()\n", "def test_external_process_timeout(self):...\n", "\"\"\"docstring\"\"\"\n", "process_id = 1\n", "user_id = '@test:server'\n", "self.get_success(self.presence_handler.update_external_syncs_row(process_id,\n user_id, True, self.clock.time_msec()))\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY / 2)\n", "state = self.get_success(self.presence_handler.get_state(UserID.from_string\n (user_id)))\n", "self.assertEqual(state.state, PresenceState.ONLINE)\n", "self.reactor.advance(EXTERNAL_PROCESS_EXPIRY)\n", "state = self.get_success(self.presence_handler.get_state(UserID.from_string\n (user_id)))\n", "self.assertEqual(state.state, PresenceState.OFFLINE)\n", "\"\"\"Tests remote servers get told about presence of users in the room when\n they join and when new local users join.\n \"\"\"\n", "user_id = '@test:server'\n", "servlets = [room.register_servlets]\n", "def make_homeserver(self, reactor, clock):...\n", "hs = self.setup_test_homeserver('server', http_client=None,\n federation_sender=Mock())\n", "return hs\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_67(VAR_10):...\n", "VAR_213 = {}\n", "for VAR_30, VAR_108 in (['x', VAR_10.getPosX()], ['y', VAR_10.getPosY()]):\n", "if VAR_108 is not None:\n", "return VAR_213\n", "VAR_213[VAR_30] = {'value': VAR_108.getValue(), 'unit': str(VAR_108.getUnit())}\n" ]
[ "def marshal_pos(w):...\n", "d = {}\n", "for x, p in (['x', w.getPosX()], ['y', w.getPosY()]):\n", "if p is not None:\n", "return d\n", "d[x] = {'value': p.getValue(), 'unit': str(p.getUnit())}\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Return'", "Assign'" ]
[ "def FUNC_0(VAR_3, VAR_4=None, VAR_5=None):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.translate import get_full_dict\n", "from frappe.utils import strip_html_tags, is_html\n", "if not hasattr(VAR_1, 'lang'):\n", "VAR_1.lang = VAR_4 or 'en'\n", "if not VAR_4:\n", "VAR_4 = VAR_1.lang\n", "VAR_175 = VAR_3\n", "if is_html(VAR_3):\n", "VAR_3 = strip_html_tags(VAR_3)\n", "VAR_3 = FUNC_1(VAR_3).strip()\n", "VAR_176 = ''\n", "if VAR_5:\n", "VAR_206 = '{msg}:{context}'.format(VAR_3=msg, VAR_5=context)\n", "if not VAR_176:\n", "VAR_176 = get_full_dict(VAR_4).get(VAR_206)\n", "VAR_176 = get_full_dict(VAR_4).get(VAR_3)\n", "return VAR_176 or VAR_175\n" ]
[ "def _(msg, lang=None, context=None):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.translate import get_full_dict\n", "from frappe.utils import strip_html_tags, is_html\n", "if not hasattr(local, 'lang'):\n", "local.lang = lang or 'en'\n", "if not lang:\n", "lang = local.lang\n", "non_translated_string = msg\n", "if is_html(msg):\n", "msg = strip_html_tags(msg)\n", "msg = as_unicode(msg).strip()\n", "translated_string = ''\n", "if context:\n", "string_key = '{msg}:{context}'.format(msg=msg, context=context)\n", "if not translated_string:\n", "translated_string = get_full_dict(lang).get(string_key)\n", "translated_string = get_full_dict(lang).get(msg)\n", "return translated_string or non_translated_string\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "ImportFrom'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "@FUNC_0...\n", "return MessageHandler(self)\n" ]
[ "@cache_in_self...\n", "return MessageHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_14(VAR_24):...\n", "\"\"\"docstring\"\"\"\n", "return '\"%s\"' % VAR_24.replace('\\\\', '\\\\\\\\').replace('\"', '\\\\\"')\n" ]
[ "def quote_etag(etag):...\n", "\"\"\"docstring\"\"\"\n", "return '\"%s\"' % etag.replace('\\\\', '\\\\\\\\').replace('\"', '\\\\\"')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_11(self, VAR_20):...\n", "VAR_43 = self.evaluate(VAR_20)\n", "if VAR_43 is self.getDefault():\n", "return VAR_43\n", "return bool(VAR_43)\n" ]
[ "def evaluateBoolean(self, expr):...\n", "value = self.evaluate(expr)\n", "if value is self.getDefault():\n", "return value\n", "return bool(value)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@event.listens_for(VAR_83, 'before_flush')...\n", "for change in itertools.chain(VAR_1.new, VAR_1.dirty):\n", "if isinstance(change, (CLASS_8, CLASS_13, CLASS_12)):\n", "for change in itertools.chain(VAR_1.new, VAR_1.deleted):\n", "if change.kobo_reading_state:\n", "if isinstance(change, CLASS_6):\n", "change.kobo_reading_state.last_modified = datetime.datetime.utcnow()\n", "change.ub_shelf.last_modified = datetime.datetime.utcnow()\n" ]
[ "@event.listens_for(Session, 'before_flush')...\n", "for change in itertools.chain(session.new, session.dirty):\n", "if isinstance(change, (ReadBook, KoboStatistics, KoboBookmark)):\n", "for change in itertools.chain(session.new, session.deleted):\n", "if change.kobo_reading_state:\n", "if isinstance(change, BookShelf):\n", "change.kobo_reading_state.last_modified = datetime.datetime.utcnow()\n", "change.ub_shelf.last_modified = datetime.datetime.utcnow()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "For", "For", "Condition", "For", "Condition", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_40():...\n", "VAR_90 = util.flask.check_lastmodified(VAR_103)\n", "VAR_89 = util.flask.check_etag(VAR_104)\n", "return VAR_90 and VAR_89\n" ]
[ "def check_etag_and_lastmodified():...\n", "lastmodified_ok = util.flask.check_lastmodified(current_lastmodified)\n", "etag_ok = util.flask.check_etag(current_etag)\n", "return lastmodified_ok and etag_ok\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_12(self):...\n", "assert url_for('/foobar') == '/foobar'\n" ]
[ "def test_it_works_with_already_formed_path(self):...\n", "assert url_for('/foobar') == '/foobar'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assert'" ]
[ "def FUNC_2() ->None:...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = [FavaLedger(filepath) for filepath in VAR_1.config['BEANCOUNT_FILES']]\n", "FUNC_1(VAR_6)\n" ]
[ "def _load_file() ->None:...\n", "\"\"\"docstring\"\"\"\n", "ledgers = [FavaLedger(filepath) for filepath in app.config['BEANCOUNT_FILES']]\n", "update_ledger_slugs(ledgers)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_42: bool=True, VAR_41: bool=True, VAR_48: bool=True,...\n", "\"\"\"docstring\"\"\"\n", "self.nlu = VAR_42\n", "self.core = VAR_41\n", "self.nlg = VAR_48\n", "self.force_training = VAR_39\n" ]
[ "def __init__(self, nlu: bool=True, core: bool=True, nlg: bool=True,...\n", "\"\"\"docstring\"\"\"\n", "self.nlu = nlu\n", "self.core = core\n", "self.nlg = nlg\n", "self.force_training = force_training\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_18(VAR_141, VAR_142):...\n", "if VAR_142 and VAR_142 not in VAR_140 and VAR_141 in VAR_62:\n", "VAR_165, VAR_166 = VAR_62[VAR_141]\n", "send_templated_mail(VAR_165, VAR_166, VAR_142, VAR_5=self.queue.\n from_address, **kwargs)\n", "VAR_140.add(VAR_142)\n" ]
[ "def send(role, recipient):...\n", "if recipient and recipient not in recipients and role in roles:\n", "template, context = roles[role]\n", "send_templated_mail(template, context, recipient, sender=self.queue.\n from_address, **kwargs)\n", "recipients.add(recipient)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0():...\n", "if 'google' not in VAR_1.remote_apps:\n", "VAR_1.remote_app('google', base_url='https://www.google.com/accounts/',\n authorize_url=\n 'https://accounts.google.com/o/oauth2/auth?prompt=select_account+consent',\n request_token_url=None, request_token_params={'scope':\n 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile'\n }, access_token_url='https://accounts.google.com/o/oauth2/token',\n access_token_method='POST', consumer_key=settings.GOOGLE_CLIENT_ID,\n consumer_secret=settings.GOOGLE_CLIENT_SECRET)\n", "return VAR_1.google\n" ]
[ "def google_remote_app():...\n", "if 'google' not in oauth.remote_apps:\n", "oauth.remote_app('google', base_url='https://www.google.com/accounts/',\n authorize_url=\n 'https://accounts.google.com/o/oauth2/auth?prompt=select_account+consent',\n request_token_url=None, request_token_params={'scope':\n 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile'\n }, access_token_url='https://accounts.google.com/o/oauth2/token',\n access_token_method='POST', consumer_key=settings.GOOGLE_CLIENT_ID,\n consumer_secret=settings.GOOGLE_CLIENT_SECRET)\n", "return oauth.google\n" ]
[ 0, 4, 4, 4 ]
[ "FunctionDef'", "Condition", "Expr'", "Return'" ]
[ "@VAR_0.filter...\n", "\"\"\"docstring\"\"\"\n", "VAR_46 = '%s > %s'\n", "VAR_47 = VAR_19.links\n", "if VAR_19.data:\n", "VAR_56 = VAR_19.data.items()\n", "if VAR_20 is not None:\n", "for sub_section_key, sub_section in VAR_56:\n", "VAR_57 = OrderedDict()\n", "return VAR_47\n", "VAR_57 = FUNC_17(sub_section, VAR_20=sub_section_key)\n", "for link_key, VAR_6 in VAR_47.items():\n", "VAR_47.update(VAR_57)\n", "VAR_58 = VAR_46 % (VAR_20, link_key)\n", "return VAR_57\n", "VAR_57.update({VAR_58: VAR_6})\n" ]
[ "@register.filter...\n", "\"\"\"docstring\"\"\"\n", "NESTED_FORMAT = '%s > %s'\n", "links = section.links\n", "if section.data:\n", "data = section.data.items()\n", "if sec_key is not None:\n", "for sub_section_key, sub_section in data:\n", "new_links = OrderedDict()\n", "return links\n", "new_links = schema_links(sub_section, sec_key=sub_section_key)\n", "for link_key, link in links.items():\n", "links.update(new_links)\n", "new_key = NESTED_FORMAT % (sec_key, link_key)\n", "return new_links\n", "new_links.update({new_key: link})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "For", "Assign'", "Return'", "Assign'", "For", "Expr'", "Assign'", "Return'", "Expr'" ]
[ "@VAR_0.route('/config', methods=['GET', 'POST'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_47 = []\n", "VAR_48 = []\n", "VAR_18 = None\n", "if request.method == 'POST':\n", "VAR_18 = request.form.get('section')\n", "VAR_47 = FUNC_58('/internal/config', 'get')\n", "flash(str(err), 'danger')\n", "return render_template('config.html', title='Config', VAR_47=\n full_config_info, VAR_48=config_info)\n", "VAR_18 = None if VAR_18 in ['', None] else VAR_18\n", "VAR_48 = VAR_47 if VAR_18 is None else [s for s in VAR_47 if s['name'] ==\n VAR_18]\n" ]
[ "@gui.route('/config', methods=['GET', 'POST'])...\n", "\"\"\"docstring\"\"\"\n", "full_config_info = []\n", "config_info = []\n", "section = None\n", "if request.method == 'POST':\n", "section = request.form.get('section')\n", "full_config_info = query_internal_api('/internal/config', 'get')\n", "flash(str(err), 'danger')\n", "return render_template('config.html', title='Config', full_config_info=\n full_config_info, config_info=config_info)\n", "section = None if section in ['', None] else section\n", "config_info = full_config_info if section is None else [s for s in\n full_config_info if s['name'] == section]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_32(self, VAR_8, VAR_7):...\n", "if 'data' in VAR_7 and 'create_discovery' in VAR_8.session:\n", "VAR_141 = []\n", "self.clean_instance(VAR_7['initial'])\n", "VAR_141 = []\n", "VAR_8.session['create_discovery'] = VAR_141\n", "for i, VAR_10 in enumerate(VAR_8.session['create_discovery']):\n", "VAR_141 = self.discover()\n", "VAR_8.session['create_discovery_meta'] = [x.meta for x in VAR_141]\n", "VAR_144 = DiscoveryResult(VAR_10)\n", "return VAR_141\n", "if not VAR_141:\n", "return VAR_141\n", "VAR_144.meta = VAR_8.session['create_discovery_meta'][i]\n", "VAR_141 = self.discover(VAR_80=True)\n", "VAR_141.append(VAR_144)\n" ]
[ "def perform_discovery(self, request, kwargs):...\n", "if 'data' in kwargs and 'create_discovery' in request.session:\n", "discovered = []\n", "self.clean_instance(kwargs['initial'])\n", "discovered = []\n", "request.session['create_discovery'] = discovered\n", "for i, data in enumerate(request.session['create_discovery']):\n", "discovered = self.discover()\n", "request.session['create_discovery_meta'] = [x.meta for x in discovered]\n", "item = DiscoveryResult(data)\n", "return discovered\n", "if not discovered:\n", "return discovered\n", "item.meta = request.session['create_discovery_meta'][i]\n", "discovered = self.discover(eager=True)\n", "discovered.append(item)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_171(VAR_14):...\n", "VAR_415 = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'\n", "VAR_416 = 1.5 * (VAR_14 - VAR_13) / max(VAR_9 - VAR_13, 1) + 1.3\n", "return VAR_415 % (1.3, VAR_416)\n" ]
[ "def style(c):...\n", "STYLE = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'\n", "size = 1.5 * (c - b) / max(a - b, 1) + 1.3\n", "return STYLE % (1.3, size)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_3(VAR_10, VAR_11=None):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.translate import get_user_lang\n", "VAR_1.lang = get_user_lang(VAR_10)\n" ]
[ "def set_user_lang(user, user_language=None):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.translate import get_user_lang\n", "local.lang = get_user_lang(user)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Assign'" ]
[ "def FUNC_0(self, VAR_4, VAR_5):...\n", "VAR_6 = self.setup_test_homeserver('red', http_client=None,\n federation_client=Mock())\n", "self.event_source = VAR_6.get_event_sources().sources['typing']\n", "VAR_6.get_federation_handler = Mock()\n", "async def FUNC_5(VAR_7=None, VAR_8=False):...\n", "return {'user': UserID.from_string(self.auth_user_id), 'token_id': 1,\n 'is_guest': False}\n" ]
[ "def make_homeserver(self, reactor, clock):...\n", "hs = self.setup_test_homeserver('red', http_client=None, federation_client=\n Mock())\n", "self.event_source = hs.get_event_sources().sources['typing']\n", "hs.get_federation_handler = Mock()\n", "async def get_user_by_access_token(token=None, allow_guest=False):...\n", "return {'user': UserID.from_string(self.auth_user_id), 'token_id': 1,\n 'is_guest': False}\n" ]
[ 0, 4, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "AsyncFunctionDef'", "Return'" ]
[ "def FUNC_5(VAR_2: Directive, VAR_9: str, VAR_10: str) ->str:...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = file.readlines()\n", "VAR_22 = VAR_2.meta['lineno'] - 1\n", "VAR_20 = FUNC_3(VAR_8, VAR_22)\n", "VAR_21 = ''.join(VAR_20).rstrip('\\n')\n", "if FUNC_0(VAR_21) != VAR_10:\n", "VAR_8 = VAR_8[:VAR_22] + [VAR_9 + '\\n'] + VAR_8[VAR_22 + len(VAR_20):]\n", "file.writelines(VAR_8)\n", "return FUNC_0(VAR_9)\n" ]
[ "def save_entry_slice(entry: Directive, source_slice: str, sha256sum: str...\n", "\"\"\"docstring\"\"\"\n", "lines = file.readlines()\n", "first_entry_line = entry.meta['lineno'] - 1\n", "entry_lines = find_entry_lines(lines, first_entry_line)\n", "entry_source = ''.join(entry_lines).rstrip('\\n')\n", "if sha256_str(entry_source) != sha256sum:\n", "lines = lines[:first_entry_line] + [source_slice + '\\n'] + lines[\n first_entry_line + len(entry_lines):]\n", "file.writelines(lines)\n", "return sha256_str(source_slice)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_34(self, VAR_44):...\n", "VAR_13 = FUNC_1(VAR_44.group(1))\n", "if VAR_13 not in self.links:\n", "return None\n", "VAR_84 = self.links[VAR_13]\n", "return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])\n" ]
[ "def output_nolink(self, m):...\n", "key = _keyify(m.group(1))\n", "if key not in self.links:\n", "return None\n", "ret = self.links[key]\n", "return self._process_link(m, ret['link'], ret['title'])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Assign'", "Return'" ]
[ "@staticmethod...\n", "if not VAR_1.match(VAR_13):\n" ]
[ "@staticmethod...\n", "if not SHA1_PATTERN.match(sha1):\n" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_3(self, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "for basket_command_middleware in get_provide_objects(\n", "if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):\n", "return VAR_5\n", "VAR_5 = dict(basket_command_middleware().postprocess_response(VAR_2=self.\n basket, VAR_1=self.request, VAR_3=command, VAR_4=kwargs, VAR_5=response))\n" ]
[ "def postprocess_response(self, command, kwargs, response):...\n", "\"\"\"docstring\"\"\"\n", "for basket_command_middleware in get_provide_objects(\n", "if not issubclass(basket_command_middleware, BaseBasketCommandMiddleware):\n", "return response\n", "response = dict(basket_command_middleware().postprocess_response(basket=\n self.basket, request=self.request, command=command, kwargs=kwargs,\n response=response))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Condition", "Return'", "Assign'" ]
[ "def FUNC_20(VAR_66: DecoratedCallable) ->DecoratedCallable:...\n", "self.add_api_websocket_route(VAR_26, VAR_66, VAR_28=name)\n", "return VAR_66\n" ]
[ "def decorator(func: DecoratedCallable) ->DecoratedCallable:...\n", "self.add_api_websocket_route(path, func, name=name)\n", "return func\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_18: Dict[str, str] = {}\n", "for i, value in enumerate(VAR_6):\n", "if value[0].isalpha():\n", "return VAR_18\n", "VAR_19 = value.upper()\n", "VAR_19 = f'VALUE_{i}'\n", "if VAR_19 in VAR_18:\n", "VAR_18[VAR_19] = value\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "output: Dict[str, str] = {}\n", "for i, value in enumerate(values):\n", "if value[0].isalpha():\n", "return output\n", "key = value.upper()\n", "key = f'VALUE_{i}'\n", "if key in output:\n", "output[key] = value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5 ]
[ "Condition", "Docstring", "AnnAssign'", "For", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "@wraps(VAR_28)...\n", "VAR_1.debug('jsonp')\n", "VAR_19 = VAR_9.get('server_id', None)\n", "VAR_315 = 500\n", "if VAR_19 is None and VAR_2.session.get('connector'):\n", "if isinstance(ex, omero.SecurityViolation):\n", "VAR_19 = VAR_2.session['connector'].server_id\n", "VAR_9['server_id'] = VAR_19\n", "VAR_315 = 403\n", "if isinstance(ex, omero.ApiUsageException):\n", "VAR_54 = VAR_28(VAR_2, *VAR_116, **kwargs)\n", "VAR_316 = traceback.format_exc()\n", "VAR_315 = 400\n", "if VAR_9.get('_raw', False):\n", "VAR_1.debug(VAR_316)\n", "return VAR_54\n", "if isinstance(VAR_54, HttpResponse):\n", "if VAR_9.get('_raw', False) or VAR_9.get('_internal', False):\n", "return VAR_54\n", "VAR_203 = VAR_2.GET.get('callback', None)\n", "return JsonResponse({'message': str(ex), 'stacktrace': VAR_316}, VAR_315=status\n )\n", "if VAR_203 is not None and not VAR_9.get('_internal', False):\n", "VAR_54 = json.dumps(VAR_54)\n", "if VAR_9.get('_internal', False):\n", "VAR_54 = '%s(%s)' % (VAR_203, VAR_54)\n", "return VAR_54\n", "VAR_281 = type(VAR_54) is dict\n", "return HttpJavascriptResponse(VAR_54)\n", "return JsonResponse(VAR_54, VAR_281=safe)\n" ]
[ "@wraps(f)...\n", "logger.debug('jsonp')\n", "server_id = kwargs.get('server_id', None)\n", "status = 500\n", "if server_id is None and request.session.get('connector'):\n", "if isinstance(ex, omero.SecurityViolation):\n", "server_id = request.session['connector'].server_id\n", "kwargs['server_id'] = server_id\n", "status = 403\n", "if isinstance(ex, omero.ApiUsageException):\n", "rv = f(request, *args, **kwargs)\n", "trace = traceback.format_exc()\n", "status = 400\n", "if kwargs.get('_raw', False):\n", "logger.debug(trace)\n", "return rv\n", "if isinstance(rv, HttpResponse):\n", "if kwargs.get('_raw', False) or kwargs.get('_internal', False):\n", "return rv\n", "c = request.GET.get('callback', None)\n", "return JsonResponse({'message': str(ex), 'stacktrace': trace}, status=status)\n", "if c is not None and not kwargs.get('_internal', False):\n", "rv = json.dumps(rv)\n", "if kwargs.get('_internal', False):\n", "rv = '%s(%s)' % (c, rv)\n", "return rv\n", "safe = type(rv) is dict\n", "return HttpJavascriptResponse(rv)\n", "return JsonResponse(rv, safe=safe)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Return'", "Condition", "Condition", "Return'", "Assign'", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_42(VAR_2, VAR_5=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_71 = VAR_2.GET or VAR_2.POST\n", "VAR_106 = len(VAR_71.getlist('image')) > 0 and list(VAR_5.getObjects(\n 'Image', VAR_71.getlist('image'))) or list()\n", "VAR_107 = len(VAR_71.getlist('dataset')) > 0 and list(VAR_5.getObjects(\n 'Dataset', VAR_71.getlist('dataset'))) or list()\n", "VAR_108 = len(VAR_71.getlist('project')) > 0 and list(VAR_5.getObjects(\n 'Project', VAR_71.getlist('project'))) or list()\n", "VAR_109 = len(VAR_71.getlist('screen')) > 0 and list(VAR_5.getObjects(\n 'Screen', VAR_71.getlist('screen'))) or list()\n", "VAR_110 = len(VAR_71.getlist('plate')) > 0 and list(VAR_5.getObjects(\n 'Plate', VAR_71.getlist('plate'))) or list()\n", "VAR_111 = len(VAR_71.getlist('acquisition')) > 0 and list(VAR_5.getObjects(\n 'PlateAcquisition', VAR_71.getlist('acquisition'))) or list()\n", "VAR_112 = len(VAR_71.getlist('share')) > 0 and [VAR_5.getShare(VAR_71.\n getlist('share')[0])] or list()\n", "VAR_113 = len(VAR_71.getlist('well')) > 0 and list(VAR_5.getObjects('Well',\n VAR_71.getlist('well'))) or list()\n", "return {'image': VAR_106, 'dataset': VAR_107, 'project': VAR_108, 'screen':\n VAR_109, 'plate': VAR_110, 'acquisition': VAR_111, 'well': VAR_113,\n 'share': VAR_112}\n" ]
[ "def getObjects(request, conn=None):...\n", "\"\"\"docstring\"\"\"\n", "r = request.GET or request.POST\n", "images = len(r.getlist('image')) > 0 and list(conn.getObjects('Image', r.\n getlist('image'))) or list()\n", "datasets = len(r.getlist('dataset')) > 0 and list(conn.getObjects('Dataset',\n r.getlist('dataset'))) or list()\n", "projects = len(r.getlist('project')) > 0 and list(conn.getObjects('Project',\n r.getlist('project'))) or list()\n", "screens = len(r.getlist('screen')) > 0 and list(conn.getObjects('Screen', r\n .getlist('screen'))) or list()\n", "plates = len(r.getlist('plate')) > 0 and list(conn.getObjects('Plate', r.\n getlist('plate'))) or list()\n", "acquisitions = len(r.getlist('acquisition')) > 0 and list(conn.getObjects(\n 'PlateAcquisition', r.getlist('acquisition'))) or list()\n", "shares = len(r.getlist('share')) > 0 and [conn.getShare(r.getlist('share')[0])\n ] or list()\n", "wells = len(r.getlist('well')) > 0 and list(conn.getObjects('Well', r.\n getlist('well'))) or list()\n", "return {'image': images, 'dataset': datasets, 'project': projects, 'screen':\n screens, 'plate': plates, 'acquisition': acquisitions, 'well': wells,\n 'share': shares}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_13(VAR_2, VAR_6, VAR_19=None, VAR_8=None, VAR_20=False, VAR_21=True):...\n", "\"\"\"docstring\"\"\"\n", "VAR_53 = VAR_2.GET\n", "VAR_1.debug('Preparing Image:%r saveDefs=%r retry=%r request=%r conn=%s' %\n (VAR_6, VAR_20, VAR_21, VAR_53, str(VAR_8)))\n", "VAR_92 = VAR_8.getObject('Image', VAR_6)\n", "if VAR_92 is None:\n", "return\n", "VAR_107 = None\n", "if 'maps' in VAR_53:\n", "VAR_197 = FUNC_12(VAR_53, 'reverse', VAR_92.getSizeC())\n", "if 'c' in VAR_53:\n", "VAR_107 = FUNC_12(VAR_53, 'inverted', VAR_92.getSizeC())\n", "VAR_1.debug('c=' + VAR_53['c'])\n", "if VAR_53.get('m', None) == 'g':\n", "if VAR_197 is not None and VAR_107 is not None:\n", "VAR_198, VAR_51, VAR_52 = FUNC_2(VAR_53['c'])\n", "VAR_92.setGreyscaleRenderingModel()\n", "if VAR_53.get('m', None) == 'c':\n", "VAR_107 = [(VAR_22[0] if VAR_22[0] is not None else VAR_22[1]) for VAR_22 in\n zip(VAR_107, VAR_197)]\n", "VAR_268 = [VAR_282.get('quantization') for VAR_282 in json.loads(VAR_53[\n 'maps'])]\n", "VAR_1.debug('Failed to set quantization maps')\n", "VAR_199 = range(1, VAR_92.getSizeC() + 1)\n", "VAR_108 = VAR_53.get('p', None)\n", "VAR_92.setColorRenderingModel()\n", "VAR_92.setQuantizationMaps(VAR_268)\n", "if VAR_20 and not VAR_92.setActiveChannels(VAR_199, VAR_51, VAR_52, VAR_107):\n", "VAR_109, VAR_110 = None, None\n", "VAR_1.debug('Something bad happened while setting the active channels...')\n", "if not VAR_92.setActiveChannels(VAR_198, VAR_51, VAR_52, VAR_107):\n", "if VAR_108 is not None and len(VAR_108.split('|')) > 1:\n", "VAR_1.debug('Something bad happened while setting the active channels...')\n", "VAR_108, VAR_200 = VAR_108.split('|', 1)\n", "VAR_92.setProjection(VAR_108)\n", "VAR_109, VAR_110 = [int(VAR_3) for VAR_3 in VAR_200.split(':')]\n", "VAR_92.setProjectionRange(VAR_109, VAR_110)\n", "VAR_92.setInvertedAxis(bool(VAR_53.get('ia', '0') == '1'))\n", "VAR_16 = VAR_53.get('q', None)\n", "if VAR_20:\n", "'z' in VAR_53 and VAR_92.setDefaultZ(VAR_178(VAR_53['z']) - 1)\n", "return VAR_92, VAR_16\n", "'t' in VAR_53 and VAR_92.setDefaultT(VAR_178(VAR_53['t']) - 1)\n", "VAR_92.saveDefaults()\n" ]
[ "def _get_prepared_image(request, iid, server_id=None, conn=None, saveDefs=...\n", "\"\"\"docstring\"\"\"\n", "r = request.GET\n", "logger.debug('Preparing Image:%r saveDefs=%r retry=%r request=%r conn=%s' %\n (iid, saveDefs, retry, r, str(conn)))\n", "img = conn.getObject('Image', iid)\n", "if img is None:\n", "return\n", "invert_flags = None\n", "if 'maps' in r:\n", "reverses = _get_maps_enabled(r, 'reverse', img.getSizeC())\n", "if 'c' in r:\n", "invert_flags = _get_maps_enabled(r, 'inverted', img.getSizeC())\n", "logger.debug('c=' + r['c'])\n", "if r.get('m', None) == 'g':\n", "if reverses is not None and invert_flags is not None:\n", "activechannels, windows, colors = _split_channel_info(r['c'])\n", "img.setGreyscaleRenderingModel()\n", "if r.get('m', None) == 'c':\n", "invert_flags = [(z[0] if z[0] is not None else z[1]) for z in zip(\n invert_flags, reverses)]\n", "qm = [m.get('quantization') for m in json.loads(r['maps'])]\n", "logger.debug('Failed to set quantization maps')\n", "allchannels = range(1, img.getSizeC() + 1)\n", "p = r.get('p', None)\n", "img.setColorRenderingModel()\n", "img.setQuantizationMaps(qm)\n", "if saveDefs and not img.setActiveChannels(allchannels, windows, colors,\n", "pStart, pEnd = None, None\n", "logger.debug('Something bad happened while setting the active channels...')\n", "if not img.setActiveChannels(activechannels, windows, colors, invert_flags):\n", "if p is not None and len(p.split('|')) > 1:\n", "logger.debug('Something bad happened while setting the active channels...')\n", "p, startEnd = p.split('|', 1)\n", "img.setProjection(p)\n", "pStart, pEnd = [int(s) for s in startEnd.split(':')]\n", "img.setProjectionRange(pStart, pEnd)\n", "img.setInvertedAxis(bool(r.get('ia', '0') == '1'))\n", "compress_quality = r.get('q', None)\n", "if saveDefs:\n", "'z' in r and img.setDefaultZ(long(r['z']) - 1)\n", "return img, compress_quality\n", "'t' in r and img.setDefaultT(long(r['t']) - 1)\n", "img.saveDefaults()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_13(self):...\n", "VAR_5 = self.get_counts('json', period='30days')\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertJSONEqual(VAR_5.content.decode(), VAR_0)\n" ]
[ "def test_counts_view_30days(self):...\n", "response = self.get_counts('json', period='30days')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertJSONEqual(response.content.decode(), COUNTS_DATA)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def __init__(self, *VAR_3, **VAR_4):...\n", "super().__init__(**kwargs)\n", "self.model_id = VAR_4.get('model_id')\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super().__init__(**kwargs)\n", "self.model_id = kwargs.get('model_id')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_26(VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_54 = 'string'\n", "VAR_55 = VAR_21.add_parser('convert', description=convert_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n", "VAR_55.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to convert')\n", "VAR_55.add_argument('--output_dir', type=str, required=True, help=\n 'output directory for the converted SavedModel')\n", "VAR_55.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to convert, separated by ','\")\n", "VAR_56 = VAR_55.add_subparsers(title='conversion methods', description=\n 'valid conversion methods', help=\n 'the conversion to run with the SavedModel')\n", "VAR_57 = VAR_56.add_parser('tensorrt', description=\n 'Convert the SavedModel with Tensorflow-TensorRT integration',\n formatter_class=argparse.RawTextHelpFormatter)\n", "VAR_57.add_argument('--max_workspace_size_bytes', type=int, default=2 << 20,\n help=\n 'the maximum GPU temporary memory which the TRT engine can use at execution time'\n )\n", "VAR_57.add_argument('--precision_mode', type=str, default='FP32', help=\n 'one of FP32, FP16 and INT8')\n", "VAR_57.add_argument('--minimum_segment_size', type=int, default=3, help=\n 'the minimum number of nodes required for a subgraph to be replacedin a TensorRT node'\n )\n", "VAR_57.add_argument('--convert_tf1_model', type=bool, default=False, help=\n 'support TRT conversion for TF1 models')\n", "VAR_57.set_defaults(func=convert_with_tensorrt)\n" ]
[ "def add_convert_subparser(subparsers):...\n", "\"\"\"docstring\"\"\"\n", "convert_msg = \"\"\"Usage example:\nTo convert the SavedModel to one that have TensorRT ops:\n$saved_model_cli convert \\\\\n --dir /tmp/saved_model \\\\\n --tag_set serve \\\\\n --output_dir /tmp/saved_model_trt \\\\\n tensorrt \n\"\"\"\n", "parser_convert = subparsers.add_parser('convert', description=convert_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n", "parser_convert.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to convert')\n", "parser_convert.add_argument('--output_dir', type=str, required=True, help=\n 'output directory for the converted SavedModel')\n", "parser_convert.add_argument('--tag_set', type=str, required=True, help=\n \"tag-set of graph in SavedModel to convert, separated by ','\")\n", "convert_subparsers = parser_convert.add_subparsers(title=\n 'conversion methods', description='valid conversion methods', help=\n 'the conversion to run with the SavedModel')\n", "parser_convert_with_tensorrt = convert_subparsers.add_parser('tensorrt',\n description=\n 'Convert the SavedModel with Tensorflow-TensorRT integration',\n formatter_class=argparse.RawTextHelpFormatter)\n", "parser_convert_with_tensorrt.add_argument('--max_workspace_size_bytes',\n type=int, default=2 << 20, help=\n 'the maximum GPU temporary memory which the TRT engine can use at execution time'\n )\n", "parser_convert_with_tensorrt.add_argument('--precision_mode', type=str,\n default='FP32', help='one of FP32, FP16 and INT8')\n", "parser_convert_with_tensorrt.add_argument('--minimum_segment_size', type=\n int, default=3, help=\n 'the minimum number of nodes required for a subgraph to be replacedin a TensorRT node'\n )\n", "parser_convert_with_tensorrt.add_argument('--convert_tf1_model', type=bool,\n default=False, help='support TRT conversion for TF1 models')\n", "parser_convert_with_tensorrt.set_defaults(func=convert_with_tensorrt)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(self, VAR_5):...\n", "VAR_8 = self.service.index(VAR_5, sort_dir='asc')\n", "return self._translate_uuids_to_ids(VAR_5, VAR_8)\n" ]
[ "def index(self, context):...\n", "images = self.service.index(context, sort_dir='asc')\n", "return self._translate_uuids_to_ids(context, images)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = self.op_mapper[VAR_3.op.__class__]\n", "self.ops.append([])\n", "for VAR_10 in VAR_3.values:\n", "self.visit(VAR_10)\n", "VAR_7 = self.ops.pop()\n", "if self.ops:\n", "self.ops[-1].append({VAR_6: VAR_7})\n", "self.mongo_query[VAR_6] = VAR_7\n" ]
[ "def visit_BoolOp(self, node):...\n", "\"\"\"docstring\"\"\"\n", "op = self.op_mapper[node.op.__class__]\n", "self.ops.append([])\n", "for value in node.values:\n", "self.visit(value)\n", "c = self.ops.pop()\n", "if self.ops:\n", "self.ops[-1].append({op: c})\n", "self.mongo_query[op] = c\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "For", "Expr'", "Assign'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_168(*VAR_9, **VAR_13):...\n", "VAR_27 = None\n", "VAR_27 = VAR_10(*VAR_9, **b)\n", "thread.start_new_thread(VAR_26, (VAR_27,))\n", "return VAR_27\n" ]
[ "def __completion(*a, **b):...\n", "d = None\n", "d = f(*a, **b)\n", "thread.start_new_thread(callback, (d,))\n", "return d\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_12(self, VAR_5, VAR_6):...\n", "VAR_7 = self.get_image_uuid(VAR_5, VAR_6)\n", "return self.get(self, VAR_5, VAR_7)\n" ]
[ "def get(self, context, image_id):...\n", "image_uuid = self.get_image_uuid(context, image_id)\n", "return self.get(self, context, image_uuid)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def __init__(self):...\n", "self.id = 178\n" ]
[ "def __init__(self):...\n", "self.id = 178\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_47(VAR_136, VAR_38):...\n", "" ]
[ "def f(x, k):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_82(self):...\n", "VAR_14 = self.token['text']\n", "VAR_82 = self.token['tag']\n", "if self._parse_block_html and VAR_82 not in VAR_7:\n", "VAR_14 = self.inline(VAR_14, VAR_43=self.inline.inline_html_rules)\n", "VAR_93 = self.token.get('extra') or ''\n", "VAR_64 = '<%s%s>%s</%s>' % (VAR_82, VAR_93, VAR_14, VAR_82)\n", "return self.renderer.block_html(VAR_64)\n" ]
[ "def output_open_html(self):...\n", "text = self.token['text']\n", "tag = self.token['tag']\n", "if self._parse_block_html and tag not in _pre_tags:\n", "text = self.inline(text, rules=self.inline.inline_html_rules)\n", "extra = self.token.get('extra') or ''\n", "html = '<%s%s>%s</%s>' % (tag, extra, text, tag)\n", "return self.renderer.block_html(html)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@defer.inlineCallbacks...\n", "VAR_11 = self.mock_handler.get_displayname\n", "VAR_11.return_value = defer.succeed('Bob')\n", "VAR_12, VAR_13 = yield self.mock_resource.trigger('GET', \n '/profile/%s/displayname' % '@opaque:elsewhere', None)\n", "self.assertEquals(200, VAR_12)\n", "self.assertEquals({'displayname': 'Bob'}, VAR_13)\n" ]
[ "@defer.inlineCallbacks...\n", "mocked_get = self.mock_handler.get_displayname\n", "mocked_get.return_value = defer.succeed('Bob')\n", "code, response = yield self.mock_resource.trigger('GET', \n '/profile/%s/displayname' % '@opaque:elsewhere', None)\n", "self.assertEquals(200, code)\n", "self.assertEquals({'displayname': 'Bob'}, response)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@wraps(VAR_4)...\n", "if VAR_87.role_viewer():\n", "return VAR_4(*VAR_49, **kwargs)\n", "abort(403)\n" ]
[ "@wraps(f)...\n", "if current_user.role_viewer():\n", "return f(*args, **kwargs)\n", "abort(403)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Expr'" ]
[ "def FUNC_40(self):...\n", "VAR_14 = self._makeOne()\n", "VAR_15 = VAR_14.createErrorInfo(AttributeError('nonesuch'), (12, 3))\n", "self.assertTrue(VAR_15.type is AttributeError)\n", "self.assertEqual(VAR_15.__allow_access_to_unprotected_subobjects__, 1)\n" ]
[ "def test_createErrorInfo_returns_unrestricted_object(self):...\n", "context = self._makeOne()\n", "info = context.createErrorInfo(AttributeError('nonesuch'), (12, 3))\n", "self.assertTrue(info.type is AttributeError)\n", "self.assertEqual(info.__allow_access_to_unprotected_subobjects__, 1)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_38(VAR_16, VAR_17, VAR_18=VAR_119):...\n", "return config.set_from_dictionary(VAR_16, VAR_17, VAR_18)\n" ]
[ "def _config_int(to_save, x, func=int):...\n", "return config.set_from_dictionary(to_save, x, func)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_24(self, VAR_22):...\n", "if not VAR_22.strip():\n", "VAR_102 = '[]'\n", "VAR_114 = self.model.search(VAR_22.strip())\n", "return VAR_102\n", "VAR_102 = [entry.to_dict() for entry in VAR_114]\n" ]
[ "def api_search(self, searchstring):...\n", "if not searchstring.strip():\n", "jsonresults = '[]'\n", "searchresults = self.model.search(searchstring.strip())\n", "return jsonresults\n", "jsonresults = [entry.to_dict() for entry in searchresults]\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_152(VAR_213):...\n", "if VAR_43:\n", "VAR_213 = FUNC_151(VAR_213)\n", "return VAR_213\n" ]
[ "def encoded_or_raw(text):...\n", "if raw:\n", "text = encode_header(text)\n", "return text\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_38(self, VAR_33='created', VAR_34=''):...\n", "VAR_77 = self.playlistdb.showPlaylists(self.getUserId(), VAR_34)\n", "VAR_78 = int(time.time())\n", "VAR_79 = False\n", "for VAR_110 in VAR_77:\n", "VAR_110['username'] = self.userdb.getNameById(VAR_110['userid'])\n", "if VAR_33[0] == '-':\n", "VAR_110['type'] = 'playlist'\n", "VAR_79 = True\n", "if not VAR_33 in ('username', 'age', 'title', 'default'):\n", "VAR_110['age'] = VAR_78 - VAR_110['created']\n", "VAR_33 = VAR_33[1:]\n", "VAR_33 = 'created'\n", "if VAR_33 == 'default':\n", "VAR_33 = 'age'\n", "VAR_77 = sorted(VAR_77, key=lambda x: x[sortby], reverse=is_reverse)\n", "VAR_79 = False\n", "return VAR_77\n" ]
[ "def api_showplaylists(self, sortby='created', filterby=''):...\n", "playlists = self.playlistdb.showPlaylists(self.getUserId(), filterby)\n", "curr_time = int(time.time())\n", "is_reverse = False\n", "for pl in playlists:\n", "pl['username'] = self.userdb.getNameById(pl['userid'])\n", "if sortby[0] == '-':\n", "pl['type'] = 'playlist'\n", "is_reverse = True\n", "if not sortby in ('username', 'age', 'title', 'default'):\n", "pl['age'] = curr_time - pl['created']\n", "sortby = sortby[1:]\n", "sortby = 'created'\n", "if sortby == 'default':\n", "sortby = 'age'\n", "playlists = sorted(playlists, key=lambda x: x[sortby], reverse=is_reverse)\n", "is_reverse = False\n", "return playlists\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def __call__(self):...\n", "VAR_11 = VAR_263.request.args\n", "if len(VAR_11) < 1:\n", "if VAR_11[0] == 'tables':\n", "VAR_153 = self.db[VAR_11(1)]\n", "return self.tables()\n", "if len(VAR_11) > 1 and not VAR_11(1) in self.db.tables:\n", "if VAR_11[0] == 'create':\n", "return self.create(VAR_153)\n", "if VAR_11[0] == 'select':\n", "return self.select(VAR_153, linkto=self.url(args='read'))\n", "if VAR_11[0] == 'search':\n", "VAR_7, VAR_289 = self.search(VAR_153, linkto=self.url(args='read'))\n", "if VAR_11[0] == 'read':\n", "return DIV(VAR_7, SQLTABLE(VAR_289))\n", "return self.read(VAR_153, VAR_11(2))\n", "if VAR_11[0] == 'update':\n", "return self.update(VAR_153, VAR_11(2))\n", "if VAR_11[0] == 'delete':\n", "return self.delete(VAR_153, VAR_11(2))\n" ]
[ "def __call__(self):...\n", "args = current.request.args\n", "if len(args) < 1:\n", "if args[0] == 'tables':\n", "table = self.db[args(1)]\n", "return self.tables()\n", "if len(args) > 1 and not args(1) in self.db.tables:\n", "if args[0] == 'create':\n", "return self.create(table)\n", "if args[0] == 'select':\n", "return self.select(table, linkto=self.url(args='read'))\n", "if args[0] == 'search':\n", "form, rows = self.search(table, linkto=self.url(args='read'))\n", "if args[0] == 'read':\n", "return DIV(form, SQLTABLE(rows))\n", "return self.read(table, args(2))\n", "if args[0] == 'update':\n", "return self.update(table, args(2))\n", "if args[0] == 'delete':\n", "return self.delete(table, args(2))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Condition", "Assign'", "Return'", "Condition", "Condition", "Return'", "Condition", "Return'", "Condition", "Assign'", "Condition", "Return'", "Return'", "Condition", "Return'", "Condition", "Return'" ]
[ "def FUNC_158(VAR_7, VAR_135=VAR_135):...\n", "if VAR_135 is not VAR_3:\n", "VAR_135(VAR_7)\n", "return FUNC_157(VAR_262=True)\n" ]
[ "def cas_onaccept(form, onaccept=onaccept):...\n", "if onaccept is not DEFAULT:\n", "onaccept(form)\n", "return allow_access(interactivelogin=True)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_27(self, VAR_90=None):...\n", "if not self.jwt_handler:\n", "return self.jwt_handler.allows_jwt(VAR_90=otherwise)\n" ]
[ "def allows_jwt(self, otherwise=None):...\n", "if not self.jwt_handler:\n", "return self.jwt_handler.allows_jwt(otherwise=otherwise)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'" ]
[ "def FUNC_25(self):...\n", "assert _validate_redirect_url(None) is False\n", "assert _validate_redirect_url(' ') is False\n" ]
[ "def test_it_fails_on_garbage(self):...\n", "assert _validate_redirect_url(None) is False\n", "assert _validate_redirect_url(' ') is False\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assert'", "Assert'" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = FUNC_1('/groups/%s/rooms', VAR_30)\n", "return self.client.get_json(VAR_5=destination, VAR_2=path, VAR_3={\n 'requester_user_id': requester_user_id}, VAR_15=True)\n" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "path = _create_v1_path('/groups/%s/rooms', group_id)\n", "return self.client.get_json(destination=destination, path=path, args={\n 'requester_user_id': requester_user_id}, ignore_backoff=True)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_14(VAR_0, VAR_1: FlaskClient):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = 'new_halcyon'\n", "VAR_14 = 'password2'\n", "VAR_8 = VAR_1.post('/user/edit', data={'username': new_user, 'password':\n new_pass}, follow_redirects=True)\n", "assert request.path == '/'\n", "VAR_1.delete('/logout')\n", "VAR_8 = VAR_1.post('/login', data={'username': new_user, 'password':\n new_pass}, follow_redirects=True)\n", "assert VAR_8.status_code == 200\n", "assert request.path == '/'\n" ]
[ "def test_edit_user(test_app, client: FlaskClient):...\n", "\"\"\"docstring\"\"\"\n", "new_user = 'new_halcyon'\n", "new_pass = 'password2'\n", "resp = client.post('/user/edit', data={'username': new_user, 'password':\n new_pass}, follow_redirects=True)\n", "assert request.path == '/'\n", "client.delete('/logout')\n", "resp = client.post('/login', data={'username': new_user, 'password':\n new_pass}, follow_redirects=True)\n", "assert resp.status_code == 200\n", "assert request.path == '/'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assert'", "Expr'", "Assign'", "Assert'", "Assert'" ]
[ "def __init__(self, VAR_3=None):...\n", "VAR_28 = {'class': 'vTextField'}\n", "if VAR_3 is not None:\n", "VAR_28.update(VAR_3)\n", "super(CLASS_12, self).__init__(VAR_3=final_attrs)\n" ]
[ "def __init__(self, attrs=None):...\n", "final_attrs = {'class': 'vTextField'}\n", "if attrs is not None:\n", "final_attrs.update(attrs)\n", "super(AdminEmailInputWidget, self).__init__(attrs=final_attrs)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_80(VAR_9, VAR_44, VAR_45):...\n", "\"\"\"docstring\"\"\"\n", "if len(CertificateInvalidation.get_certificate_invalidations(VAR_44.\n", "if not VAR_44.is_valid():\n", "VAR_46, VAR_72 = CertificateInvalidation.objects.update_or_create(VAR_44=\n generated_certificate, defaults={'invalidated_by': request.user,\n 'notes': certificate_invalidation_data.get('notes', ''), 'active': True})\n", "VAR_44.invalidate()\n", "return {'id': VAR_46.id, 'user': VAR_46.generated_certificate.user.username,\n 'invalidated_by': VAR_46.invalidated_by.username, 'created': VAR_46.\n created.strftime('%B %d, %Y'), 'notes': VAR_46.notes}\n" ]
[ "def invalidate_certificate(request, generated_certificate,...\n", "\"\"\"docstring\"\"\"\n", "if len(CertificateInvalidation.get_certificate_invalidations(\n", "if not generated_certificate.is_valid():\n", "certificate_invalidation, __ = (CertificateInvalidation.objects.\n update_or_create(generated_certificate=generated_certificate, defaults=\n {'invalidated_by': request.user, 'notes': certificate_invalidation_data\n .get('notes', ''), 'active': True}))\n", "generated_certificate.invalidate()\n", "return {'id': certificate_invalidation.id, 'user': certificate_invalidation\n .generated_certificate.user.username, 'invalidated_by':\n certificate_invalidation.invalidated_by.username, 'created':\n certificate_invalidation.created.strftime('%B %d, %Y'), 'notes':\n certificate_invalidation.notes}\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Assign'", "Expr'", "Return'" ]
[ "async def FUNC_10(self, VAR_16, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "VAR_37 = VAR_20.server_name\n", "VAR_0.info('Requesting keys %s from notary server %s', VAR_16.items(), VAR_37)\n", "VAR_50 = await self.client.post_json(destination=perspective_name, path=\n '/_matrix/key/v2/query', data={'server_keys': {server_name: {key_id: {\n 'minimum_valid_until_ts': min_valid_ts} for key_id, min_valid_ts in\n server_keys.items()} for server_name, server_keys in keys_to_fetch.\n items()}})\n", "VAR_31 = {}\n", "VAR_38 = []\n", "VAR_39 = self.clock.time_msec()\n", "for VAR_21 in VAR_50['server_keys']:\n", "VAR_2 = VAR_21.get('server_name')\n", "await self.store.store_server_verify_keys(VAR_37, VAR_39, VAR_38)\n", "if not isinstance(VAR_2, str):\n", "return VAR_31\n", "self._validate_perspectives_response(VAR_20, VAR_21)\n", "VAR_0.warning(\n 'Error processing response from key notary server %s for origin server %s: %s'\n , VAR_37, VAR_2, e)\n", "VAR_38.extend((VAR_2, VAR_43, VAR_49) for VAR_43, VAR_49 in VAR_59.items())\n", "VAR_59 = await self.process_v2_response(VAR_37, VAR_21, VAR_19=time_now_ms)\n", "VAR_31.setdefault(VAR_2, {}).update(VAR_59)\n" ]
[ "async def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):...\n", "\"\"\"docstring\"\"\"\n", "perspective_name = key_server.server_name\n", "logger.info('Requesting keys %s from notary server %s', keys_to_fetch.items\n (), perspective_name)\n", "query_response = await self.client.post_json(destination=perspective_name,\n path='/_matrix/key/v2/query', data={'server_keys': {server_name: {\n key_id: {'minimum_valid_until_ts': min_valid_ts} for key_id,\n min_valid_ts in server_keys.items()} for server_name, server_keys in\n keys_to_fetch.items()}})\n", "keys = {}\n", "added_keys = []\n", "time_now_ms = self.clock.time_msec()\n", "for response in query_response['server_keys']:\n", "server_name = response.get('server_name')\n", "await self.store.store_server_verify_keys(perspective_name, time_now_ms,\n added_keys)\n", "if not isinstance(server_name, str):\n", "return keys\n", "self._validate_perspectives_response(key_server, response)\n", "logger.warning(\n 'Error processing response from key notary server %s for origin server %s: %s'\n , perspective_name, server_name, e)\n", "added_keys.extend((server_name, key_id, key) for key_id, key in\n processed_response.items())\n", "processed_response = await self.process_v2_response(perspective_name,\n response, time_added_ms=time_now_ms)\n", "keys.setdefault(server_name, {}).update(processed_response)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Expr'", "Condition", "Return'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_6(VAR_0, VAR_1: FlaskClient, VAR_2):...\n", "VAR_5 = VAR_1.get('/dataobj/1')\n", "assert VAR_5.status_code == 200\n" ]
[ "def test_get_dataobj(test_app, client: FlaskClient, note_fixture):...\n", "response = client.get('/dataobj/1')\n", "assert response.status_code == 200\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assert'" ]
[ "def FUNC_9(self, *VAR_4, **VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_54 = VAR_4[0] if VAR_4 else ''\n", "if not VAR_54 in self.handlers:\n", "return \"Error: no such action. '%s'\" % VAR_54\n", "VAR_55 = self.handlers[VAR_54]\n", "VAR_56 = not ('noauth' in dir(VAR_55) and VAR_55.noauth)\n", "if VAR_56 and not self.isAuthorized():\n", "VAR_57 = {}\n", "if 'data' in VAR_5:\n", "VAR_57 = json.loads(VAR_5['data'])\n", "VAR_58 = 'binary' in dir(VAR_55) and VAR_55.binary\n", "if VAR_58:\n", "return VAR_55(**handler_args)\n", "return json.dumps({'data': VAR_55(**handler_args)})\n" ]
[ "def api(self, *args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "action = args[0] if args else ''\n", "if not action in self.handlers:\n", "return \"Error: no such action. '%s'\" % action\n", "handler = self.handlers[action]\n", "needsAuth = not ('noauth' in dir(handler) and handler.noauth)\n", "if needsAuth and not self.isAuthorized():\n", "handler_args = {}\n", "if 'data' in kwargs:\n", "handler_args = json.loads(kwargs['data'])\n", "is_binary = 'binary' in dir(handler) and handler.binary\n", "if is_binary:\n", "return handler(**handler_args)\n", "return json.dumps({'data': handler(**handler_args)})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_26(self):...\n", "self.assert_expected(self.folder.t, 'BooleanAttributesAndDefault.html')\n" ]
[ "def testBooleanAttributesAndDefault(self):...\n", "self.assert_expected(self.folder.t, 'BooleanAttributesAndDefault.html')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_18(self):...\n", "VAR_5 = self.get_counts('rst')\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertContains(VAR_5, '[email protected]')\n" ]
[ "def test_counts_view_rst(self):...\n", "response = self.get_counts('rst')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertContains(response, '[email protected]')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def __str__(self):...\n", "return '%s' % self.name\n" ]
[ "def __str__(self):...\n", "return '%s' % self.name\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@handle_dashboard_error...\n", "\"\"\"docstring\"\"\"\n", "VAR_65 = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(\n VAR_10))\n", "VAR_40 = require_student_from_identifier(VAR_9.GET.get('student'))\n", "VAR_36 = find_unit(VAR_65, VAR_9.GET.get('url'))\n", "set_due_date_extension(VAR_65, VAR_36, VAR_40, None)\n", "if not getattr(VAR_36, 'due', None):\n", "return JsonResponse(_(\n 'Successfully removed invalid due date extension (unit has no due date).'))\n", "VAR_145 = VAR_36.due.strftime('%Y-%m-%d %H:%M')\n", "return JsonResponse(_(\n 'Successfully reset due date for student {0} for {1} to {2}').format(\n VAR_40.profile.name, FUNC_59(VAR_36), VAR_145))\n" ]
[ "@handle_dashboard_error...\n", "\"\"\"docstring\"\"\"\n", "course = get_course_by_id(SlashSeparatedCourseKey.from_deprecated_string(\n course_id))\n", "student = require_student_from_identifier(request.GET.get('student'))\n", "unit = find_unit(course, request.GET.get('url'))\n", "set_due_date_extension(course, unit, student, None)\n", "if not getattr(unit, 'due', None):\n", "return JsonResponse(_(\n 'Successfully removed invalid due date extension (unit has no due date).'))\n", "original_due_date_str = unit.due.strftime('%Y-%m-%d %H:%M')\n", "return JsonResponse(_(\n 'Successfully reset due date for student {0} for {1} to {2}').format(\n student.profile.name, _display_unit(unit), original_due_date_str))\n" ]
[ 0, 0, 0, 3, 3, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_23(self):...\n", "self.client.login(username='super', password='secret')\n" ]
[ "def setUp(self):...\n", "self.client.login(username='super', password='secret')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@abstractmethod...\n", "" ]
[ "@abstractmethod...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_55 = VAR_2.session.get('server_settings', {}).get('browser', {})\n", "VAR_56 = VAR_55.get('thumb_default_size', 96)\n", "if VAR_10 is None:\n", "VAR_10 = VAR_56\n", "VAR_126 = get_longs(VAR_2, 'id')\n", "VAR_126 = list(set(VAR_126))\n", "if len(VAR_126) == 1:\n", "VAR_6 = VAR_126[0]\n", "VAR_1.debug('Image ids: %r' % VAR_126)\n", "VAR_162 = FUNC_5(VAR_2, VAR_6, VAR_10=w, VAR_8=conn)\n", "return {VAR_6: None}\n", "if len(VAR_126) > settings.THUMBNAILS_BATCH:\n", "return {VAR_6: 'data:image/jpeg;base64,%s' % base64.b64encode(VAR_162).\n decode('utf-8')}\n", "return HttpJavascriptResponseServerError('Max %s thumbnails at a time.' %\n settings.THUMBNAILS_BATCH)\n", "VAR_127 = VAR_8.getThumbnailSet([rlong(VAR_212) for VAR_212 in VAR_126], VAR_10\n )\n", "VAR_54 = dict()\n", "for VAR_212 in VAR_126:\n", "VAR_54[VAR_212] = None\n", "return VAR_54\n", "VAR_23 = VAR_127[VAR_212]\n", "VAR_1.error('Thumbnail not available. (img id: %d)' % VAR_212)\n", "if len(VAR_23) > 0:\n", "VAR_1.error(traceback.format_exc())\n", "VAR_54[VAR_212] = 'data:image/jpeg;base64,%s' % base64.b64encode(VAR_23\n ).decode('utf-8')\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "server_settings = request.session.get('server_settings', {}).get('browser', {})\n", "defaultSize = server_settings.get('thumb_default_size', 96)\n", "if w is None:\n", "w = defaultSize\n", "image_ids = get_longs(request, 'id')\n", "image_ids = list(set(image_ids))\n", "if len(image_ids) == 1:\n", "iid = image_ids[0]\n", "logger.debug('Image ids: %r' % image_ids)\n", "data = _render_thumbnail(request, iid, w=w, conn=conn)\n", "return {iid: None}\n", "if len(image_ids) > settings.THUMBNAILS_BATCH:\n", "return {iid: 'data:image/jpeg;base64,%s' % base64.b64encode(data).decode(\n 'utf-8')}\n", "return HttpJavascriptResponseServerError('Max %s thumbnails at a time.' %\n settings.THUMBNAILS_BATCH)\n", "thumbnails = conn.getThumbnailSet([rlong(i) for i in image_ids], w)\n", "rv = dict()\n", "for i in image_ids:\n", "rv[i] = None\n", "return rv\n", "t = thumbnails[i]\n", "logger.error('Thumbnail not available. (img id: %d)' % i)\n", "if len(t) > 0:\n", "logger.error(traceback.format_exc())\n", "rv[i] = 'data:image/jpeg;base64,%s' % base64.b64encode(t).decode('utf-8')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Return'", "Condition", "Return'", "Return'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_4(self, VAR_24, VAR_25=None):...\n", "self.encoding = VAR_25\n", "VAR_27 = False\n", "self.errors = []\n", "if VAR_25 and self.inject_meta_charset:\n", "from ..filters.inject_meta_charset import Filter\n", "if self.strip_whitespace:\n", "VAR_24 = Filter(VAR_24, VAR_25)\n", "from ..filters.whitespace import Filter\n", "if self.sanitize:\n", "VAR_24 = Filter(VAR_24)\n", "from ..filters.sanitizer import Filter\n", "if self.omit_optional_tags:\n", "VAR_24 = Filter(VAR_24)\n", "from ..filters.optionaltags import Filter\n", "if self.alphabetical_attributes:\n", "VAR_24 = Filter(VAR_24)\n", "from ..filters.alphabeticalattributes import Filter\n", "for token in VAR_24:\n", "VAR_24 = Filter(VAR_24)\n", "VAR_31 = token['type']\n", "if VAR_31 == 'Doctype':\n", "VAR_35 = '<!DOCTYPE %s' % token['name']\n", "if VAR_31 in ('Characters', 'SpaceCharacters'):\n", "if token['publicId']:\n", "if VAR_31 == 'SpaceCharacters' or VAR_27:\n", "if VAR_31 in ('StartTag', 'EmptyTag'):\n", "VAR_35 += ' PUBLIC \"%s\"' % token['publicId']\n", "if token['systemId']:\n", "if VAR_27 and token['data'].find('</') >= 0:\n", "yield self.encode(escape(token['data']))\n", "VAR_37 = token['name']\n", "if VAR_31 == 'EndTag':\n", "if token['systemId']:\n", "VAR_35 += ' SYSTEM'\n", "self.serializeError('Unexpected </ in CDATA')\n", "yield self.encode(token['data'])\n", "yield self.encodeStrict('<%s' % VAR_37)\n", "VAR_37 = token['name']\n", "if VAR_31 == 'Comment':\n", "if token['systemId'].find('\"') >= 0:\n", "VAR_35 += '>'\n", "if VAR_37 in rcdataElements and not self.escape_rcdata:\n", "if VAR_37 in rcdataElements:\n", "VAR_26 = token['data']\n", "if VAR_31 == 'Entity':\n", "if token['systemId'].find(\"'\") >= 0:\n", "VAR_8 = '\"'\n", "yield self.encodeStrict(VAR_35)\n", "VAR_27 = True\n", "if VAR_27:\n", "VAR_27 = False\n", "if VAR_27:\n", "if VAR_26.find('--') >= 0:\n", "VAR_37 = token['name']\n", "self.serializeError(token['data'])\n", "self.serializeError(\n 'System identifer contains both single and double quote characters')\n", "VAR_8 = \"'\"\n", "VAR_35 += ' %s%s%s' % (VAR_8, token['systemId'], VAR_8)\n", "for (attr_namespace, attr_name), attr_value in token['data'].items():\n", "self.serializeError('Unexpected child element of a CDATA element')\n", "yield self.encodeStrict('</%s>' % VAR_37)\n", "self.serializeError('Unexpected child element of a CDATA element')\n", "self.serializeError('Comment contains --')\n", "yield self.encodeStrict('<!--%s-->' % token['data'])\n", "VAR_40 = VAR_37 + ';'\n", "VAR_38 = attr_name\n", "if VAR_37 in voidElements and self.use_trailing_solidus:\n", "if VAR_40 not in entities:\n", "VAR_32 = attr_value\n", "if self.space_before_trailing_solidus:\n", "yield self.encode('>')\n", "self.serializeError('Entity %s not recognized' % VAR_37)\n", "if self.resolve_entities and VAR_40 not in xmlEntities:\n", "yield self.encodeStrict(' ')\n", "yield self.encodeStrict(' /')\n", "yield self.encodeStrict('/')\n", "VAR_26 = entities[VAR_40]\n", "VAR_26 = '&%s;' % VAR_37\n", "yield self.encodeStrict(VAR_38)\n", "yield self.encodeStrict(VAR_26)\n", "if not self.minimize_boolean_attributes or VAR_38 not in booleanAttributes.get(\n", "yield self.encodeStrict('=')\n", "if self.quote_attr_values:\n", "VAR_39 = True\n", "VAR_39 = len(VAR_32) == 0 or VAR_1.search(VAR_32)\n", "VAR_32 = VAR_32.replace('&', '&amp;')\n", "if self.escape_lt_in_attrs:\n", "VAR_32 = VAR_32.replace('<', '&lt;')\n", "if VAR_39:\n", "VAR_8 = self.quote_char\n", "yield self.encode(VAR_32)\n", "if self.use_best_quote_char:\n", "if \"'\" in VAR_32 and '\"' not in VAR_32:\n", "if VAR_8 == \"'\":\n", "VAR_8 = '\"'\n", "if '\"' in VAR_32 and \"'\" not in VAR_32:\n", "VAR_32 = VAR_32.replace(\"'\", '&#39;')\n", "VAR_32 = VAR_32.replace('\"', '&quot;')\n", "VAR_8 = \"'\"\n", "yield self.encodeStrict(VAR_8)\n", "yield self.encode(VAR_32)\n", "yield self.encodeStrict(VAR_8)\n" ]
[ "def serialize(self, treewalker, encoding=None):...\n", "self.encoding = encoding\n", "in_cdata = False\n", "self.errors = []\n", "if encoding and self.inject_meta_charset:\n", "from ..filters.inject_meta_charset import Filter\n", "if self.strip_whitespace:\n", "treewalker = Filter(treewalker, encoding)\n", "from ..filters.whitespace import Filter\n", "if self.sanitize:\n", "treewalker = Filter(treewalker)\n", "from ..filters.sanitizer import Filter\n", "if self.omit_optional_tags:\n", "treewalker = Filter(treewalker)\n", "from ..filters.optionaltags import Filter\n", "if self.alphabetical_attributes:\n", "treewalker = Filter(treewalker)\n", "from ..filters.alphabeticalattributes import Filter\n", "for token in treewalker:\n", "treewalker = Filter(treewalker)\n", "type = token['type']\n", "if type == 'Doctype':\n", "doctype = '<!DOCTYPE %s' % token['name']\n", "if type in ('Characters', 'SpaceCharacters'):\n", "if token['publicId']:\n", "if type == 'SpaceCharacters' or in_cdata:\n", "if type in ('StartTag', 'EmptyTag'):\n", "doctype += ' PUBLIC \"%s\"' % token['publicId']\n", "if token['systemId']:\n", "if in_cdata and token['data'].find('</') >= 0:\n", "yield self.encode(escape(token['data']))\n", "name = token['name']\n", "if type == 'EndTag':\n", "if token['systemId']:\n", "doctype += ' SYSTEM'\n", "self.serializeError('Unexpected </ in CDATA')\n", "yield self.encode(token['data'])\n", "yield self.encodeStrict('<%s' % name)\n", "name = token['name']\n", "if type == 'Comment':\n", "if token['systemId'].find('\"') >= 0:\n", "doctype += '>'\n", "if name in rcdataElements and not self.escape_rcdata:\n", "if name in rcdataElements:\n", "data = token['data']\n", "if type == 'Entity':\n", "if token['systemId'].find(\"'\") >= 0:\n", "quote_char = '\"'\n", "yield self.encodeStrict(doctype)\n", "in_cdata = True\n", "if in_cdata:\n", "in_cdata = False\n", "if in_cdata:\n", "if data.find('--') >= 0:\n", "name = token['name']\n", "self.serializeError(token['data'])\n", "self.serializeError(\n 'System identifer contains both single and double quote characters')\n", "quote_char = \"'\"\n", "doctype += ' %s%s%s' % (quote_char, token['systemId'], quote_char)\n", "for (attr_namespace, attr_name), attr_value in token['data'].items():\n", "self.serializeError('Unexpected child element of a CDATA element')\n", "yield self.encodeStrict('</%s>' % name)\n", "self.serializeError('Unexpected child element of a CDATA element')\n", "self.serializeError('Comment contains --')\n", "yield self.encodeStrict('<!--%s-->' % token['data'])\n", "key = name + ';'\n", "k = attr_name\n", "if name in voidElements and self.use_trailing_solidus:\n", "if key not in entities:\n", "v = attr_value\n", "if self.space_before_trailing_solidus:\n", "yield self.encode('>')\n", "self.serializeError('Entity %s not recognized' % name)\n", "if self.resolve_entities and key not in xmlEntities:\n", "yield self.encodeStrict(' ')\n", "yield self.encodeStrict(' /')\n", "yield self.encodeStrict('/')\n", "data = entities[key]\n", "data = '&%s;' % name\n", "yield self.encodeStrict(k)\n", "yield self.encodeStrict(data)\n", "if not self.minimize_boolean_attributes or k not in booleanAttributes.get(name,\n", "yield self.encodeStrict('=')\n", "if self.quote_attr_values:\n", "quote_attr = True\n", "quote_attr = len(v) == 0 or quoteAttributeSpec.search(v)\n", "v = v.replace('&', '&amp;')\n", "if self.escape_lt_in_attrs:\n", "v = v.replace('<', '&lt;')\n", "if quote_attr:\n", "quote_char = self.quote_char\n", "yield self.encode(v)\n", "if self.use_best_quote_char:\n", "if \"'\" in v and '\"' not in v:\n", "if quote_char == \"'\":\n", "quote_char = '\"'\n", "if '\"' in v and \"'\" not in v:\n", "v = v.replace(\"'\", '&#39;')\n", "v = v.replace('\"', '&quot;')\n", "quote_char = \"'\"\n", "yield self.encodeStrict(quote_char)\n", "yield self.encode(v)\n", "yield self.encodeStrict(quote_char)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "ImportFrom'", "Condition", "Assign'", "ImportFrom'", "Condition", "Assign'", "ImportFrom'", "Condition", "Assign'", "ImportFrom'", "Condition", "Assign'", "ImportFrom'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Condition", "AugAssign'", "Condition", "Condition", "Expr'", "Assign'", "Condition", "Condition", "AugAssign'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Condition", "AugAssign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "AugAssign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "For", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_10(self):...\n", "VAR_5 = {'not_types': ['m.*', 'org.*']}\n", "VAR_6 = FUNC_0(sender='@foo:bar', type='com.nom.nom.nom', room_id='!foo:bar')\n", "self.assertTrue(Filter(VAR_5).check(VAR_6))\n" ]
[ "def test_definition_not_types_works_with_unknowns(self):...\n", "definition = {'not_types': ['m.*', 'org.*']}\n", "event = MockEvent(sender='@foo:bar', type='com.nom.nom.nom', room_id='!foo:bar'\n )\n", "self.assertTrue(Filter(definition).check(event))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0(VAR_2, VAR_3, VAR_4, **VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = []\n", "VAR_11 = Change.objects.content()\n", "if VAR_2:\n", "VAR_11 = VAR_11.filter(author=user)\n", "for language in Language.objects.filter(**kwargs).distinct().iterator():\n", "VAR_17 = VAR_11.filter(language=language, **kwargs).authors_list((VAR_3, VAR_4)\n )\n", "return VAR_10\n", "if not VAR_17:\n", "VAR_10.append({language.name: sorted(VAR_17, key=lambda item: item[2])})\n" ]
[ "def generate_credits(user, start_date, end_date, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "result = []\n", "base = Change.objects.content()\n", "if user:\n", "base = base.filter(author=user)\n", "for language in Language.objects.filter(**kwargs).distinct().iterator():\n", "authors = base.filter(language=language, **kwargs).authors_list((start_date,\n end_date))\n", "return result\n", "if not authors:\n", "result.append({language.name: sorted(authors, key=lambda item: item[2])})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "For", "Assign'", "Return'", "Condition", "Expr'" ]
[ "def FUNC_35(self, VAR_44, VAR_49, VAR_62=None):...\n", "VAR_85 = VAR_44.group(0)\n", "VAR_14 = VAR_44.group(1)\n", "if VAR_85[0] == '!':\n", "return self.renderer.image(VAR_49, VAR_62, VAR_14)\n", "self._in_link = True\n", "VAR_14 = self.output(VAR_14)\n", "self._in_link = False\n", "return self.renderer.link(VAR_49, VAR_62, VAR_14)\n" ]
[ "def _process_link(self, m, link, title=None):...\n", "line = m.group(0)\n", "text = m.group(1)\n", "if line[0] == '!':\n", "return self.renderer.image(link, title, text)\n", "self._in_link = True\n", "text = self.output(text)\n", "self._in_link = False\n", "return self.renderer.link(link, title, text)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@VAR_0.route('/last_pid/<pid_id>/<input_period>')...\n", "\"\"\"docstring\"\"\"\n", "if not str_is_float(VAR_31):\n", "return '', 204\n", "VAR_70 = PID.query.filter(PID.unique_id == VAR_30).first()\n", "VAR_1.debug('No Data returned form influxdb')\n", "if len(VAR_70.measurement.split(',')) == 2:\n", "return '', 204\n", "VAR_19 = VAR_70.measurement.split(',')[0]\n", "VAR_19 = None\n", "VAR_13 = VAR_70.measurement.split(',')[1]\n", "VAR_13 = None\n", "VAR_96 = DeviceMeasurements.query.filter(DeviceMeasurements.unique_id == VAR_13\n ).first()\n", "if VAR_96:\n", "VAR_109 = Conversion.query.filter(Conversion.unique_id == VAR_96.conversion_id\n ).first()\n", "VAR_109 = None\n", "VAR_97, VAR_98, VAR_96 = return_measurement_info(VAR_96, VAR_109)\n", "VAR_99 = None\n", "if VAR_70 and ',' in VAR_70.measurement:\n", "VAR_110 = VAR_70.measurement.split(',')[1]\n", "VAR_100 = FUNC_23(VAR_30, 'pid_value', VAR_31, VAR_29='pid_p_value')\n", "VAR_111 = DeviceMeasurements.query.filter(DeviceMeasurements.unique_id ==\n VAR_110).first()\n", "VAR_101 = FUNC_23(VAR_30, 'pid_value', VAR_31, VAR_29='pid_i_value')\n", "if VAR_111:\n", "VAR_102 = FUNC_23(VAR_30, 'pid_value', VAR_31, VAR_29='pid_d_value')\n", "VAR_80 = Conversion.query.filter(Conversion.unique_id == VAR_111.conversion_id\n ).first()\n", "if None not in (VAR_100[1], VAR_101[1], VAR_102[1]):\n", "VAR_37, VAR_99, VAR_37 = return_measurement_info(VAR_111, VAR_80)\n", "VAR_112 = [VAR_100[0], '{:.3f}'.format(float(VAR_100[1]) + float(VAR_101[1]\n ) + float(VAR_102[1]))]\n", "VAR_112 = None\n", "VAR_103 = None\n", "if VAR_70.band:\n", "VAR_104 = {'activated': VAR_70.is_activated, 'paused': VAR_70.is_paused,\n 'held': VAR_70.is_held, 'setpoint': FUNC_23(VAR_30, VAR_99, VAR_31,\n VAR_22=0), 'setpoint_band': VAR_103, 'pid_p_value': VAR_100,\n 'pid_i_value': VAR_101, 'pid_d_value': VAR_102, 'pid_pid_value':\n VAR_112, 'duration_time': FUNC_23(VAR_30, 's', VAR_31, VAR_29=\n 'duration_time'), 'duty_cycle': FUNC_23(VAR_30, 'percent', VAR_31,\n VAR_29='duty_cycle'), 'actual': FUNC_23(VAR_19, VAR_98, VAR_31, VAR_29=\n actual_measurement, VAR_22=actual_channel)}\n", "VAR_69 = DaemonControl()\n", "VAR_1.debug(\"Couldn't get setpoint\")\n", "return jsonify(VAR_104)\n", "VAR_103 = VAR_69.pid_get(VAR_70.unique_id, 'setpoint_band')\n" ]
[ "@blueprint.route('/last_pid/<pid_id>/<input_period>')...\n", "\"\"\"docstring\"\"\"\n", "if not str_is_float(input_period):\n", "return '', 204\n", "pid = PID.query.filter(PID.unique_id == pid_id).first()\n", "logger.debug('No Data returned form influxdb')\n", "if len(pid.measurement.split(',')) == 2:\n", "return '', 204\n", "device_id = pid.measurement.split(',')[0]\n", "device_id = None\n", "measurement_id = pid.measurement.split(',')[1]\n", "measurement_id = None\n", "actual_measurement = DeviceMeasurements.query.filter(DeviceMeasurements.\n unique_id == measurement_id).first()\n", "if actual_measurement:\n", "actual_conversion = Conversion.query.filter(Conversion.unique_id ==\n actual_measurement.conversion_id).first()\n", "actual_conversion = None\n", "actual_channel, actual_unit, actual_measurement = return_measurement_info(\n actual_measurement, actual_conversion)\n", "setpoint_unit = None\n", "if pid and ',' in pid.measurement:\n", "pid_measurement = pid.measurement.split(',')[1]\n", "p_value = return_point_timestamp(pid_id, 'pid_value', input_period,\n measurement='pid_p_value')\n", "setpoint_measurement = DeviceMeasurements.query.filter(DeviceMeasurements.\n unique_id == pid_measurement).first()\n", "i_value = return_point_timestamp(pid_id, 'pid_value', input_period,\n measurement='pid_i_value')\n", "if setpoint_measurement:\n", "d_value = return_point_timestamp(pid_id, 'pid_value', input_period,\n measurement='pid_d_value')\n", "conversion = Conversion.query.filter(Conversion.unique_id ==\n setpoint_measurement.conversion_id).first()\n", "if None not in (p_value[1], i_value[1], d_value[1]):\n", "_, setpoint_unit, _ = return_measurement_info(setpoint_measurement, conversion)\n", "pid_value = [p_value[0], '{:.3f}'.format(float(p_value[1]) + float(i_value[\n 1]) + float(d_value[1]))]\n", "pid_value = None\n", "setpoint_band = None\n", "if pid.band:\n", "live_data = {'activated': pid.is_activated, 'paused': pid.is_paused, 'held':\n pid.is_held, 'setpoint': return_point_timestamp(pid_id, setpoint_unit,\n input_period, channel=0), 'setpoint_band': setpoint_band, 'pid_p_value':\n p_value, 'pid_i_value': i_value, 'pid_d_value': d_value,\n 'pid_pid_value': pid_value, 'duration_time': return_point_timestamp(\n pid_id, 's', input_period, measurement='duration_time'), 'duty_cycle':\n return_point_timestamp(pid_id, 'percent', input_period, measurement=\n 'duty_cycle'), 'actual': return_point_timestamp(device_id, actual_unit,\n input_period, measurement=actual_measurement, channel=actual_channel)}\n", "daemon = DaemonControl()\n", "logger.debug(\"Couldn't get setpoint\")\n", "return jsonify(live_data)\n", "setpoint_band = daemon.pid_get(pid.unique_id, 'setpoint_band')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Assign'" ]
[ "def FUNC_63(VAR_79):...\n", "" ]
[ "def getConfigValue(key):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_169(VAR_212, VAR_215, VAR_101=VAR_101):...\n", "for VAR_440 in (VAR_212.tags or []):\n", "VAR_440 = VAR_440.strip().lower()\n", "if VAR_440:\n", "VAR_101.wiki_tag.insert(VAR_148=tag, wiki_page=id)\n" ]
[ "def update_tags_insert(page, id, db=db):...\n", "for tag in (page.tags or []):\n", "tag = tag.strip().lower()\n", "if tag:\n", "db.wiki_tag.insert(name=tag, wiki_page=id)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_8(self, VAR_3, VAR_4):...\n", "VAR_17 = self.default_config()\n", "VAR_17['require_auth_for_profile_requests'] = True\n", "VAR_17['limit_profile_requests_to_users_who_share_rooms'] = True\n", "self.hs = self.setup_test_homeserver(VAR_17=config)\n", "return self.hs\n" ]
[ "def make_homeserver(self, reactor, clock):...\n", "config = self.default_config()\n", "config['require_auth_for_profile_requests'] = True\n", "config['limit_profile_requests_to_users_who_share_rooms'] = True\n", "self.hs = self.setup_test_homeserver(config=config)\n", "return self.hs\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "self.hs.config.use_presence = True\n", "VAR_7 = {'presence': 'here', 'status_msg': 'beep boop'}\n", "VAR_8, VAR_9 = self.make_request('PUT', '/presence/%s/status' % (self.\n user_id,), VAR_7)\n", "self.assertEqual(VAR_9.code, 200)\n", "self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)\n" ]
[ "def test_put_presence(self):...\n", "\"\"\"docstring\"\"\"\n", "self.hs.config.use_presence = True\n", "body = {'presence': 'here', 'status_msg': 'beep boop'}\n", "request, channel = self.make_request('PUT', '/presence/%s/status' % (self.\n user_id,), body)\n", "self.assertEqual(channel.code, 200)\n", "self.assertEqual(self.hs.get_presence_handler().set_state.call_count, 1)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_5.SERVICE_OPTS.setOmeroGroup(int(VAR_20))\n", "VAR_103 = getIntOrDefault(VAR_2, 'owner', None)\n", "VAR_104 = BaseContainer(VAR_5)\n", "VAR_104.listContainerHierarchy(VAR_103)\n", "VAR_54 = 'webclient/data/chgrp_target_tree.html'\n", "VAR_53 = {'manager': VAR_104, 'target_type': VAR_21, 'template': VAR_54}\n", "return VAR_53\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "conn.SERVICE_OPTS.setOmeroGroup(int(group_id))\n", "owner = getIntOrDefault(request, 'owner', None)\n", "manager = BaseContainer(conn)\n", "manager.listContainerHierarchy(owner)\n", "template = 'webclient/data/chgrp_target_tree.html'\n", "context = {'manager': manager, 'target_type': target_type, 'template': template\n }\n", "return context\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "async def FUNC_43(self, VAR_1: EventBase, VAR_49: RoomStreamToken) ->None:...\n", "\"\"\"docstring\"\"\"\n", "VAR_152 = []\n", "if VAR_1.type == VAR_188.Member:\n", "VAR_43 = VAR_1.state_key\n", "if VAR_1.internal_metadata.is_outlier():\n", "if VAR_1.internal_metadata.is_outlier():\n", "return\n", "assert VAR_1.internal_metadata.stream_ordering\n", "if VAR_1.membership != Membership.INVITE:\n", "VAR_185 = UserID.from_string(VAR_43)\n", "VAR_153 = PersistedEventPosition(self._instance_name, VAR_1.\n internal_metadata.stream_ordering)\n", "if not self.is_mine_id(VAR_43):\n", "VAR_152.append(VAR_185)\n", "self.notifier.on_new_room_event(VAR_1, VAR_153, VAR_49, VAR_152=extra_users)\n", "return\n" ]
[ "async def _notify_persisted_event(self, event: EventBase, max_stream_token:...\n", "\"\"\"docstring\"\"\"\n", "extra_users = []\n", "if event.type == EventTypes.Member:\n", "target_user_id = event.state_key\n", "if event.internal_metadata.is_outlier():\n", "if event.internal_metadata.is_outlier():\n", "return\n", "assert event.internal_metadata.stream_ordering\n", "if event.membership != Membership.INVITE:\n", "target_user = UserID.from_string(target_user_id)\n", "event_pos = PersistedEventPosition(self._instance_name, event.\n internal_metadata.stream_ordering)\n", "if not self.is_mine_id(target_user_id):\n", "extra_users.append(target_user)\n", "self.notifier.on_new_room_event(event, event_pos, max_stream_token,\n extra_users=extra_users)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Return'", "Assert'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_76(VAR_43):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.core.doctype.data_import.data_import import import_doc\n", "FUNC_76(VAR_43)\n" ]
[ "def import_doc(path):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.core.doctype.data_import.data_import import import_doc\n", "import_doc(path)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Expr'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_42 = argparse.ArgumentParser(VAR_39=description)\n", "VAR_42.add_argument('-c', '--config-path', action='append', metavar=\n 'CONFIG_FILE', help=\n 'Specify config file. Can be given multiple times and may specify directories containing *.yaml files.'\n )\n", "VAR_68 = VAR_42.add_argument_group('Config generation')\n", "VAR_68.add_argument('--generate-config', action='store_true', help=\n 'Generate a config file, then exit.')\n", "VAR_68.add_argument('--generate-missing-configs', '--generate-keys', action\n ='store_true', help=\n 'Generate any missing additional config files, then exit.')\n", "VAR_68.add_argument('-H', '--server-name', help=\n 'The server name to generate a config file for.')\n", "VAR_68.add_argument('--report-stats', action='store', help=\n 'Whether the generated config reports anonymized usage statistics.',\n choices=['yes', 'no'])\n", "VAR_68.add_argument('--config-directory', '--keys-directory', metavar=\n 'DIRECTORY', help=\n 'Specify where additional config files such as signing keys and log config should be stored. Defaults to the same directory as the last config file.'\n )\n", "VAR_68.add_argument('--data-directory', metavar='DIRECTORY', help=\n 'Specify where data such as the media store and database file should be stored. Defaults to the current working directory.'\n )\n", "VAR_68.add_argument('--open-private-ports', action='store_true', help=\n 'Leave private ports (such as the non-TLS HTTP listener) open to the internet. Do not use this unless you know what you are doing.'\n )\n", "VAR_15.invoke_all_static('add_arguments', VAR_42)\n", "VAR_67 = VAR_42.parse_args(VAR_40)\n", "VAR_8 = FUNC_4(VAR_9=config_args.config_path)\n", "if not VAR_8:\n", "VAR_42.error('string')\n", "if VAR_67.config_directory:\n", "VAR_29 = VAR_67.config_directory\n", "VAR_29 = os.path.dirname(VAR_8[-1])\n", "VAR_29 = os.path.abspath(VAR_29)\n", "VAR_30 = os.getcwd()\n", "VAR_69 = VAR_67.generate_missing_configs\n", "VAR_65 = VAR_15()\n", "if VAR_67.generate_config:\n", "if VAR_67.report_stats is None:\n", "VAR_43 = FUNC_3(VAR_8)\n", "VAR_42.error(\n 'Please specify either --report-stats=yes or --report-stats=no\\n\\n' + VAR_1\n )\n", "VAR_75, = VAR_8\n", "if VAR_69:\n", "if not FUNC_0(VAR_75):\n", "VAR_65.generate_missing_files(VAR_43, VAR_29)\n", "VAR_65.parse_config_dict(VAR_43, VAR_29=config_dir_path, VAR_30=data_dir_path)\n", "print('Generating config file %s' % (VAR_75,))\n", "print('Config file %r already exists. Generating any missing config files.' %\n (VAR_75,))\n", "return None\n", "VAR_65.invoke_all('read_arguments', VAR_67)\n", "if VAR_67.data_directory:\n", "VAR_69 = True\n", "return VAR_65\n", "VAR_30 = VAR_67.data_directory\n", "VAR_30 = os.getcwd()\n", "VAR_30 = os.path.abspath(VAR_30)\n", "VAR_31 = VAR_67.server_name\n", "if not VAR_31:\n", "VAR_80 = VAR_65.generate_config(VAR_29=config_dir_path, VAR_30=\n data_dir_path, VAR_31=server_name, VAR_33=config_args.report_stats ==\n 'yes', VAR_32=True, VAR_34=config_args.open_private_ports)\n", "if not FUNC_0(VAR_29):\n", "os.makedirs(VAR_29)\n", "config_file.write(VAR_80)\n", "config_file.write('\\n\\n# vim:ft=yaml')\n", "VAR_43 = yaml.safe_load(VAR_80)\n", "VAR_65.generate_missing_files(VAR_43, VAR_29)\n", "print('string' % (VAR_75, VAR_31))\n", "return\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "parser = argparse.ArgumentParser(description=description)\n", "parser.add_argument('-c', '--config-path', action='append', metavar=\n 'CONFIG_FILE', help=\n 'Specify config file. Can be given multiple times and may specify directories containing *.yaml files.'\n )\n", "generate_group = parser.add_argument_group('Config generation')\n", "generate_group.add_argument('--generate-config', action='store_true', help=\n 'Generate a config file, then exit.')\n", "generate_group.add_argument('--generate-missing-configs', '--generate-keys',\n action='store_true', help=\n 'Generate any missing additional config files, then exit.')\n", "generate_group.add_argument('-H', '--server-name', help=\n 'The server name to generate a config file for.')\n", "generate_group.add_argument('--report-stats', action='store', help=\n 'Whether the generated config reports anonymized usage statistics.',\n choices=['yes', 'no'])\n", "generate_group.add_argument('--config-directory', '--keys-directory',\n metavar='DIRECTORY', help=\n 'Specify where additional config files such as signing keys and log config should be stored. Defaults to the same directory as the last config file.'\n )\n", "generate_group.add_argument('--data-directory', metavar='DIRECTORY', help=\n 'Specify where data such as the media store and database file should be stored. Defaults to the current working directory.'\n )\n", "generate_group.add_argument('--open-private-ports', action='store_true',\n help=\n 'Leave private ports (such as the non-TLS HTTP listener) open to the internet. Do not use this unless you know what you are doing.'\n )\n", "cls.invoke_all_static('add_arguments', parser)\n", "config_args = parser.parse_args(argv)\n", "config_files = find_config_files(search_paths=config_args.config_path)\n", "if not config_files:\n", "parser.error(\n \"\"\"Must supply a config file.\nA config file can be automatically generated using \"--generate-config -H SERVER_NAME -c CONFIG-FILE\\\"\"\"\"\n )\n", "if config_args.config_directory:\n", "config_dir_path = config_args.config_directory\n", "config_dir_path = os.path.dirname(config_files[-1])\n", "config_dir_path = os.path.abspath(config_dir_path)\n", "data_dir_path = os.getcwd()\n", "generate_missing_configs = config_args.generate_missing_configs\n", "obj = cls()\n", "if config_args.generate_config:\n", "if config_args.report_stats is None:\n", "config_dict = read_config_files(config_files)\n", "parser.error(\n 'Please specify either --report-stats=yes or --report-stats=no\\n\\n' +\n MISSING_REPORT_STATS_SPIEL)\n", "config_path, = config_files\n", "if generate_missing_configs:\n", "if not path_exists(config_path):\n", "obj.generate_missing_files(config_dict, config_dir_path)\n", "obj.parse_config_dict(config_dict, config_dir_path=config_dir_path,\n data_dir_path=data_dir_path)\n", "print('Generating config file %s' % (config_path,))\n", "print('Config file %r already exists. Generating any missing config files.' %\n (config_path,))\n", "return None\n", "obj.invoke_all('read_arguments', config_args)\n", "if config_args.data_directory:\n", "generate_missing_configs = True\n", "return obj\n", "data_dir_path = config_args.data_directory\n", "data_dir_path = os.getcwd()\n", "data_dir_path = os.path.abspath(data_dir_path)\n", "server_name = config_args.server_name\n", "if not server_name:\n", "config_str = obj.generate_config(config_dir_path=config_dir_path,\n data_dir_path=data_dir_path, server_name=server_name, report_stats=\n config_args.report_stats == 'yes', generate_secrets=True,\n open_private_ports=config_args.open_private_ports)\n", "if not path_exists(config_dir_path):\n", "os.makedirs(config_dir_path)\n", "config_file.write(config_str)\n", "config_file.write('\\n\\n# vim:ft=yaml')\n", "config_dict = yaml.safe_load(config_str)\n", "obj.generate_missing_files(config_dict, config_dir_path)\n", "print(\n 'A config file has been generated in %r for server name %r. Please review this file and customise it to your needs.'\n % (config_path, server_name))\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_8(self):...\n", "VAR_4 = keyring.Keyring(self.hs)\n", "VAR_20 = signedjson.key.generate_signing_key(1)\n", "VAR_27 = self.hs.get_datastore().store_server_verify_keys('server9', time.\n time() * 1000, [('server9', FUNC_0(VAR_20), FetchKeyResult(\n get_verify_key(VAR_20), 1000))])\n", "self.get_success(VAR_27)\n", "VAR_21 = {}\n", "signedjson.sign.sign_json(VAR_21, 'server9', VAR_20)\n", "VAR_28 = FUNC_2(VAR_4, 'server9', {}, 0, 'test unsigned')\n", "self.get_failure(VAR_28, SynapseError)\n", "VAR_28 = FUNC_2(VAR_4, 'server9', VAR_21, 500, 'test signed')\n", "self.get_success(VAR_28)\n" ]
[ "def test_verify_json_for_server(self):...\n", "kr = keyring.Keyring(self.hs)\n", "key1 = signedjson.key.generate_signing_key(1)\n", "r = self.hs.get_datastore().store_server_verify_keys('server9', time.time() *\n 1000, [('server9', get_key_id(key1), FetchKeyResult(get_verify_key(key1\n ), 1000))])\n", "self.get_success(r)\n", "json1 = {}\n", "signedjson.sign.sign_json(json1, 'server9', key1)\n", "d = _verify_json_for_server(kr, 'server9', {}, 0, 'test unsigned')\n", "self.get_failure(d, SynapseError)\n", "d = _verify_json_for_server(kr, 'server9', json1, 500, 'test signed')\n", "self.get_success(d)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_3(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.register_user('user', 'pass')\n", "VAR_10 = self.login('user', 'pass')\n", "VAR_11 = self.register_user('otheruser', 'pass')\n", "VAR_12 = self.login('otheruser', 'pass')\n", "VAR_18 = self.register_user('yetanotheruser', 'pass')\n", "VAR_19 = self.login('yetanotheruser', 'pass')\n", "VAR_15 = self.helper.create_room_as(VAR_1, tok=access_token)\n", "self.helper.join(VAR_15=room, user=other_user_id, tok=other_access_token)\n", "VAR_13 = self.get_success(self.hs.get_datastore().get_user_by_access_token(\n VAR_10))\n", "VAR_14 = VAR_13.token_id\n", "self.get_success(self.hs.get_pusherpool().add_pusher(VAR_1=user_id, VAR_10=\n token_id, kind='http', app_id='m.http', app_display_name=\n 'HTTP Push Notifications', device_display_name='pushy push', pushkey=\n '[email protected]', lang=None, data={'url': 'example.com'}))\n", "self.helper.send(VAR_15, VAR_7='Hi!', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[0][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.assertEqual(self.push_attempts[0][1], 'example.com')\n", "self.assertEqual(self.push_attempts[0][2]['notification']['prio'], 'high')\n", "self.helper.join(VAR_15=room, user=yet_another_user_id, tok=\n yet_another_access_token)\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.helper.send(VAR_15, VAR_7='Welcome!', tok=other_access_token)\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 2)\n", "self.assertEqual(self.push_attempts[1][1], 'example.com')\n", "self.assertEqual(self.push_attempts[1][2]['notification']['prio'], 'low')\n" ]
[ "def test_sends_high_priority_for_one_to_one_only(self):...\n", "\"\"\"docstring\"\"\"\n", "user_id = self.register_user('user', 'pass')\n", "access_token = self.login('user', 'pass')\n", "other_user_id = self.register_user('otheruser', 'pass')\n", "other_access_token = self.login('otheruser', 'pass')\n", "yet_another_user_id = self.register_user('yetanotheruser', 'pass')\n", "yet_another_access_token = self.login('yetanotheruser', 'pass')\n", "room = self.helper.create_room_as(user_id, tok=access_token)\n", "self.helper.join(room=room, user=other_user_id, tok=other_access_token)\n", "user_tuple = self.get_success(self.hs.get_datastore().\n get_user_by_access_token(access_token))\n", "token_id = user_tuple.token_id\n", "self.get_success(self.hs.get_pusherpool().add_pusher(user_id=user_id,\n access_token=token_id, kind='http', app_id='m.http', app_display_name=\n 'HTTP Push Notifications', device_display_name='pushy push', pushkey=\n '[email protected]', lang=None, data={'url': 'example.com'}))\n", "self.helper.send(room, body='Hi!', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[0][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.assertEqual(self.push_attempts[0][1], 'example.com')\n", "self.assertEqual(self.push_attempts[0][2]['notification']['prio'], 'high')\n", "self.helper.join(room=room, user=yet_another_user_id, tok=\n yet_another_access_token)\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.helper.send(room, body='Welcome!', tok=other_access_token)\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 2)\n", "self.assertEqual(self.push_attempts[1][1], 'example.com')\n", "self.assertEqual(self.push_attempts[1][2]['notification']['prio'], 'low')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_14 = signedjson.key.generate_signing_key(random_string(5))\n", "self.expect_outgoing_key_request(self.hs.hostname, VAR_14)\n", "VAR_21 = 'ed25519:%s' % (VAR_14.version,)\n", "VAR_22 = PerspectivesKeyFetcher(self.hs2)\n", "VAR_23 = VAR_22.get_keys({self.hs.hostname: {VAR_21: 1000}})\n", "VAR_24 = self.get_success(VAR_23)\n", "self.assertIn(self.hs.hostname, VAR_24)\n", "VAR_25 = VAR_24[self.hs.hostname][VAR_21]\n", "assert isinstance(VAR_25, FetchKeyResult)\n", "self.assertEqual(signedjson.key.encode_verify_key_base64(VAR_25.verify_key),\n signedjson.key.encode_verify_key_base64(VAR_14.verify_key))\n" ]
[ "def test_get_notary_key(self):...\n", "\"\"\"docstring\"\"\"\n", "testkey = signedjson.key.generate_signing_key(random_string(5))\n", "self.expect_outgoing_key_request(self.hs.hostname, testkey)\n", "keyid = 'ed25519:%s' % (testkey.version,)\n", "fetcher = PerspectivesKeyFetcher(self.hs2)\n", "d = fetcher.get_keys({self.hs.hostname: {keyid: 1000}})\n", "res = self.get_success(d)\n", "self.assertIn(self.hs.hostname, res)\n", "keyres = res[self.hs.hostname][keyid]\n", "assert isinstance(keyres, FetchKeyResult)\n", "self.assertEqual(signedjson.key.encode_verify_key_base64(keyres.verify_key),\n signedjson.key.encode_verify_key_base64(testkey.verify_key))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assert'", "Expr'" ]
[ "@staticmethod...\n", "VAR_7 = deepcopy(VAR_7)\n", "for code, response_data in VAR_5.items():\n", "VAR_24 = response_from_data(status_code=int(code), VAR_5=response_data)\n", "return VAR_7\n", "if isinstance(VAR_24, ParseError):\n", "return ParseError(detail=\n f'cannot parse response of endpoint {endpoint.name}', VAR_5=response.data)\n", "if isinstance(VAR_24, (RefResponse, ListRefResponse)):\n", "VAR_7.relative_imports.add(FUNC_0(VAR_24.reference, VAR_1='..models'))\n", "VAR_7.responses.append(VAR_24)\n" ]
[ "@staticmethod...\n", "endpoint = deepcopy(endpoint)\n", "for code, response_data in data.items():\n", "response = response_from_data(status_code=int(code), data=response_data)\n", "return endpoint\n", "if isinstance(response, ParseError):\n", "return ParseError(detail=\n f'cannot parse response of endpoint {endpoint.name}', data=response.data)\n", "if isinstance(response, (RefResponse, ListRefResponse)):\n", "endpoint.relative_imports.add(import_string_from_reference(response.\n reference, prefix='..models'))\n", "endpoint.responses.append(response)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "For", "Assign'", "Return'", "Condition", "Return'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_5(VAR_4, VAR_13, VAR_3):...\n", "if '%(key)s' in VAR_4:\n", "VAR_4 = VAR_4.replace('%(key)s', VAR_13)\n", "if '%s' in VAR_4:\n", "VAR_4 = VAR_4.replace('%s', (VAR_3 or '') + '%')\n", "return VAR_4\n" ]
[ "def scrub_custom_query(query, key, txt):...\n", "if '%(key)s' in query:\n", "query = query.replace('%(key)s', key)\n", "if '%s' in query:\n", "query = query.replace('%s', (txt or '') + '%')\n", "return query\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "@login_required(ignore_login_fail=True)...\n", "\"\"\"docstring\"\"\"\n", "return HttpResponse('OK')\n" ]
[ "@login_required(ignore_login_fail=True)...\n", "\"\"\"docstring\"\"\"\n", "return HttpResponse('OK')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]