lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_20(self, VAR_12, VAR_2):...\n", "return self.make_req_msg(VAR_12[0], VAR_12[1], VAR_12[2], VAR_2, VAR_12[3])\n" ]
[ "def req_from_data(self, d, fun):...\n", "return self.make_req_msg(d[0], d[1], d[2], fun, d[3])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@gallery.route('/gallery/benwa/<int:post_id>')...\n", "VAR_5 = Post.query.paginate(VAR_1, 1, False)\n", "if VAR_5.items:\n", "return render_template('show.html', VAR_5=post, VAR_6=CommentForm())\n", "flash(\"That Benwa doesn't exist yet\")\n", "return redirect(url_for('gallery.show_posts'))\n" ]
[ "@gallery.route('/gallery/benwa/<int:post_id>')...\n", "post = Post.query.paginate(post_id, 1, False)\n", "if post.items:\n", "return render_template('show.html', post=post, form=CommentForm())\n", "flash(\"That Benwa doesn't exist yet\")\n", "return redirect(url_for('gallery.show_posts'))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Expr'", "Return'" ]
[ "import sqlite3, os\n", "VAR_0 = os.path.dirname(__file__)\n", "VAR_1 = 'database/main.db'\n", "VAR_2 = sqlite3.connect(os.path.join(VAR_0, VAR_1), timeout=1)\n", "VAR_2.row_factory = sqlite3.Row\n", "VAR_3 = VAR_2.cursor()\n", "def FUNC_0(VAR_4):...\n", "VAR_3.execute(\"SELECT * FROM players WHERE Name = '%s' COLLATE NOCASE\" % VAR_4)\n", "VAR_14 = dict(VAR_3.fetchone())\n", "return VAR_14\n" ]
[ "import sqlite3, os\n", "script_dir = os.path.dirname(__file__)\n", "rel_path = 'database/main.db'\n", "database = sqlite3.connect(os.path.join(script_dir, rel_path), timeout=1)\n", "database.row_factory = sqlite3.Row\n", "db = database.cursor()\n", "def getPlayer(player):...\n", "db.execute(\"SELECT * FROM players WHERE Name = '%s' COLLATE NOCASE\" % player)\n", "playerstats = dict(db.fetchone())\n", "return playerstats\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 4, 0, 0 ]
[ "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_12=None, VAR_13=None, VAR_14=None, VAR_15=None, VAR_16...\n", "VAR_29 = self.search_exercises(VAR_12=number, VAR_13=category_id, VAR_14=\n module_id, VAR_15=exercise_id, VAR_16=filter_for_assistant)\n", "VAR_30 = []\n", "if VAR_17:\n", "for VAR_27 in VAR_29:\n", "for VAR_27 in VAR_29:\n", "VAR_38 = VAR_27.get('best_submission', None)\n", "return VAR_30\n", "VAR_30.extend(s['id'] for s in VAR_27.get('submissions', []))\n", "if not VAR_38 is None:\n", "VAR_30.append(VAR_38)\n" ]
[ "def submission_ids(self, number=None, category_id=None, module_id=None,...\n", "exercises = self.search_exercises(number=number, category_id=category_id,\n module_id=module_id, exercise_id=exercise_id, filter_for_assistant=\n filter_for_assistant)\n", "submissions = []\n", "if best:\n", "for entry in exercises:\n", "for entry in exercises:\n", "sid = entry.get('best_submission', None)\n", "return submissions\n", "submissions.extend(s['id'] for s in entry.get('submissions', []))\n", "if not sid is None:\n", "submissions.append(sid)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "For", "For", "Assign'", "Return'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_1(self):...\n", "VAR_4 = self.redis_client.pubsub()\n", "VAR_4.subscribe('__keyspace@0__:Exports')\n", "VAR_5 = 0\n", "VAR_9 = self.redis_client.lrange('Exports', 0, -1)\n", "for VAR_2 in VAR_9:\n", "VAR_5 += 1\n", "for msg in VAR_4.listen():\n", "self._process_key(VAR_2)\n", "if msg['type'] == 'subscribe':\n", "assert msg['data'] == b'rpush'\n", "VAR_12 = self.redis_client.llen('Exports')\n", "assert VAR_12 >= VAR_5\n", "for i in range(VAR_5, VAR_12):\n", "VAR_5 += 1\n", "VAR_2 = self.redis_client.lindex('Exports', i)\n", "self._process_key(VAR_2)\n" ]
[ "def _run(self):...\n", "import_pubsub_client = self.redis_client.pubsub()\n", "import_pubsub_client.subscribe('__keyspace@0__:Exports')\n", "num_imported = 0\n", "export_keys = self.redis_client.lrange('Exports', 0, -1)\n", "for key in export_keys:\n", "num_imported += 1\n", "for msg in import_pubsub_client.listen():\n", "self._process_key(key)\n", "if msg['type'] == 'subscribe':\n", "assert msg['data'] == b'rpush'\n", "num_imports = self.redis_client.llen('Exports')\n", "assert num_imports >= num_imported\n", "for i in range(num_imported, num_imports):\n", "num_imported += 1\n", "key = self.redis_client.lindex('Exports', i)\n", "self._process_key(key)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "AugAssign'", "For", "Expr'", "Condition", "Assert'", "Assign'", "Assert'", "For", "AugAssign'", "Assign'", "Expr'" ]
[ "def FUNC_24(self):...\n", "VAR_0 = [SleepCheck(0.5) for i in range(5)]\n", "VAR_7 = len(VAR_0) - 2\n", "self.set_max_jobs(VAR_7)\n", "self.runall(VAR_0)\n", "self.assertEqual(len(VAR_0), self.runner.stats.num_cases())\n", "self.assertEqual(0, len(self.runner.stats.failures()))\n", "self.assertEqual(VAR_7, max(self.monitor.num_tasks))\n", "self.assertEqual(VAR_7, self.monitor.num_tasks[VAR_7])\n", "self.read_timestamps(self.monitor.tasks)\n", "VAR_20 = (b > e for b, e in zip(self.begin_stamps[VAR_7:], self.end_stamps[\n :-VAR_7]))\n", "self.assertTrue(all(VAR_20))\n", "if self.begin_stamps[VAR_7 - 1] > self.end_stamps[0]:\n", "self.skipTest('the system seems too loaded.')\n" ]
[ "def test_concurrency_limited(self):...\n", "checks = [SleepCheck(0.5) for i in range(5)]\n", "max_jobs = len(checks) - 2\n", "self.set_max_jobs(max_jobs)\n", "self.runall(checks)\n", "self.assertEqual(len(checks), self.runner.stats.num_cases())\n", "self.assertEqual(0, len(self.runner.stats.failures()))\n", "self.assertEqual(max_jobs, max(self.monitor.num_tasks))\n", "self.assertEqual(max_jobs, self.monitor.num_tasks[max_jobs])\n", "self.read_timestamps(self.monitor.tasks)\n", "begin_after_end = (b > e for b, e in zip(self.begin_stamps[max_jobs:], self\n .end_stamps[:-max_jobs]))\n", "self.assertTrue(all(begin_after_end))\n", "if self.begin_stamps[max_jobs - 1] > self.end_stamps[0]:\n", "self.skipTest('the system seems too loaded.')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_5(self):...\n", "return self.result\n" ]
[ "def get_result(self):...\n", "return self.result\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_5(VAR_7, VAR_12):...\n", "VAR_3.execute('UPDATE games set GamesPlayed = %i WHERE ID = %i' % (VAR_7,\n VAR_12))\n", "VAR_2.commit()\n" ]
[ "def GameNewPlayed(Played, ID):...\n", "db.execute('UPDATE games set GamesPlayed = %i WHERE ID = %i' % (Played, ID))\n", "database.commit()\n" ]
[ 0, 4, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def FUNC_7(self):...\n", "VAR_5 = [{'cloudProvider': 'gce', 'asgName': self.__server_group_name,\n 'serverGroupName': self.__server_group_name, 'region': self.TEST_REGION,\n 'zone': self.TEST_ZONE, 'type': 'enableServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n", "VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n", "VAR_6.new_clause_builder('Server Group Enabled', retryable_for_secs=90\n ).list_resources('managed-instance-groups').contains_pred_list([jc.\n PathContainsPredicate('baseInstanceName', self.__server_group_name), jc\n .PathContainsPredicate('targetPools', 'https')])\n", "VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - enable server group', application=self.TEST_APP)\n", "return st.OperationContract(self.new_post_operation(title=\n 'enable_server_group', data=payload, path=self.__path), contract=\n builder.build())\n" ]
[ "def enable_server_group(self):...\n", "job = [{'cloudProvider': 'gce', 'asgName': self.__server_group_name,\n 'serverGroupName': self.__server_group_name, 'region': self.TEST_REGION,\n 'zone': self.TEST_ZONE, 'type': 'enableServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "builder.new_clause_builder('Server Group Enabled', retryable_for_secs=90\n ).list_resources('managed-instance-groups').contains_pred_list([jc.\n PathContainsPredicate('baseInstanceName', self.__server_group_name), jc\n .PathContainsPredicate('targetPools', 'https')])\n", "payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - enable server group', application=self.TEST_APP)\n", "return st.OperationContract(self.new_post_operation(title=\n 'enable_server_group', data=payload, path=self.__path), contract=\n builder.build())\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_6(self, VAR_72):...\n", "VAR_92 = set(self.separator.findall(VAR_72) if VAR_72 else [])\n", "VAR_93 = set(VAR_17 for VAR_17 in VAR_92 if not self.email_re.match(VAR_17))\n", "VAR_92 = VAR_92 - VAR_93\n", "if self.num > 0 and len(VAR_92) + len(VAR_93) > self.num:\n", "if self.num == 1:\n", "if VAR_93:\n", "VAR_101.errors.add(errors.BAD_EMAILS, {'emails': '\"%s\"' % VAR_72})\n", "VAR_101.errors.add(errors.TOO_MANY_EMAILS, {'num': self.num})\n", "VAR_101.errors.add(errors.BAD_EMAILS, {'emails': ', '.join(VAR_93)})\n", "if not VAR_92:\n", "VAR_101.errors.add(errors.NO_EMAILS)\n", "return list(VAR_92)[0] if self.num == 1 else VAR_92\n" ]
[ "def run(self, emails0):...\n", "emails = set(self.separator.findall(emails0) if emails0 else [])\n", "failures = set(e for e in emails if not self.email_re.match(e))\n", "emails = emails - failures\n", "if self.num > 0 and len(emails) + len(failures) > self.num:\n", "if self.num == 1:\n", "if failures:\n", "c.errors.add(errors.BAD_EMAILS, {'emails': '\"%s\"' % emails0})\n", "c.errors.add(errors.TOO_MANY_EMAILS, {'num': self.num})\n", "c.errors.add(errors.BAD_EMAILS, {'emails': ', '.join(failures)})\n", "if not emails:\n", "c.errors.add(errors.NO_EMAILS)\n", "return list(emails)[0] if self.num == 1 else emails\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_18(self, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = \"SELECT meta FROM {0} WHERE path='{1}';\".format(VAR_2, VAR_8)\n", "VAR_4 = self._run_command(VAR_7)\n", "VAR_4 = VAR_4[0][0]\n", "VAR_4 = None\n", "return VAR_4\n" ]
[ "def getCaption(self, pth):...\n", "\"\"\"docstring\"\"\"\n", "command = \"SELECT meta FROM {0} WHERE path='{1}';\".format(TABLE_NAME, pth)\n", "data = self._run_command(command)\n", "data = data[0][0]\n", "data = None\n", "return data\n" ]
[ 0, 0, 4, 4, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@login_required...\n", "VAR_27 = {'title': VAR_19}\n", "VAR_8 = None\n", "if VAR_3 is not None:\n", "VAR_8 = get_object_or_404(VAR_5, **{group_slug_field: group_slug})\n", "VAR_46 = True\n", "VAR_27.update({'content_type': get_ct(VAR_8), 'object_id': VAR_8.id})\n", "if not VAR_46:\n", "VAR_46 = FUNC_3(VAR_14.user, VAR_8, VAR_12, VAR_13)\n", "return HttpResponseForbidden()\n", "VAR_28 = get_object_or_404(VAR_2, **article_args)\n", "if VAR_41.is_observing(VAR_28, VAR_14.user):\n", "VAR_41.stop_observing(VAR_28, VAR_14.user)\n", "return redirect(VAR_28)\n" ]
[ "@login_required...\n", "article_args = {'title': title}\n", "group = None\n", "if group_slug is not None:\n", "group = get_object_or_404(group_qs, **{group_slug_field: group_slug})\n", "allow_read = True\n", "article_args.update({'content_type': get_ct(group), 'object_id': group.id})\n", "if not allow_read:\n", "allow_read = has_read_perm(request.user, group, is_member, is_private)\n", "return HttpResponseForbidden()\n", "article = get_object_or_404(article_qs, **article_args)\n", "if notification.is_observing(article, request.user):\n", "notification.stop_observing(article, request.user)\n", "return redirect(article)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_4(VAR_15, VAR_16):...\n", "VAR_33 = {}\n", "for in_file, VAR_45 in VAR_15:\n", "VAR_33[VAR_45] = FUNC_5(in_file, VAR_16)\n", "return VAR_33\n" ]
[ "def build_tree(templates, config):...\n", "res = {}\n", "for in_file, out_file in templates:\n", "res[out_file] = render_template(in_file, config)\n", "return res\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_3(VAR_6):...\n", "\"\"\"docstring\"\"\"\n", "VAR_14 = {}\n", "VAR_14['userInfo'] = VAR_6.to_json()\n", "VAR_9 = VAR_14['userInfo'].pop('tasks')\n", "return VAR_9\n" ]
[ "def make_todo_list(verified_user):...\n", "\"\"\"docstring\"\"\"\n", "todo_list = {}\n", "todo_list['userInfo'] = verified_user.to_json()\n", "all_tasks = todo_list['userInfo'].pop('tasks')\n", "return all_tasks\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_3(self, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_2[0] == '0':\n", "return self.cursor.execute('select * from item')\n", "return self.cursor.execute('string' % VAR_2[0])\n" ]
[ "def find_all_items(self, langid):...\n", "\"\"\"docstring\"\"\"\n", "if langid[0] == '0':\n", "return self.cursor.execute('select * from item')\n", "return self.cursor.execute(\n 'select item.id, item.shoppinglistid, item.name, itemtranslation.id, itemtranslation.itemid, itemtranslation.itemlanguageid, itemtranslation.translation from item left join itemtranslation on itemtranslation.itemlanguageid = \"%s\" and itemtranslation.itemid = item.id'\n % langid[0])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'" ]
[ "\"\"\"\nCreated on 6 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\"\"\"\n", "import yaml\n", "import subprocess\n", "import os\n", "import re\n", "from beamr.debug import warn, err\n", "VAR_0 = os.path.expanduser('~/.beamrrc')\n", "VAR_1 = 'string'\n", "VAR_2 = {'docclass': 'beamer', 'packages': ['utf8,inputenc', 'T1,fontenc',\n 'pdfpages', 'upquote', 'normalem,ulem'], 'graphicspath': [], 'theme':\n 'Copenhagen', 'scheme': 'beaver', 'imgexts': ['', '.png', '.pdf',\n '.jpg', '.mps', '.jpeg', '.jbig2', '.jb2', '.PNG', '.PDF', '.JPG',\n '.JPEG', '.JBIG2', '.JB2'], 'safe': True, 'pdflatex': 'pdflatex',\n 'verbatim': 'listings', 'vbtmCmds': {'packageNames': ['listings',\n 'minted'], 'once': {'listings': 'string', 'minted': ''}, 'foreach': {\n 'minted':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{minted}[xleftmargin=20pt,linenos]{%s}\n%s\n \\\\end{minted}\n}\n\"\"\"\n , 'listings':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{lstlisting}[language=%s,style=defostyle]\n%s\n \\\\end{lstlisting}\n}\n\"\"\"\n }, 'foreachNoLang': {'minted':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{minted}[xleftmargin=20pt,linenos]{text}\n%s\n \\\\end{minted}\n}\n\"\"\"\n , 'listings':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{lstlisting}[style=defostyle]\n%s\n \\\\end{lstlisting}\n}\n\"\"\"\n }, 'insertion': '\\\\codeSnippet%s '}, 'emph': {'*': '\\\\textbf{%s}', '_':\n '\\\\textit{%s}', '~': '\\\\sout{%s}', '**': '\\\\alert{%s}', '__':\n '\\\\underline{%s}'}, 'stretch': {'<>': lambda s: \n '\\\\centering\\\\noindent\\\\resizebox{0.9\\\\textwidth}{!}{%s}' % s, '><': lambda\n s: \"\"\"\\\\begin{center}\n%s\n\\\\end{center}\"\"\" % s, '<<': lambda s: \n \"\"\"\\\\begin{flushleft}\n%s\n\\\\end{flushleft}\"\"\" % s, '>>': lambda s: \n \"\"\"\\\\begin{flushright}\n%s\n\\\\end{flushright}\"\"\" % s, '+': lambda s:\n '\\\\pause ', '>': lambda s: '\\\\hfill ', '^^': lambda s: '\\\\vspace{-%s}' %\n s, 'vv': lambda s: '\\\\vspace{%s}' % s, '__': lambda s: \n '{\\\\footnotesize %s}' % s, ':': lambda s: ''}, 'bib': None}\n", "VAR_3 = []\n", "VAR_4 = {}\n", "def __init__(self, VAR_5):...\n", "self.parsedConfig = yaml.load_all(VAR_5)\n", "self.__class__.docConfig.append(self)\n", "@classmethod...\n", "VAR_14 = [VAR_6.cmdlineConfig]\n", "while len(VAR_6.docConfig):\n", "for stub in VAR_6.docConfig.pop(0).parsedConfig:\n", "for stub in yaml.load_all(re.sub('(^|\\\\n\\\\.\\\\.\\\\.)[\\\\s\\\\S]*?($|\\\\n---)',\n", "for c in reversed(VAR_14):\n", "VAR_14.append(stub)\n", "if stub:\n", "VAR_6.recursiveUpdate(VAR_6.effectiveConfig, c)\n", "@classmethod...\n", "VAR_14.append(stub)\n", "VAR_6.recursiveUpdate(VAR_6.cmdlineConfig, yaml.load(VAR_7))\n", "VAR_6.recursiveUpdate(VAR_6.cmdlineConfig, VAR_8)\n", "@classmethod...\n", "VAR_15 = VAR_6.effectiveConfig\n", "warn('Could not get raw configuration for', VAR_9, 'due to', repr(e))\n", "@classmethod...\n", "for i in range(len(VAR_9)):\n", "return None\n", "VAR_15 = VAR_6.effectiveConfig\n", "warn('Could not get configuration for', VAR_9, 'due to', repr(e))\n", "@classmethod...\n", "VAR_15 = VAR_15[VAR_9[i]]\n", "return VAR_15\n", "for i in range(len(VAR_9)):\n", "return VAR_10['default'] if 'default' in VAR_10 else lambda s: s\n", "if not os.path.isfile(VAR_6.userConfigPath):\n", "VAR_15 = VAR_15[VAR_9[i]]\n", "if callable(VAR_15):\n", "if VAR_11:\n", "if not VAR_11:\n", "return VAR_15\n", "return lambda s: VAR_15 % s\n", "cf.write(VAR_6.userConfigTemplate % VAR_11)\n", "err('Editor not given. Cannot edit.')\n", "for VAR_15 in yaml.load_all(cf):\n", "if not VAR_11:\n", "subprocess.call([VAR_11, VAR_6.userConfigPath])\n", "return 2\n", "if 'editor' in VAR_15:\n", "err('Editor not given. Cannot edit.')\n", "return 0\n", "VAR_11 = VAR_15['editor']\n", "return 3\n" ]
[ "\"\"\"\nCreated on 6 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\"\"\"\n", "import yaml\n", "import subprocess\n", "import os\n", "import re\n", "from beamr.debug import warn, err\n", "userConfigPath = os.path.expanduser('~/.beamrrc')\n", "userConfigTemplate = \"\"\"---\n# Beam configuration file. Please include user settings between the 3 dashes and the 3 dots.\neditor: %s\n\n...\n\"\"\"\n", "effectiveConfig = {'docclass': 'beamer', 'packages': ['utf8,inputenc',\n 'T1,fontenc', 'pdfpages', 'upquote', 'normalem,ulem'], 'graphicspath':\n [], 'theme': 'Copenhagen', 'scheme': 'beaver', 'imgexts': ['', '.png',\n '.pdf', '.jpg', '.mps', '.jpeg', '.jbig2', '.jb2', '.PNG', '.PDF',\n '.JPG', '.JPEG', '.JBIG2', '.JB2'], 'safe': True, 'pdflatex':\n 'pdflatex', 'verbatim': 'listings', 'vbtmCmds': {'packageNames': [\n 'listings', 'minted'], 'once': {'listings':\n '\\\\definecolor{codegreen}{rgb}{0.1,0.4,0.1}\\\\definecolor{codegray}{rgb}{0.5,0.5,0.5}\\\\definecolor{codepurple}{rgb}{0.4,0,0.7}\\\\lstdefinestyle{defostyle}{commentstyle=\\\\color{codegreen},keywordstyle=\\\\color{blue},numberstyle=\\\\tiny\\\\color{codegray},stringstyle=\\\\color{codepurple},basicstyle=\\\\footnotesize\\\\ttfamily,breakatwhitespace=false,breaklines=true,captionpos=b,keepspaces=true,numbers=left,numbersep=5pt,showspaces=false,showstringspaces=false,showtabs=false,tabsize=3}'\n , 'minted': ''}, 'foreach': {'minted':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{minted}[xleftmargin=20pt,linenos]{%s}\n%s\n \\\\end{minted}\n}\n\"\"\"\n , 'listings':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{lstlisting}[language=%s,style=defostyle]\n%s\n \\\\end{lstlisting}\n}\n\"\"\"\n }, 'foreachNoLang': {'minted':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{minted}[xleftmargin=20pt,linenos]{text}\n%s\n \\\\end{minted}\n}\n\"\"\"\n , 'listings':\n \"\"\"\\\\defverbatim[colored]%s{\n \\\\begin{lstlisting}[style=defostyle]\n%s\n \\\\end{lstlisting}\n}\n\"\"\"\n }, 'insertion': '\\\\codeSnippet%s '}, 'emph': {'*': '\\\\textbf{%s}', '_':\n '\\\\textit{%s}', '~': '\\\\sout{%s}', '**': '\\\\alert{%s}', '__':\n '\\\\underline{%s}'}, 'stretch': {'<>': lambda s: \n '\\\\centering\\\\noindent\\\\resizebox{0.9\\\\textwidth}{!}{%s}' % s, '><': lambda\n s: \"\"\"\\\\begin{center}\n%s\n\\\\end{center}\"\"\" % s, '<<': lambda s: \n \"\"\"\\\\begin{flushleft}\n%s\n\\\\end{flushleft}\"\"\" % s, '>>': lambda s: \n \"\"\"\\\\begin{flushright}\n%s\n\\\\end{flushright}\"\"\" % s, '+': lambda s:\n '\\\\pause ', '>': lambda s: '\\\\hfill ', '^^': lambda s: '\\\\vspace{-%s}' %\n s, 'vv': lambda s: '\\\\vspace{%s}' % s, '__': lambda s: \n '{\\\\footnotesize %s}' % s, ':': lambda s: ''}, 'bib': None}\n", "docConfig = []\n", "cmdlineConfig = {}\n", "def __init__(self, txt):...\n", "self.parsedConfig = yaml.load_all(txt)\n", "self.__class__.docConfig.append(self)\n", "@classmethod...\n", "configStubs = [cls.cmdlineConfig]\n", "while len(cls.docConfig):\n", "for stub in cls.docConfig.pop(0).parsedConfig:\n", "for stub in yaml.load_all(re.sub('(^|\\\\n\\\\.\\\\.\\\\.)[\\\\s\\\\S]*?($|\\\\n---)',\n", "for c in reversed(configStubs):\n", "configStubs.append(stub)\n", "if stub:\n", "cls.recursiveUpdate(cls.effectiveConfig, c)\n", "@classmethod...\n", "configStubs.append(stub)\n", "cls.recursiveUpdate(cls.cmdlineConfig, yaml.load(general))\n", "cls.recursiveUpdate(cls.cmdlineConfig, special)\n", "@classmethod...\n", "d = cls.effectiveConfig\n", "warn('Could not get raw configuration for', arg, 'due to', repr(e))\n", "@classmethod...\n", "for i in range(len(arg)):\n", "return None\n", "d = cls.effectiveConfig\n", "warn('Could not get configuration for', arg, 'due to', repr(e))\n", "@classmethod...\n", "d = d[arg[i]]\n", "return d\n", "for i in range(len(arg)):\n", "return kw['default'] if 'default' in kw else lambda s: s\n", "if not os.path.isfile(cls.userConfigPath):\n", "d = d[arg[i]]\n", "if callable(d):\n", "if editor:\n", "if not editor:\n", "return d\n", "return lambda s: d % s\n", "cf.write(cls.userConfigTemplate % editor)\n", "err('Editor not given. Cannot edit.')\n", "for d in yaml.load_all(cf):\n", "if not editor:\n", "subprocess.call([editor, cls.userConfigPath])\n", "return 2\n", "if 'editor' in d:\n", "err('Editor not given. Cannot edit.')\n", "return 0\n", "editor = d['editor']\n", "return 3\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "For", "For", "For", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Condition", "For", "Return'", "Assign'", "Expr'", "Condition", "Assign'", "Return'", "For", "Return'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Return'", "Return'", "Expr'", "Expr'", "For", "Condition", "Expr'", "Return'", "Condition", "Expr'", "Return'", "Assign'", "Return'" ]
[ "def FUNC_26(self, VAR_14):...\n", "" ]
[ "def get_max_length(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_0(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "self.get_submission_info()\n", "return False\n", "return True\n" ]
[ "def test_success(self):...\n", "if not GenericRequest.test_success(self):\n", "return False\n", "self.get_submission_info()\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Expr'", "Return'", "Return'" ]
[ "def __call__(self, *VAR_12, **VAR_9):...\n", "return CLASS_1(self.key_name, *VAR_12, **kwargs)\n" ]
[ "def __call__(self, *args, **kwargs):...\n", "return KeyTransform(self.key_name, *args, **kwargs)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8():...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_10(VAR_9, VAR_10, VAR_11=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = [iter(VAR_9)] * VAR_10\n", "return izip_longest(*VAR_15, VAR_11=fillvalue)\n" ]
[ "def swissPairings():...\n", "\"\"\"docstring\"\"\"\n", "def grouper(iterable, n, fillvalue=None):...\n", "\"\"\"docstring\"\"\"\n", "args = [iter(iterable)] * n\n", "return izip_longest(*args, fillvalue=fillvalue)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "from crimemap.dbhelper import DBHelper\n", "from flask import Flask, render_template, request\n", "VAR_0 = Flask(__name__)\n", "VAR_1 = DBHelper()\n", "@VAR_0.route('/')...\n", "VAR_2 = VAR_1.get_all_inputs()\n", "print(e)\n", "return render_template('home.html', VAR_2=data)\n", "VAR_2 = None\n" ]
[ "from crimemap.dbhelper import DBHelper\n", "from flask import Flask, render_template, request\n", "app = Flask(__name__)\n", "DB = DBHelper()\n", "@app.route('/')...\n", "data = DB.get_all_inputs()\n", "print(e)\n", "return render_template('home.html', data=data)\n", "data = None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Return'", "Assign'" ]
[ "def FUNC_6(self):...\n", "self.login()\n", "while True:\n", "VAR_30 = random.choice(self.tasks)\n", "self.do_step(SubmitRandomRequest(self.browser, VAR_30, VAR_10=self.base_url,\n VAR_11=self.submissions_path))\n" ]
[ "def act(self):...\n", "self.login()\n", "while True:\n", "task = random.choice(self.tasks)\n", "self.do_step(SubmitRandomRequest(self.browser, task, base_url=self.base_url,\n submissions_path=self.submissions_path))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_17(self, VAR_14):...\n", "" ]
[ "def is_relation(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_2(VAR_7=None, VAR_8=0, VAR_9=0):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_7 is None:\n", "return True\n", "return False\n", "VAR_21 = re.match('^([^;]+)(; length=([0-9]+))?$', VAR_7, re.IGNORECASE)\n", "VAR_22 = parse_http_date(VAR_21.group(1))\n", "VAR_23 = VAR_21.group(3)\n", "if VAR_23 and int(VAR_23) != VAR_9:\n", "if int(VAR_8) > VAR_22:\n" ]
[ "def was_modified_since(header=None, mtime=0, size=0):...\n", "\"\"\"docstring\"\"\"\n", "if header is None:\n", "return True\n", "return False\n", "matches = re.match('^([^;]+)(; length=([0-9]+))?$', header, re.IGNORECASE)\n", "header_mtime = parse_http_date(matches.group(1))\n", "header_len = matches.group(3)\n", "if header_len and int(header_len) != size:\n", "if int(mtime) > header_mtime:\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Return'", "Assign'", "Assign'", "Assign'", "Condition", "Condition" ]
[ "from sup.net import NetError\n", "from wzworkers import WorkerInterrupt\n", "from wipeskel import WipeSkel, WipeState, cstate\n", "from beon import exc, regexp\n", "import re\n", "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, *VAR_4, **VAR_5):...\n", "self.sbjfun = VAR_2\n", "self.msgfun = VAR_3\n", "self.forums = VAR_0\n", "self.targets = type(VAR_1) == str and [('', VAR_1)] or type(VAR_1\n ) == tuple and list(VAR_1) or VAR_1\n", "super().__init__(*VAR_4, **kvargs)\n", "def FUNC_0(self, VAR_6):...\n", "if not self.logined:\n", "self._capdata = 0, 0\n", "self.log.warning('Caprate limit reached, calling dologin() for now')\n", "return\n", "self.dologin()\n", "def FUNC_1(self):...\n", "for VAR_7 in self.targets:\n", "self.schedule(self.add_comment, (VAR_7, self.msgfun()))\n", "if len(self.targets) == 0:\n", "self.schedule(self.scan_targets_loop)\n", "self.schedule(self.comment_loop)\n", "def FUNC_2(self, VAR_7, VAR_8):...\n", "if True:\n", "def FUNC_3(self):...\n", "self.postmsg(VAR_7[1], VAR_8, VAR_7[0])\n", "self.counters['comments'] += 1\n", "for f in self.forums:\n", "self.w.sleep(self.comment_successtimeout)\n", "self.counter_tick()\n", "def FUNC_4(self):...\n", "self.w.sleep(self.comment_successtimeout)\n", "self.addtopic(self.msgfun(), self.sbjfun(), f)\n", "self.counters['topics'] += 1\n", "VAR_9 = 0\n", "self.schedule(self.add_comment, (VAR_7, VAR_8))\n", "self.w.sleep(self.topic_successtimeout)\n", "for user, forum in self.forums:\n", "self.targets.remove(VAR_7)\n", "self.w.sleep(self.comment_successtimeout)\n", "self.topic_successtimeout = self.topic_successtimeout + 0.1\n", "VAR_1 = []\n", "return VAR_9\n", "self.log.error('Too many wrong answers to CAPTCHA')\n", "self.log.info('Wait5Min exc caught, topic_successtimeout + 0.1, cur: %f',\n self.topic_successtimeout)\n", "self.log.debug('Scanning first page of the forum %s:%s', user, forum)\n", "self.schedule(self.add_comment, (VAR_7, VAR_8))\n", "self.w.sleep(self.topic_successtimeout)\n", "VAR_10 = self.site.get_page('1', forum, user)\n", "self.log.warn('%s: %s', e, e.answer)\n", "self.log.error('Too many wrong answers to CAPTCHA')\n", "VAR_11 = re.compile(regexp.f_sub_id.format(user, self.site.domain, forum))\n", "self.schedule(self.add_comment, (VAR_7, VAR_8))\n", "self.long_sleep(10)\n", "VAR_12 = set(map(lambda x: (user, x[0] + x[1]), VAR_11.findall(VAR_10)))\n", "self.schedule(self.add_comment, (VAR_7, VAR_8))\n", "self.log.warning('%s: %s', e, e.answer)\n", "for VAR_7 in VAR_12:\n", "self.schedule_first(self.switch_user)\n", "self.w.sleep(self.errortimeout)\n", "if VAR_7 in self.pc.sets['closed'] or VAR_7 in self.pc.sets['bumplimit'\n", "VAR_13 = len(VAR_1)\n", "self.log.info('Removing %s from targets', VAR_7)\n", "self.log.error(e)\n", "VAR_1.append(VAR_7)\n", "VAR_9 += VAR_13\n", "self.targets.remove(VAR_7)\n", "self.w.sleep(self.errortimeout)\n", "self.w.sleep(self.errortimeout)\n", "if VAR_13 > 0:\n", "self.schedule(self.add_comment, (VAR_7, VAR_8))\n", "self.log.warn(e)\n", "self.log.info('Found %d new targets in forum %s:%s', VAR_13, user, forum)\n", "self.log.debug('Found no new targets in forum %s:%s', user, forum)\n", "self.w.sleep(self.errortimeout)\n", "self.w.sleep(self.errortimeout)\n", "self.targets.extend(VAR_1)\n", "self.targets.remove(VAR_7)\n", "self.w.sleep(self.errortimeout)\n", "self.log.exception(e)\n", "self.w.sleep(self.errortimeout)\n" ]
[ "from sup.net import NetError\n", "from wzworkers import WorkerInterrupt\n", "from wipeskel import WipeSkel, WipeState, cstate\n", "from beon import exc, regexp\n", "import re\n", "def __init__(self, forums, targets, sbjfun, msgfun, *args, **kvargs):...\n", "self.sbjfun = sbjfun\n", "self.msgfun = msgfun\n", "self.forums = forums\n", "self.targets = type(targets) == str and [('', targets)] or type(targets\n ) == tuple and list(targets) or targets\n", "super().__init__(*args, **kvargs)\n", "def on_caprate_limit(self, rate):...\n", "if not self.logined:\n", "self._capdata = 0, 0\n", "self.log.warning('Caprate limit reached, calling dologin() for now')\n", "return\n", "self.dologin()\n", "def comment_loop(self):...\n", "for t in self.targets:\n", "self.schedule(self.add_comment, (t, self.msgfun()))\n", "if len(self.targets) == 0:\n", "self.schedule(self.scan_targets_loop)\n", "self.schedule(self.comment_loop)\n", "def add_comment(self, t, msg):...\n", "if True:\n", "def forumwipe_loop(self):...\n", "self.postmsg(t[1], msg, t[0])\n", "self.counters['comments'] += 1\n", "for f in self.forums:\n", "self.w.sleep(self.comment_successtimeout)\n", "self.counter_tick()\n", "def get_targets(self):...\n", "self.w.sleep(self.comment_successtimeout)\n", "self.addtopic(self.msgfun(), self.sbjfun(), f)\n", "self.counters['topics'] += 1\n", "found_count = 0\n", "self.schedule(self.add_comment, (t, msg))\n", "self.w.sleep(self.topic_successtimeout)\n", "for user, forum in self.forums:\n", "self.targets.remove(t)\n", "self.w.sleep(self.comment_successtimeout)\n", "self.topic_successtimeout = self.topic_successtimeout + 0.1\n", "targets = []\n", "return found_count\n", "self.log.error('Too many wrong answers to CAPTCHA')\n", "self.log.info('Wait5Min exc caught, topic_successtimeout + 0.1, cur: %f',\n self.topic_successtimeout)\n", "self.log.debug('Scanning first page of the forum %s:%s', user, forum)\n", "self.schedule(self.add_comment, (t, msg))\n", "self.w.sleep(self.topic_successtimeout)\n", "page = self.site.get_page('1', forum, user)\n", "self.log.warn('%s: %s', e, e.answer)\n", "self.log.error('Too many wrong answers to CAPTCHA')\n", "rxp = re.compile(regexp.f_sub_id.format(user, self.site.domain, forum))\n", "self.schedule(self.add_comment, (t, msg))\n", "self.long_sleep(10)\n", "found = set(map(lambda x: (user, x[0] + x[1]), rxp.findall(page)))\n", "self.schedule(self.add_comment, (t, msg))\n", "self.log.warning('%s: %s', e, e.answer)\n", "for t in found:\n", "self.schedule_first(self.switch_user)\n", "self.w.sleep(self.errortimeout)\n", "if t in self.pc.sets['closed'] or t in self.pc.sets['bumplimit'\n", "lt = len(targets)\n", "self.log.info('Removing %s from targets', t)\n", "self.log.error(e)\n", "targets.append(t)\n", "found_count += lt\n", "self.targets.remove(t)\n", "self.w.sleep(self.errortimeout)\n", "self.w.sleep(self.errortimeout)\n", "if lt > 0:\n", "self.schedule(self.add_comment, (t, msg))\n", "self.log.warn(e)\n", "self.log.info('Found %d new targets in forum %s:%s', lt, user, forum)\n", "self.log.debug('Found no new targets in forum %s:%s', user, forum)\n", "self.w.sleep(self.errortimeout)\n", "self.w.sleep(self.errortimeout)\n", "self.targets.extend(targets)\n", "self.targets.remove(t)\n", "self.w.sleep(self.errortimeout)\n", "self.log.exception(e)\n", "self.w.sleep(self.errortimeout)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Condition", "Assign'", "Expr'", "Return'", "Expr'", "FunctionDef'", "For", "Expr'", "Condition", "Expr'", "Expr'", "FunctionDef'", "Condition", "FunctionDef'", "Expr'", "AugAssign'", "For", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "AugAssign'", "Assign'", "Expr'", "Expr'", "For", "Expr'", "Expr'", "Assign'", "Assign'", "Return'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "For", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "AugAssign'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_40(self):...\n", "\"\"\"docstring\"\"\"\n", "return list(self.list_properties.keys())\n" ]
[ "def get_columns_list(self):...\n", "\"\"\"docstring\"\"\"\n", "return list(self.list_properties.keys())\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_6(self, VAR_38):...\n", "if not VAR_38:\n", "if self.emp_error is not None:\n", "if len(VAR_38) > self.length:\n", "VAR_101.errors.add(self.emp_error)\n", "VAR_101.errors.add(self.len_error)\n", "return VAR_38\n" ]
[ "def run(self, title):...\n", "if not title:\n", "if self.emp_error is not None:\n", "if len(title) > self.length:\n", "c.errors.add(self.emp_error)\n", "c.errors.add(self.len_error)\n", "return title\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(self):...\n", "VAR_2 = self.connect()\n", "VAR_3 = 'SELECT description FROM crimes;'\n", "VAR_2.close()\n", "cursor.execute(VAR_3)\n", "return cursor.fetchall()\n" ]
[ "def get_all_inputs(self):...\n", "connection = self.connect()\n", "query = 'SELECT description FROM crimes;'\n", "connection.close()\n", "cursor.execute(query)\n", "return cursor.fetchall()\n" ]
[ 0, 0, 0, 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@property...\n", "VAR_65 = '.' if self._workdir is None else self._workdir\n", "if not os.path.isabs(VAR_65):\n", "return os.path.abspath(os.path.join(self.workflow.basedir, VAR_65))\n", "return VAR_65\n" ]
[ "@property...\n", "workdir = '.' if self._workdir is None else self._workdir\n", "if not os.path.isabs(workdir):\n", "return os.path.abspath(os.path.join(self.workflow.basedir, workdir))\n", "return workdir\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self, VAR_48):...\n", "VAR_80 = CLASS_5.run(self, VAR_48)\n", "if VAR_80 and not (VAR_101.user_is_loggedin and VAR_80.can_submit(VAR_101.user)\n", "abort(403, 'forbidden')\n", "return VAR_80\n" ]
[ "def run(self, link_name):...\n", "link = VLink.run(self, link_name)\n", "if link and not (c.user_is_loggedin and link.can_submit(c.user)):\n", "abort(403, 'forbidden')\n", "return link\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_11(self, VAR_7):...\n", "VAR_22 = ''\n", "if VAR_7._where:\n", "VAR_27 = 0\n", "VAR_7 = 'DELETE FROM {0} {1};'.format(VAR_7.model_class._meta.table_name,\n VAR_22)\n", "if isinstance(VAR_7._where, VAR_39):\n", "return VAR_7\n", "VAR_22 = 'WHERE {0}'.format(VAR_7._where)\n", "for value in VAR_7._where:\n", "if VAR_27 == 0:\n", "VAR_40 = 'WHERE '\n", "VAR_40 = ' AND '\n", "VAR_22 += \"%s %s.%s %s '%s'\" % (VAR_40, value.lhs.model_class._meta.\n table_name, value.lhs.name, value.op, value.rhs)\n", "VAR_27 += 1\n" ]
[ "def generate_delete(self, query):...\n", "where = ''\n", "if query._where:\n", "i = 0\n", "query = 'DELETE FROM {0} {1};'.format(query.model_class._meta.table_name, where\n )\n", "if isinstance(query._where, str):\n", "return query\n", "where = 'WHERE {0}'.format(query._where)\n", "for value in query._where:\n", "if i == 0:\n", "con = 'WHERE '\n", "con = ' AND '\n", "where += \"%s %s.%s %s '%s'\" % (con, value.lhs.model_class._meta.table_name,\n value.lhs.name, value.op, value.rhs)\n", "i += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "For", "Condition", "Assign'", "Assign'", "AugAssign'", "AugAssign'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.get(self.section, 'password_reset_link')\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.get(self.section, 'password_reset_link')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.curated_chemical.sid\n", "return False\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.curated_chemical.sid\n", "return False\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Return'", "Return'" ]
[ "import json\n", "from django.conf import settings\n", "from django.test import TestCase, Client\n", "from django.urls import reverse\n", "from experiences.models import ORMExperience\n", "from people.models import ORMPerson\n", "from profiles.models import ORMProfile\n", "def FUNC_0(self):...\n", "CLASS_0.ScenarioMaker().when_call_get_email_confirmation(\n ).then_response_should_be_a_redirect_to_app_deeplink_with_params()\n", "def FUNC_4(self):...\n", "VAR_12 = Client()\n", "self.response = VAR_12.get('{}?{}'.format(reverse(\n 'email-confirmation-redirect'), 'token=ABXZ'))\n", "return self\n" ]
[ "import json\n", "from django.conf import settings\n", "from django.test import TestCase, Client\n", "from django.urls import reverse\n", "from experiences.models import ORMExperience\n", "from people.models import ORMPerson\n", "from profiles.models import ORMProfile\n", "def test_when_called_redirect_view_redirects_to_apps_url(self):...\n", "RedirectConfirmEmailTestCase.ScenarioMaker().when_call_get_email_confirmation(\n ).then_response_should_be_a_redirect_to_app_deeplink_with_params()\n", "def when_call_get_email_confirmation(self):...\n", "client = Client()\n", "self.response = client.get('{}?{}'.format(reverse(\n 'email-confirmation-redirect'), 'token=ABXZ'))\n", "return self\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(self, VAR_3, **VAR_6):...\n", "logging.warn('url_read_json(%s, %s)', VAR_3[:500], str(VAR_6)[:500])\n", "if not self._requests:\n", "return None\n", "VAR_6.pop('stream', None)\n", "for i, n in enumerate(self._requests):\n", "if n[0] == VAR_3:\n", "self.fail('Unknown request %s' % VAR_3)\n", "VAR_9 = self._requests.pop(i)\n", "if len(VAR_9) != 3:\n", "self.fail('Expected json request, got normal data; %s' % VAR_3)\n", "VAR_10, VAR_11, VAR_12 = VAR_9\n", "if callable(VAR_11):\n", "VAR_11(VAR_6)\n", "self.assertEqual(VAR_11, VAR_6)\n", "if VAR_12 is not None:\n", "return VAR_12\n", "return None\n" ]
[ "def _url_read_json(self, url, **kwargs):...\n", "logging.warn('url_read_json(%s, %s)', url[:500], str(kwargs)[:500])\n", "if not self._requests:\n", "return None\n", "kwargs.pop('stream', None)\n", "for i, n in enumerate(self._requests):\n", "if n[0] == url:\n", "self.fail('Unknown request %s' % url)\n", "data = self._requests.pop(i)\n", "if len(data) != 3:\n", "self.fail('Expected json request, got normal data; %s' % url)\n", "_, expected_kwargs, result = data\n", "if callable(expected_kwargs):\n", "expected_kwargs(kwargs)\n", "self.assertEqual(expected_kwargs, kwargs)\n", "if result is not None:\n", "return result\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Condition", "Return'", "Expr'", "For", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Condition", "Return'", "Return'" ]
[ "def FUNC_5(self, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "self.lasttok = VAR_3.type\n", "return VAR_3\n" ]
[ "def t_LPAR(self, tok):...\n", "\"\"\"docstring\"\"\"\n", "self.lasttok = tok.type\n", "return tok\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_8, VAR_9, VAR_10=None, VAR_11=None, VAR_12=(...\n", "get_and_check_project(VAR_9, VAR_11, VAR_12)\n", "VAR_28 = self.queryset.get(VAR_10=pk, VAR_1=project_pk)\n", "VAR_28.pending_action = VAR_8\n", "VAR_28.last_error = None\n", "VAR_28.save()\n", "scheduler.process_pending_tasks(background=True)\n", "return Response({'success': True})\n" ]
[ "def set_pending_action(self, pending_action, request, pk=None, project_pk=...\n", "get_and_check_project(request, project_pk, perms)\n", "task = self.queryset.get(pk=pk, project=project_pk)\n", "task.pending_action = pending_action\n", "task.last_error = None\n", "task.save()\n", "scheduler.process_pending_tasks(background=True)\n", "return Response({'success': True})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "VAR_19 = self.common.get_volume_stats(VAR_5)\n", "VAR_19['storage_protocol'] = 'iSCSI'\n", "VAR_20 = self.configuration.safe_get('volume_backend_name')\n", "VAR_19['volume_backend_name'] = VAR_20 or self.__class__.__name__\n", "self.common.client_logout()\n", "return VAR_19\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "self.common.client_login()\n", "stats = self.common.get_volume_stats(refresh)\n", "stats['storage_protocol'] = 'iSCSI'\n", "backend_name = self.configuration.safe_get('volume_backend_name')\n", "stats['volume_backend_name'] = backend_name or self.__class__.__name__\n", "self.common.client_logout()\n", "return stats\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_8(VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "VAR_25 = []\n", "VAR_26 = []\n", "VAR_27 = CommerceConfiguration.current()\n", "VAR_28 = {'username': VAR_10.username}\n", "VAR_29 = VAR_27.is_cache_enabled\n", "VAR_30 = VAR_27.CACHE_KEY + '.' + str(VAR_10.id) if VAR_29 else None\n", "VAR_31 = ecommerce_api_client(VAR_10)\n", "VAR_32 = get_edx_api_data(VAR_27, 'orders', VAR_31=api, querystring=\n user_query, VAR_30=cache_key)\n", "for order in VAR_32:\n", "if order['status'].lower() == 'complete':\n", "return VAR_26\n", "VAR_42 = datetime.strptime(order['date_placed'], '%Y-%m-%dT%H:%M:%SZ')\n", "VAR_43 = {'number': order['number'], 'price': order['total_excl_tax'],\n 'order_date': strftime_localized(VAR_42, 'SHORT_DATE'), 'receipt_url':\n EcommerceService().get_receipt_page_url(order['number']), 'lines':\n order['lines']}\n", "VAR_26.append(VAR_43)\n" ]
[ "def get_user_orders(user):...\n", "\"\"\"docstring\"\"\"\n", "no_data = []\n", "user_orders = []\n", "commerce_configuration = CommerceConfiguration.current()\n", "user_query = {'username': user.username}\n", "use_cache = commerce_configuration.is_cache_enabled\n", "cache_key = commerce_configuration.CACHE_KEY + '.' + str(user.id\n ) if use_cache else None\n", "api = ecommerce_api_client(user)\n", "commerce_user_orders = get_edx_api_data(commerce_configuration, 'orders',\n api=api, querystring=user_query, cache_key=cache_key)\n", "for order in commerce_user_orders:\n", "if order['status'].lower() == 'complete':\n", "return user_orders\n", "date_placed = datetime.strptime(order['date_placed'], '%Y-%m-%dT%H:%M:%SZ')\n", "order_data = {'number': order['number'], 'price': order['total_excl_tax'],\n 'order_date': strftime_localized(date_placed, 'SHORT_DATE'),\n 'receipt_url': EcommerceService().get_receipt_page_url(order['number']),\n 'lines': order['lines']}\n", "user_orders.append(order_data)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_0(self, VAR_1, VAR_2):...\n", "if VAR_2:\n", "VAR_15 = self.DOMAIN + '/wiki/' + VAR_1\n", "VAR_15 = self.DOMAIN + VAR_1\n", "return VAR_15\n" ]
[ "def build_url(self, wiki_topic, add_wiki_text):...\n", "if add_wiki_text:\n", "url = self.DOMAIN + '/wiki/' + wiki_topic\n", "url = self.DOMAIN + wiki_topic\n", "return url\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(self):...\n", "assert self.response.status_code == 302\n", "assert self.response['Location'] == '{}{}?token=ABXZ'.format(settings.\n APP_DEEPLINK_DOMAIN, '/people/me/login')\n", "return self\n" ]
[ "def then_response_should_be_a_redirect_to_app_deeplink_with_params(self):...\n", "assert self.response.status_code == 302\n", "assert self.response['Location'] == '{}{}?token=ABXZ'.format(settings.\n APP_DEEPLINK_DOMAIN, '/people/me/login')\n", "return self\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assert'", "Assert'", "Return'" ]
[ "def FUNC_3(self):...\n", "CLASS_5.ScenarioMaker().given_an_apple_appid('ASDF.com.myapp.ios'\n ).when_call_aasa().then_response_should_be_json(\n '{\"applinks\": {\"apps\": [], \"details\": [{\"appID\": \"ASDF.com.myapp.ios\", \"paths\": [\"*\"]}]}}'\n )\n" ]
[ "def test_aasa_returns_json_with_appid(self):...\n", "AASATestCase.ScenarioMaker().given_an_apple_appid('ASDF.com.myapp.ios'\n ).when_call_aasa().then_response_should_be_json(\n '{\"applinks\": {\"apps\": [], \"details\": [{\"appID\": \"ASDF.com.myapp.ios\", \"paths\": [\"*\"]}]}}'\n )\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_8 = 'test_foo.py'\n", "VAR_7 = BokChoyTestSuite('', test_spec=spec)\n", "VAR_1 = 'tests/{}'.format(VAR_8)\n", "self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name))\n" ]
[ "def test_suite_spec(self):...\n", "spec = 'test_foo.py'\n", "suite = BokChoyTestSuite('', test_spec=spec)\n", "name = 'tests/{}'.format(spec)\n", "self.assertEqual(suite.cmd, self._expected_command(name=name))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "self._check_flags()\n" ]
[ "def check_for_setup_error(self):...\n", "\"\"\"docstring\"\"\"\n", "self._check_flags()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_2(self):...\n", "\"\"\"docstring\"\"\"\n", "self.use_instance_names = ['katotest%sa' % self.test_id, 'katotest%sb' %\n self.test_id, 'katotest%sc' % self.test_id]\n", "self.use_instance_zones = [self.bindings['TEST_GCE_ZONE'], 'us-central1-b',\n self.bindings['TEST_GCE_ZONE']]\n", "if self.use_instance_zones[0] == self.use_instance_zones[1]:\n", "self.use_instance_zones[1] = 'us-central1-c'\n", "VAR_17 = [self.bindings['TEST_GCE_IMAGE_NAME'], 'debian-7-wheezy-v20150818',\n self.bindings['TEST_GCE_IMAGE_NAME']]\n", "if VAR_17[0] == VAR_17[1]:\n", "VAR_17[1] = 'ubuntu-1404-trusty-v20150805'\n", "VAR_18 = ['f1-micro', 'g1-small', 'f1-micro']\n", "VAR_19 = []\n", "VAR_20 = gcp.GceContractBuilder(self.gce_observer)\n", "for i in range(3):\n", "VAR_19.append({'createGoogleInstanceDescription': {'instanceName': self.\n use_instance_names[i], 'image': VAR_17[i], 'instanceType': VAR_18[i],\n 'zone': self.use_instance_zones[i], 'credentials': self.bindings[\n 'GCE_CREDENTIALS']}})\n", "VAR_21 = self.agent.make_json_payload_from_object(VAR_19)\n", "VAR_20.new_clause_builder('Instance %d Created' % i, retryable_for_secs=90\n ).list_resources('instances').contains_path_value('name', self.\n use_instance_names[i])\n", "return st.OperationContract(self.new_post_operation(title=\n 'create_instances', data=payload, VAR_29='ops'), contract=builder.build())\n", "if i < 2:\n", "VAR_20.new_clause_builder('Instance %d Details' % i).inspect_resource(\n 'instances', self.use_instance_names[i], VAR_35=['--zone', self.\n use_instance_zones[i]]).contains_path_value('machineType', VAR_18[i])\n", "VAR_20.new_clause_builder('Instance %d Is Running' % i, retryable_for_secs=90\n ).inspect_resource('instances', name=self.use_instance_names[i], VAR_35\n =['--zone', self.use_instance_zones[i]]).contains_path_eq('status',\n 'RUNNING')\n" ]
[ "def create_instances(self):...\n", "\"\"\"docstring\"\"\"\n", "self.use_instance_names = ['katotest%sa' % self.test_id, 'katotest%sb' %\n self.test_id, 'katotest%sc' % self.test_id]\n", "self.use_instance_zones = [self.bindings['TEST_GCE_ZONE'], 'us-central1-b',\n self.bindings['TEST_GCE_ZONE']]\n", "if self.use_instance_zones[0] == self.use_instance_zones[1]:\n", "self.use_instance_zones[1] = 'us-central1-c'\n", "image_name = [self.bindings['TEST_GCE_IMAGE_NAME'],\n 'debian-7-wheezy-v20150818', self.bindings['TEST_GCE_IMAGE_NAME']]\n", "if image_name[0] == image_name[1]:\n", "image_name[1] = 'ubuntu-1404-trusty-v20150805'\n", "machine_type = ['f1-micro', 'g1-small', 'f1-micro']\n", "instance_spec = []\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "for i in range(3):\n", "instance_spec.append({'createGoogleInstanceDescription': {'instanceName':\n self.use_instance_names[i], 'image': image_name[i], 'instanceType':\n machine_type[i], 'zone': self.use_instance_zones[i], 'credentials':\n self.bindings['GCE_CREDENTIALS']}})\n", "payload = self.agent.make_json_payload_from_object(instance_spec)\n", "builder.new_clause_builder('Instance %d Created' % i, retryable_for_secs=90\n ).list_resources('instances').contains_path_value('name', self.\n use_instance_names[i])\n", "return st.OperationContract(self.new_post_operation(title=\n 'create_instances', data=payload, path='ops'), contract=builder.build())\n", "if i < 2:\n", "builder.new_clause_builder('Instance %d Details' % i).inspect_resource(\n 'instances', self.use_instance_names[i], extra_args=['--zone', self.\n use_instance_zones[i]]).contains_path_value('machineType', machine_type[i])\n", "builder.new_clause_builder('Instance %d Is Running' % i, retryable_for_secs=90\n ).inspect_resource('instances', name=self.use_instance_names[i],\n extra_args=['--zone', self.use_instance_zones[i]]).contains_path_eq(\n 'status', 'RUNNING')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Assign'", "Expr'", "Return'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_17(self, VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "self._eql_execute('volume', 'select', VAR_18['volume_name'], 'snapshot',\n 'delete', VAR_18['name'])\n", "VAR_0.error(_('Failed to delete snapshot %(snap)s of volume %(vol)s'), {\n 'snap': VAR_18['name'], 'vol': VAR_18['volume_name']})\n" ]
[ "def delete_snapshot(self, snapshot):...\n", "\"\"\"docstring\"\"\"\n", "self._eql_execute('volume', 'select', snapshot['volume_name'], 'snapshot',\n 'delete', snapshot['name'])\n", "LOG.error(_('Failed to delete snapshot %(snap)s of volume %(vol)s'), {\n 'snap': snapshot['name'], 'vol': snapshot['volume_name']})\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]
[ "def FUNC_0(self, VAR_5=''):...\n", "VAR_8 = get_surveys_by_email(self.current_user, 10)\n", "self.render('index.html', message=msg, VAR_8=surveys)\n" ]
[ "def get(self, msg=''):...\n", "surveys = get_surveys_by_email(self.current_user, 10)\n", "self.render('index.html', message=msg, surveys=surveys)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_8(self, VAR_4, VAR_5):...\n", "if VAR_4 == 'ansible_group_priority':\n", "self.set_priority(int(VAR_5))\n", "self.vars[VAR_4] = VAR_5\n" ]
[ "def set_variable(self, key, value):...\n", "if key == 'ansible_group_priority':\n", "self.set_priority(int(value))\n", "self.vars[key] = value\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = jc.Contract()\n", "return st.OperationContract(self.agent.make_delete_app_operation(VAR_1=self\n .bindings, application=self.TEST_APP), VAR_6=contract)\n" ]
[ "def delete_app(self):...\n", "\"\"\"docstring\"\"\"\n", "contract = jc.Contract()\n", "return st.OperationContract(self.agent.make_delete_app_operation(bindings=\n self.bindings, application=self.TEST_APP), contract=contract)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_23(self):...\n", "return True\n" ]
[ "def is_active(self):...\n", "return True\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_1, VAR_7, VAR_4=None, VAR_12=None):...\n", "GenericRequest.__init__(self, VAR_1, VAR_4)\n", "self.url = '%stasks/%s/submit' % (self.base_url, VAR_7[1])\n", "self.task = VAR_7\n", "self.submissions_path = VAR_12\n", "self.data = {}\n" ]
[ "def __init__(self, browser, task, base_url=None, submissions_path=None):...\n", "GenericRequest.__init__(self, browser, base_url)\n", "self.url = '%stasks/%s/submit' % (self.base_url, task[1])\n", "self.task = task\n", "self.submissions_path = submissions_path\n", "self.data = {}\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@classmethod...\n", "if not VAR_15 or VAR_15 == '*':\n", "return []\n", "VAR_32 = raw_sql(\n 'SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $query ORDER BY bm25(FtsIndex) LIMIT $lim'\n )\n", "return VAR_14.select(lambda g: g.rowid in VAR_32)\n" ]
[ "@classmethod...\n", "if not query or query == '*':\n", "return []\n", "fts_ids = raw_sql(\n 'SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $query ORDER BY bm25(FtsIndex) LIMIT $lim'\n )\n", "return cls.select(lambda g: g.rowid in fts_ids)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_4 = self.live_server_url + '/datagroups/'\n", "self.browser.get(VAR_4)\n", "self.browser.find_element_by_xpath('//*[@title=\"edit\"]').click()\n", "VAR_6 = self.browser.find_element_by_name('cancel')\n", "self.assertEqual(VAR_6.get_attribute('href'), VAR_4,\n 'User should go back to list view when clicking cancel')\n", "VAR_10 = DataGroup.objects.first()\n", "VAR_11 = f'{self.live_server_url}/datasource/{VAR_10.data_source.pk}'\n", "self.browser.get(VAR_11)\n", "self.browser.find_elements_by_xpath('//*[@title=\"edit\"]')[1].click()\n", "VAR_6 = self.browser.find_element_by_name('cancel')\n", "self.assertEqual(VAR_6.get_attribute('href'), VAR_11,\n 'User should go back to detail view when clicking cancel')\n", "VAR_12 = f'{self.live_server_url}/datagroup/{VAR_10.pk}/'\n", "self.browser.get(VAR_12)\n", "self.browser.find_element_by_xpath('//*[@title=\"edit\"]').click()\n", "VAR_6 = self.browser.find_element_by_name('cancel')\n", "self.assertEqual(VAR_6.get_attribute('href'), VAR_12,\n 'User should go back to detail view when clicking cancel')\n", "VAR_13 = f'{self.live_server_url}/datagroup/edit/{VAR_10.pk}/'\n", "self.browser.get(VAR_13)\n", "self.browser.find_element_by_name('cancel').click()\n", "self.assertIn('/datagroups/', self.browser.current_url,\n 'User should always return to detail page after submit')\n" ]
[ "def test_datagroup(self):...\n", "list_url = self.live_server_url + '/datagroups/'\n", "self.browser.get(list_url)\n", "self.browser.find_element_by_xpath('//*[@title=\"edit\"]').click()\n", "btn = self.browser.find_element_by_name('cancel')\n", "self.assertEqual(btn.get_attribute('href'), list_url,\n 'User should go back to list view when clicking cancel')\n", "dg = DataGroup.objects.first()\n", "ds_detail_url = f'{self.live_server_url}/datasource/{dg.data_source.pk}'\n", "self.browser.get(ds_detail_url)\n", "self.browser.find_elements_by_xpath('//*[@title=\"edit\"]')[1].click()\n", "btn = self.browser.find_element_by_name('cancel')\n", "self.assertEqual(btn.get_attribute('href'), ds_detail_url,\n 'User should go back to detail view when clicking cancel')\n", "dg_detail_url = f'{self.live_server_url}/datagroup/{dg.pk}/'\n", "self.browser.get(dg_detail_url)\n", "self.browser.find_element_by_xpath('//*[@title=\"edit\"]').click()\n", "btn = self.browser.find_element_by_name('cancel')\n", "self.assertEqual(btn.get_attribute('href'), dg_detail_url,\n 'User should go back to detail view when clicking cancel')\n", "edit_url = f'{self.live_server_url}/datagroup/edit/{dg.pk}/'\n", "self.browser.get(edit_url)\n", "self.browser.find_element_by_name('cancel').click()\n", "self.assertIn('/datagroups/', self.browser.current_url,\n 'User should always return to detail page after submit')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_16(self, VAR_25, VAR_26='', VAR_21=True):...\n", "VAR_20 = request_mock(VAR_25)\n", "VAR_20.args = VAR_26\n", "return self._render(VAR_20, VAR_21)\n" ]
[ "def get(self, path, get_args='', as_json=True):...\n", "request = request_mock(path)\n", "request.args = get_args\n", "return self._render(request, as_json)\n" ]
[ 0, 5, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_32(self):...\n", "VAR_44 = VAR_54.get_doc(self.doctype, self.name).as_dict()\n", "for VAR_7 in self.as_dict():\n", "VAR_25 = self.meta.get_field(VAR_7)\n", "VAR_60 = VAR_44.get(VAR_7)\n", "if VAR_25 and not VAR_25.allow_on_submit and (self.get(VAR_7) or VAR_60):\n", "if VAR_25.fieldtype == 'Table':\n", "VAR_72 = len(self.get(VAR_7))\n", "VAR_72 = self.get_value(VAR_7)\n", "VAR_60 = len(VAR_60)\n", "if VAR_72 != VAR_60:\n", "VAR_54.throw(_('Not allowed to change {0} after submission').format(VAR_25.\n label), VAR_54.UpdateAfterSubmitError)\n" ]
[ "def _validate_update_after_submit(self):...\n", "db_values = frappe.get_doc(self.doctype, self.name).as_dict()\n", "for key in self.as_dict():\n", "df = self.meta.get_field(key)\n", "db_value = db_values.get(key)\n", "if df and not df.allow_on_submit and (self.get(key) or db_value):\n", "if df.fieldtype == 'Table':\n", "self_value = len(self.get(key))\n", "self_value = self.get_value(key)\n", "db_value = len(db_value)\n", "if self_value != db_value:\n", "frappe.throw(_('Not allowed to change {0} after submission').format(df.\n label), frappe.UpdateAfterSubmitError)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_1(self, VAR_2, **VAR_1):...\n", "if VAR_2.GET.get('keyword'):\n", "VAR_4 = VAR_2.GET.get('keyword')\n", "VAR_3 = self.get_context_data()\n", "return HttpResponseRedirect(VAR_4)\n", "return self.render_to_response(VAR_3)\n" ]
[ "def get(self, request, **kwargs):...\n", "if request.GET.get('keyword'):\n", "domain = request.GET.get('keyword')\n", "context = self.get_context_data()\n", "return HttpResponseRedirect(domain)\n", "return self.render_to_response(context)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_6(self, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "return dump_cookie(self.app.config.get('SESSION_COOKIE_NAME'), VAR_8,\n max_age=float(self.app.config.get('PERMANENT_SESSION_LIFETIME')), path=\n self.app.config.get('SESSION_COOKIE_PATH'), domain=self.app.config.get(\n 'SESSION_COOKIE_DOMAIN'))\n" ]
[ "def dump_session_cookie(self, session_id):...\n", "\"\"\"docstring\"\"\"\n", "return dump_cookie(self.app.config.get('SESSION_COOKIE_NAME'), session_id,\n max_age=float(self.app.config.get('PERMANENT_SESSION_LIFETIME')), path=\n self.app.config.get('SESSION_COOKIE_PATH'), domain=self.app.config.get(\n 'SESSION_COOKIE_DOMAIN'))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_17(self, VAR_9=True):...\n", "\"\"\"docstring\"\"\"\n", "self._kill_process_type(VAR_13.PROCESS_TYPE_PLASMA_STORE, VAR_9=check_alive)\n" ]
[ "def kill_plasma_store(self, check_alive=True):...\n", "\"\"\"docstring\"\"\"\n", "self._kill_process_type(ray_constants.PROCESS_TYPE_PLASMA_STORE,\n check_alive=check_alive)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "def FUNC_1(self, VAR_4):...\n", "VAR_4 = u\"'{0}'\".format(VAR_4)\n", "return VAR_4\n" ]
[ "def insert_format(self, value):...\n", "value = u\"'{0}'\".format(value)\n", "return value\n" ]
[ 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_4(self):...\n", "VAR_15 = self.path\n", "if VAR_15.startswith('/chromium/src/+/master'):\n", "VAR_15 = VAR_15[len('/chromium/src/+/master'):]\n", "VAR_16 = os.path.realpath(os.path.join(self.server.top_level, VAR_15[1:]))\n", "if not VAR_16.startswith(self.server.top_level):\n", "self._DoUnknown()\n", "if VAR_15 in ('/base.css', '/doc.css', '/prettify.css'):\n", "self._DoCSS(VAR_15[1:])\n", "if not os.path.exists(VAR_16):\n", "self._DoNotFound()\n", "if VAR_15.lower().endswith('.md'):\n", "self._DoMD(VAR_15)\n", "if os.path.exists(VAR_16 + '/README.md'):\n", "self._DoMD(VAR_15 + '/README.md')\n", "if VAR_15.lower().endswith('.png'):\n", "self._DoImage(VAR_16, 'image/png')\n", "if VAR_15.lower().endswith('.jpg'):\n", "self._DoImage(VAR_16, 'image/jpeg')\n", "if os.path.isdir(VAR_16):\n", "self._DoDirListing(VAR_16)\n", "if os.path.exists(VAR_16):\n", "self._DoRawSourceFile(VAR_16)\n", "self._DoUnknown()\n" ]
[ "def do_GET(self):...\n", "path = self.path\n", "if path.startswith('/chromium/src/+/master'):\n", "path = path[len('/chromium/src/+/master'):]\n", "full_path = os.path.realpath(os.path.join(self.server.top_level, path[1:]))\n", "if not full_path.startswith(self.server.top_level):\n", "self._DoUnknown()\n", "if path in ('/base.css', '/doc.css', '/prettify.css'):\n", "self._DoCSS(path[1:])\n", "if not os.path.exists(full_path):\n", "self._DoNotFound()\n", "if path.lower().endswith('.md'):\n", "self._DoMD(path)\n", "if os.path.exists(full_path + '/README.md'):\n", "self._DoMD(path + '/README.md')\n", "if path.lower().endswith('.png'):\n", "self._DoImage(full_path, 'image/png')\n", "if path.lower().endswith('.jpg'):\n", "self._DoImage(full_path, 'image/jpeg')\n", "if os.path.isdir(full_path):\n", "self._DoDirListing(full_path)\n", "if os.path.exists(full_path):\n", "self._DoRawSourceFile(full_path)\n", "self._DoUnknown()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "@functools.wraps(VAR_3)...\n", "VAR_23 = VAR_5.pop('timeout', None)\n", "VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n", "if VAR_23 is None:\n", "return VAR_24.wait()\n", "VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n", "VAR_48 = VAR_24.wait()\n", "VAR_36.cancel()\n", "return VAR_48\n" ]
[ "@functools.wraps(f)...\n", "timeout = kwargs.pop('timeout', None)\n", "gt = eventlet.spawn(f, self, *args, **kwargs)\n", "if timeout is None:\n", "return gt.wait()\n", "kill_thread = eventlet.spawn_after(timeout, gt.kill)\n", "res = gt.wait()\n", "kill_thread.cancel()\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "VAR_0 = pickle.load(FUNC_14(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7, VAR_8, VAR_9, VAR_10 = list(\n range(0, 10))\n", "VAR_11, VAR_12, VAR_13, VAR_14, VAR_15, VAR_16, VAR_17 = list(range(0, 7))\n", "\"\"\"string\"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, VAR_18, VAR_19):...\n", "self.fs = VAR_18\n", "self.cfg = VAR_19\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def FUNC_0(self, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_21, directories, filenames in os.walk(VAR_20):\n", "for VAR_32 in filenames:\n", "def FUNC_1(self, VAR_21, VAR_22):...\n", "VAR_72 = os.path.join(VAR_21, VAR_32)\n", "\"\"\"docstring\"\"\"\n", "VAR_73 = '/' + os.path.relpath(VAR_72, VAR_20)\n", "VAR_55 = VAR_21.rstrip('/').split('/')\n", "VAR_24 = self.getfile(VAR_73, VAR_23=False)\n", "if VAR_21[0] == '/':\n", "if VAR_24 and VAR_24[VAR_2] == VAR_13:\n", "VAR_22 = []\n", "VAR_22 = [x for x in VAR_22.split('/') if len(x) and x is not None]\n", "self.update_realfile(VAR_24, VAR_72)\n", "while 1:\n", "if not len(VAR_55):\n", "VAR_66 = VAR_55.pop(0)\n", "return '/%s' % ('/'.join(VAR_22),)\n", "if VAR_66 == '..':\n", "if len(VAR_22):\n", "if VAR_66 in ('.', ''):\n", "VAR_22.pop()\n", "VAR_22.append(VAR_66)\n" ]
[ "\"\"\"\nThis module contains ...\n\"\"\"\n", "from __future__ import division, absolute_import\n", "import cPickle as pickle\n", "import pickle\n", "import os\n", "import time\n", "import fnmatch\n", "import hashlib\n", "import re\n", "import stat\n", "import errno\n", "from twisted.python import log\n", "from cowrie.core.config import CONFIG\n", "PICKLE = pickle.load(open(CONFIG.get('honeypot', 'filesystem_file'), 'rb'))\n", "(A_NAME, A_TYPE, A_UID, A_GID, A_SIZE, A_MODE, A_CTIME, A_CONTENTS,\n A_TARGET, A_REALFILE) = list(range(0, 10))\n", "T_LINK, T_DIR, T_FILE, T_BLK, T_CHR, T_SOCK, T_FIFO = list(range(0, 7))\n", "\"\"\"\n 62 ELOOP Too many levels of symbolic links. A path name lookup involved more than 8 symbolic links.\n raise OSError(errno.ELOOP, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n raise OSError(errno.ENOENT, os.strerror(errno.ENOENT))\n \"\"\"\n", "\"\"\"\n \"\"\"\n", "def __init__(self, fs, cfg):...\n", "self.fs = fs\n", "self.cfg = cfg\n", "self.tempfiles = {}\n", "self.filenames = {}\n", "self.newcount = 0\n", "self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n", "def init_honeyfs(self, honeyfs_path):...\n", "\"\"\"docstring\"\"\"\n", "for path, directories, filenames in os.walk(honeyfs_path):\n", "for filename in filenames:\n", "def resolve_path(self, path, cwd):...\n", "realfile_path = os.path.join(path, filename)\n", "\"\"\"docstring\"\"\"\n", "virtual_path = '/' + os.path.relpath(realfile_path, honeyfs_path)\n", "pieces = path.rstrip('/').split('/')\n", "f = self.getfile(virtual_path, follow_symlinks=False)\n", "if path[0] == '/':\n", "if f and f[A_TYPE] == T_FILE:\n", "cwd = []\n", "cwd = [x for x in cwd.split('/') if len(x) and x is not None]\n", "self.update_realfile(f, realfile_path)\n", "while 1:\n", "if not len(pieces):\n", "piece = pieces.pop(0)\n", "return '/%s' % ('/'.join(cwd),)\n", "if piece == '..':\n", "if len(cwd):\n", "if piece in ('.', ''):\n", "cwd.pop()\n", "cwd.append(piece)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "For", "For", "FunctionDef'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Return'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_11(VAR_1, **VAR_6):...\n", "import sql\n", "VAR_43 = 'install_haproxy.sh'\n", "VAR_44 = sql.get_setting('tmp_config_path')\n", "VAR_45 = sql.get_setting('haproxy_sock_port')\n", "VAR_46 = sql.get_setting('stats_port')\n", "VAR_47 = sql.get_setting('server_state_file')\n", "VAR_48 = sql.get_setting('stats_user')\n", "VAR_49 = sql.get_setting('stats_password')\n", "VAR_24 = sql.get_setting('proxy')\n", "os.system('cp scripts/%s .' % VAR_43)\n", "VAR_50 = VAR_24 if VAR_24 is not None else ''\n", "VAR_13 = ['sudo chmod +x ' + VAR_44 + VAR_43 + ' && ' + VAR_44 + '/' +\n VAR_43 + ' PROXY=' + VAR_50 + ' SOCK_PORT=' + VAR_45 + ' STAT_PORT=' +\n VAR_46 + ' STAT_FILE=' + VAR_47 + ' STATS_USER=' + VAR_48 +\n ' STATS_PASS=' + VAR_49]\n", "VAR_51 = str(FUNC_15(VAR_1, VAR_44, VAR_43))\n", "if VAR_51:\n", "print('error: ' + VAR_51)\n", "os.system('rm -f %s' % VAR_43)\n", "FUNC_22(VAR_1, VAR_13, print_out='1')\n", "if VAR_6.get('syn_flood') == '1':\n", "FUNC_12(VAR_1)\n" ]
[ "def install_haproxy(serv, **kwargs):...\n", "import sql\n", "script = 'install_haproxy.sh'\n", "tmp_config_path = sql.get_setting('tmp_config_path')\n", "haproxy_sock_port = sql.get_setting('haproxy_sock_port')\n", "stats_port = sql.get_setting('stats_port')\n", "server_state_file = sql.get_setting('server_state_file')\n", "stats_user = sql.get_setting('stats_user')\n", "stats_password = sql.get_setting('stats_password')\n", "proxy = sql.get_setting('proxy')\n", "os.system('cp scripts/%s .' % script)\n", "proxy_serv = proxy if proxy is not None else ''\n", "commands = ['sudo chmod +x ' + tmp_config_path + script + ' && ' +\n tmp_config_path + '/' + script + ' PROXY=' + proxy_serv + ' SOCK_PORT=' +\n haproxy_sock_port + ' STAT_PORT=' + stats_port + ' STAT_FILE=' +\n server_state_file + ' STATS_USER=' + stats_user + ' STATS_PASS=' +\n stats_password]\n", "error = str(upload(serv, tmp_config_path, script))\n", "if error:\n", "print('error: ' + error)\n", "os.system('rm -f %s' % script)\n", "ssh_command(serv, commands, print_out='1')\n", "if kwargs.get('syn_flood') == '1':\n", "syn_flood_protect(serv)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_13(self):...\n", "VAR_4 = {'r': u'not ascii £ һ'}\n", "VAR_1 = 'http://my.url.com'\n", "VAR_2 = 'True'\n", "url_helper.urllib2.urlopen(mox.StrContains(VAR_1), mox.IgnoreArg(), timeout\n =mox.IgnoreArg()).AndReturn(StringIO.StringIO(VAR_2))\n", "self._mox.ReplayAll()\n", "self.assertEqual(url_helper.UrlOpen(VAR_1, VAR_4=data), VAR_2)\n", "self._mox.VerifyAll()\n" ]
[ "def testNonAcsiiData(self):...\n", "data = {'r': u'not ascii £ һ'}\n", "url = 'http://my.url.com'\n", "response = 'True'\n", "url_helper.urllib2.urlopen(mox.StrContains(url), mox.IgnoreArg(), timeout=\n mox.IgnoreArg()).AndReturn(StringIO.StringIO(response))\n", "self._mox.ReplayAll()\n", "self.assertEqual(url_helper.UrlOpen(url, data=data), response)\n", "self._mox.VerifyAll()\n" ]
[ 0, 0, 0, 0, 5, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_6(VAR_16, VAR_18, VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "VAR_47 = {}\n", "VAR_46 = reader.KeywordToken\n", "for VAR_76, v in keyworder.get_author_keywords(VAR_16, VAR_18, VAR_17).items():\n", "VAR_47[VAR_46(VAR_76, type='author-kw')] = v\n", "return VAR_47\n" ]
[ "def extract_author_keywords(skw_db, ckw_db, fulltext):...\n", "\"\"\"docstring\"\"\"\n", "akw = {}\n", "K = reader.KeywordToken\n", "for k, v in keyworder.get_author_keywords(skw_db, ckw_db, fulltext).items():\n", "akw[K(k, type='author-kw')] = v\n", "return akw\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_31(self, VAR_35, VAR_32):...\n", "\"\"\"docstring\"\"\"\n", "VAR_60 = {p for val in VAR_35 for p in val.split(',')}\n", "if not VAR_60:\n", "return {}\n", "VAR_61 = {}\n", "VAR_62 = get_buildroot()\n", "VAR_63 = self.context.products.get_data('runtime_classpath')\n", "for VAR_36 in VAR_32:\n", "VAR_78 = self._maybe_get_plugin_name(VAR_36)\n", "VAR_64 = VAR_60 - set(VAR_61.keys())\n", "if VAR_78 in VAR_60:\n", "VAR_80 = self._plugin_targets('scalac').get(VAR_78, [])\n", "VAR_81 = [os.path.relpath(cpe, VAR_62) for cpe in ClasspathUtil.\n internal_classpath(VAR_80, VAR_63, self._confs)]\n", "VAR_81 = VAR_81 or [VAR_36]\n", "if VAR_61.get(VAR_78, VAR_81) != VAR_81:\n", "VAR_61[VAR_78] = VAR_81\n", "if len(VAR_61) == len(VAR_60):\n", "return VAR_61\n" ]
[ "def _find_scalac_plugins(self, scalac_plugins, classpath):...\n", "\"\"\"docstring\"\"\"\n", "plugin_names = {p for val in scalac_plugins for p in val.split(',')}\n", "if not plugin_names:\n", "return {}\n", "active_plugins = {}\n", "buildroot = get_buildroot()\n", "cp_product = self.context.products.get_data('runtime_classpath')\n", "for classpath_element in classpath:\n", "name = self._maybe_get_plugin_name(classpath_element)\n", "unresolved_plugins = plugin_names - set(active_plugins.keys())\n", "if name in plugin_names:\n", "plugin_target_closure = self._plugin_targets('scalac').get(name, [])\n", "rel_classpath_elements = [os.path.relpath(cpe, buildroot) for cpe in\n ClasspathUtil.internal_classpath(plugin_target_closure, cp_product,\n self._confs)]\n", "rel_classpath_elements = rel_classpath_elements or [classpath_element]\n", "if active_plugins.get(name, rel_classpath_elements) != rel_classpath_elements:\n", "active_plugins[name] = rel_classpath_elements\n", "if len(active_plugins) == len(plugin_names):\n", "return active_plugins\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'" ]
[ "def FUNC_25(self, VAR_32):...\n", "VAR_25 = self.marks[0][0]\n", "VAR_71 = self.finish()\n", "VAR_32.timing(VAR_25, VAR_71['duration_ms'])\n", "for FUNC_22, duration in six.iteritems(VAR_71['marks_ms']):\n", "VAR_32.timing('{}.{}'.format(VAR_25, FUNC_22), duration)\n" ]
[ "def send_metrics_to(self, metrics):...\n", "name = self.marks[0][0]\n", "final = self.finish()\n", "metrics.timing(name, final['duration_ms'])\n", "for mark, duration in six.iteritems(final['marks_ms']):\n", "metrics.timing('{}.{}'.format(name, mark), duration)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "For", "Expr'" ]
[ "def FUNC_4(self, VAR_4):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_4.id is None:\n", "if VAR_4.id in self.__id_map:\n", "if not self.__allow_none_id:\n", "self.__id_map[VAR_4.id] = VAR_4\n", "if hasattr(VAR_4, 'slug') and not VAR_4.slug is None:\n", "if VAR_4.slug in self.__slug_map:\n", "self.__entities.append(VAR_4)\n", "self.__slug_map[VAR_4.slug] = VAR_4\n" ]
[ "def add(self, entity):...\n", "\"\"\"docstring\"\"\"\n", "if not entity.id is None:\n", "if entity.id in self.__id_map:\n", "if not self.__allow_none_id:\n", "self.__id_map[entity.id] = entity\n", "if hasattr(entity, 'slug') and not entity.slug is None:\n", "if entity.slug in self.__slug_map:\n", "self.__entities.append(entity)\n", "self.__slug_map[entity.slug] = entity\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_2(self, VAR_5):...\n", "if self.table_view.verticalScrollBar().value(\n", "self.load_channels()\n" ]
[ "def _on_list_scroll(self, event):...\n", "if self.table_view.verticalScrollBar().value(\n", "self.load_channels()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "def FUNC_6(self):...\n", "self.uut.gives_corrected = True\n", "VAR_1 = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a', 'b']))\n", "self.assertEqual((), VAR_1)\n", "VAR_1 = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a']))\n", "self.assertEqual(len(VAR_1), 1)\n" ]
[ "def test_gives_corrected(self):...\n", "self.uut.gives_corrected = True\n", "out = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a', 'b']))\n", "self.assertEqual((), out)\n", "out = tuple(self.uut.process_output(['a', 'b'], 'filename', ['a']))\n", "self.assertEqual(len(out), 1)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_7(self):...\n", "VAR_14 = ['http://8.9.10.11:8080', 'https://9.10.11.12:4433']\n", "VAR_15 = self.new_mocked_cluster(VAR_14, FUNC_0)\n", "self._assert_providers(VAR_15, [(urlparse.urlparse(p).netloc, p) for p in\n VAR_14])\n" ]
[ "def test_conf_providers_with_scheme(self):...\n", "conf_managers = ['http://8.9.10.11:8080', 'https://9.10.11.12:4433']\n", "api = self.new_mocked_cluster(conf_managers, _validate_conn_up)\n", "self._assert_providers(api, [(urlparse.urlparse(p).netloc, p) for p in\n conf_managers])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_17, VAR_18):...\n", "self.name = VAR_18\n", "self.path = VAR_17 + '/scripts/' + VAR_18\n", "self.exists = FUNC_6(self.path)\n", "if not self.exists:\n", "return\n", "self.lines = list(self.read_file())\n" ]
[ "def __init__(self, app_path, name):...\n", "self.name = name\n", "self.path = app_path + '/scripts/' + name\n", "self.exists = file_exists(self.path)\n", "if not self.exists:\n", "return\n", "self.lines = list(self.read_file())\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_13(self, VAR_2, VAR_3, VAR_4, *VAR_5):...\n", "self.write_data({'type': 'market_ask', 'event': VAR_5[0]})\n" ]
[ "def on_market_ask(self, subject, changetype, objectID, *args):...\n", "self.write_data({'type': 'market_ask', 'event': args[0]})\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_3(self, VAR_8, VAR_9):...\n", "for line in VAR_8:\n", "if line.startswith(VAR_9):\n", "return\n", "return line[len(VAR_9):]\n" ]
[ "def _get_prefixed_value(self, lines, prefix):...\n", "for line in lines:\n", "if line.startswith(prefix):\n", "return\n", "return line[len(prefix):]\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Return'", "Return'" ]
[ "def FUNC_0(VAR_1):...\n", "for key in VAR_6.__all__:\n", "if not hasattr(VAR_1, key):\n", "setattr(VAR_1, key, getattr(VAR_6, key))\n" ]
[ "def _include_filters(obj):...\n", "for key in filters.__all__:\n", "if not hasattr(obj, key):\n", "setattr(obj, key, getattr(filters, key))\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'" ]
[ "def FUNC_5(self) ->'Report':...\n", "\"\"\"docstring\"\"\"\n", "self.body = []\n", "self.data = SharedCache()\n", "self.files = SharedCache()\n", "self._last_update_time = time.time()\n", "return self\n" ]
[ "def clear(self) ->'Report':...\n", "\"\"\"docstring\"\"\"\n", "self.body = []\n", "self.data = SharedCache()\n", "self.files = SharedCache()\n", "self._last_update_time = time.time()\n", "return self\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_9, VAR_10):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def ensure_export(self, context, volume):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_0(VAR_2):...\n", "if VAR_2[2] == '':\n", "VAR_8 = VAR_2[0] + ' is ' + VAR_2[1] + '.'\n", "if VAR_2[0][2] != '' and VAR_2[2]:\n", "return VAR_8\n", "VAR_8 = VAR_2[0] + ' brewed in ' + VAR_2[2] + ' is ' + VAR_2[1] + '.'\n" ]
[ "def formatReply(result):...\n", "if result[2] == '':\n", "reply = result[0] + ' is ' + result[1] + '.'\n", "if result[0][2] != '' and result[2]:\n", "return reply\n", "reply = result[0] + ' brewed in ' + result[2] + ' is ' + result[1] + '.'\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_0(self):...\n", "VAR_1, VAR_2 = 'firstuser', 'password'\n", "VAR_3 = '[email protected]'\n", "VAR_4 = {'username': VAR_1, 'password': VAR_2, 'email': VAR_3}\n", "VAR_5 = '/api/auth/register'\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "self.assertTrue(VAR_6.data['is_superuser'])\n", "VAR_1, VAR_2 = 'seconduser', 'password'\n", "VAR_3 = '[email protected]'\n", "VAR_4 = {'username': VAR_1, 'password': VAR_2, 'email': VAR_3}\n", "VAR_5 = '/api/auth/register'\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "self.assertFalse(VAR_6.data['is_superuser'])\n" ]
[ "def test_first_signup(self):...\n", "username, password = 'firstuser', 'password'\n", "email = '[email protected]'\n", "submit = {'username': username, 'password': password, 'email': email}\n", "url = '/api/auth/register'\n", "response = self.client.post(url, json.dumps(submit), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertTrue(response.data['is_superuser'])\n", "username, password = 'seconduser', 'password'\n", "email = '[email protected]'\n", "submit = {'username': username, 'password': password, 'email': email}\n", "url = '/api/auth/register'\n", "response = self.client.post(url, json.dumps(submit), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertFalse(response.data['is_superuser'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_5(self, VAR_11):...\n", "return VAR_11.name\n" ]
[ "def get_object_id(self, keypair):...\n", "return keypair.name\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_7=3, VAR_8='peek', VAR_9=256, **VAR_1):...\n", "super().__init__(VAR_8, **kwargs)\n", "self.timeout = VAR_7\n", "self.maxlen = VAR_9\n", "self.argparse.add_argument('-u', '--udp', action='store_true', dest='udp',\n default=False, help='Use UDP instead of TCP')\n", "self.argparse.add_argument('-6', '--ipv6', action='store_true', dest='ipv6',\n default=False, help='Use IPv6 sockets to connect to target')\n", "self.argparse.add_argument('host', help='Host or IP to connect to')\n", "self.argparse.add_argument('port', type=int, help='TCP/UDP port to connect to')\n" ]
[ "def __init__(self, timeout=3, command_name='peek', maxlen=256, **kwargs):...\n", "super().__init__(command_name, **kwargs)\n", "self.timeout = timeout\n", "self.maxlen = maxlen\n", "self.argparse.add_argument('-u', '--udp', action='store_true', dest='udp',\n default=False, help='Use UDP instead of TCP')\n", "self.argparse.add_argument('-6', '--ipv6', action='store_true', dest='ipv6',\n default=False, help='Use IPv6 sockets to connect to target')\n", "self.argparse.add_argument('host', help='Host or IP to connect to')\n", "self.argparse.add_argument('port', type=int, help='TCP/UDP port to connect to')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_6(self, VAR_42, VAR_43):...\n", "if not VAR_101.user_is_loggedin or VAR_101.user.needs_captcha():\n", "if not captcha.valid_solution(VAR_42, VAR_43):\n", "VAR_101.errors.add(errors.BAD_CAPTCHA)\n" ]
[ "def run(self, iden, solution):...\n", "if not c.user_is_loggedin or c.user.needs_captcha():\n", "if not captcha.valid_solution(iden, solution):\n", "c.errors.add(errors.BAD_CAPTCHA)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Expr'" ]
[ "def FUNC_20(VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = os.path.normpath(VAR_8)\n", "VAR_33 = re.search('{[^{]', VAR_8)\n", "VAR_34 = os.path.dirname(VAR_8[:VAR_33.start()]\n ) if VAR_33 else os.path.dirname(VAR_8)\n", "if not VAR_34:\n", "VAR_34 = '.'\n", "VAR_35 = [VAR_29.group('name') for VAR_29 in VAR_5.finditer(VAR_8)]\n", "VAR_36 = namedtuple('Wildcards', VAR_35)\n", "VAR_11 = VAR_36(*[list() for VAR_40 in VAR_35])\n", "VAR_8 = re.compile(FUNC_8(VAR_8))\n", "for dirpath, dirnames, filenames in os.walk(VAR_34):\n", "for VAR_0 in chain(filenames, dirnames):\n", "return VAR_11\n", "if dirpath != '.':\n", "VAR_0 = os.path.join(dirpath, VAR_0)\n", "VAR_29 = re.match(VAR_8, VAR_0)\n", "if VAR_29:\n", "for VAR_40, VAR_16 in VAR_29.groupdict().items():\n", "getattr(VAR_11, VAR_40).append(VAR_16)\n" ]
[ "def glob_wildcards(pattern):...\n", "\"\"\"docstring\"\"\"\n", "pattern = os.path.normpath(pattern)\n", "first_wildcard = re.search('{[^{]', pattern)\n", "dirname = os.path.dirname(pattern[:first_wildcard.start()]\n ) if first_wildcard else os.path.dirname(pattern)\n", "if not dirname:\n", "dirname = '.'\n", "names = [match.group('name') for match in _wildcard_regex.finditer(pattern)]\n", "Wildcards = namedtuple('Wildcards', names)\n", "wildcards = Wildcards(*[list() for name in names])\n", "pattern = re.compile(regex(pattern))\n", "for dirpath, dirnames, filenames in os.walk(dirname):\n", "for f in chain(filenames, dirnames):\n", "return wildcards\n", "if dirpath != '.':\n", "f = os.path.join(dirpath, f)\n", "match = re.match(pattern, f)\n", "if match:\n", "for name, value in match.groupdict().items():\n", "getattr(wildcards, name).append(value)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "For", "Return'", "Condition", "Assign'", "Assign'", "Condition", "For", "Expr'" ]
[ "def FUNC_2(self, VAR_8, VAR_9, VAR_10=None, VAR_11=None, VAR_12=(...\n", "get_and_check_project(VAR_9, VAR_11, VAR_12)\n", "VAR_28 = self.queryset.get(VAR_10=pk, VAR_1=project_pk)\n", "VAR_28.pending_action = VAR_8\n", "VAR_28.last_error = None\n", "VAR_28.save()\n", "scheduler.process_pending_tasks(background=True)\n", "return Response({'success': True})\n" ]
[ "def set_pending_action(self, pending_action, request, pk=None, project_pk=...\n", "get_and_check_project(request, project_pk, perms)\n", "task = self.queryset.get(pk=pk, project=project_pk)\n", "task.pending_action = pending_action\n", "task.last_error = None\n", "task.save()\n", "scheduler.process_pending_tasks(background=True)\n", "return Response({'success': True})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_5(self, VAR_0, VAR_1):...\n", "VAR_1 = self.kwargs['pk']\n", "VAR_15 = Event.objects.filter(Q(id__in=Hunt(id=pk).events.all())).order_by(\n '-publish_timestamp')\n", "return VAR_15\n" ]
[ "def get_queryset(self, request, pk):...\n", "pk = self.kwargs['pk']\n", "query = Event.objects.filter(Q(id__in=Hunt(id=pk).events.all())).order_by(\n '-publish_timestamp')\n", "return query\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "@BaseHandler.check_is_group_user('Announcement Manager')...\n", "if VAR_6:\n", "VAR_15 = Announce.by_id(VAR_6, self.sql_session).scalar()\n", "self._['user_groups'] = GroupList.get_user_groups(self.current_user.key,\n self.sql_session)\n", "if not VAR_15:\n", "self.page_render(self._)\n", "self._['ann_id'] = VAR_6\n", "self._['title'] = VAR_15.title\n", "self._['content'] = VAR_15.content\n", "self._['is_private'] = VAR_15.is_private\n", "VAR_16 = AttachmentList.by_ann_id(VAR_6, self.sql_session).all()\n", "self._['tags'] = AnnTag.get_ann_tags(VAR_6, self.sql_session)\n", "self._['atts'] = [att.to_dict() for att in VAR_16]\n", "if self.is_group_user(VAR_15.author_group_name):\n", "self._['group'] = VAR_15.author_group_name\n" ]
[ "@BaseHandler.check_is_group_user('Announcement Manager')...\n", "if ann_id:\n", "ann = Announce.by_id(ann_id, self.sql_session).scalar()\n", "self._['user_groups'] = GroupList.get_user_groups(self.current_user.key,\n self.sql_session)\n", "if not ann:\n", "self.page_render(self._)\n", "self._['ann_id'] = ann_id\n", "self._['title'] = ann.title\n", "self._['content'] = ann.content\n", "self._['is_private'] = ann.is_private\n", "atts = AttachmentList.by_ann_id(ann_id, self.sql_session).all()\n", "self._['tags'] = AnnTag.get_ann_tags(ann_id, self.sql_session)\n", "self._['atts'] = [att.to_dict() for att in atts]\n", "if self.is_group_user(ann.author_group_name):\n", "self._['group'] = ann.author_group_name\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_5(self):...\n", "VAR_14 = 'select comment from comments where userid=%d order by date;'\n", "VAR_15 = sql.queryDB(self.conn, VAR_14)\n", "return VAR_15\n" ]
[ "def getAllComments(self):...\n", "sqlText = 'select comment from comments where userid=%d order by date;'\n", "allposts = sql.queryDB(self.conn, sqlText)\n", "return allposts\n" ]
[ 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_7(self, *VAR_7, **VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_9 in VAR_7:\n", "self._set_inoutput_item(VAR_9, VAR_7=True)\n", "for VAR_10, VAR_9 in VAR_8.items():\n", "self._set_inoutput_item(VAR_9, VAR_7=True, VAR_10=name)\n", "for VAR_9 in self.output:\n", "if self.dynamic_output and VAR_9 not in self.dynamic_output:\n", "VAR_3 = VAR_9.get_wildcard_names()\n", "if self.wildcard_names:\n", "if self.wildcard_names != VAR_3:\n", "self.wildcard_names = VAR_3\n" ]
[ "def set_output(self, *output, **kwoutput):...\n", "\"\"\"docstring\"\"\"\n", "for item in output:\n", "self._set_inoutput_item(item, output=True)\n", "for name, item in kwoutput.items():\n", "self._set_inoutput_item(item, output=True, name=name)\n", "for item in self.output:\n", "if self.dynamic_output and item not in self.dynamic_output:\n", "wildcards = item.get_wildcard_names()\n", "if self.wildcard_names:\n", "if self.wildcard_names != wildcards:\n", "self.wildcard_names = wildcards\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Expr'", "For", "Expr'", "For", "Condition", "Assign'", "Condition", "Condition", "Assign'" ]
[ "def FUNC_6(self, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_4 is None:\n", "VAR_4 = self.login_data.get('username')\n", "if '+86' not in VAR_4:\n", "if not VAR_4:\n", "VAR_4 = '+86' + VAR_4\n", "if VAR_5 is None:\n", "VAR_4 = input('请输入手机号:')\n", "VAR_5 = self.login_data.get('password')\n", "return VAR_4, VAR_5\n", "if not VAR_5:\n", "VAR_5 = input('请输入密码:')\n" ]
[ "def _check_user_pass(self, username, password):...\n", "\"\"\"docstring\"\"\"\n", "if username is None:\n", "username = self.login_data.get('username')\n", "if '+86' not in username:\n", "if not username:\n", "username = '+86' + username\n", "if password is None:\n", "username = input('请输入手机号:')\n", "password = self.login_data.get('password')\n", "return username, password\n", "if not password:\n", "password = input('请输入密码:')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Assign'" ]
[ "def FUNC_5(self, VAR_1, VAR_2, VAR_8=None, VAR_9='form', VAR_4=None, VAR_14...\n", "VAR_29 = super(CLASS_0, self).fields_view_get(VAR_1, VAR_2, VAR_8, VAR_9,\n VAR_4, VAR_14=toolbar, VAR_15=submenu)\n", "if VAR_4 is None:\n", "VAR_4 = {}\n", "if 'location' in VAR_4 and VAR_4['location']:\n", "VAR_51 = self.pool.get('stock.location').browse(VAR_1, VAR_2, VAR_4['location']\n )\n", "return VAR_29\n", "VAR_52 = VAR_29.get('fields', {})\n", "if VAR_52:\n", "if VAR_51.usage == 'supplier':\n", "if VAR_52.get('virtual_available'):\n", "if VAR_51.usage == 'internal':\n", "VAR_29['fields']['virtual_available']['string'] = _('Future Receptions')\n", "if VAR_52.get('qty_available'):\n", "if VAR_52.get('virtual_available'):\n", "if VAR_51.usage == 'customer':\n", "VAR_29['fields']['qty_available']['string'] = _('Received Qty')\n", "VAR_29['fields']['virtual_available']['string'] = _('Future Stock')\n", "if VAR_52.get('virtual_available'):\n", "if VAR_51.usage == 'inventory':\n", "VAR_29['fields']['virtual_available']['string'] = _('Future Deliveries')\n", "if VAR_52.get('qty_available'):\n", "if VAR_52.get('virtual_available'):\n", "if VAR_51.usage == 'procurement':\n", "VAR_29['fields']['qty_available']['string'] = _('Delivered Qty')\n", "VAR_29['fields']['virtual_available']['string'] = _('Future P&L')\n", "if VAR_52.get('qty_available'):\n", "if VAR_52.get('virtual_available'):\n", "if VAR_51.usage == 'production':\n", "VAR_29['fields']['qty_available']['string'] = _('P&L Qty')\n", "VAR_29['fields']['virtual_available']['string'] = _('Future Qty')\n", "if VAR_52.get('qty_available'):\n", "if VAR_52.get('virtual_available'):\n", "VAR_29['fields']['qty_available']['string'] = _('Unplanned Qty')\n", "VAR_29['fields']['virtual_available']['string'] = _('Future Productions')\n", "if VAR_52.get('qty_available'):\n", "VAR_29['fields']['qty_available']['string'] = _('Produced Qty')\n" ]
[ "def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=...\n", "res = super(product_product, self).fields_view_get(cr, uid, view_id,\n view_type, context, toolbar=toolbar, submenu=submenu)\n", "if context is None:\n", "context = {}\n", "if 'location' in context and context['location']:\n", "location_info = self.pool.get('stock.location').browse(cr, uid, context[\n 'location'])\n", "return res\n", "fields = res.get('fields', {})\n", "if fields:\n", "if location_info.usage == 'supplier':\n", "if fields.get('virtual_available'):\n", "if location_info.usage == 'internal':\n", "res['fields']['virtual_available']['string'] = _('Future Receptions')\n", "if fields.get('qty_available'):\n", "if fields.get('virtual_available'):\n", "if location_info.usage == 'customer':\n", "res['fields']['qty_available']['string'] = _('Received Qty')\n", "res['fields']['virtual_available']['string'] = _('Future Stock')\n", "if fields.get('virtual_available'):\n", "if location_info.usage == 'inventory':\n", "res['fields']['virtual_available']['string'] = _('Future Deliveries')\n", "if fields.get('qty_available'):\n", "if fields.get('virtual_available'):\n", "if location_info.usage == 'procurement':\n", "res['fields']['qty_available']['string'] = _('Delivered Qty')\n", "res['fields']['virtual_available']['string'] = _('Future P&L')\n", "if fields.get('qty_available'):\n", "if fields.get('virtual_available'):\n", "if location_info.usage == 'production':\n", "res['fields']['qty_available']['string'] = _('P&L Qty')\n", "res['fields']['virtual_available']['string'] = _('Future Qty')\n", "if fields.get('qty_available'):\n", "if fields.get('virtual_available'):\n", "res['fields']['qty_available']['string'] = _('Unplanned Qty')\n", "res['fields']['virtual_available']['string'] = _('Future Productions')\n", "if fields.get('qty_available'):\n", "res['fields']['qty_available']['string'] = _('Produced Qty')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "For", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_6(self, VAR_41):...\n", "if VAR_41:\n", "return self.error()\n", "return Thing._by_fullname(VAR_41, False, data=True)\n" ]
[ "def run(self, fullname):...\n", "if fullname:\n", "return self.error()\n", "return Thing._by_fullname(fullname, False, data=True)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "@VAR_2.route('/<string:filename>')...\n", "VAR_16 = os.path.join(VAR_1, VAR_16)\n", "if os.path.exists(VAR_16):\n", "VAR_86 = f.read()\n", "abort(404)\n", "return VAR_86\n" ]
[ "@app.route('/<string:filename>')...\n", "filename = os.path.join(static_path, filename)\n", "if os.path.exists(filename):\n", "response_body = f.read()\n", "abort(404)\n", "return response_body\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def filter_queryset(self, request, queryset, view):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_16(VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "VAR_30 = VAR_19(VAR_16, 'dynamic')\n", "VAR_31 = [VAR_30] if FUNC_10(VAR_30) else VAR_30\n", "for VAR_3 in VAR_31:\n", "VAR_52 = list(VAR_5.finditer(VAR_3))\n", "return VAR_30\n", "for VAR_29 in VAR_52:\n", "if VAR_29.group('constraint'):\n" ]
[ "def dynamic(value):...\n", "\"\"\"docstring\"\"\"\n", "annotated = flag(value, 'dynamic')\n", "tocheck = [annotated] if not_iterable(annotated) else annotated\n", "for file in tocheck:\n", "matches = list(_wildcard_regex.finditer(file))\n", "return annotated\n", "for match in matches:\n", "if match.group('constraint'):\n" ]
[ 0, 0, 7, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'", "For", "Condition" ]
[ "def FUNC_15(VAR_28, VAR_29):...\n", "VAR_29.setFormatter(logging.Formatter(VAR_26, datefmt=DATE_FORMAT))\n", "VAR_28.addHandler(VAR_29)\n" ]
[ "def add_handler(logger, handler):...\n", "handler.setFormatter(logging.Formatter(LOG_FORMAT, datefmt=DATE_FORMAT))\n", "logger.addHandler(handler)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def FUNC_0():...\n", "VAR_1 = psycopg2.connect('dbname=forum')\n", "VAR_2 = VAR_1.cursor()\n", "\"\"\"Return all posts from the 'database', most recent first.\"\"\"\n", "VAR_2.execute('SELECT time, content FROM posts order by time DESC')\n", "VAR_3 = ({'content': str(row[1]), 'time': str(row[0])} for row in VAR_2.\n fetchall())\n", "VAR_1.close()\n", "return VAR_3\n" ]
[ "def get_posts():...\n", "db = psycopg2.connect('dbname=forum')\n", "c = db.cursor()\n", "\"\"\"Return all posts from the 'database', most recent first.\"\"\"\n", "c.execute('SELECT time, content FROM posts order by time DESC')\n", "posts = ({'content': str(row[1]), 'time': str(row[0])} for row in c.fetchall())\n", "db.close()\n", "return posts\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_11(VAR_7, VAR_8=None):...\n", "if isinstance(VAR_7, UserExerciseSummary):\n", "VAR_3 = VAR_7.exercise\n", "if isinstance(VAR_7, Submission):\n", "VAR_26 = {'points': VAR_7.get_points(), 'max': VAR_3.max_points,\n 'difficulty': VAR_3.difficulty, 'required': VAR_3.points_to_pass,\n 'confirm_the_level': VAR_3.category.confirm_the_level, 'missing_points':\n VAR_7.is_missing_points(), 'passed': VAR_7.is_passed(), 'full_score':\n VAR_7.is_full_points(), 'submitted': VAR_7.is_submitted(), 'graded':\n VAR_7.is_graded(), 'official': not VAR_7.is_unofficial(),\n 'exercise_page': True}\n", "VAR_3 = VAR_7.exercise\n", "VAR_16 = VAR_7.get('points', 0)\n", "VAR_19 = 0\n", "VAR_26 = {'points': VAR_7.grade, 'max': VAR_3.max_points, 'difficulty':\n VAR_3.difficulty, 'required': VAR_3.points_to_pass, 'confirm_the_level':\n VAR_3.category.confirm_the_level, 'missing_points': VAR_7.grade < VAR_3\n .points_to_pass, 'passed': VAR_7.grade >= VAR_3.points_to_pass,\n 'full_score': VAR_7.grade >= VAR_3.max_points, 'submitted': True,\n 'graded': VAR_7.is_graded, 'official': VAR_7.status != Submission.\n STATUS.UNOFFICIAL}\n", "VAR_30 = VAR_7.get('max_points', 0)\n", "VAR_20 = None\n", "if not VAR_7.is_graded and (not VAR_3.category.confirm_the_level or VAR_7.\n", "VAR_31 = VAR_7.get('points_to_pass', 0)\n", "if VAR_26['max'] > 0:\n", "VAR_26['status'] = VAR_7.status\n", "VAR_26 = {'points': VAR_16, 'max': VAR_30, 'difficulty': VAR_7.get(\n 'difficulty', ''), 'required': VAR_31, 'confirm_the_level': VAR_7.get(\n 'confirm_the_level', False), 'missing_points': VAR_16 < VAR_31,\n 'passed': VAR_7.get('passed', True), 'full_score': VAR_16 >= VAR_30,\n 'submitted': VAR_7.get('submission_count', 0) > 0, 'graded': VAR_7.get(\n 'graded', True), 'status': VAR_7.get('submission_status', False),\n 'unconfirmed': VAR_7.get('unconfirmed', False), 'official': not VAR_7.\n get('unofficial', False), 'confirmable_points': VAR_7.get(\n 'confirmable_points', False)}\n", "VAR_19 = int(round(100.0 * VAR_26['points'] / VAR_26['max']))\n", "VAR_26.update({'classes': VAR_8, 'percentage': VAR_19,\n 'required_percentage': VAR_20})\n", "if VAR_26['required']:\n", "return VAR_26\n", "VAR_20 = int(round(100.0 * VAR_26['required'] / VAR_26['max']))\n" ]
[ "def _points_data(obj, classes=None):...\n", "if isinstance(obj, UserExerciseSummary):\n", "exercise = obj.exercise\n", "if isinstance(obj, Submission):\n", "data = {'points': obj.get_points(), 'max': exercise.max_points,\n 'difficulty': exercise.difficulty, 'required': exercise.points_to_pass,\n 'confirm_the_level': exercise.category.confirm_the_level,\n 'missing_points': obj.is_missing_points(), 'passed': obj.is_passed(),\n 'full_score': obj.is_full_points(), 'submitted': obj.is_submitted(),\n 'graded': obj.is_graded(), 'official': not obj.is_unofficial(),\n 'exercise_page': True}\n", "exercise = obj.exercise\n", "points = obj.get('points', 0)\n", "percentage = 0\n", "data = {'points': obj.grade, 'max': exercise.max_points, 'difficulty':\n exercise.difficulty, 'required': exercise.points_to_pass,\n 'confirm_the_level': exercise.category.confirm_the_level,\n 'missing_points': obj.grade < exercise.points_to_pass, 'passed': obj.\n grade >= exercise.points_to_pass, 'full_score': obj.grade >= exercise.\n max_points, 'submitted': True, 'graded': obj.is_graded, 'official': obj\n .status != Submission.STATUS.UNOFFICIAL}\n", "max_points = obj.get('max_points', 0)\n", "required_percentage = None\n", "if not obj.is_graded and (not exercise.category.confirm_the_level or obj.\n", "required = obj.get('points_to_pass', 0)\n", "if data['max'] > 0:\n", "data['status'] = obj.status\n", "data = {'points': points, 'max': max_points, 'difficulty': obj.get(\n 'difficulty', ''), 'required': required, 'confirm_the_level': obj.get(\n 'confirm_the_level', False), 'missing_points': points < required,\n 'passed': obj.get('passed', True), 'full_score': points >= max_points,\n 'submitted': obj.get('submission_count', 0) > 0, 'graded': obj.get(\n 'graded', True), 'status': obj.get('submission_status', False),\n 'unconfirmed': obj.get('unconfirmed', False), 'official': not obj.get(\n 'unofficial', False), 'confirmable_points': obj.get(\n 'confirmable_points', False)}\n", "percentage = int(round(100.0 * data['points'] / data['max']))\n", "data.update({'classes': classes, 'percentage': percentage,\n 'required_percentage': required_percentage})\n", "if data['required']:\n", "return data\n", "required_percentage = int(round(100.0 * data['required'] / data['max']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_41(self, VAR_27):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = 'chmod o+r %s' % VAR_27\n", "VAR_33 = self.run_command(VAR_6, VAR_17=10, VAR_19=True)\n", "if VAR_33['status'] == 0:\n", "return True\n", "VAR_4 = 'Exception while making %s readable. Return code was %s'\n", "self.log_error(VAR_4 % (VAR_27, VAR_33['status']))\n" ]
[ "def make_archive_readable(self, filepath):...\n", "\"\"\"docstring\"\"\"\n", "cmd = 'chmod o+r %s' % filepath\n", "res = self.run_command(cmd, timeout=10, need_root=True)\n", "if res['status'] == 0:\n", "return True\n", "msg = 'Exception while making %s readable. Return code was %s'\n", "self.log_error(msg % (filepath, res['status']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Expr'" ]
[ "from database_writer import get_db\n", "from process_data import processData\n", "from threading import Thread\n", "import logger\n", "import bracket_utils\n", "import constants\n", "import time\n", "from tweet import tweet\n", "VAR_0 = False\n", "VAR_1 = True\n", "VAR_2 = True\n", "VAR_3 = logger.logger(__name__)\n", "def __init__(self, VAR_4, VAR_5=False, VAR_6='smash'):...\n", "self.start_time = time.time()\n", "self.testing = VAR_5\n", "self.scenes = VAR_4\n", "VAR_6 = 'smash_test' if VAR_5 else VAR_6\n", "self.db = get_db(db=db_name)\n", "VAR_11 = 'SELECT count(*) FROM matches'\n", "VAR_12 = self.db.exec(VAR_11)\n", "if VAR_12[0][0] == 0:\n", "VAR_2 = True\n", "self.data_processor = processData(self.db)\n", "VAR_3.info('validURL being created')\n", "def FUNC_0(self):...\n", "if not self.testing:\n", "while True:\n", "self.create_analysis_threads()\n", "VAR_3.info('About to create analyziz threads')\n", "def FUNC_1(self):...\n", "self.create_analysis_threads()\n", "self.start_time = time.time()\n", "VAR_3.info('just finished with analysis threads')\n", "VAR_13 = []\n", "time.sleep(constants.SLEEP_TIME)\n", "VAR_14 = 3\n", "VAR_3.info('Just finished sleeping')\n", "VAR_15 = len(self.scenes)\n", "for i in range(VAR_14):\n", "VAR_18 = int(VAR_15 / VAR_14 * i)\n", "for VAR_20 in VAR_13:\n", "VAR_19 = int(VAR_15 / VAR_14 * (i + 1))\n", "VAR_3.info('abouto call join for the analysis thread {}'.format(VAR_20.name))\n", "VAR_3.info('we have joined all threads. Should tweet after this')\n", "VAR_9 = self.scenes[VAR_18:VAR_19]\n", "VAR_20.join()\n", "if not VAR_0 and VAR_2:\n", "VAR_8 = [VAR_10.get_name() for VAR_10 in VAR_9]\n", "VAR_21 = time.time() - self.start_time\n", "VAR_0 = True\n", "def FUNC_2(self, VAR_7, VAR_8):...\n", "VAR_20 = Thread(target=self.analyze_scenes, VAR_8=str(name), args=(chunk,))\n", "VAR_22 = VAR_21 / 60\n", "VAR_21 = time.time() - self.start_time\n", "VAR_3.info('we are about to analyze scene {} with {} brackets'.format(VAR_8,\n len(VAR_7)))\n", "VAR_3.info('Trying to start the analysis thread for scenes {}'.format(\n VAR_20.name))\n", "VAR_3.info('joining for the analysis thread {} in {} minutes'.format(VAR_20\n .name, VAR_22))\n", "VAR_22 = VAR_21 / 60\n", "for url in VAR_7:\n", "VAR_20.start()\n", "if not VAR_0 and VAR_2:\n", "VAR_3.info(\n 'Just finished analyzing scenes for the first time. It took {} minutes. About to tweet'\n .format(VAR_22))\n", "VAR_11 = \"SELECT * FROM analyzed where base_url='{}'\".format(url)\n", "def FUNC_3(self, VAR_9):...\n", "VAR_13.append(VAR_20)\n", "tweet('joining for the analysis thread {} in {} minutes'.format(VAR_20.\n name, VAR_22))\n", "tweet('Done loading scene data. Took {} minutes'.format(VAR_22))\n", "VAR_12 = self.db.exec(VAR_11)\n", "for VAR_10 in VAR_9:\n", "if len(VAR_12) == 0:\n", "self.analyze_scene(VAR_10)\n", "def FUNC_4(self, VAR_10):...\n", "VAR_26 = bracket_utils.get_display_base(url)\n", "VAR_3.info('Skpping pro bracket because it has already been analyzed: {}'.\n format(url))\n", "VAR_16 = VAR_10.get_base_urls()\n", "if 'doubles' in VAR_26.lower() or 'dubs' in VAR_26.lower():\n", "VAR_17 = VAR_10.get_users()\n", "VAR_3.info(\n 'We are skipping the tournament {} because it is a doubles tournament'.\n format(VAR_26))\n", "VAR_3.info('About to process pro bracket {}'.format(url))\n", "VAR_8 = VAR_10.get_name()\n", "self.data_processor.process(url, VAR_8, VAR_26)\n", "VAR_3.info('found the following users for scene {}: {}'.format(VAR_8, VAR_17))\n", "for user in VAR_17:\n", "VAR_11 = \"SELECT * FROM user_analyzed WHERE user='{}';\".format(user)\n", "for base_url in VAR_16:\n", "VAR_23 = self.db.exec(VAR_11)\n", "VAR_3.info('About to start this analysis thread for scene {}'.format(VAR_10\n .get_name()))\n", "if not VAR_0 and VAR_2:\n", "if len(VAR_23) > 0:\n", "VAR_11 = \"SELECT first,last FROM valids WHERE base_url = '\" + str(base_url\n ) + \"';\"\n", "tweet('About to start ranking for scene {}'.format(VAR_8))\n", "self.data_processor.check_and_update_ranks(VAR_8)\n", "VAR_27 = bracket_utils.get_brackets_from_user(user, pages=1)\n", "VAR_28 = bracket_utils.get_brackets_from_user(user)\n", "VAR_24 = self.db.exec(VAR_11)\n", "for VAR_32 in VAR_27:\n", "for url in VAR_28:\n", "VAR_25 = len(VAR_24) > 0\n", "VAR_3.info('here are the brackets from the most recent page of user {}: {}'\n .format(user, VAR_27))\n", "VAR_3.info('found this url from a user: {} {}'.format(url, user))\n", "VAR_3.info('done with user {}'.format(user))\n", "if VAR_25:\n", "VAR_11 = \"SELECT * FROM user_analyzed WHERE url='{}' AND user='{}';\".format(\n VAR_32, user)\n", "VAR_26 = bracket_utils.get_display_base(url)\n", "VAR_3.info('validURLs found values in the database' + str(VAR_24))\n", "VAR_29 = bracket_utils._get_first_valid_url(base_url)\n", "VAR_23 = self.db.exec(VAR_11)\n", "if 'doubles' in VAR_26.lower() or 'dubs' in VAR_26.lower():\n", "VAR_29 = VAR_24[0][0]\n", "VAR_30 = bracket_utils._get_last_valid_url(base_url, VAR_29)\n", "if len(VAR_23) == 0:\n", "VAR_3.info(\n 'We are skipping the tournament {} because it is a doubles tournament'.\n format(VAR_26))\n", "self.data_processor.process(url, VAR_8, VAR_26)\n", "VAR_30 = VAR_24[0][1]\n", "VAR_11 = 'INSERT INTO valids (base_url, first, last, scene) VALUES ('\n", "VAR_3.info('found this url from a user: {} {}'.format(VAR_32, user))\n", "VAR_3.info('url {} is not new for user {}'.format(VAR_32, user))\n", "VAR_11 = (\n \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');\"\n .format(url, user, VAR_8))\n", "VAR_31 = bracket_utils._get_last_valid_url(base_url, VAR_30 - 1)\n", "VAR_11 += \"'\" + str(base_url) + \"', \" + str(VAR_29) + ', ' + str(VAR_30\n ) + \", '\" + str(VAR_8) + \"');\"\n", "VAR_26 = bracket_utils.get_display_base(VAR_32)\n", "self.db.exec(VAR_11)\n", "if not VAR_31 == VAR_30:\n", "self.db.exec(VAR_11)\n", "if 'doubles' in VAR_26.lower() or 'dubs' in VAR_26.lower():\n", "if VAR_31 - VAR_30 > 5:\n", "for i in range(VAR_29, VAR_30 + 1):\n", "VAR_3.info(\n 'We are skipping the tournament {} because it is a doubles tournament'.\n format(VAR_26))\n", "self.data_processor.process(VAR_32, VAR_8, VAR_26)\n", "f.write(\n '[validURLs.py:55]: found a SHIT TON of new tournaments for bracket: {}'\n .format(base_url))\n", "VAR_32 = base_url.replace('###', str(VAR_31))\n", "VAR_32 = base_url.replace('###', str(i))\n", "VAR_11 = (\n \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');\"\n .format(VAR_32, user, VAR_8))\n", "VAR_11 = 'UPDATE valids SET last=' + str(VAR_31) + \" where base_url = '\" + str(\n base_url) + \"';\"\n", "VAR_3.info('Found new bracket: {}'.format(VAR_32))\n", "VAR_26 = bracket_utils.get_display_base(VAR_32, counter=i)\n", "self.db.exec(VAR_11)\n", "self.db.exec(VAR_11)\n", "VAR_33 = 'Found new bracket: {}'.format(VAR_32)\n", "if 'doubles' in VAR_26.lower() or 'dubs' in VAR_26.lower():\n", "VAR_33 = 'Found new {} bracket: {}'.format(VAR_8, VAR_32)\n", "for i in range(VAR_30 + 1, VAR_31 + 1):\n", "tweet(VAR_33)\n", "VAR_3.info(\n 'We are skipping the tournament {} because it is a doubles tournament'.\n format(VAR_26))\n", "self.data_processor.process(VAR_32, VAR_8, VAR_26)\n", "tweet(VAR_33)\n", "VAR_32 = base_url.replace('###', str(i))\n", "VAR_26 = bracket_utils.get_display_base(VAR_32, counter=i)\n", "if 'doubles' in VAR_26.lower() or 'dubs' in VAR_26.lower():\n", "VAR_3.info(\n 'We are skipping the tournament {} because it is a doubles tournament'.\n format(VAR_26))\n", "self.data_processor.process(VAR_32, VAR_8, VAR_26, new_bracket=True)\n" ]
[ "from database_writer import get_db\n", "from process_data import processData\n", "from threading import Thread\n", "import logger\n", "import bracket_utils\n", "import constants\n", "import time\n", "from tweet import tweet\n", "analyzed_scenes = False\n", "run_pros = True\n", "should_tweet = True\n", "LOG = logger.logger(__name__)\n", "def __init__(self, scenes, testing=False, db_name='smash'):...\n", "self.start_time = time.time()\n", "self.testing = testing\n", "self.scenes = scenes\n", "db_name = 'smash_test' if testing else db_name\n", "self.db = get_db(db=db_name)\n", "sql = 'SELECT count(*) FROM matches'\n", "res = self.db.exec(sql)\n", "if res[0][0] == 0:\n", "should_tweet = True\n", "self.data_processor = processData(self.db)\n", "LOG.info('validURL being created')\n", "def init(self):...\n", "if not self.testing:\n", "while True:\n", "self.create_analysis_threads()\n", "LOG.info('About to create analyziz threads')\n", "def create_analysis_threads(self):...\n", "self.create_analysis_threads()\n", "self.start_time = time.time()\n", "LOG.info('just finished with analysis threads')\n", "threads = []\n", "time.sleep(constants.SLEEP_TIME)\n", "num_threads = 3\n", "LOG.info('Just finished sleeping')\n", "length = len(self.scenes)\n", "for i in range(num_threads):\n", "i1 = int(length / num_threads * i)\n", "for t in threads:\n", "i2 = int(length / num_threads * (i + 1))\n", "LOG.info('abouto call join for the analysis thread {}'.format(t.name))\n", "LOG.info('we have joined all threads. Should tweet after this')\n", "chunk = self.scenes[i1:i2]\n", "t.join()\n", "if not analyzed_scenes and should_tweet:\n", "name = [scene.get_name() for scene in chunk]\n", "seconds_to_analyze = time.time() - self.start_time\n", "analyzed_scenes = True\n", "def analyze_smashgg(self, urls, name):...\n", "t = Thread(target=self.analyze_scenes, name=str(name), args=(chunk,))\n", "minutes = seconds_to_analyze / 60\n", "seconds_to_analyze = time.time() - self.start_time\n", "LOG.info('we are about to analyze scene {} with {} brackets'.format(name,\n len(urls)))\n", "LOG.info('Trying to start the analysis thread for scenes {}'.format(t.name))\n", "LOG.info('joining for the analysis thread {} in {} minutes'.format(t.name,\n minutes))\n", "minutes = seconds_to_analyze / 60\n", "for url in urls:\n", "t.start()\n", "if not analyzed_scenes and should_tweet:\n", "LOG.info(\n 'Just finished analyzing scenes for the first time. It took {} minutes. About to tweet'\n .format(minutes))\n", "sql = \"SELECT * FROM analyzed where base_url='{}'\".format(url)\n", "def analyze_scenes(self, chunk):...\n", "threads.append(t)\n", "tweet('joining for the analysis thread {} in {} minutes'.format(t.name,\n minutes))\n", "tweet('Done loading scene data. Took {} minutes'.format(minutes))\n", "res = self.db.exec(sql)\n", "for scene in chunk:\n", "if len(res) == 0:\n", "self.analyze_scene(scene)\n", "def analyze_scene(self, scene):...\n", "display_name = bracket_utils.get_display_base(url)\n", "LOG.info('Skpping pro bracket because it has already been analyzed: {}'.\n format(url))\n", "base_urls = scene.get_base_urls()\n", "if 'doubles' in display_name.lower() or 'dubs' in display_name.lower():\n", "users = scene.get_users()\n", "LOG.info('We are skipping the tournament {} because it is a doubles tournament'\n .format(display_name))\n", "LOG.info('About to process pro bracket {}'.format(url))\n", "name = scene.get_name()\n", "self.data_processor.process(url, name, display_name)\n", "LOG.info('found the following users for scene {}: {}'.format(name, users))\n", "for user in users:\n", "sql = \"SELECT * FROM user_analyzed WHERE user='{}';\".format(user)\n", "for base_url in base_urls:\n", "results = self.db.exec(sql)\n", "LOG.info('About to start this analysis thread for scene {}'.format(scene.\n get_name()))\n", "if not analyzed_scenes and should_tweet:\n", "if len(results) > 0:\n", "sql = \"SELECT first,last FROM valids WHERE base_url = '\" + str(base_url) + \"';\"\n", "tweet('About to start ranking for scene {}'.format(name))\n", "self.data_processor.check_and_update_ranks(name)\n", "most_recent_page = bracket_utils.get_brackets_from_user(user, pages=1)\n", "user_urls = bracket_utils.get_brackets_from_user(user)\n", "result = self.db.exec(sql)\n", "for bracket in most_recent_page:\n", "for url in user_urls:\n", "has_results = len(result) > 0\n", "LOG.info('here are the brackets from the most recent page of user {}: {}'.\n format(user, most_recent_page))\n", "LOG.info('found this url from a user: {} {}'.format(url, user))\n", "LOG.info('done with user {}'.format(user))\n", "if has_results:\n", "sql = \"SELECT * FROM user_analyzed WHERE url='{}' AND user='{}';\".format(\n bracket, user)\n", "display_name = bracket_utils.get_display_base(url)\n", "LOG.info('validURLs found values in the database' + str(result))\n", "first = bracket_utils._get_first_valid_url(base_url)\n", "results = self.db.exec(sql)\n", "if 'doubles' in display_name.lower() or 'dubs' in display_name.lower():\n", "first = result[0][0]\n", "last = bracket_utils._get_last_valid_url(base_url, first)\n", "if len(results) == 0:\n", "LOG.info('We are skipping the tournament {} because it is a doubles tournament'\n .format(display_name))\n", "self.data_processor.process(url, name, display_name)\n", "last = result[0][1]\n", "sql = 'INSERT INTO valids (base_url, first, last, scene) VALUES ('\n", "LOG.info('found this url from a user: {} {}'.format(bracket, user))\n", "LOG.info('url {} is not new for user {}'.format(bracket, user))\n", "sql = (\n \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');\"\n .format(url, user, name))\n", "new_last = bracket_utils._get_last_valid_url(base_url, last - 1)\n", "sql += \"'\" + str(base_url) + \"', \" + str(first) + ', ' + str(last\n ) + \", '\" + str(name) + \"');\"\n", "display_name = bracket_utils.get_display_base(bracket)\n", "self.db.exec(sql)\n", "if not new_last == last:\n", "self.db.exec(sql)\n", "if 'doubles' in display_name.lower() or 'dubs' in display_name.lower():\n", "if new_last - last > 5:\n", "for i in range(first, last + 1):\n", "LOG.info('We are skipping the tournament {} because it is a doubles tournament'\n .format(display_name))\n", "self.data_processor.process(bracket, name, display_name)\n", "f.write(\n '[validURLs.py:55]: found a SHIT TON of new tournaments for bracket: {}'\n .format(base_url))\n", "bracket = base_url.replace('###', str(new_last))\n", "bracket = base_url.replace('###', str(i))\n", "sql = (\n \"INSERT INTO user_analyzed (url, user, scene) VALUES ('{}', '{}', '{}');\"\n .format(bracket, user, name))\n", "sql = 'UPDATE valids SET last=' + str(new_last) + \" where base_url = '\" + str(\n base_url) + \"';\"\n", "LOG.info('Found new bracket: {}'.format(bracket))\n", "display_name = bracket_utils.get_display_base(bracket, counter=i)\n", "self.db.exec(sql)\n", "self.db.exec(sql)\n", "msg = 'Found new bracket: {}'.format(bracket)\n", "if 'doubles' in display_name.lower() or 'dubs' in display_name.lower():\n", "msg = 'Found new {} bracket: {}'.format(name, bracket)\n", "for i in range(last + 1, new_last + 1):\n", "tweet(msg)\n", "LOG.info('We are skipping the tournament {} because it is a doubles tournament'\n .format(display_name))\n", "self.data_processor.process(bracket, name, display_name)\n", "tweet(msg)\n", "bracket = base_url.replace('###', str(i))\n", "display_name = bracket_utils.get_display_base(bracket, counter=i)\n", "if 'doubles' in display_name.lower() or 'dubs' in display_name.lower():\n", "LOG.info('We are skipping the tournament {} because it is a doubles tournament'\n .format(display_name))\n", "self.data_processor.process(bracket, name, display_name, new_bracket=True)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 4, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Condition", "Condition", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "For", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "For", "Expr'", "Condition", "Expr'", "Assign'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "For", "Condition", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "For", "Assign'", "For", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "For", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "AugAssign'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Condition", "For", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_27(self):...\n", "VAR_4 = 'hubba-bubba'\n", "VAR_3 = '/afterlogin/'\n", "VAR_8 = self.add_outstanding_query(VAR_3)\n", "VAR_10 = self.dump_session_cookie(VAR_8)\n", "VAR_11 = auth_response(VAR_8, VAR_4)\n", "VAR_31 = self.app.dispatch_request()\n", "VAR_31 = self.app.dispatch_request()\n", "self.assertEqual(VAR_31.status, '302 FOUND')\n", "self.assertIn(\n 'https://idp.example.com/simplesaml/saml2/idp/SingleLogoutService.php?SAMLResponse='\n , VAR_31.location)\n" ]
[ "def test_logout_service_startingIDP(self):...\n", "eppn = 'hubba-bubba'\n", "came_from = '/afterlogin/'\n", "session_id = self.add_outstanding_query(came_from)\n", "cookie = self.dump_session_cookie(session_id)\n", "saml_response = auth_response(session_id, eppn)\n", "response = self.app.dispatch_request()\n", "response = self.app.dispatch_request()\n", "self.assertEqual(response.status, '302 FOUND')\n", "self.assertIn(\n 'https://idp.example.com/simplesaml/saml2/idp/SingleLogoutService.php?SAMLResponse='\n , response.location)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]