lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_1(self):...\n",
"super(CLASS_1, self).setUp()\n",
"VAR_12 = patch('pavelib.utils.test.suites.bokchoy_suite.sh')\n",
"self._mock_sh = VAR_12.start()\n",
"self.addCleanup(VAR_12.stop)\n"
] | [
"def setUp(self):...\n",
"super(TestPaverPa11yCrawlerCmd, self).setUp()\n",
"mock_sh = patch('pavelib.utils.test.suites.bokchoy_suite.sh')\n",
"self._mock_sh = mock_sh.start()\n",
"self.addCleanup(mock_sh.stop)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_34, *VAR_35, **VAR_36):...\n",
"super().__init__(*VAR_35, **kvargs)\n",
"self.newproxyfile = 'newproxies.txt'\n",
"self.proxylist = set()\n",
"self.c = VAR_34\n",
"self.threads = []\n",
"self.processes = []\n",
"self.th_sa = 'inproc://wm-wth.sock'\n",
"self.th_ba = 'inproc://wm-back.sock'\n",
"self.pr_sa = 'ipc://wm-wpr.sock'\n",
"self.pr_ba = 'ipc://wm-back.sock'\n",
"self.userqueues = {}\n",
"self.usersfile = 'wm_users.pickle'\n",
"self.targetsfile = 'wm_targets.pickle'\n",
"self.bumplimitfile = 'wm_bumplimit.pickle'\n"
] | [
"def __init__(self, config, *args, **kvargs):...\n",
"super().__init__(*args, **kvargs)\n",
"self.newproxyfile = 'newproxies.txt'\n",
"self.proxylist = set()\n",
"self.c = config\n",
"self.threads = []\n",
"self.processes = []\n",
"self.th_sa = 'inproc://wm-wth.sock'\n",
"self.th_ba = 'inproc://wm-back.sock'\n",
"self.pr_sa = 'ipc://wm-wpr.sock'\n",
"self.pr_ba = 'ipc://wm-back.sock'\n",
"self.userqueues = {}\n",
"self.usersfile = 'wm_users.pickle'\n",
"self.targetsfile = 'wm_targets.pickle'\n",
"self.bumplimitfile = 'wm_bumplimit.pickle'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_46):...\n",
"super(CLASS_2, self).__init__(VAR_46)\n"
] | [
"def __init__(self, conf):...\n",
"super(AnsiblePlugin, self).__init__(conf)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_0.route('/users/<id>/edit', methods=['GET', 'POST'])...\n",
"VAR_5 = VAR_1\n",
"VAR_2 = connectToMySQL('users_db')\n",
"if request.method == 'POST':\n",
"VAR_6 = 'string' + VAR_5 + ';'\n",
"VAR_4 = VAR_2.query_db('SELECT * FROM users WHERE id = ' + VAR_5 + ';')\n",
"VAR_8 = {'fn': request.form['first_name'], 'ln': request.form['last_name'],\n 'e': request.form['email'], 'd': request.form['description']}\n",
"return render_template('edit.html', VAR_4=user[0])\n",
"VAR_2.query_db(VAR_6, VAR_8)\n",
"return redirect('/users/{}'.format(VAR_5))\n"
] | [
"@app.route('/users/<id>/edit', methods=['GET', 'POST'])...\n",
"user_id = id\n",
"mysql = connectToMySQL('users_db')\n",
"if request.method == 'POST':\n",
"query = (\n 'UPDATE users SET first_name = %(fn)s, last_name = %(ln)s, email= %(e)s, description = %(d)s, updated_at = now() WHERE id = '\n + user_id + ';')\n",
"user = mysql.query_db('SELECT * FROM users WHERE id = ' + user_id + ';')\n",
"data = {'fn': request.form['first_name'], 'ln': request.form['last_name'],\n 'e': request.form['email'], 'd': request.form['description']}\n",
"return render_template('edit.html', user=user[0])\n",
"mysql.query_db(query, data)\n",
"return redirect('/users/{}'.format(user_id))\n"
] | [
0,
4,
0,
0,
4,
4,
0,
0,
0,
4
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Expr'",
"Return'"
] |
[
"@VAR_3.message_handler(func=lambda m: True)...\n",
"VAR_26 = VAR_6.text\n",
"VAR_10 = VAR_6.from_user.id\n",
"VAR_1[VAR_10].new_directory(VAR_26)\n",
"VAR_7 = VAR_1[VAR_10].get_directory_content()\n",
"VAR_27 = FUNC_2(VAR_7, len(VAR_1[VAR_10].path) > 1)\n",
"FUNC_6(VAR_10, VAR_3)\n",
"VAR_28 = VAR_3.send_message(VAR_10, VAR_1[VAR_10].get_path_string(),\n reply_markup=keyboard)\n",
"VAR_1[VAR_10].last_action_message_ids.append(VAR_28.message_id)\n"
] | [
"@bot.message_handler(func=lambda m: True)...\n",
"new_directory_name = message.text\n",
"telegram_id = message.from_user.id\n",
"explorers[telegram_id].new_directory(new_directory_name)\n",
"content = explorers[telegram_id].get_directory_content()\n",
"keyboard = content_builder(content, len(explorers[telegram_id].path) > 1)\n",
"remove_messages(telegram_id, bot)\n",
"message_sent = bot.send_message(telegram_id, explorers[telegram_id].\n get_path_string(), reply_markup=keyboard)\n",
"explorers[telegram_id].last_action_message_ids.append(message_sent.message_id)\n"
] | [
0,
0,
0,
4,
4,
4,
0,
4,
4
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_12(self):...\n",
"self.run_test_case(self.scenario.delete_server_group(), max_retries=5)\n"
] | [
"def test_x_delete_server_group(self):...\n",
"self.run_test_case(self.scenario.delete_server_group(), max_retries=5)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"from . import pathexpr\n",
"from . import ds\n",
"from . import fs\n",
"def __init__(self, VAR_0, VAR_1):...\n",
"self._doc = VAR_0\n",
"self._root = VAR_1\n",
"def FUNC_0(self):...\n",
"return self._doc['rules'].keys()\n"
] | [
"from . import pathexpr\n",
"from . import ds\n",
"from . import fs\n",
"def __init__(self, compileddoc, startingpath):...\n",
"self._doc = compileddoc\n",
"self._root = startingpath\n",
"def get_rule_names(self):...\n",
"return self._doc['rules'].keys()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_11(self, VAR_7, VAR_16):...\n",
"VAR_15 = self.find_product_by_id(VAR_7)\n",
"self._purchase_by_product(VAR_15, VAR_16)\n"
] | [
"def purchase_by_product_id(self, product_id, quantity):...\n",
"product = self.find_product_by_id(product_id)\n",
"self._purchase_by_product(product, quantity)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_11(VAR_16, VAR_17, VAR_15):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_15 is not None:\n",
"VAR_17 = str(VAR_17)\n",
"if VAR_15.alt_number is None:\n",
"print(\n '[VCF-Zarr] Determining maximum number of ALT alleles by scaling all variants in the VCF file.'\n )\n",
"print('[VCF-Zarr] Using alt number provided in configuration.')\n",
"VAR_42 = allel.read_vcf(VAR_16, fields=['numalt'], log=sys.stdout)\n",
"VAR_44 = VAR_15.alt_number\n",
"VAR_43 = VAR_42['variants/numalt']\n",
"print('[VCF-Zarr] Alt number: {}'.format(VAR_44))\n",
"VAR_44 = np.max(VAR_43)\n",
"VAR_39 = allel.vcf_read.DEFAULT_CHUNK_LENGTH\n",
"if VAR_15.chunk_length is not None:\n",
"VAR_39 = VAR_15.chunk_length\n",
"print('[VCF-Zarr] Chunk length: {}'.format(VAR_39))\n",
"VAR_40 = allel.vcf_read.DEFAULT_CHUNK_WIDTH\n",
"if VAR_15.chunk_width is not None:\n",
"VAR_40 = VAR_15.chunk_width\n",
"print('[VCF-Zarr] Chunk width: {}'.format(VAR_40))\n",
"if VAR_15.compressor == 'Blosc':\n",
"VAR_45 = Blosc(cname=conversion_config.blosc_compression_algorithm, clevel=\n conversion_config.blosc_compression_level, shuffle=conversion_config.\n blosc_shuffle_mode)\n",
"print('[VCF-Zarr] Using {} compressor.'.format(VAR_15.compressor))\n",
"print('[VCF-Zarr] Performing VCF to Zarr conversion...')\n",
"allel.vcf_to_zarr(VAR_16, VAR_17, VAR_44=alt_number, overwrite=True, log=\n sys.stdout, VAR_45=compressor, VAR_39=chunk_length, VAR_40=chunk_width)\n",
"print('[VCF-Zarr] Done.')\n"
] | [
"def convert_to_zarr(input_vcf_path, output_zarr_path, conversion_config):...\n",
"\"\"\"docstring\"\"\"\n",
"if conversion_config is not None:\n",
"output_zarr_path = str(output_zarr_path)\n",
"if conversion_config.alt_number is None:\n",
"print(\n '[VCF-Zarr] Determining maximum number of ALT alleles by scaling all variants in the VCF file.'\n )\n",
"print('[VCF-Zarr] Using alt number provided in configuration.')\n",
"callset = allel.read_vcf(input_vcf_path, fields=['numalt'], log=sys.stdout)\n",
"alt_number = conversion_config.alt_number\n",
"numalt = callset['variants/numalt']\n",
"print('[VCF-Zarr] Alt number: {}'.format(alt_number))\n",
"alt_number = np.max(numalt)\n",
"chunk_length = allel.vcf_read.DEFAULT_CHUNK_LENGTH\n",
"if conversion_config.chunk_length is not None:\n",
"chunk_length = conversion_config.chunk_length\n",
"print('[VCF-Zarr] Chunk length: {}'.format(chunk_length))\n",
"chunk_width = allel.vcf_read.DEFAULT_CHUNK_WIDTH\n",
"if conversion_config.chunk_width is not None:\n",
"chunk_width = conversion_config.chunk_width\n",
"print('[VCF-Zarr] Chunk width: {}'.format(chunk_width))\n",
"if conversion_config.compressor == 'Blosc':\n",
"compressor = Blosc(cname=conversion_config.blosc_compression_algorithm,\n clevel=conversion_config.blosc_compression_level, shuffle=\n conversion_config.blosc_shuffle_mode)\n",
"print('[VCF-Zarr] Using {} compressor.'.format(conversion_config.compressor))\n",
"print('[VCF-Zarr] Performing VCF to Zarr conversion...')\n",
"allel.vcf_to_zarr(input_vcf_path, output_zarr_path, alt_number=alt_number,\n overwrite=True, log=sys.stdout, compressor=compressor, chunk_length=\n chunk_length, chunk_width=chunk_width)\n",
"print('[VCF-Zarr] Done.')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
7,
7,
7
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_12, VAR_13, VAR_14=None):...\n",
"\"\"\"docstring\"\"\"\n",
"assert VAR_14 is None or (VAR_13 is None or VAR_14 >= VAR_23(VAR_13))\n",
"if VAR_14:\n",
"VAR_38 = 'create table t1(s %s(%s))' % (VAR_12, VAR_14)\n",
"VAR_38 = 'create table t1(s %s)' % VAR_12\n",
"self.cursor.execute(VAR_38)\n",
"self.cursor.execute('insert into t1 values(?)', VAR_13)\n",
"VAR_4 = self.cursor.execute('select * from t1').fetchone()[0]\n",
"self.assertEqual(type(VAR_4), type(VAR_13))\n",
"if VAR_13 is not None:\n",
"self.assertEqual(VAR_23(VAR_4), VAR_23(VAR_13))\n",
"self.assertEqual(VAR_4, VAR_13)\n",
"self.cursor.execute('select * from t1 where s=?', VAR_13)\n"
] | [
"def _test_strtype(self, sqltype, value, colsize=None):...\n",
"\"\"\"docstring\"\"\"\n",
"assert colsize is None or (value is None or colsize >= len(value))\n",
"if colsize:\n",
"sql = 'create table t1(s %s(%s))' % (sqltype, colsize)\n",
"sql = 'create table t1(s %s)' % sqltype\n",
"self.cursor.execute(sql)\n",
"self.cursor.execute('insert into t1 values(?)', value)\n",
"v = self.cursor.execute('select * from t1').fetchone()[0]\n",
"self.assertEqual(type(v), type(value))\n",
"if value is not None:\n",
"self.assertEqual(len(v), len(value))\n",
"self.assertEqual(v, value)\n",
"self.cursor.execute('select * from t1 where s=?', value)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assert'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = VAR_0[:]\n",
"for user in self.participants:\n",
"VAR_11.append((Allow, user.login, ('view_activity', 'view_workshop',\n 'view.file')))\n",
"return VAR_11\n"
] | [
"def get_event_acl(self):...\n",
"\"\"\"docstring\"\"\"\n",
"acl = DEFAULT_PERM[:]\n",
"for user in self.participants:\n",
"acl.append((Allow, user.login, ('view_activity', 'view_workshop', 'view.file'))\n )\n",
"return acl\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"@jwt_required...\n",
"\"\"\"docstring\"\"\"\n",
"return database_utilities.execute_query(\n f\"delete from admins where email = '{VAR_1}'\")\n"
] | [
"@jwt_required...\n",
"\"\"\"docstring\"\"\"\n",
"return database_utilities.execute_query(\n f\"delete from admins where email = '{email}'\")\n"
] | [
0,
0,
4
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_19(self):...\n",
"VAR_7, VAR_12 = self.make_xsrf_handling_app()\n",
"self.mock_get_current_identity(model.Identity(model.IDENTITY_USER,\n '[email protected]'))\n",
"VAR_17 = VAR_7.get('/request').body\n",
"self.mock_get_current_identity(model.Identity(model.IDENTITY_USER,\n '[email protected]'))\n",
"VAR_15 = VAR_7.post('/request', expect_errors=True)\n",
"self.assertEqual(403, VAR_15.status_int)\n",
"self.assertFalse(VAR_12)\n"
] | [
"def test_xsrf_token_identity_matters(self):...\n",
"app, calls = self.make_xsrf_handling_app()\n",
"self.mock_get_current_identity(model.Identity(model.IDENTITY_USER,\n '[email protected]'))\n",
"token = app.get('/request').body\n",
"self.mock_get_current_identity(model.Identity(model.IDENTITY_USER,\n '[email protected]'))\n",
"response = app.post('/request', expect_errors=True)\n",
"self.assertEqual(403, response.status_int)\n",
"self.assertFalse(calls)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4():...\n",
"VAR_33 = FUNC_3(VAR_21=parent, VAR_22=child, VAR_23=\n ExtractedChemicalFormSet, VAR_24=ExtractedChemicalForm)\n",
"return CLASS_16, VAR_33\n"
] | [
"def one():...\n",
"ChemicalFormSet = make_formset(parent_model=parent, model=child, formset=\n ExtractedChemicalFormSet, form=ExtractedChemicalForm)\n",
"return ExtractedTextForm, ChemicalFormSet\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_1, VAR_2, VAR_8=None, VAR_9='form', VAR_4=None, VAR_14...\n",
"VAR_29 = super(CLASS_0, self).fields_view_get(VAR_1, VAR_2, VAR_8, VAR_9,\n VAR_4, VAR_14=toolbar, VAR_15=submenu)\n",
"if VAR_4 is None:\n",
"VAR_4 = {}\n",
"if 'location' in VAR_4 and VAR_4['location']:\n",
"VAR_51 = self.pool.get('stock.location').browse(VAR_1, VAR_2, VAR_4['location']\n )\n",
"return VAR_29\n",
"VAR_52 = VAR_29.get('fields', {})\n",
"if VAR_52:\n",
"if VAR_51.usage == 'supplier':\n",
"if VAR_52.get('virtual_available'):\n",
"if VAR_51.usage == 'internal':\n",
"VAR_29['fields']['virtual_available']['string'] = _('Future Receptions')\n",
"if VAR_52.get('qty_available'):\n",
"if VAR_52.get('virtual_available'):\n",
"if VAR_51.usage == 'customer':\n",
"VAR_29['fields']['qty_available']['string'] = _('Received Qty')\n",
"VAR_29['fields']['virtual_available']['string'] = _('Future Stock')\n",
"if VAR_52.get('virtual_available'):\n",
"if VAR_51.usage == 'inventory':\n",
"VAR_29['fields']['virtual_available']['string'] = _('Future Deliveries')\n",
"if VAR_52.get('qty_available'):\n",
"if VAR_52.get('virtual_available'):\n",
"if VAR_51.usage == 'procurement':\n",
"VAR_29['fields']['qty_available']['string'] = _('Delivered Qty')\n",
"VAR_29['fields']['virtual_available']['string'] = _('Future P&L')\n",
"if VAR_52.get('qty_available'):\n",
"if VAR_52.get('virtual_available'):\n",
"if VAR_51.usage == 'production':\n",
"VAR_29['fields']['qty_available']['string'] = _('P&L Qty')\n",
"VAR_29['fields']['virtual_available']['string'] = _('Future Qty')\n",
"if VAR_52.get('qty_available'):\n",
"if VAR_52.get('virtual_available'):\n",
"VAR_29['fields']['qty_available']['string'] = _('Unplanned Qty')\n",
"VAR_29['fields']['virtual_available']['string'] = _('Future Productions')\n",
"if VAR_52.get('qty_available'):\n",
"VAR_29['fields']['qty_available']['string'] = _('Produced Qty')\n"
] | [
"def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=...\n",
"res = super(product_product, self).fields_view_get(cr, uid, view_id,\n view_type, context, toolbar=toolbar, submenu=submenu)\n",
"if context is None:\n",
"context = {}\n",
"if 'location' in context and context['location']:\n",
"location_info = self.pool.get('stock.location').browse(cr, uid, context[\n 'location'])\n",
"return res\n",
"fields = res.get('fields', {})\n",
"if fields:\n",
"if location_info.usage == 'supplier':\n",
"if fields.get('virtual_available'):\n",
"if location_info.usage == 'internal':\n",
"res['fields']['virtual_available']['string'] = _('Future Receptions')\n",
"if fields.get('qty_available'):\n",
"if fields.get('virtual_available'):\n",
"if location_info.usage == 'customer':\n",
"res['fields']['qty_available']['string'] = _('Received Qty')\n",
"res['fields']['virtual_available']['string'] = _('Future Stock')\n",
"if fields.get('virtual_available'):\n",
"if location_info.usage == 'inventory':\n",
"res['fields']['virtual_available']['string'] = _('Future Deliveries')\n",
"if fields.get('qty_available'):\n",
"if fields.get('virtual_available'):\n",
"if location_info.usage == 'procurement':\n",
"res['fields']['qty_available']['string'] = _('Delivered Qty')\n",
"res['fields']['virtual_available']['string'] = _('Future P&L')\n",
"if fields.get('qty_available'):\n",
"if fields.get('virtual_available'):\n",
"if location_info.usage == 'production':\n",
"res['fields']['qty_available']['string'] = _('P&L Qty')\n",
"res['fields']['virtual_available']['string'] = _('Future Qty')\n",
"if fields.get('qty_available'):\n",
"if fields.get('virtual_available'):\n",
"res['fields']['qty_available']['string'] = _('Unplanned Qty')\n",
"res['fields']['virtual_available']['string'] = _('Future Productions')\n",
"if fields.get('qty_available'):\n",
"res['fields']['qty_available']['string'] = _('Produced Qty')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_14(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1.value = beamr.interpreters.Box(VAR_1.value)\n",
"return VAR_1\n"
] | [
"def t_BOX(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t.value = beamr.interpreters.Box(t.value)\n",
"return t\n"
] | [
0,
0,
2,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"\"\"\"docstring\"\"\"\n",
"GenericRequest._prepare(self)\n",
"VAR_17 = os.path.join(self.submissions_path, self.task[1])\n",
"VAR_18 = os.listdir(VAR_17)\n",
"VAR_19 = random.choice(VAR_18)\n",
"self.source_path = os.path.join(VAR_17, VAR_19)\n",
"self.files = []\n",
"if os.path.isdir(self.source_path):\n",
"VAR_22 = os.listdir(self.source_path)\n",
"VAR_8 = os.path.splitext(VAR_19)[0]\n",
"self.files = [('%s.%%l' % os.path.splitext(sf)[0], os.path.join(self.\n source_path, sf)) for sf in VAR_22]\n",
"self.files = [('%s.%%l' % VAR_8, self.source_path)]\n"
] | [
"def _prepare(self):...\n",
"\"\"\"docstring\"\"\"\n",
"GenericRequest._prepare(self)\n",
"task_path = os.path.join(self.submissions_path, self.task[1])\n",
"sources = os.listdir(task_path)\n",
"source = random.choice(sources)\n",
"self.source_path = os.path.join(task_path, source)\n",
"self.files = []\n",
"if os.path.isdir(self.source_path):\n",
"submission_formats = os.listdir(self.source_path)\n",
"submission_format = os.path.splitext(source)[0]\n",
"self.files = [('%s.%%l' % os.path.splitext(sf)[0], os.path.join(self.\n source_path, sf)) for sf in submission_formats]\n",
"self.files = [('%s.%%l' % submission_format, self.source_path)]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_57():...\n",
"self.cursor.execute('create table t1 (word varchar (100))')\n",
"VAR_38 = set(['a'])\n",
"self.cursor.executemany('insert into t1 (word) values (?)', [VAR_38])\n"
] | [
"def f():...\n",
"self.cursor.execute('create table t1 (word varchar (100))')\n",
"words = set(['a'])\n",
"self.cursor.executemany('insert into t1 (word) values (?)', [words])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = []\n",
"VAR_7 = self.pool.get('ir.model.data')\n",
"VAR_8 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n",
"VAR_9 = self.pool.get('account.bank.statement')\n",
"VAR_10 = self.pool.get('ir.sequence')\n",
"VAR_11 = self.pool.get('account.journal')\n",
"VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n",
"VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_12)))\n",
"VAR_13 = map(lambda x1: x1[0], VAR_2.fetchall())\n",
"for journal in VAR_11.browse(VAR_2, VAR_3, VAR_13):\n",
"VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n",
"VAR_14 = self.pool.get('ir.model.data')\n",
"if len(VAR_4):\n",
"VAR_15 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n",
"VAR_17 = ''\n",
"VAR_16 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n",
"if journal.sequence_id:\n",
"if VAR_15:\n",
"VAR_17 = VAR_10.get_id(VAR_2, VAR_3, journal.sequence_id.id)\n",
"VAR_17 = VAR_10.get(VAR_2, VAR_3, 'account.bank.statement')\n",
"VAR_15 = VAR_14.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n",
"if VAR_16:\n",
"VAR_18 = VAR_9.create(VAR_2, VAR_3, {'journal_id': journal.id, 'company_id':\n VAR_8, 'user_id': VAR_3, 'state': 'open', 'name': VAR_17,\n 'starting_details_ids': VAR_9._get_cash_close_box_lines(VAR_2, VAR_3, [])})\n",
"VAR_16 = VAR_14.browse(VAR_2, VAR_3, VAR_16, VAR_5=context).res_id\n",
"return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(VAR_15, 'tree'), (VAR_16, 'form')],\n 'type': 'ir.actions.act_window'}\n",
"VAR_9.button_open(VAR_2, VAR_3, [VAR_18], VAR_5)\n"
] | [
"def open_statement(self, cr, uid, ids, context):...\n",
"\"\"\"docstring\"\"\"\n",
"list_statement = []\n",
"mod_obj = self.pool.get('ir.model.data')\n",
"company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n",
"statement_obj = self.pool.get('account.bank.statement')\n",
"sequence_obj = self.pool.get('ir.sequence')\n",
"journal_obj = self.pool.get('account.journal')\n",
"cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n",
"j_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n",
"journal_ids = map(lambda x1: x1[0], cr.fetchall())\n",
"for journal in journal_obj.browse(cr, uid, journal_ids):\n",
"ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n",
"data_obj = self.pool.get('ir.model.data')\n",
"if len(ids):\n",
"id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n",
"number = ''\n",
"id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n",
"if journal.sequence_id:\n",
"if id2:\n",
"number = sequence_obj.get_id(cr, uid, journal.sequence_id.id)\n",
"number = sequence_obj.get(cr, uid, 'account.bank.statement')\n",
"id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n",
"if id3:\n",
"statement_id = statement_obj.create(cr, uid, {'journal_id': journal.id,\n 'company_id': company_id, 'user_id': uid, 'state': 'open', 'name':\n number, 'starting_details_ids': statement_obj._get_cash_close_box_lines\n (cr, uid, [])})\n",
"id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n",
"return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(id2, 'tree'), (id3, 'form')],\n 'type': 'ir.actions.act_window'}\n",
"statement_obj.button_open(cr, uid, [statement_id], context)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Expr'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return sum(map(len, VAR_3.values()))\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return sum(map(len, wildcards.values()))\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"VAR_18 = self.common.create_volume(VAR_6)\n",
"self.common.client_logout()\n",
"return {'metadata': VAR_18}\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"metadata = self.common.create_volume(volume)\n",
"self.common.client_logout()\n",
"return {'metadata': metadata}\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(VAR_2):...\n",
""
] | [
"def try_get_template(name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_2(self, VAR_2):...\n",
"VAR_4 = 'select userid from post_like where postid=%d' % VAR_2\n",
"VAR_5 = sql.queryDB(self.conn, VAR_4)\n",
"return VAR_5\n"
] | [
"def getPostLike(self, postid):...\n",
"sqlText = 'select userid from post_like where postid=%d' % postid\n",
"result = sql.queryDB(self.conn, sqlText)\n",
"return result\n"
] | [
0,
4,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._redis_address\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._redis_address\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = VAR_0['sensors']\n",
"if 'column_fmt' in VAR_0:\n",
"VAR_21 = VAR_0['column_fmt']\n",
"VAR_3 = ('TIMESTAMP',) + tuple([str(sen).strip() for sen in VAR_10])\n",
"VAR_3 = ('TIMESTAMP',) + tuple([VAR_21.replace('{sensor}', str(sen).strip()\n ) for sen in VAR_10])\n",
"return VAR_3\n"
] | [
"def get_columns(settings):...\n",
"\"\"\"docstring\"\"\"\n",
"sensors = settings['sensors']\n",
"if 'column_fmt' in settings:\n",
"column_fmt = settings['column_fmt']\n",
"columns = ('TIMESTAMP',) + tuple([str(sen).strip() for sen in sensors])\n",
"columns = ('TIMESTAMP',) + tuple([column_fmt.replace('{sensor}', str(sen).\n strip()) for sen in sensors])\n",
"return columns\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self, VAR_13, VAR_11, VAR_7, VAR_14, VAR_10):...\n",
"VAR_22 = VAR_7.do_existing_paths()\n",
"VAR_23 = []\n",
"if VAR_22:\n",
"for ictx in VAR_14:\n",
"VAR_26 = []\n",
"VAR_38 = glob.glob(os.path.join(ictx.path, '*'))\n",
"return VAR_23\n",
"if 'key' in VAR_11:\n",
"VAR_38 = (x for x in VAR_38 if os.path.isdir(x))\n",
"VAR_39 = VAR_7.get_parameters(VAR_11['key'], VAR_13, VAR_14)\n",
"if 'collection' in VAR_11:\n",
"if 'collection' in VAR_11:\n",
"if VAR_39:\n",
"VAR_40 = VAR_10.get_collection(VAR_11['collection'])\n",
"for VAR_9, value in itertools.product(VAR_14, VAR_26):\n",
"VAR_40 = VAR_10.get_collection(VAR_11['collection'])\n",
"VAR_23.extend((ictx, x) for x in VAR_38)\n",
"VAR_26.extend(x for x in VAR_39 if x)\n",
"VAR_41 = [x for x in VAR_26 if x not in VAR_40]\n",
"VAR_23.append((VAR_9, os.path.join(VAR_9.path, value)))\n",
"VAR_38 = (x for x in VAR_38 if os.path.split(x)[-1] in VAR_40)\n",
"if VAR_41:\n"
] | [
"def get_directories(self, levelctx, levelfields, searcher, ctxlist, client):...\n",
"doexisting = searcher.do_existing_paths()\n",
"dirlist = []\n",
"if doexisting:\n",
"for ictx in ctxlist:\n",
"values = []\n",
"ctxdirs = glob.glob(os.path.join(ictx.path, '*'))\n",
"return dirlist\n",
"if 'key' in levelfields:\n",
"ctxdirs = (x for x in ctxdirs if os.path.isdir(x))\n",
"search_param = searcher.get_parameters(levelfields['key'], levelctx, ctxlist)\n",
"if 'collection' in levelfields:\n",
"if 'collection' in levelfields:\n",
"if search_param:\n",
"coll = client.get_collection(levelfields['collection'])\n",
"for ctx, value in itertools.product(ctxlist, values):\n",
"coll = client.get_collection(levelfields['collection'])\n",
"dirlist.extend((ictx, x) for x in ctxdirs)\n",
"values.extend(x for x in search_param if x)\n",
"bad_values = [x for x in values if x not in coll]\n",
"dirlist.append((ctx, os.path.join(ctx.path, value)))\n",
"ctxdirs = (x for x in ctxdirs if os.path.split(x)[-1] in coll)\n",
"if bad_values:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"For",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition"
] |
[
"def FUNC_37(self, VAR_14, VAR_18):...\n",
"VAR_31 = self.get_related_model(VAR_14)\n",
"return self.session.query(VAR_31).get(VAR_18)\n"
] | [
"def get_related_obj(self, col_name, value):...\n",
"rel_model = self.get_related_model(col_name)\n",
"return self.session.query(rel_model).get(value)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_23(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = []\n",
"for VAR_7, VAR_12 in self.all_processes.items():\n",
"for VAR_16 in VAR_12:\n",
"return VAR_17\n",
"if VAR_16.process.poll() is None:\n",
"VAR_17.append((VAR_7, VAR_16.process))\n"
] | [
"def live_processes(self):...\n",
"\"\"\"docstring\"\"\"\n",
"result = []\n",
"for process_type, process_infos in self.all_processes.items():\n",
"for process_info in process_infos:\n",
"return result\n",
"if process_info.process.poll() is None:\n",
"result.append((process_type, process_info.process))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"For",
"Return'",
"Condition",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_21 = config.ensure_configured()\n",
"VAR_40 = api.reinitialize_request_cache()\n",
"self.response.headers['Content-Security-Policy'] = 'string'\n",
"self.response.headers['Strict-Transport-Security'\n ] = 'max-age=31536000; includeSubDomains; preload'\n",
"if self.frame_options:\n",
"self.response.headers['X-Frame-Options'] = self.frame_options\n",
"VAR_41 = None\n",
"for VAR_49 in self.get_auth_methods(VAR_21):\n",
"self.auth_method = VAR_49\n",
"VAR_41 = VAR_49(self.request)\n",
"self.authentication_error(err)\n",
"VAR_41 = VAR_41 or model.Anonymous\n",
"if VAR_41:\n",
"return\n",
"VAR_42 = VAR_49 in (FUNC_4, FUNC_5)\n",
"VAR_43 = self.request.headers.get(host_token.HTTP_HEADER)\n",
"if VAR_43:\n",
"VAR_50 = host_token.validate_host_token(VAR_43)\n",
"assert self.request.remote_addr\n",
"if VAR_50:\n",
"VAR_44 = ipaddr.ip_from_string(self.request.remote_addr)\n",
"VAR_40.peer_host = VAR_50\n",
"VAR_40.peer_ip = VAR_44\n",
"VAR_40.peer_identity = api.verify_ip_whitelisted(VAR_41, VAR_44, self.\n request.headers)\n",
"self.authorization_error(err)\n",
"VAR_45 = self.request.headers.get(delegation.HTTP_HEADER)\n",
"return\n",
"if VAR_45:\n",
"VAR_40.current_identity = VAR_40.peer_identity\n",
"VAR_40.current_identity = delegation.check_delegation_token(VAR_45, VAR_40.\n peer_identity)\n",
"self.authorization_error(api.AuthorizationError('Bad delegation token: %s' %\n exc))\n",
"VAR_51 = not VAR_42 and self.request.method in self.xsrf_token_enforce_on\n",
"self.authorization_error(err)\n",
"VAR_54 = \"\"\"Transient error while validating delegation token.\n%s\"\"\" % exc\n",
"if VAR_51 and self.xsrf_token is None:\n",
"logging.error(VAR_54)\n",
"self.xsrf_token_data = {}\n",
"self.abort(500, detail=msg)\n",
"if self.xsrf_token is not None:\n",
"self.xsrf_token_data = self.verify_xsrf_token()\n",
"super(CLASS_2, self).dispatch()\n"
] | [
"def dispatch(self):...\n",
"\"\"\"docstring\"\"\"\n",
"conf = config.ensure_configured()\n",
"auth_context = api.reinitialize_request_cache()\n",
"self.response.headers['Content-Security-Policy'] = (\n \"default-src https: 'self' 'unsafe-inline' https://www.google.com https://www.google-analytics.com 'unsafe-eval'\"\n )\n",
"self.response.headers['Strict-Transport-Security'\n ] = 'max-age=31536000; includeSubDomains; preload'\n",
"if self.frame_options:\n",
"self.response.headers['X-Frame-Options'] = self.frame_options\n",
"identity = None\n",
"for method_func in self.get_auth_methods(conf):\n",
"self.auth_method = method_func\n",
"identity = method_func(self.request)\n",
"self.authentication_error(err)\n",
"identity = identity or model.Anonymous\n",
"if identity:\n",
"return\n",
"using_headers_auth = method_func in (oauth_authentication,\n service_to_service_authentication)\n",
"host_tok = self.request.headers.get(host_token.HTTP_HEADER)\n",
"if host_tok:\n",
"validated_host = host_token.validate_host_token(host_tok)\n",
"assert self.request.remote_addr\n",
"if validated_host:\n",
"ip = ipaddr.ip_from_string(self.request.remote_addr)\n",
"auth_context.peer_host = validated_host\n",
"auth_context.peer_ip = ip\n",
"auth_context.peer_identity = api.verify_ip_whitelisted(identity, ip, self.\n request.headers)\n",
"self.authorization_error(err)\n",
"delegation_tok = self.request.headers.get(delegation.HTTP_HEADER)\n",
"return\n",
"if delegation_tok:\n",
"auth_context.current_identity = auth_context.peer_identity\n",
"auth_context.current_identity = delegation.check_delegation_token(\n delegation_tok, auth_context.peer_identity)\n",
"self.authorization_error(api.AuthorizationError('Bad delegation token: %s' %\n exc))\n",
"need_xsrf_token = (not using_headers_auth and self.request.method in self.\n xsrf_token_enforce_on)\n",
"self.authorization_error(err)\n",
"msg = \"\"\"Transient error while validating delegation token.\n%s\"\"\" % exc\n",
"if need_xsrf_token and self.xsrf_token is None:\n",
"logging.error(msg)\n",
"self.xsrf_token_data = {}\n",
"self.abort(500, detail=msg)\n",
"if self.xsrf_token is not None:\n",
"self.xsrf_token_data = self.verify_xsrf_token()\n",
"super(AuthenticatingHandler, self).dispatch()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assert'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_27):...\n",
"if VAR_27:\n",
"VAR_27 = int(VAR_27, 36)\n",
"return Comment._byID(VAR_27, True)\n"
] | [
"def run(self, cid):...\n",
"if cid:\n",
"cid = int(cid, 36)\n",
"return Comment._byID(cid, True)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = self.parse_body()\n",
"VAR_9 = VAR_4.get('version', None)\n",
"VAR_24 = VAR_4.get('dimensions', {})\n",
"VAR_25 = VAR_4.get('state', {})\n",
"VAR_14 = None\n",
"if VAR_24.get('id'):\n",
"VAR_52 = VAR_24['id']\n",
"if bool(VAR_24.get('quarantined')) or bool(VAR_25.get('quarantined')):\n",
"if isinstance(VAR_52, list) and len(VAR_52) == 1 and isinstance(VAR_52[0],\n",
"return VAR_4, VAR_14, VAR_9, VAR_25, VAR_24, 'Bot self-quarantined'\n",
"VAR_26 = None\n",
"VAR_14 = VAR_24['id'][0]\n",
"for _ in [0]:\n",
"VAR_26 = FUNC_1(self.EXPECTED_KEYS, VAR_4, 'keys')\n",
"if VAR_26:\n",
"if VAR_26:\n",
"VAR_50 = \"\"\"Quarantined Bot\nhttps://%s/restricted/bot/%s\n%s\"\"\" % (app_identity\n .get_default_version_hostname(), VAR_14, VAR_26)\n",
"VAR_27 = bot_management.get_settings_key(VAR_14).get()\n",
"VAR_26 = FUNC_4(self.REQUIRED_STATE_KEYS, VAR_25, 'state')\n",
"ereporter2.log_request(self.request, VAR_5='bot', VAR_8=line)\n",
"if bool(VAR_27 and VAR_27.quarantined):\n",
"if VAR_26:\n",
"return VAR_4, VAR_14, VAR_9, VAR_25, VAR_24, VAR_26\n",
"return VAR_4, VAR_14, VAR_9, VAR_25, VAR_24, 'Quarantined by admin'\n",
"return VAR_4, VAR_14, VAR_9, VAR_25, VAR_24, None\n",
"if not VAR_14:\n",
"VAR_26 = 'Missing bot id'\n",
"if not all(isinstance(key, unicode) and isinstance(values, list) and all(\n",
"VAR_26 = \"\"\"Invalid dimensions type:\n%s\"\"\" % json.dumps(VAR_24, sort_keys=\n True, indent=2, separators=(',', ': '))\n",
"VAR_53 = task_to_run.dimensions_powerset_count(VAR_24)\n",
"if VAR_53 > task_to_run.MAX_DIMENSIONS:\n",
"VAR_26 = 'Dimensions product %d is too high' % VAR_53\n",
"if not isinstance(VAR_25.get('lease_expiration_ts'), (None.__class__, int)):\n",
"VAR_26 = 'lease_expiration_ts (%r) must be int or None' % VAR_25[\n 'lease_expiration_ts']\n"
] | [
"def _process(self):...\n",
"\"\"\"docstring\"\"\"\n",
"request = self.parse_body()\n",
"version = request.get('version', None)\n",
"dimensions = request.get('dimensions', {})\n",
"state = request.get('state', {})\n",
"bot_id = None\n",
"if dimensions.get('id'):\n",
"dimension_id = dimensions['id']\n",
"if bool(dimensions.get('quarantined')) or bool(state.get('quarantined')):\n",
"if isinstance(dimension_id, list) and len(dimension_id) == 1 and isinstance(\n",
"return request, bot_id, version, state, dimensions, 'Bot self-quarantined'\n",
"quarantined_msg = None\n",
"bot_id = dimensions['id'][0]\n",
"for _ in [0]:\n",
"quarantined_msg = has_unexpected_keys(self.EXPECTED_KEYS, request, 'keys')\n",
"if quarantined_msg:\n",
"if quarantined_msg:\n",
"line = \"\"\"Quarantined Bot\nhttps://%s/restricted/bot/%s\n%s\"\"\" % (app_identity\n .get_default_version_hostname(), bot_id, quarantined_msg)\n",
"bot_settings = bot_management.get_settings_key(bot_id).get()\n",
"quarantined_msg = has_missing_keys(self.REQUIRED_STATE_KEYS, state, 'state')\n",
"ereporter2.log_request(self.request, source='bot', message=line)\n",
"if bool(bot_settings and bot_settings.quarantined):\n",
"if quarantined_msg:\n",
"return request, bot_id, version, state, dimensions, quarantined_msg\n",
"return request, bot_id, version, state, dimensions, 'Quarantined by admin'\n",
"return request, bot_id, version, state, dimensions, None\n",
"if not bot_id:\n",
"quarantined_msg = 'Missing bot id'\n",
"if not all(isinstance(key, unicode) and isinstance(values, list) and all(\n",
"quarantined_msg = \"\"\"Invalid dimensions type:\n%s\"\"\" % json.dumps(dimensions,\n sort_keys=True, indent=2, separators=(',', ': '))\n",
"dimensions_count = task_to_run.dimensions_powerset_count(dimensions)\n",
"if dimensions_count > task_to_run.MAX_DIMENSIONS:\n",
"quarantined_msg = 'Dimensions product %d is too high' % dimensions_count\n",
"if not isinstance(state.get('lease_expiration_ts'), (None.__class__, int)):\n",
"quarantined_msg = 'lease_expiration_ts (%r) must be int or None' % state[\n 'lease_expiration_ts']\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_0(self, VAR_1, VAR_6, VAR_7, VAR_3=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.text_input(VAR_6, VAR_7)\n"
] | [
"def make_input(self, version, name, value, attribute=None):...\n",
"\"\"\"docstring\"\"\"\n",
"return self.text_input(name, value)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_14(self, VAR_9):...\n",
"if not self.logged_in():\n",
"print('shrub: unauthenticated; use \"login [username] to log in first')\n",
"VAR_20 = self.send_cmd('create_issue{} {}'.format(self.insecure_mode, VAR_9))\n",
"return\n",
"print(VAR_20)\n"
] | [
"def do_create_issue(self, line):...\n",
"if not self.logged_in():\n",
"print('shrub: unauthenticated; use \"login [username] to log in first')\n",
"response = self.send_cmd('create_issue{} {}'.format(self.insecure_mode, line))\n",
"return\n",
"print(response)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Expr'"
] |
[
"@staticmethod...\n",
"if tuple(VAR_17) != tuple(VAR_12):\n",
"VAR_31 = Diff.from_string_arrays(VAR_12, VAR_17)\n",
"for diff in VAR_31.split_diff():\n",
"yield diff\n"
] | [
"@staticmethod...\n",
"if tuple(new_file) != tuple(file):\n",
"wholediff = Diff.from_string_arrays(file, new_file)\n",
"for diff in wholediff.split_diff():\n",
"yield diff\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_13(self):...\n",
"VAR_11 = {}\n",
"for VAR_12 in self.parent_groups:\n",
"VAR_11[VAR_12.name] = VAR_12\n",
"return VAR_11\n",
"VAR_11.update(VAR_12._get_ancestors())\n"
] | [
"def _get_ancestors(self):...\n",
"results = {}\n",
"for g in self.parent_groups:\n",
"results[g.name] = g\n",
"return results\n",
"results.update(g._get_ancestors())\n"
] | [
0,
1,
0,
1,
1,
1
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"VAR_0 = {'groupby': ['tags_key', 'tags_value']}\n",
"assert column_expr('tags_key', VAR_0\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n",
"assert column_expr('tags_key', VAR_0) == 'tags_key'\n",
"assert column_expr('tags_value', VAR_0) == '((all_tags)[2] AS tags_value)'\n"
] | [
"def test_alias_in_alias(self):...\n",
"body = {'groupby': ['tags_key', 'tags_value']}\n",
"assert column_expr('tags_key', body\n ) == '(((arrayJoin(arrayMap((x,y) -> [x,y], tags.key, tags.value)) AS all_tags))[1] AS tags_key)'\n",
"assert column_expr('tags_key', body) == 'tags_key'\n",
"assert column_expr('tags_value', body) == '((all_tags)[2] AS tags_value)'\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assert'",
"Assert'",
"Assert'"
] |
[
"@wait_for(timeout=5.0)...\n",
"return VAR_0.start_client(VAR_1=mode)\n"
] | [
"@wait_for(timeout=5.0)...\n",
"return client.start_client(mode=mode)\n"
] | [
0,
0
] | [
"For",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"VAR_7 = []\n",
"for parent in self.parent_groups:\n",
"VAR_7.append(parent.serialize())\n",
"self._hosts = None\n",
"VAR_8 = dict(VAR_0=self.name, vars=self.vars.copy(), VAR_7=parent_groups,\n depth=self.depth, VAR_9=self.hosts)\n",
"return VAR_8\n"
] | [
"def serialize(self):...\n",
"parent_groups = []\n",
"for parent in self.parent_groups:\n",
"parent_groups.append(parent.serialize())\n",
"self._hosts = None\n",
"result = dict(name=self.name, vars=self.vars.copy(), parent_groups=\n parent_groups, depth=self.depth, hosts=self.hosts)\n",
"return result\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def __init__(self, VAR_8=None, VAR_9=None, VAR_2=None, **VAR_10):...\n",
"if VAR_2 and not callable(VAR_2):\n",
"self.encoder = VAR_2\n",
"super().__init__(VAR_8, VAR_9, **kwargs)\n"
] | [
"def __init__(self, verbose_name=None, name=None, encoder=None, **kwargs):...\n",
"if encoder and not callable(encoder):\n",
"self.encoder = encoder\n",
"super().__init__(verbose_name, name, **kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'"
] |
[
"def FUNC_13(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = 2\n",
"VAR_10 = 'string'.format(repo_dir=REPO_DIR, shard_str='/shard_' + self.\n shard if self.shard else '', procs=process_count)\n",
"VAR_7 = BokChoyTestSuite('', num_processes=process_count)\n",
"self.assertEqual(BokChoyTestSuite.verbosity_processes_string(VAR_7), VAR_10)\n"
] | [
"def test_verbosity_settings_2_processes(self):...\n",
"\"\"\"docstring\"\"\"\n",
"process_count = 2\n",
"expected_verbosity_string = (\n '--with-xunitmp --xunitmp-file={repo_dir}/reports/bok_choy{shard_str}/xunit.xml --processes={procs} --no-color --process-timeout=1200'\n .format(repo_dir=REPO_DIR, shard_str='/shard_' + self.shard if self.\n shard else '', procs=process_count))\n",
"suite = BokChoyTestSuite('', num_processes=process_count)\n",
"self.assertEqual(BokChoyTestSuite.verbosity_processes_string(suite),\n expected_verbosity_string)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_2(VAR_2):...\n",
"VAR_2.call_to_terminate_proxy()\n"
] | [
"def after_all(context):...\n",
"context.call_to_terminate_proxy()\n"
] | [
0,
5
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_4(VAR_14, VAR_15):...\n",
"return FUNC_6(VAR_14, [[VAR_16 for line in VAR_15 for VAR_16 in line]], True)\n"
] | [
"def hStrip(dims, files):...\n",
"return smartGrid(dims, [[file for line in files for file in line]], True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2(self, VAR_21, VAR_22):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_55 = VAR_21.rstrip('/').split('/')\n",
"if len(VAR_55[0]):\n",
"VAR_22 = [x for x in VAR_22.split('/') if len(x) and x is not None]\n",
"VAR_22, VAR_55 = [], VAR_55[1:]\n",
"VAR_21 = VAR_21[1:]\n",
"VAR_56 = []\n",
"def FUNC_33(VAR_57, VAR_22):...\n",
"if not len(VAR_57):\n",
"VAR_56.append('/%s' % ('/'.join(VAR_22),))\n",
"if VAR_57[0] == '.':\n",
"FUNC_33(VAR_55, VAR_22)\n",
"FUNC_33(VAR_57[1:], VAR_22)\n",
"if VAR_57[0] == '..':\n",
"return VAR_56\n",
"FUNC_33(VAR_57[1:], VAR_22[:-1])\n",
"VAR_65 = [x[VAR_1] for x in self.get_path('/'.join(VAR_22))]\n",
"VAR_74 = [x for x in VAR_65 if fnmatch.fnmatchcase(x, VAR_57[0])]\n",
"for match in VAR_74:\n",
"FUNC_33(VAR_57[1:], VAR_22 + [match])\n"
] | [
"def resolve_path_wc(self, path, cwd):...\n",
"\"\"\"docstring\"\"\"\n",
"pieces = path.rstrip('/').split('/')\n",
"if len(pieces[0]):\n",
"cwd = [x for x in cwd.split('/') if len(x) and x is not None]\n",
"cwd, pieces = [], pieces[1:]\n",
"path = path[1:]\n",
"found = []\n",
"def foo(p, cwd):...\n",
"if not len(p):\n",
"found.append('/%s' % ('/'.join(cwd),))\n",
"if p[0] == '.':\n",
"foo(pieces, cwd)\n",
"foo(p[1:], cwd)\n",
"if p[0] == '..':\n",
"return found\n",
"foo(p[1:], cwd[:-1])\n",
"names = [x[A_NAME] for x in self.get_path('/'.join(cwd))]\n",
"matches = [x for x in names if fnmatch.fnmatchcase(x, p[0])]\n",
"for match in matches:\n",
"foo(p[1:], cwd + [match])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.priority = VAR_77\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.priority = priority\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self):...\n",
"return self.name\n"
] | [
"def get_name(self):...\n",
"return self.name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self, VAR_2, VAR_1, VAR_3):...\n",
""
] | [
"def support_project(self, user_id, project_id, money):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def __init__(self, VAR_1):...\n",
"self.file = VAR_1\n",
"self.hosts = list()\n",
"self.parse()\n"
] | [
"def __init__(self, file):...\n",
"self.file = file\n",
"self.hosts = list()\n",
"self.parse()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"from pylons import c, request, g\n",
"from pylons.i18n import _\n",
"from pylons.controllers.util import abort\n",
"from r2.lib import utils, captcha\n",
"from r2.lib.filters import unkeep_space, websafe, _force_utf8, _force_ascii\n",
"from r2.lib.db.operators import asc, desc\n",
"from r2.config import cache\n",
"from r2.lib.template_helpers import add_sr\n",
"from r2.lib.jsonresponse import json_respond\n",
"from r2.models import *\n",
"from r2.controllers.errors import errors, UserRequiredException\n",
"from copy import copy\n",
"from datetime import datetime, timedelta\n",
"import re\n",
"VAR_6 = None\n",
"def __init__(self, VAR_7=None, VAR_8=None, VAR_9=True, VAR_10=True, VAR_11=True...\n",
"if VAR_7:\n",
"self.param = VAR_7\n",
"self.param = self.default_param\n",
"self.default = VAR_8\n",
"self.post, self.get, self.url = VAR_9, VAR_10, VAR_11\n",
"def __call__(self, VAR_11):...\n",
"VAR_15 = []\n",
"if self.param:\n",
"for p in utils.tup(self.param):\n",
"return self.run(*VAR_15)\n",
"if self.post and VAR_102.post.get(p):\n",
"VAR_33 = VAR_102.post[p]\n",
"if self.get and VAR_102.get.get(p):\n",
"VAR_15.append(VAR_33)\n",
"VAR_33 = VAR_102.get[p]\n",
"if self.url and VAR_11.get(p):\n",
"VAR_33 = VAR_11[p]\n",
"VAR_33 = self.default\n"
] | [
"from pylons import c, request, g\n",
"from pylons.i18n import _\n",
"from pylons.controllers.util import abort\n",
"from r2.lib import utils, captcha\n",
"from r2.lib.filters import unkeep_space, websafe, _force_utf8, _force_ascii\n",
"from r2.lib.db.operators import asc, desc\n",
"from r2.config import cache\n",
"from r2.lib.template_helpers import add_sr\n",
"from r2.lib.jsonresponse import json_respond\n",
"from r2.models import *\n",
"from r2.controllers.errors import errors, UserRequiredException\n",
"from copy import copy\n",
"from datetime import datetime, timedelta\n",
"import re\n",
"default_param = None\n",
"def __init__(self, param=None, default=None, post=True, get=True, url=True):...\n",
"if param:\n",
"self.param = param\n",
"self.param = self.default_param\n",
"self.default = default\n",
"self.post, self.get, self.url = post, get, url\n",
"def __call__(self, url):...\n",
"a = []\n",
"if self.param:\n",
"for p in utils.tup(self.param):\n",
"return self.run(*a)\n",
"if self.post and request.post.get(p):\n",
"val = request.post[p]\n",
"if self.get and request.get.get(p):\n",
"a.append(val)\n",
"val = request.get[p]\n",
"if self.url and url.get(p):\n",
"val = url[p]\n",
"val = self.default\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Condition",
"For",
"Return'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(VAR_0, VAR_1, VAR_2=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if 'device_class' in VAR_0:\n",
"VAR_22 = VAR_0['device_class']\n",
"if VAR_1 in ['simulate', 'fake']:\n",
"VAR_11, VAR_12 = VAR_22.split('.')\n",
"VAR_22 = 'fake.Fake'\n",
"VAR_22 = 'generic.Generic'\n",
"VAR_13 = import_module('..devices.' + VAR_11, __name__)\n",
"return getattr(VAR_13, VAR_12)(VAR_0, VAR_2=debug)\n"
] | [
"def get_device(settings, instrum, debug=False):...\n",
"\"\"\"docstring\"\"\"\n",
"if 'device_class' in settings:\n",
"device_class = settings['device_class']\n",
"if instrum in ['simulate', 'fake']:\n",
"mod, obj = device_class.split('.')\n",
"device_class = 'fake.Fake'\n",
"device_class = 'generic.Generic'\n",
"module = import_module('..devices.' + mod, __name__)\n",
"return getattr(module, obj)(settings, debug=debug)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_6():...\n",
"if not request.method in VAR_0:\n",
"return True\n",
"if not request.environ.get('AUTH_TYPE') == 'cookie':\n",
"return True\n",
"if config.get('skip_authentication'):\n",
"return True\n",
"if request.environ.get('HTTP_REFERER'):\n",
"VAR_7 = urlparse(request.environ.get('HTTP_REFERER'))\n",
"if request.method == 'GET' and FUNC_4():\n",
"VAR_8 = VAR_7.hostname\n",
"return True\n",
"return False\n",
"if VAR_7.port:\n",
"VAR_8 += ':' + str(VAR_7.port)\n",
"if VAR_8.endswith(request.environ['adhocracy.domain']):\n",
"if request.method != 'GET':\n",
"return True\n"
] | [
"def check():...\n",
"if not request.method in methods:\n",
"return True\n",
"if not request.environ.get('AUTH_TYPE') == 'cookie':\n",
"return True\n",
"if config.get('skip_authentication'):\n",
"return True\n",
"if request.environ.get('HTTP_REFERER'):\n",
"ref_url = urlparse(request.environ.get('HTTP_REFERER'))\n",
"if request.method == 'GET' and has_token():\n",
"ref_host = ref_url.hostname\n",
"return True\n",
"return False\n",
"if ref_url.port:\n",
"ref_host += ':' + str(ref_url.port)\n",
"if ref_host.endswith(request.environ['adhocracy.domain']):\n",
"if request.method != 'GET':\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
5,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'",
"Condition",
"AugAssign'",
"Condition",
"Condition",
"Return'"
] |
[
"@click.command(help='List accounts')...\n",
"VAR_37 = newrelic.get_accounts()\n",
"if VAR_11:\n",
"for VAR_20 in VAR_37:\n",
"VAR_24 = [['ID', 'Name']]\n",
"print(VAR_20[0])\n",
"for VAR_20 in VAR_37:\n",
"VAR_24.append([VAR_20['id'], VAR_20['name']])\n",
"VAR_25 = SingleTable(VAR_24)\n",
"VAR_25.title = click.style('Accounts', fg='black')\n",
"print(VAR_25.table)\n"
] | [
"@click.command(help='List accounts')...\n",
"accounts = newrelic.get_accounts()\n",
"if raw:\n",
"for account in accounts:\n",
"data = [['ID', 'Name']]\n",
"print(account[0])\n",
"for account in accounts:\n",
"data.append([account['id'], account['name']])\n",
"table = SingleTable(data)\n",
"table.title = click.style('Accounts', fg='black')\n",
"print(table.table)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"For",
"Assign'",
"Expr'",
"For",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_26(self, VAR_74):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.benchmark = VAR_74\n",
"return VAR_101\n"
] | [
"def benchmark(self, benchmark):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.benchmark = benchmark\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"import logging\n",
"import traceback\n",
"from fuel_health.common.utils.data_utils import rand_name\n",
"from fuel_health import nmanager\n",
"VAR_0 = logging.getLogger(__name__)\n",
"\"\"\"string\"\"\"\n",
"@classmethod...\n",
"super(CLASS_0, VAR_1).setUpClass()\n",
"if VAR_1.manager.clients_initialized:\n",
"VAR_1.tenant_id = VAR_1.manager._get_identity_client(VAR_1.config.identity.\n admin_username, VAR_1.config.identity.admin_password, VAR_1.config.\n identity.admin_tenant_name).tenant_id\n",
"def FUNC_1(self):...\n",
"VAR_1.keypairs = {}\n",
"super(CLASS_0, self).setUp()\n",
"VAR_1.security_groups = {}\n",
"self.check_clients_state()\n",
"VAR_1.network = []\n",
"if not self.config.compute.compute_nodes:\n",
"VAR_1.servers = []\n",
"self.skipTest('There are no compute nodes')\n",
"def FUNC_2(self):...\n",
"VAR_1.floating_ips = []\n",
"super(CLASS_0, self).tearDown()\n",
"if self.manager.clients_initialized:\n",
"if self.servers:\n",
"def FUNC_3(self):...\n",
"for VAR_7 in self.servers:\n",
"\"\"\"docstring\"\"\"\n",
"self._delete_server(VAR_7)\n",
"VAR_0.debug(traceback.format_exc())\n",
"self.keypairs[self.tenant_id] = self.verify(30, self._create_keypair, 1,\n 'Keypair can not be created.', 'keypair creation', self.compute_client)\n",
"self.servers.remove(VAR_7)\n",
"VAR_0.debug('Server was already deleted.')\n",
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"def FUNC_5(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = self.verify(50, self._list_networks, 1,\n 'List of networks is not available.', 'listing networks')\n",
"VAR_3, VAR_4 = zip(*((n.label, n.id) for n in VAR_2))\n",
"for mynet in self.network:\n",
"self.verify_response_body(VAR_3, mynet.label,\n 'Network can not be created.properly', failed_step=2)\n",
"def FUNC_6(self):...\n",
"self.verify_response_body(VAR_4, mynet.id,\n 'Network can not be created. properly ', failed_step=3)\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"VAR_5 = rand_name('ost1_test-server-smoke-')\n",
"VAR_6 = [self.security_groups[self.tenant_id].name]\n",
"VAR_7 = self.verify(200, self._create_server, 2,\n 'Creating instance using the new security group has failed.',\n 'image creation', self.compute_client, VAR_5, VAR_6)\n",
"self.verify(30, self._delete_server, 3, 'Server can not be deleted.',\n 'server deletion', VAR_7)\n",
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"VAR_5 = rand_name('ost1_test-server-smoke-')\n",
"VAR_6 = [self.security_groups[self.tenant_id].name]\n",
"VAR_7 = self.verify(250, self._create_server, 2,\n 'Server can not be created.', 'server creation', self.compute_client,\n VAR_5, VAR_6)\n",
"VAR_8 = self.verify(20, self._create_floating_ip, 3,\n 'Floating IP can not be created.', 'floating IP creation')\n",
"self.verify(20, self._assign_floating_ip_to_instance, 4,\n 'Floating IP can not be assigned.', 'floating IP assignment', self.\n compute_client, VAR_7, VAR_8)\n",
"self.floating_ips.append(VAR_8)\n",
"VAR_9 = VAR_8.ip\n",
"VAR_0.info('is address is {0}'.format(VAR_9))\n",
"VAR_0.debug(VAR_9)\n",
"self.verify(600, self._check_vm_connectivity, 5,\n 'VM connectivity doesn`t function properly.',\n 'VM connectivity checking', VAR_9, 30, (9, 60))\n",
"self.verify(600, self._check_connectivity_from_vm, 6,\n 'Connectivity to 8.8.8.8 from the VM doesn`t function properly.',\n 'public connectivity checking from VM', VAR_9, 30, (9, 60))\n",
"self.verify(20, self.compute_client.servers.remove_floating_ip, 7,\n 'Floating IP cannot be removed.', 'removing floating IP', VAR_7, VAR_8)\n",
"self.verify(20, self.compute_client.floating_ips.delete, 8,\n 'Floating IP cannot be deleted.', 'floating IP deletion', VAR_8)\n",
"if self.floating_ips:\n",
"self.floating_ips.remove(VAR_8)\n",
"self.verify(30, self._delete_server, 9, 'Server can not be deleted. ',\n 'server deletion', VAR_7)\n",
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"VAR_5 = rand_name('ost1_test-server-smoke-')\n",
"VAR_6 = [self.security_groups[self.tenant_id].name]\n",
"VAR_7 = self.verify(250, self._create_server, 2,\n 'Server can not be created.', 'server creation', self.compute_client,\n VAR_5, VAR_6)\n",
"for addr in VAR_7.addresses:\n",
"VAR_0.debug(traceback.format_exc())\n",
"self.verify(600, self._check_connectivity_from_vm, 3,\n 'Connectivity to 8.8.8.8 from the VM doesn`t function properly.',\n 'public connectivity checking from VM', VAR_12, 30, (9, 30), VAR_11)\n",
"if addr.startswith('novanetwork'):\n",
"if not self.config.compute.use_vcenter:\n",
"self.fail(\n 'Step 3 failed: cannot get instance details. Please refer to OpenStack logs for more details.'\n )\n",
"self.verify(30, self._delete_server, 4, 'Server can not be deleted. ',\n 'server deletion', VAR_7)\n",
"VAR_12 = VAR_7.addresses[addr][0]['addr']\n",
"VAR_11 = getattr(VAR_7, 'OS-EXT-SRV-ATTR:host')\n",
"VAR_11 = None\n",
"def FUNC_9(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"VAR_5 = rand_name('ost1_test-server-smoke-file_inj-')\n",
"VAR_6 = [self.security_groups[self.tenant_id].name]\n",
"VAR_10 = {'/home/cirros/server.txt': self._load_file('server.txt')}\n",
"VAR_7 = self.verify(300, self._create_server, 2,\n 'Creating instance using the new security group has failed.',\n 'instance creation', self.compute_client, VAR_5, VAR_6, VAR_10=data_file)\n",
"VAR_8 = self.verify(20, self._create_floating_ip, 3,\n 'Floating IP can not be created.', 'floating IP creation')\n",
"self.verify(20, self._assign_floating_ip_to_instance, 3,\n 'Floating IP can not be assigned.', 'floating IP assignment', self.\n compute_client, VAR_7, VAR_8)\n",
"self.floating_ips.append(VAR_8)\n",
"VAR_9 = VAR_8.ip\n",
"self.verify(600, self._run_command_from_vm, 4,\n 'Can not find injected file on instance.',\n 'check if injected file exists', VAR_9, 30, (9, 60),\n '[ -f /home/cirros/server.txt ] && echo \"True\" || echo \"False\"')\n",
"self.verify(20, self.compute_client.servers.remove_floating_ip, 5,\n 'Floating IP cannot be removed.', 'removing floating IP', VAR_7, VAR_8)\n",
"self.verify(20, self.compute_client.floating_ips.delete, 5,\n 'Floating IP cannot be deleted.', 'floating IP deletion', VAR_8)\n",
"if self.floating_ips:\n",
"self.floating_ips.remove(VAR_8)\n",
"self.verify(30, self._delete_server, 6, 'Server can not be deleted. ',\n 'server deletion', VAR_7)\n"
] | [
"import logging\n",
"import traceback\n",
"from fuel_health.common.utils.data_utils import rand_name\n",
"from fuel_health import nmanager\n",
"LOG = logging.getLogger(__name__)\n",
"\"\"\"Test suit verifies:\n - keypairs creation\n - security groups creation\n - Network creation\n - Instance creation\n - Floating ip creation\n - Instance connectivity by floating IP\n \"\"\"\n",
"@classmethod...\n",
"super(TestNovaNetwork, cls).setUpClass()\n",
"if cls.manager.clients_initialized:\n",
"cls.tenant_id = cls.manager._get_identity_client(cls.config.identity.\n admin_username, cls.config.identity.admin_password, cls.config.identity\n .admin_tenant_name).tenant_id\n",
"def setUp(self):...\n",
"cls.keypairs = {}\n",
"super(TestNovaNetwork, self).setUp()\n",
"cls.security_groups = {}\n",
"self.check_clients_state()\n",
"cls.network = []\n",
"if not self.config.compute.compute_nodes:\n",
"cls.servers = []\n",
"self.skipTest('There are no compute nodes')\n",
"def tearDown(self):...\n",
"cls.floating_ips = []\n",
"super(TestNovaNetwork, self).tearDown()\n",
"if self.manager.clients_initialized:\n",
"if self.servers:\n",
"def test_001_create_keypairs(self):...\n",
"for server in self.servers:\n",
"\"\"\"docstring\"\"\"\n",
"self._delete_server(server)\n",
"LOG.debug(traceback.format_exc())\n",
"self.keypairs[self.tenant_id] = self.verify(30, self._create_keypair, 1,\n 'Keypair can not be created.', 'keypair creation', self.compute_client)\n",
"self.servers.remove(server)\n",
"LOG.debug('Server was already deleted.')\n",
"def test_002_create_security_groups(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"def test_003_check_networks(self):...\n",
"\"\"\"docstring\"\"\"\n",
"seen_nets = self.verify(50, self._list_networks, 1,\n 'List of networks is not available.', 'listing networks')\n",
"seen_labels, seen_ids = zip(*((n.label, n.id) for n in seen_nets))\n",
"for mynet in self.network:\n",
"self.verify_response_body(seen_labels, mynet.label,\n 'Network can not be created.properly', failed_step=2)\n",
"def test_004_create_servers(self):...\n",
"self.verify_response_body(seen_ids, mynet.id,\n 'Network can not be created. properly ', failed_step=3)\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"name = rand_name('ost1_test-server-smoke-')\n",
"security_groups = [self.security_groups[self.tenant_id].name]\n",
"server = self.verify(200, self._create_server, 2,\n 'Creating instance using the new security group has failed.',\n 'image creation', self.compute_client, name, security_groups)\n",
"self.verify(30, self._delete_server, 3, 'Server can not be deleted.',\n 'server deletion', server)\n",
"def test_008_check_public_instance_connectivity_from_instance(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"name = rand_name('ost1_test-server-smoke-')\n",
"security_groups = [self.security_groups[self.tenant_id].name]\n",
"server = self.verify(250, self._create_server, 2,\n 'Server can not be created.', 'server creation', self.compute_client,\n name, security_groups)\n",
"floating_ip = self.verify(20, self._create_floating_ip, 3,\n 'Floating IP can not be created.', 'floating IP creation')\n",
"self.verify(20, self._assign_floating_ip_to_instance, 4,\n 'Floating IP can not be assigned.', 'floating IP assignment', self.\n compute_client, server, floating_ip)\n",
"self.floating_ips.append(floating_ip)\n",
"ip_address = floating_ip.ip\n",
"LOG.info('is address is {0}'.format(ip_address))\n",
"LOG.debug(ip_address)\n",
"self.verify(600, self._check_vm_connectivity, 5,\n 'VM connectivity doesn`t function properly.',\n 'VM connectivity checking', ip_address, 30, (9, 60))\n",
"self.verify(600, self._check_connectivity_from_vm, 6,\n 'Connectivity to 8.8.8.8 from the VM doesn`t function properly.',\n 'public connectivity checking from VM', ip_address, 30, (9, 60))\n",
"self.verify(20, self.compute_client.servers.remove_floating_ip, 7,\n 'Floating IP cannot be removed.', 'removing floating IP', server,\n floating_ip)\n",
"self.verify(20, self.compute_client.floating_ips.delete, 8,\n 'Floating IP cannot be deleted.', 'floating IP deletion', floating_ip)\n",
"if self.floating_ips:\n",
"self.floating_ips.remove(floating_ip)\n",
"self.verify(30, self._delete_server, 9, 'Server can not be deleted. ',\n 'server deletion', server)\n",
"def test_006_check_internet_connectivity_instance_without_floatingIP(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"name = rand_name('ost1_test-server-smoke-')\n",
"security_groups = [self.security_groups[self.tenant_id].name]\n",
"server = self.verify(250, self._create_server, 2,\n 'Server can not be created.', 'server creation', self.compute_client,\n name, security_groups)\n",
"for addr in server.addresses:\n",
"LOG.debug(traceback.format_exc())\n",
"self.verify(600, self._check_connectivity_from_vm, 3,\n 'Connectivity to 8.8.8.8 from the VM doesn`t function properly.',\n 'public connectivity checking from VM', instance_ip, 30, (9, 30), compute)\n",
"if addr.startswith('novanetwork'):\n",
"if not self.config.compute.use_vcenter:\n",
"self.fail(\n 'Step 3 failed: cannot get instance details. Please refer to OpenStack logs for more details.'\n )\n",
"self.verify(30, self._delete_server, 4, 'Server can not be deleted. ',\n 'server deletion', server)\n",
"instance_ip = server.addresses[addr][0]['addr']\n",
"compute = getattr(server, 'OS-EXT-SRV-ATTR:host')\n",
"compute = None\n",
"def test_009_create_server_with_file(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.check_image_exists()\n",
"if not self.security_groups:\n",
"self.security_groups[self.tenant_id] = self.verify(25, self.\n _create_security_group, 1, 'Security group can not be created.',\n 'security group creation', self.compute_client)\n",
"name = rand_name('ost1_test-server-smoke-file_inj-')\n",
"security_groups = [self.security_groups[self.tenant_id].name]\n",
"data_file = {'/home/cirros/server.txt': self._load_file('server.txt')}\n",
"server = self.verify(300, self._create_server, 2,\n 'Creating instance using the new security group has failed.',\n 'instance creation', self.compute_client, name, security_groups,\n data_file=data_file)\n",
"floating_ip = self.verify(20, self._create_floating_ip, 3,\n 'Floating IP can not be created.', 'floating IP creation')\n",
"self.verify(20, self._assign_floating_ip_to_instance, 3,\n 'Floating IP can not be assigned.', 'floating IP assignment', self.\n compute_client, server, floating_ip)\n",
"self.floating_ips.append(floating_ip)\n",
"ip_address = floating_ip.ip\n",
"self.verify(600, self._run_command_from_vm, 4,\n 'Can not find injected file on instance.',\n 'check if injected file exists', ip_address, 30, (9, 60),\n '[ -f /home/cirros/server.txt ] && echo \"True\" || echo \"False\"')\n",
"self.verify(20, self.compute_client.servers.remove_floating_ip, 5,\n 'Floating IP cannot be removed.', 'removing floating IP', server,\n floating_ip)\n",
"self.verify(20, self.compute_client.floating_ips.delete, 5,\n 'Floating IP cannot be deleted.', 'floating IP deletion', floating_ip)\n",
"if self.floating_ips:\n",
"self.floating_ips.remove(floating_ip)\n",
"self.verify(30, self._delete_server, 6, 'Server can not be deleted. ',\n 'server deletion', server)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"FunctionDef'",
"For",
"Docstring",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Expr'",
"FunctionDef'",
"Expr'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self, VAR_47):...\n",
"if VAR_101.user_is_admin:\n",
"return True\n",
"if VAR_101.user_is_loggedin:\n",
"VAR_18 = Thing._by_fullname(VAR_47, data=True)\n",
"abort(403, 'forbidden')\n",
"VAR_109 = VAR_18.subreddit_slow\n",
"if VAR_109.can_ban(VAR_101.user):\n",
"return True\n"
] | [
"def run(self, thing_name):...\n",
"if c.user_is_admin:\n",
"return True\n",
"if c.user_is_loggedin:\n",
"item = Thing._by_fullname(thing_name, data=True)\n",
"abort(403, 'forbidden')\n",
"subreddit = item.subreddit_slow\n",
"if subreddit.can_ban(c.user):\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
"def check_for_setup_error(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._check_flags()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"def FUNC_1(VAR_0, VAR_1=None):...\n",
"VAR_5 = FUNC_0()\n",
"VAR_6 = VAR_5.cursor()\n",
"if not VAR_1:\n",
"VAR_6.execute(VAR_0)\n",
"VAR_6.execute(VAR_0, VAR_1)\n",
"VAR_5.commit()\n",
"VAR_5.close()\n"
] | [
"def execute(query, params=None):...\n",
"conn = connect()\n",
"c = conn.cursor()\n",
"if not params:\n",
"c.execute(query)\n",
"c.execute(query, params)\n",
"conn.commit()\n",
"conn.close()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_11(self, VAR_9):...\n",
"return [self.actors[VAR_6] for VAR_6 in self.actors.keys() if VAR_9 in self\n .actors[VAR_6].name]\n"
] | [
"def allActorsbyName(self, name):...\n",
"return [self.actors[ID] for ID in self.actors.keys() if name in self.actors\n [ID].name]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_16(self, VAR_7, VAR_15):...\n",
"VAR_26 = []\n",
"if VAR_7._joins:\n",
"VAR_9 = [None] * len(VAR_7._joins) + [None]\n",
"for res in VAR_15:\n",
"VAR_30 = {}\n",
"VAR_4 = dict(zip(VAR_7.model_class._meta.sorted_fields_names, res))\n",
"return VAR_26\n",
"VAR_31 = []\n",
"VAR_26.append(VAR_7.model_class(**kwargs))\n",
"VAR_32 = {}\n",
"for res in VAR_15:\n",
"VAR_34 = len(VAR_7.model_class._meta.sorted_fields_names)\n",
"VAR_35 = res[:VAR_34]\n",
"VAR_27 = 0\n",
"if not VAR_30:\n",
"VAR_32[VAR_7.model_class] = VAR_27\n",
"if not VAR_39(VAR_35) in VAR_30:\n",
"VAR_31.append(None)\n",
"VAR_4 = dict(zip(VAR_7.model_class._meta.sorted_fields_names, VAR_35))\n",
"VAR_9[VAR_27] = VAR_30[VAR_39(VAR_35)]\n",
"VAR_38 = VAR_7.model_class(**kwargs)\n",
"for VAR_41 in VAR_7._joins:\n",
"VAR_30[VAR_39(VAR_35)] = {'model': VAR_38}\n",
"VAR_35 = res[VAR_34:VAR_34 + len(VAR_41.dest._meta.sorted_fields_names)]\n",
"VAR_26.append(VAR_38)\n",
"VAR_34 += len(VAR_41.dest._meta.sorted_fields_names)\n",
"VAR_27 += 1\n",
"if len(VAR_31) == VAR_27:\n",
"VAR_32[VAR_41.dest] = VAR_27\n",
"if VAR_35 == [None] * len(VAR_41.dest._meta.sorted_fields_names):\n",
"VAR_31.append(VAR_32[VAR_41.src])\n",
"VAR_9[VAR_27] = {'model': None}\n",
"if not VAR_27 in VAR_9[VAR_31[VAR_27]] or not VAR_39(VAR_35) in VAR_9[\n",
"if not VAR_27 in VAR_9[VAR_31[VAR_27]]:\n",
"VAR_4 = dict(zip(VAR_41.dest._meta.sorted_fields_names, VAR_35))\n",
"VAR_9[VAR_27] = VAR_9[VAR_31[VAR_27]][VAR_27][VAR_39(VAR_35)]\n",
"VAR_9[VAR_31[VAR_27]][VAR_27] = {}\n",
"VAR_9[VAR_31[VAR_27]][VAR_27]['None'] = VAR_9[VAR_27]\n",
"VAR_42 = VAR_41.dest(**kwargs)\n",
"VAR_9[VAR_27] = {'model': VAR_42}\n",
"if not VAR_27 in VAR_9[VAR_31[VAR_27]]:\n",
"VAR_9[VAR_31[VAR_27]][VAR_27] = {}\n",
"VAR_9[VAR_31[VAR_27]][VAR_27][VAR_39(VAR_35)] = VAR_9[VAR_27]\n",
"VAR_9[VAR_27] = VAR_9[VAR_31[VAR_27]][VAR_27][VAR_39(VAR_35)]\n",
"VAR_42 = VAR_9[VAR_27]['model']\n",
"if VAR_41.src in VAR_32:\n",
"if VAR_41.src._meta.many_to_many:\n",
"VAR_43 = VAR_31[VAR_27]\n",
"if not VAR_41.dest._meta.many_to_many:\n",
"VAR_44 = VAR_32[VAR_9[VAR_31[VAR_27]]['model'].__class__]\n",
"if VAR_9[VAR_31[VAR_27]]['model'].isForeignKey(VAR_9[VAR_31[VAR_27]][\n",
"VAR_45 = getattr(VAR_42, VAR_9[VAR_31[VAR_27]]['model']._meta.rel_class[\n VAR_41.dest].related_name)\n",
"VAR_45 = getattr(VAR_42, VAR_9[VAR_31[VAR_27]]['model']._meta.rel_class[\n VAR_41.dest].related_name)\n",
"if VAR_9[VAR_31[VAR_27]]['model'].isReferenceField(VAR_9[VAR_31[VAR_27]][\n",
"VAR_45.append(VAR_9[VAR_31[VAR_44]]['model'])\n",
"VAR_45.append(VAR_9[VAR_31[VAR_27]]['model'])\n",
"VAR_45 = getattr(VAR_9[VAR_31[VAR_27]]['model'], VAR_9[VAR_31[VAR_27]][\n 'model']._meta.rel_class[VAR_41.dest].name)\n",
"VAR_45 = getattr(VAR_9[VAR_31[VAR_44]]['model'], VAR_9[VAR_31[VAR_27]][\n 'model']._meta.rel_class[VAR_41.dest].name)\n",
"setattr(VAR_9[VAR_31[VAR_27]]['model'], VAR_9[VAR_31[VAR_27]]['model'].\n _meta.rel_class[VAR_41.dest].name, VAR_42)\n",
"VAR_45.append(VAR_42)\n",
"VAR_45.append(VAR_42)\n",
"setattr(VAR_42, VAR_9[VAR_31[VAR_27]]['model']._meta.rel_class[VAR_41.dest]\n .related_name, VAR_9[VAR_31[VAR_27]]['model'])\n"
] | [
"def parse_select(self, query, result):...\n",
"class_list = []\n",
"if query._joins:\n",
"current = [None] * len(query._joins) + [None]\n",
"for res in result:\n",
"models_class = {}\n",
"kwargs = dict(zip(query.model_class._meta.sorted_fields_names, res))\n",
"return class_list\n",
"rel = []\n",
"class_list.append(query.model_class(**kwargs))\n",
"pos = {}\n",
"for res in result:\n",
"start = len(query.model_class._meta.sorted_fields_names)\n",
"curr_list = res[:start]\n",
"i = 0\n",
"if not models_class:\n",
"pos[query.model_class] = i\n",
"if not str(curr_list) in models_class:\n",
"rel.append(None)\n",
"kwargs = dict(zip(query.model_class._meta.sorted_fields_names, curr_list))\n",
"current[i] = models_class[str(curr_list)]\n",
"last_model = query.model_class(**kwargs)\n",
"for join in query._joins:\n",
"models_class[str(curr_list)] = {'model': last_model}\n",
"curr_list = res[start:start + len(join.dest._meta.sorted_fields_names)]\n",
"class_list.append(last_model)\n",
"start += len(join.dest._meta.sorted_fields_names)\n",
"i += 1\n",
"if len(rel) == i:\n",
"pos[join.dest] = i\n",
"if curr_list == [None] * len(join.dest._meta.sorted_fields_names):\n",
"rel.append(pos[join.src])\n",
"current[i] = {'model': None}\n",
"if not i in current[rel[i]] or not str(curr_list) in current[rel[i]][i]:\n",
"if not i in current[rel[i]]:\n",
"kwargs = dict(zip(join.dest._meta.sorted_fields_names, curr_list))\n",
"current[i] = current[rel[i]][i][str(curr_list)]\n",
"current[rel[i]][i] = {}\n",
"current[rel[i]][i]['None'] = current[i]\n",
"new_model = join.dest(**kwargs)\n",
"current[i] = {'model': new_model}\n",
"if not i in current[rel[i]]:\n",
"current[rel[i]][i] = {}\n",
"current[rel[i]][i][str(curr_list)] = current[i]\n",
"current[i] = current[rel[i]][i][str(curr_list)]\n",
"new_model = current[i]['model']\n",
"if join.src in pos:\n",
"if join.src._meta.many_to_many:\n",
"middle_table_index = rel[i]\n",
"if not join.dest._meta.many_to_many:\n",
"index = pos[current[rel[i]]['model'].__class__]\n",
"if current[rel[i]]['model'].isForeignKey(current[rel[i]]['model']._meta.\n",
"x = getattr(new_model, current[rel[i]]['model']._meta.rel_class[join.dest].\n related_name)\n",
"x = getattr(new_model, current[rel[i]]['model']._meta.rel_class[join.dest].\n related_name)\n",
"if current[rel[i]]['model'].isReferenceField(current[rel[i]]['model']._meta\n",
"x.append(current[rel[index]]['model'])\n",
"x.append(current[rel[i]]['model'])\n",
"x = getattr(current[rel[i]]['model'], current[rel[i]]['model']._meta.\n rel_class[join.dest].name)\n",
"x = getattr(current[rel[index]]['model'], current[rel[i]]['model']._meta.\n rel_class[join.dest].name)\n",
"setattr(current[rel[i]]['model'], current[rel[i]]['model']._meta.rel_class[\n join.dest].name, new_model)\n",
"x.append(new_model)\n",
"x.append(new_model)\n",
"setattr(new_model, current[rel[i]]['model']._meta.rel_class[join.dest].\n related_name, current[rel[i]]['model'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Expr'",
"AugAssign'",
"AugAssign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_23(self, VAR_29):...\n",
""
] | [
"def is_localhost(self, hostname):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_19(VAR_16):...\n",
"VAR_37 = os.path.join(VAR_16, 'README.md')\n",
"if os.path.isfile(VAR_37):\n",
"return 'LICENSE' in open(VAR_37).read()\n",
"return False\n"
] | [
"def license_mentionned_in_readme(path):...\n",
"readme_path = os.path.join(path, 'README.md')\n",
"if os.path.isfile(readme_path):\n",
"return 'LICENSE' in open(readme_path).read()\n",
"return False\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def __getstate__(self):...\n",
"return self.serialize()\n"
] | [
"def __getstate__(self):...\n",
"return self.serialize()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@property...\n",
"return self.settings['db']\n"
] | [
"@property...\n",
"return self.settings['db']\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_19(self, VAR_16, VAR_20, VAR_21=False, **VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', VAR_16['name'], 'access', 'delete', '1')\n",
"VAR_0.error(_('Failed to terminate connection to volume %s'), VAR_16['name'])\n"
] | [
"def terminate_connection(self, volume, connector, force=False, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', volume['name'], 'access', 'delete', '1')\n",
"LOG.error(_('Failed to terminate connection to volume %s'), volume['name'])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"@eqlx.with_timeout...\n",
"return 'no timeout'\n"
] | [
"@eqlx.with_timeout...\n",
"return 'no timeout'\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_30(VAR_13):...\n",
"self.fail('authenticate should not be called')\n"
] | [
"def skipped(request):...\n",
"self.fail('authenticate should not be called')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"self.assertTrue(os.path.exists(fp.name),\n 'Temporary file should exist within the context.')\n",
"self.assertTrue(os.path.exists(fp.name),\n 'Temporary file should exist outside of context if cleanup=False.')\n",
"os.unlink(fp.name)\n"
] | [
"def test_temporary_file_without_cleanup(self):...\n",
"self.assertTrue(os.path.exists(fp.name),\n 'Temporary file should exist within the context.')\n",
"self.assertTrue(os.path.exists(fp.name),\n 'Temporary file should exist outside of context if cleanup=False.')\n",
"os.unlink(fp.name)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_17(self):...\n",
"def FUNC_23(VAR_11):...\n",
"return '%s> ' % self.configuration.eqlx_group_name\n"
] | [
"def test_get_output(self):...\n",
"def _fake_recv(ignore_arg):...\n",
"return '%s> ' % self.configuration.eqlx_group_name\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_25(VAR_32, VAR_34=0):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_67 = list(VAR_32.items())\n",
"VAR_67.sort(FUNC_23)\n",
"return VAR_34 and VAR_67[:VAR_34] or VAR_67\n"
] | [
"def _sort_kw_matches(skw_matches, limit=0):...\n",
"\"\"\"docstring\"\"\"\n",
"sorted_keywords = list(skw_matches.items())\n",
"sorted_keywords.sort(_skw_matches_comparator)\n",
"return limit and sorted_keywords[:limit] or sorted_keywords\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(VAR_6):...\n",
"VAR_8 = VAR_6.submission.exercise if VAR_6.submission else None\n",
"return {'id': VAR_6.id, 'submission_id': VAR_6.submission.id if VAR_6.\n submission else 0, 'name': '{} {}, {}'.format(VAR_6.course_instance.\n course.code, str(VAR_8.parent) if VAR_8 and VAR_8.parent else VAR_6.\n course_instance.instance_name, str(VAR_8) if VAR_8 else VAR_6.subject),\n 'link': VAR_6.get_display_url()}\n"
] | [
"def notification_entry(n):...\n",
"exercise = n.submission.exercise if n.submission else None\n",
"return {'id': n.id, 'submission_id': n.submission.id if n.submission else 0,\n 'name': '{} {}, {}'.format(n.course_instance.course.code, str(exercise.\n parent) if exercise and exercise.parent else n.course_instance.\n instance_name, str(exercise) if exercise else n.subject), 'link': n.\n get_display_url()}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"from osv import fields, osv\n",
"from tools.translate import _\n",
"VAR_0 = 'product.product'\n",
"def FUNC_0(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_4 is None:\n",
"VAR_4 = {}\n",
"VAR_17 = self.pool.get('product.product').browse(VAR_1, VAR_2, VAR_3, VAR_4\n =context)\n",
"VAR_18 = (VAR_17.property_stock_account_input and VAR_17.\n property_stock_account_input.id or False)\n",
"if not VAR_18:\n",
"VAR_18 = (VAR_17.categ_id.property_stock_account_input_categ and VAR_17.\n categ_id.property_stock_account_input_categ.id or False)\n",
"VAR_19 = (VAR_17.property_stock_account_output and VAR_17.\n property_stock_account_output.id or False)\n",
"if not VAR_19:\n",
"VAR_19 = (VAR_17.categ_id.property_stock_account_output_categ and VAR_17.\n categ_id.property_stock_account_output_categ.id or False)\n",
"VAR_20 = (VAR_17.categ_id.property_stock_journal and VAR_17.categ_id.\n property_stock_journal.id or False)\n",
"VAR_21 = (VAR_17.categ_id.property_stock_variation and VAR_17.categ_id.\n property_stock_variation.id or False)\n",
"return {'stock_account_input': VAR_18, 'stock_account_output': VAR_19,\n 'stock_journal': VAR_20, 'property_stock_variation': VAR_21}\n"
] | [
"from osv import fields, osv\n",
"from tools.translate import _\n",
"_inherit = 'product.product'\n",
"def get_product_accounts(self, cr, uid, product_id, context=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if context is None:\n",
"context = {}\n",
"product_obj = self.pool.get('product.product').browse(cr, uid, product_id,\n context=context)\n",
"stock_input_acc = (product_obj.property_stock_account_input and product_obj\n .property_stock_account_input.id or False)\n",
"if not stock_input_acc:\n",
"stock_input_acc = (product_obj.categ_id.property_stock_account_input_categ and\n product_obj.categ_id.property_stock_account_input_categ.id or False)\n",
"stock_output_acc = (product_obj.property_stock_account_output and\n product_obj.property_stock_account_output.id or False)\n",
"if not stock_output_acc:\n",
"stock_output_acc = (product_obj.categ_id.\n property_stock_account_output_categ and product_obj.categ_id.\n property_stock_account_output_categ.id or False)\n",
"journal_id = (product_obj.categ_id.property_stock_journal and product_obj.\n categ_id.property_stock_journal.id or False)\n",
"account_variation = (product_obj.categ_id.property_stock_variation and\n product_obj.categ_id.property_stock_variation.id or False)\n",
"return {'stock_account_input': stock_input_acc, 'stock_account_output':\n stock_output_acc, 'stock_journal': journal_id,\n 'property_stock_variation': account_variation}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_16(self, VAR_15):...\n",
"\"\"\"docstring\"\"\"\n",
"for o in self.products:\n",
"if o.match(VAR_15):\n",
"return False\n",
"return True\n"
] | [
"def is_producer(self, requested_output):...\n",
"\"\"\"docstring\"\"\"\n",
"for o in self.products:\n",
"if o.match(requested_output):\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_2, VAR_10, VAR_11):...\n",
"self.window = VAR_2\n",
"self.parent_widget = VAR_10\n",
"self.database_filepath = VAR_11\n",
"self.unlock_database()\n"
] | [
"def __init__(self, window, widget, filepath):...\n",
"self.window = window\n",
"self.parent_widget = widget\n",
"self.database_filepath = filepath\n",
"self.unlock_database()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@VAR_2.patch('core.common.utils.c_onboarding_status')...\n",
"VAR_5 = VAR_2.return_value\n",
"VAR_5.find_one.return_value = VAR_0\n",
"self.assertEqual(get_onboarding_percentage(1), VAR_1)\n"
] | [
"@mock.patch('core.common.utils.c_onboarding_status')...\n",
"_mock = mock.return_value\n",
"_mock.find_one.return_value = steps\n",
"self.assertEqual(get_onboarding_percentage(1), result)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14, VAR_15 = new_webui_log_file()\n",
"self._webui_url, VAR_16 = ray.services.start_ui(self._redis_address, VAR_14\n =stdout_file, VAR_15=stderr_file)\n",
"assert VAR_13.PROCESS_TYPE_WEB_UI not in self.all_processes\n",
"if VAR_16 is not None:\n",
"self.all_processes[VAR_13.PROCESS_TYPE_WEB_UI] = [VAR_16]\n"
] | [
"def start_ui(self):...\n",
"\"\"\"docstring\"\"\"\n",
"stdout_file, stderr_file = new_webui_log_file()\n",
"self._webui_url, process_info = ray.services.start_ui(self._redis_address,\n stdout_file=stdout_file, stderr_file=stderr_file)\n",
"assert ray_constants.PROCESS_TYPE_WEB_UI not in self.all_processes\n",
"if process_info is not None:\n",
"self.all_processes[ray_constants.PROCESS_TYPE_WEB_UI] = [process_info]\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assert'",
"Condition",
"Assign'"
] |
[
"def FUNC_2(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = self.data_document.data_group.group_type.code\n",
"if VAR_18 in ['CP', 'HH']:\n",
"return reverse('qa_chemicalpresence_index')\n",
"return reverse('qa_extractionscript_index')\n"
] | [
"def get_qa_index_path(self):...\n",
"\"\"\"docstring\"\"\"\n",
"group_type_code = self.data_document.data_group.group_type.code\n",
"if group_type_code in ['CP', 'HH']:\n",
"return reverse('qa_chemicalpresence_index')\n",
"return reverse('qa_extractionscript_index')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_26(self, VAR_41, VAR_42):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def symlink(self, targetPath, linkPath):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_5(self, VAR_13, VAR_14=1):...\n",
"utils.check_ssh_injection(VAR_13)\n",
"VAR_11 = ' '.join(VAR_13)\n",
"if not self.sshpool:\n",
"VAR_39 = self.configuration.san_password\n",
"VAR_43 = VAR_14\n",
"VAR_0.error(_('Error running SSH command: %s') % VAR_11)\n",
"VAR_40 = self.configuration.san_private_key\n",
"while VAR_14 > 0:\n",
"VAR_41 = self.configuration.ssh_min_pool_conn\n",
"VAR_14 -= 1\n",
"VAR_49 = _(\n \"SSH Command failed after '%(total_attempts)r' attempts : '%(command)s'\"\n ) % {'total_attempts': VAR_43, 'command': VAR_11}\n",
"VAR_42 = self.configuration.ssh_max_pool_conn\n",
"VAR_0.info(_('EQL-driver: executing \"%s\"') % VAR_11)\n",
"VAR_0.exception(e)\n",
"self.sshpool = utils.SSHPool(self.configuration.san_ip, self.configuration.\n san_ssh_port, self.configuration.ssh_conn_timeout, self.configuration.\n san_login, VAR_39=password, VAR_40=privatekey, VAR_41=min_size, VAR_42=\n max_size)\n",
"return self._ssh_execute(VAR_10, VAR_11, VAR_23=self.configuration.\n eqlx_cli_timeout)\n",
"greenthread.sleep(random.randint(20, 500) / 100.0)\n"
] | [
"def _run_ssh(self, cmd_list, attempts=1):...\n",
"utils.check_ssh_injection(cmd_list)\n",
"command = ' '.join(cmd_list)\n",
"if not self.sshpool:\n",
"password = self.configuration.san_password\n",
"total_attempts = attempts\n",
"LOG.error(_('Error running SSH command: %s') % command)\n",
"privatekey = self.configuration.san_private_key\n",
"while attempts > 0:\n",
"min_size = self.configuration.ssh_min_pool_conn\n",
"attempts -= 1\n",
"msg = _(\n \"SSH Command failed after '%(total_attempts)r' attempts : '%(command)s'\"\n ) % {'total_attempts': total_attempts, 'command': command}\n",
"max_size = self.configuration.ssh_max_pool_conn\n",
"LOG.info(_('EQL-driver: executing \"%s\"') % command)\n",
"LOG.exception(e)\n",
"self.sshpool = utils.SSHPool(self.configuration.san_ip, self.configuration.\n san_ssh_port, self.configuration.ssh_conn_timeout, self.configuration.\n san_login, password=password, privatekey=privatekey, min_size=min_size,\n max_size=max_size)\n",
"return self._ssh_execute(ssh, command, timeout=self.configuration.\n eqlx_cli_timeout)\n",
"greenthread.sleep(random.randint(20, 500) / 100.0)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_6():...\n",
"VAR_13.throw(_('Invalid Search Field'), VAR_13.DataError)\n"
] | [
"def _raise_exception():...\n",
"frappe.throw(_('Invalid Search Field'), frappe.DataError)\n"
] | [
0,
4
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_25(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 8}, 'CONFIDENCE': {'MEDIUM': 8}}\n",
"self.check_example('os-exec.py', VAR_2)\n"
] | [
"def test_os_exec(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 8}, 'CONFIDENCE': {'MEDIUM': 8}}\n",
"self.check_example('os-exec.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"from reddit_base import RedditController\n",
"from r2.lib.pages import BoringPage, ShowMeetup, NewMeetup, EditMeetup, PaneStack, CommentListing, LinkInfoPage, CommentReplyBox, NotEnoughKarmaToPost\n",
"from validator import validate, VUser, VRequired, VMeetup, VEditMeetup, VFloat, ValueOrBlank, ValidIP, VMenu, VCreateMeetup\n",
"from errors import errors\n",
"from r2.lib.jsonresponse import Json\n",
"from routes.util import url_for\n",
"from r2.models import Meetup, Link, Subreddit, CommentBuilder\n",
"from r2.models.listing import NestedListing\n",
"from r2.lib.menus import CommentSortMenu, NumCommentsMenu\n",
"from r2.lib.filters import python_websafe\n",
"from mako.template import Template\n",
"from pylons.i18n import _\n",
"from pylons import c, g, request\n",
"import json\n",
"def FUNC_0(VAR_0):...\n",
"VAR_1 = Template(filename='r2/templates/showmeetup.html', output_encoding=\n 'utf-8', encoding_errors='replace')\n",
"VAR_2 = VAR_1.get_def('meetup_info').render_unicode(VAR_0=meetup)\n",
"VAR_3 = url_for(controller='meetups', action='show', id=meetup._id36)\n",
"VAR_4 = python_websafe(VAR_0.title)\n",
"VAR_5 = u\"<h2>Discussion article for the meetup : <a href='%s'>%s</a></h2>\" % (\n VAR_3, VAR_4)\n",
"return VAR_5 + VAR_2 + VAR_5\n"
] | [
"from reddit_base import RedditController\n",
"from r2.lib.pages import BoringPage, ShowMeetup, NewMeetup, EditMeetup, PaneStack, CommentListing, LinkInfoPage, CommentReplyBox, NotEnoughKarmaToPost\n",
"from validator import validate, VUser, VRequired, VMeetup, VEditMeetup, VFloat, ValueOrBlank, ValidIP, VMenu, VCreateMeetup\n",
"from errors import errors\n",
"from r2.lib.jsonresponse import Json\n",
"from routes.util import url_for\n",
"from r2.models import Meetup, Link, Subreddit, CommentBuilder\n",
"from r2.models.listing import NestedListing\n",
"from r2.lib.menus import CommentSortMenu, NumCommentsMenu\n",
"from r2.lib.filters import python_websafe\n",
"from mako.template import Template\n",
"from pylons.i18n import _\n",
"from pylons import c, g, request\n",
"import json\n",
"def meetup_article_text(meetup):...\n",
"t = Template(filename='r2/templates/showmeetup.html', output_encoding=\n 'utf-8', encoding_errors='replace')\n",
"res = t.get_def('meetup_info').render_unicode(meetup=meetup)\n",
"url = url_for(controller='meetups', action='show', id=meetup._id36)\n",
"title = python_websafe(meetup.title)\n",
"hdr = u\"<h2>Discussion article for the meetup : <a href='%s'>%s</a></h2>\" % (\n url, title)\n",
"return hdr + res + hdr\n"
] | [
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_9(self, VAR_9, VAR_11=None):...\n",
"VAR_1 = get_and_check_project(VAR_9, VAR_11, ('change_project',))\n",
"VAR_27 = [VAR_35 for filesList in map(lambda key: VAR_9.FILES.getlist(key),\n [keys for keys in VAR_9.FILES]) for VAR_35 in filesList]\n",
"VAR_28 = models.Task.create_from_images(VAR_27, VAR_1)\n",
"if VAR_28 is not None:\n",
"return Response({'id': VAR_28.id}, status=status.HTTP_201_CREATED)\n"
] | [
"def create(self, request, project_pk=None):...\n",
"project = get_and_check_project(request, project_pk, ('change_project',))\n",
"files = [file for filesList in map(lambda key: request.FILES.getlist(key),\n [keys for keys in request.FILES]) for file in filesList]\n",
"task = models.Task.create_from_images(files, project)\n",
"if task is not None:\n",
"return Response({'id': task.id}, status=status.HTTP_201_CREATED)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_26(self, VAR_41, VAR_42):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def symlink(self, targetPath, linkPath):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"\"\"\"string\"\"\"\n",
"from flask import Flask, jsonify, request\n",
"from flask_restful import Resource, Api\n",
"from flask_cors import CORS\n",
"import db_interac\n",
"import utils\n",
"VAR_0 = Flask(__name__)\n",
"VAR_1 = Api(VAR_0)\n",
"CORS(VAR_0)\n",
"def FUNC_0(self):...\n",
"\"\"\"docstring\"\"\"\n",
"\"\"\"string\"\"\"\n",
"VAR_2 = request.form.get('userId')\n",
"if VAR_2 is None:\n",
"VAR_28 = db_interac.get_user_profiles()\n",
"VAR_3 = db_interac.get_user_profile(VAR_2)\n",
"if not VAR_28:\n",
"VAR_4 = {}\n",
"return {}, 500\n",
"VAR_29 = []\n",
"if VAR_3[0] == False:\n",
"for user in VAR_28:\n",
"VAR_4['error'] = 'error adding profile'\n",
"VAR_4['username'] = VAR_3[1]\n",
"VAR_29.append({'id': user[0], 'username': user[1], 'firstName': user[2],\n 'lastName': user[3], 'bio': user[4]})\n",
"return VAR_29, 200\n",
"return VAR_4, 200\n",
"VAR_4['firstName'] = VAR_3[2]\n",
"VAR_4['lastName'] = VAR_3[3]\n",
"VAR_4['bio'] = VAR_3[4]\n",
"VAR_4['messages'] = VAR_3[5]\n"
] | [
"\"\"\"A basic Flask app which we can build off of.\n\nHandles basic http requests in a simple way.\nOnly throws 404 errors for now.\n\nNote: Pretty much everything should be changed.\n Also see about adding arguments to the requests.\n Should also change the names of the classes.\n\n The classes may not need all of these methods.\n\"\"\"\n",
"from flask import Flask, jsonify, request\n",
"from flask_restful import Resource, Api\n",
"from flask_cors import CORS\n",
"import db_interac\n",
"import utils\n",
"app = Flask(__name__)\n",
"api = Api(app)\n",
"CORS(app)\n",
"def get(self):...\n",
"\"\"\"docstring\"\"\"\n",
"\"\"\"\n # Check the request comes from appropriate location.\n if not utils.validate_ip(request.remote_addr)\n return {}, 403\n \"\"\"\n",
"user_id = request.form.get('userId')\n",
"if user_id is None:\n",
"users = db_interac.get_user_profiles()\n",
"user_profile = db_interac.get_user_profile(user_id)\n",
"if not users:\n",
"return_obj = {}\n",
"return {}, 500\n",
"response_obj = []\n",
"if user_profile[0] == False:\n",
"for user in users:\n",
"return_obj['error'] = 'error adding profile'\n",
"return_obj['username'] = user_profile[1]\n",
"response_obj.append({'id': user[0], 'username': user[1], 'firstName': user[\n 2], 'lastName': user[3], 'bio': user[4]})\n",
"return response_obj, 200\n",
"return return_obj, 200\n",
"return_obj['firstName'] = user_profile[2]\n",
"return_obj['lastName'] = user_profile[3]\n",
"return_obj['bio'] = user_profile[4]\n",
"return_obj['messages'] = user_profile[5]\n"
] | [
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_34(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'LOW': 5}, 'CONFIDENCE': {'HIGH': 5}}\n",
"self.check_example('skip.py', VAR_2)\n"
] | [
"def test_skip(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'LOW': 5}, 'CONFIDENCE': {'HIGH': 5}}\n",
"self.check_example('skip.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(self, VAR_3):...\n",
"VAR_8 = '%s-%s' % (self.__cluster_name, VAR_3)\n",
"VAR_5 = [{'cloudProvider': 'gce', 'asgName': VAR_8, 'serverGroupName':\n VAR_8, 'region': self.TEST_REGION, 'zone': self.TEST_ZONE, 'type':\n 'destroyServerGroup', 'regions': [self.TEST_REGION], 'zones': [self.\n TEST_ZONE], 'credentials': self.bindings['GCE_CREDENTIALS'], 'user':\n 'integration-tests'}]\n",
"VAR_6 = gcp.GceContractBuilder(self.gce_observer)\n",
"VAR_6.new_clause_builder('Server Group Destroyed', retryable_for_secs=90\n ).list_resources('managed-instance-groups').excludes_path_value(\n 'baseInstanceName', VAR_8)\n",
"VAR_7 = self.agent.make_json_payload_from_kwargs(VAR_5=job, description=\n 'Server Group Test - destroy server group', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'destroy_server_group', data=payload, path=self.__path), contract=\n builder.build())\n"
] | [
"def destroy_server_group(self, version):...\n",
"serverGroupName = '%s-%s' % (self.__cluster_name, version)\n",
"job = [{'cloudProvider': 'gce', 'asgName': serverGroupName,\n 'serverGroupName': serverGroupName, 'region': self.TEST_REGION, 'zone':\n self.TEST_ZONE, 'type': 'destroyServerGroup', 'regions': [self.\n TEST_REGION], 'zones': [self.TEST_ZONE], 'credentials': self.bindings[\n 'GCE_CREDENTIALS'], 'user': 'integration-tests'}]\n",
"builder = gcp.GceContractBuilder(self.gce_observer)\n",
"builder.new_clause_builder('Server Group Destroyed', retryable_for_secs=90\n ).list_resources('managed-instance-groups').excludes_path_value(\n 'baseInstanceName', serverGroupName)\n",
"payload = self.agent.make_json_payload_from_kwargs(job=job, description=\n 'Server Group Test - destroy server group', application=self.TEST_APP)\n",
"return st.OperationContract(self.new_post_operation(title=\n 'destroy_server_group', data=payload, path=self.__path), contract=\n builder.build())\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Return'"
] |
[
"@auth.route('/reset/<token>', methods=['GET', 'POST'])...\n",
"if not current_user.is_anonymous:\n",
"return redirect(url_for('main.index'))\n",
"VAR_1 = PasswordResetForm()\n",
"if VAR_1.validate_on_submit():\n",
"VAR_2 = User.query.filter_by(email=form.email.data).first()\n",
"return render_template('auth/reset_password.html', VAR_1=form)\n",
"if VAR_2 is None:\n",
"return redirect(url_for('main.index'))\n",
"if VAR_2.reset_password(VAR_0, VAR_1.password.data):\n",
"flash('Your password has been updated.')\n",
"return redirect(url_for('main.index'))\n",
"return redirect(url_for('auth.login'))\n"
] | [
"@auth.route('/reset/<token>', methods=['GET', 'POST'])...\n",
"if not current_user.is_anonymous:\n",
"return redirect(url_for('main.index'))\n",
"form = PasswordResetForm()\n",
"if form.validate_on_submit():\n",
"user = User.query.filter_by(email=form.email.data).first()\n",
"return render_template('auth/reset_password.html', form=form)\n",
"if user is None:\n",
"return redirect(url_for('main.index'))\n",
"if user.reset_password(token, form.password.data):\n",
"flash('Your password has been updated.')\n",
"return redirect(url_for('main.index'))\n",
"return redirect(url_for('auth.login'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'",
"Condition",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_30(self, VAR_78):...\n",
"def FUNC_39(VAR_101):...\n",
"VAR_101.version = VAR_78\n",
"return VAR_101\n"
] | [
"def version(self, version):...\n",
"def decorate(ruleinfo):...\n",
"ruleinfo.version = version\n",
"return ruleinfo\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_17(self, VAR_4, VAR_12=True, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n"
] | [
"def make_target_proxy(self, accessor, manage_back_references=True, options=None...\n",
"\"\"\"docstring\"\"\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Docstring"
] |
[
"def FUNC_4(self, VAR_4):...\n",
"if self.log_dir is None:\n",
"return\n",
"VAR_19 = '%s_%s.log' % (VAR_4.start_time, VAR_4.__class__.__name__)\n",
"VAR_20 = os.path.join(self.log_dir, VAR_19)\n",
"VAR_21 = os.path.join(self.log_dir, VAR_4.__class__.__name__)\n",
"VAR_4.store_to_file(fd)\n",
"os.remove(VAR_21)\n",
"os.symlink(VAR_19, VAR_21)\n"
] | [
"def store_to_file(self, request):...\n",
"if self.log_dir is None:\n",
"return\n",
"filename = '%s_%s.log' % (request.start_time, request.__class__.__name__)\n",
"filepath = os.path.join(self.log_dir, filename)\n",
"linkpath = os.path.join(self.log_dir, request.__class__.__name__)\n",
"request.store_to_file(fd)\n",
"os.remove(linkpath)\n",
"os.symlink(filename, linkpath)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_18(VAR_32, VAR_33, VAR_5=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_60 = {}\n",
"VAR_52 = {}\n",
"for VAR_93, _ in VAR_32:\n",
"for VAR_83 in VAR_93.fieldcodes:\n",
"for VAR_94, _ in VAR_33:\n",
"VAR_60.setdefault(VAR_83, set()).add(VAR_93.output(VAR_5))\n",
"if len(VAR_94.fieldcodes):\n",
"for VAR_83, VAR_36 in VAR_60.items():\n",
"for VAR_83 in VAR_94.fieldcodes:\n",
"for VAR_62 in VAR_94.getComponents():\n",
"VAR_52[VAR_83] = ', '.join(VAR_36)\n",
"return VAR_52\n",
"VAR_60.setdefault(VAR_83, set()).add(VAR_94.output(VAR_5))\n",
"for VAR_83 in VAR_62.fieldcodes:\n",
"VAR_60.setdefault(VAR_83, set()).add('%s*' % VAR_94.output(VAR_5))\n",
"VAR_60.setdefault('*', set()).add(VAR_62.output(VAR_5))\n"
] | [
"def _get_fieldcodes(skw_matches, ckw_matches, spires=False):...\n",
"\"\"\"docstring\"\"\"\n",
"fieldcodes = {}\n",
"output = {}\n",
"for skw, _ in skw_matches:\n",
"for fieldcode in skw.fieldcodes:\n",
"for ckw, _ in ckw_matches:\n",
"fieldcodes.setdefault(fieldcode, set()).add(skw.output(spires))\n",
"if len(ckw.fieldcodes):\n",
"for fieldcode, keywords in fieldcodes.items():\n",
"for fieldcode in ckw.fieldcodes:\n",
"for kw in ckw.getComponents():\n",
"output[fieldcode] = ', '.join(keywords)\n",
"return output\n",
"fieldcodes.setdefault(fieldcode, set()).add(ckw.output(spires))\n",
"for fieldcode in kw.fieldcodes:\n",
"fieldcodes.setdefault(fieldcode, set()).add('%s*' % ckw.output(spires))\n",
"fieldcodes.setdefault('*', set()).add(kw.output(spires))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"For",
"For",
"Expr'",
"Condition",
"For",
"For",
"For",
"Assign'",
"Return'",
"Expr'",
"For",
"Expr'",
"Expr'"
] |
[
"def FUNC_6(self):...\n",
"VAR_14 = ['8.9.10.11', '9.10.11.12:4433']\n",
"VAR_15 = self.new_mocked_cluster(VAR_14, FUNC_0)\n",
"self._assert_providers(VAR_15, [(p, 'https://%s' % p) for p in VAR_14])\n"
] | [
"def test_conf_providers_no_scheme(self):...\n",
"conf_managers = ['8.9.10.11', '9.10.11.12:4433']\n",
"api = self.new_mocked_cluster(conf_managers, _validate_conn_up)\n",
"self._assert_providers(api, [(p, 'https://%s' % p) for p in conf_managers])\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_11(self):...\n",
"VAR_14 = []\n",
"for VAR_16 in self.FORM:\n",
"if VAR_16 in self.meta.iptc_keys:\n",
"return VAR_14\n",
"VAR_14.append((VAR_16, self.get_safe_value(self.meta, VAR_16)))\n",
"VAR_14.append((VAR_16, ''))\n"
] | [
"def get_form_fields(self):...\n",
"ret = []\n",
"for field in self.FORM:\n",
"if field in self.meta.iptc_keys:\n",
"return ret\n",
"ret.append((field, self.get_safe_value(self.meta, field)))\n",
"ret.append((field, ''))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Return'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_4):...\n",
"if not VAR_4:\n",
"return None\n",
"VAR_30 = VAR_4\n",
"if isinstance(VAR_30, str):\n",
"VAR_30 = bytes(VAR_30, encoding='utf-8')\n",
"return pickle.loads(VAR_30)\n",
"return super().to_python(VAR_4)\n",
"VAR_30 = base64.b64decode(VAR_30)\n"
] | [
"def to_python(self, value):...\n",
"if not value:\n",
"return None\n",
"_value = value\n",
"if isinstance(_value, str):\n",
"_value = bytes(_value, encoding='utf-8')\n",
"return pickle.loads(_value)\n",
"return super().to_python(value)\n",
"_value = base64.b64decode(_value)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'",
"Assign'"
] |
[
"def FUNC_13(self, *VAR_13, **VAR_14):...\n",
"for VAR_9 in VAR_13:\n",
"self._set_log_item(VAR_9)\n",
"for VAR_10, VAR_9 in VAR_14.items():\n",
"self._set_log_item(VAR_9, VAR_10=name)\n"
] | [
"def set_log(self, *logs, **kwlogs):...\n",
"for item in logs:\n",
"self._set_log_item(item)\n",
"for name, item in kwlogs.items():\n",
"self._set_log_item(item, name=name)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'",
"For",
"Expr'"
] |
[
"def __repr__(self):...\n",
"return super().__repr__() + 'Backend: ' + self.backend + '\\n'\n"
] | [
"def __repr__(self):...\n",
"return super().__repr__() + 'Backend: ' + self.backend + '\\n'\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@VAR_0.route('/countries/<country_id>', methods=['GET'])...\n",
"return get_by_id('countries', VAR_1)\n"
] | [
"@locations.route('/countries/<country_id>', methods=['GET'])...\n",
"return get_by_id('countries', country_id)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"import json\n",
"import functools\n",
"import zope.security.interfaces\n",
"from twisted.internet import defer\n",
"from twisted.python import log, failure\n",
"from twisted.web import resource\n",
"from twisted.web.server import NOT_DONE_YET\n",
"from zope.component import queryAdapter, getUtility\n",
"from opennode.oms.config import get_config\n",
"from opennode.oms.endpoint.httprest.base import IHttpRestView, IHttpRestSubViewFactory\n",
"from opennode.oms.model.traversal import traverse_path\n",
"from opennode.oms.security.checker import proxy_factory\n",
"from opennode.oms.security.interaction import new_interaction\n",
"from opennode.oms.util import blocking_yield\n",
"from opennode.oms.zodb import db\n",
"def __init__(self, VAR_2=None, *VAR_3, **VAR_4):...\n",
"super(CLASS_1, self).__init__(*VAR_3, **kwargs)\n",
"self.body = VAR_2\n",
"@property...\n",
"@property...\n",
"VAR_5 = {}\n",
"VAR_6 = 404\n",
"VAR_7 = 'Not Found'\n",
"VAR_6 = 501\n",
"VAR_7 = 'Not Implemented'\n",
"def __init__(self, VAR_8, *VAR_3, **VAR_4):...\n",
"super(CLASS_4, self).__init__(*VAR_3, **kwargs)\n",
"self.url = VAR_8\n",
"@property...\n",
"return {'Location': self.url}\n"
] | [
"import json\n",
"import functools\n",
"import zope.security.interfaces\n",
"from twisted.internet import defer\n",
"from twisted.python import log, failure\n",
"from twisted.web import resource\n",
"from twisted.web.server import NOT_DONE_YET\n",
"from zope.component import queryAdapter, getUtility\n",
"from opennode.oms.config import get_config\n",
"from opennode.oms.endpoint.httprest.base import IHttpRestView, IHttpRestSubViewFactory\n",
"from opennode.oms.model.traversal import traverse_path\n",
"from opennode.oms.security.checker import proxy_factory\n",
"from opennode.oms.security.interaction import new_interaction\n",
"from opennode.oms.util import blocking_yield\n",
"from opennode.oms.zodb import db\n",
"def __init__(self, body=None, *args, **kwargs):...\n",
"super(HttpStatus, self).__init__(*args, **kwargs)\n",
"self.body = body\n",
"@property...\n",
"@property...\n",
"headers = {}\n",
"status_code = 404\n",
"status_description = 'Not Found'\n",
"status_code = 501\n",
"status_description = 'Not Implemented'\n",
"def __init__(self, url, *args, **kwargs):...\n",
"super(AbstractRedirect, self).__init__(*args, **kwargs)\n",
"self.url = url\n",
"@property...\n",
"return {'Location': self.url}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_4(self, VAR_5):...\n",
"for oldg in VAR_5.get_ancestors():\n",
"if oldg not in self.groups:\n",
"if VAR_5 not in self.groups:\n",
"self.add_group(oldg)\n",
"self.groups.append(VAR_5)\n"
] | [
"def add_group(self, group):...\n",
"for oldg in group.get_ancestors():\n",
"if oldg not in self.groups:\n",
"if group not in self.groups:\n",
"self.add_group(oldg)\n",
"self.groups.append(group)\n"
] | [
0,
0,
0,
0,
1,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self):...\n",
"return \"datetime('now')\"\n"
] | [
"def sqlNowCall(self):...\n",
"return \"datetime('now')\"\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"VAR_11 = ExtractedChemicalIndex(meta={'id': self.id}, chem_name=self.\n raw_chem_name, raw_cas=self.raw_cas, raw_chem_name=self.raw_chem_name,\n facet_model_name='Extracted Chemical')\n",
"VAR_11.save()\n",
"return VAR_11.to_dict(include_meta=True)\n"
] | [
"def indexing(self):...\n",
"obj = ExtractedChemicalIndex(meta={'id': self.id}, chem_name=self.\n raw_chem_name, raw_cas=self.raw_cas, raw_chem_name=self.raw_chem_name,\n facet_model_name='Extracted Chemical')\n",
"obj.save()\n",
"return obj.to_dict(include_meta=True)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.