lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_1(self, VAR_2):...\n",
"VAR_7 = self.rides[VAR_2 * 5:min(VAR_2 * 5 + 5, len(self.rides))]\n",
"for ride in VAR_7:\n",
"print(str(ride[0]) + '.', end='')\n",
"if VAR_2 * 5 + 5 < len(self.rides):\n",
"print(ride)\n",
"VAR_10 = input('string')\n",
"VAR_10 = input(\n \"To book a member on a ride, please enter 'b'. To exit, press 'e': \")\n",
"if VAR_10 == 'y':\n",
"if VAR_10 == 'b':\n",
"self.display_rides(VAR_2 + 1)\n",
"self.book_ride()\n"
] | [
"def display_rides(self, page_num):...\n",
"page = self.rides[page_num * 5:min(page_num * 5 + 5, len(self.rides))]\n",
"for ride in page:\n",
"print(str(ride[0]) + '.', end='')\n",
"if page_num * 5 + 5 < len(self.rides):\n",
"print(ride)\n",
"user_input = input(\n \"To book a member on a ride, please enter 'b'. To see more rides, please enter 'y'. To exit, press 'e': \"\n )\n",
"user_input = input(\n \"To book a member on a ride, please enter 'b'. To exit, press 'e': \")\n",
"if user_input == 'y':\n",
"if user_input == 'b':\n",
"self.display_rides(page_num + 1)\n",
"self.book_ride()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_8(self, VAR_10):...\n",
"self.model.reset()\n",
"self.load_torrents(1, 50)\n"
] | [
"def _on_filter_input_change(self, _):...\n",
"self.model.reset()\n",
"self.load_torrents(1, 50)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_3):...\n",
"self.total += VAR_3.total\n",
"self.success += VAR_3.success\n",
"self.failure += VAR_3.failure\n",
"self.error += VAR_3.error\n",
"self.undecided += VAR_3.undecided\n",
"self.total_time += VAR_3.total_time\n",
"self.max_time = max(self.max_time, VAR_3.max_time)\n"
] | [
"def merge(self, log2):...\n",
"self.total += log2.total\n",
"self.success += log2.success\n",
"self.failure += log2.failure\n",
"self.error += log2.error\n",
"self.undecided += log2.undecided\n",
"self.total_time += log2.total_time\n",
"self.max_time = max(self.max_time, log2.max_time)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"AugAssign'",
"Assign'"
] |
[
"def FUNC_2(self, VAR_6):...\n",
"for VAR_7 in self.G.edges():\n",
"self.calcTPVM(VAR_7, VAR_6)\n"
] | [
"def calcAllTPVM(self, gamma):...\n",
"for edge in self.G.edges():\n",
"self.calcTPVM(edge, gamma)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"For",
"Expr'"
] |
[
"@odin.map_field(from_field=('from_field_c1', 'from_field_c2',...\n",
"return '-'.join(VAR_46)\n"
] | [
"@odin.map_field(from_field=('from_field_c1', 'from_field_c2',...\n",
"return '-'.join(values)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_11(self, VAR_14=None, VAR_15=False, VAR_16=False, VAR_17=1, VAR_18...\n",
"self.global_resources = dict() if VAR_52 is None else VAR_52\n",
"self.global_resources['_cores'] = VAR_17\n",
"self.global_resources['_nodes'] = VAR_18\n",
"def FUNC_2(VAR_87):...\n",
"return map(self._rules.__getitem__, filter(self.is_rule, VAR_87))\n"
] | [
"def execute(self, targets=None, dryrun=False, touch=False, cores=1, nodes=1,...\n",
"self.global_resources = dict() if resources is None else resources\n",
"self.global_resources['_cores'] = cores\n",
"self.global_resources['_nodes'] = nodes\n",
"def rules(items):...\n",
"return map(self._rules.__getitem__, filter(self.is_rule, items))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_18(self, VAR_36, VAR_37=None):...\n",
"VAR_37 = frappe.permissions.get_user_permission_doctypes(VAR_37, VAR_36)\n",
"VAR_1 = frappe.get_meta(self.doctype)\n",
"for doctypes in VAR_37:\n",
"VAR_64 = {}\n",
"VAR_58 = []\n",
"for VAR_6 in VAR_1.get_fields_to_check_permissions(doctypes):\n",
"VAR_68 = VAR_36.get(VAR_6.options, [])\n",
"if VAR_58:\n",
"VAR_69 = 'ifnull(`tab{doctype}`.`{fieldname}`, \"\")=\"\"'.format(VAR_0=self.\n doctype, fieldname=df.fieldname)\n",
"self.match_conditions.append(' and '.join(VAR_58))\n",
"if VAR_64:\n",
"if VAR_68:\n",
"self.match_filters.append(VAR_64)\n",
"if not cint(frappe.get_system_settings('apply_strict_user_permissions')):\n",
"VAR_62 = VAR_69\n",
"VAR_62 = VAR_69 + ' or '\n",
"VAR_62 = ''\n",
"VAR_58.append('({condition})'.format(VAR_62=condition))\n",
"VAR_62 += '`tab{doctype}`.`{fieldname}` in ({values})'.format(VAR_0=self.\n doctype, fieldname=df.fieldname, VAR_60=', '.join([('\"' + frappe.db.\n escape(v, percent=False) + '\"') for v in user_permission_values]))\n",
"VAR_64[VAR_6.options] = VAR_68\n"
] | [
"def add_user_permissions(self, user_permissions, user_permission_doctypes=None...\n",
"user_permission_doctypes = frappe.permissions.get_user_permission_doctypes(\n user_permission_doctypes, user_permissions)\n",
"meta = frappe.get_meta(self.doctype)\n",
"for doctypes in user_permission_doctypes:\n",
"match_filters = {}\n",
"match_conditions = []\n",
"for df in meta.get_fields_to_check_permissions(doctypes):\n",
"user_permission_values = user_permissions.get(df.options, [])\n",
"if match_conditions:\n",
"cond = 'ifnull(`tab{doctype}`.`{fieldname}`, \"\")=\"\"'.format(doctype=self.\n doctype, fieldname=df.fieldname)\n",
"self.match_conditions.append(' and '.join(match_conditions))\n",
"if match_filters:\n",
"if user_permission_values:\n",
"self.match_filters.append(match_filters)\n",
"if not cint(frappe.get_system_settings('apply_strict_user_permissions')):\n",
"condition = cond\n",
"condition = cond + ' or '\n",
"condition = ''\n",
"match_conditions.append('({condition})'.format(condition=condition))\n",
"condition += '`tab{doctype}`.`{fieldname}` in ({values})'.format(doctype=\n self.doctype, fieldname=df.fieldname, values=', '.join([('\"' + frappe.\n db.escape(v, percent=False) + '\"') for v in user_permission_values]))\n",
"match_filters[df.options] = user_permission_values\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"AugAssign'",
"Assign'"
] |
[
"def FUNC_49(self):...\n",
"for VAR_14 in self.list_columns.keys():\n",
"if self.is_pk(VAR_14):\n",
"return VAR_14\n"
] | [
"def get_pk_name(self):...\n",
"for col_name in self.list_columns.keys():\n",
"if self.is_pk(col_name):\n",
"return col_name\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"For",
"Condition",
"Return'"
] |
[
"from django.http import HttpResponse, HttpResponseRedirect\n",
"from django.shortcuts import get_object_or_404, render\n",
"from django.views.generic import TemplateView, DetailView\n",
"from .forms import SearchForm\n",
"from lib.geoip import GeoIP\n",
"from lib.vt import VT\n",
"from lib.threatminer import ThreatMiner\n",
"import socket\n",
"from django.db.models import Q\n",
"from apps.threat.models import Event, Attribute\n",
"from apps.reputation.models import blacklist\n",
"from apps.twitter.models import tweet\n",
"from apps.exploit.models import Exploit\n",
"VAR_0 = 'ip/index.html'\n",
"def FUNC_0(self, **VAR_1):...\n",
"VAR_3 = super().get_context_data(**kwargs)\n",
"VAR_3['search_form'] = SearchForm()\n",
"return VAR_3\n"
] | [
"from django.http import HttpResponse, HttpResponseRedirect\n",
"from django.shortcuts import get_object_or_404, render\n",
"from django.views.generic import TemplateView, DetailView\n",
"from .forms import SearchForm\n",
"from lib.geoip import GeoIP\n",
"from lib.vt import VT\n",
"from lib.threatminer import ThreatMiner\n",
"import socket\n",
"from django.db.models import Q\n",
"from apps.threat.models import Event, Attribute\n",
"from apps.reputation.models import blacklist\n",
"from apps.twitter.models import tweet\n",
"from apps.exploit.models import Exploit\n",
"template_name = 'ip/index.html'\n",
"def get_context_data(self, **kwargs):...\n",
"context = super().get_context_data(**kwargs)\n",
"context['search_form'] = SearchForm()\n",
"return context\n"
] | [
6,
6,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@property...\n",
"if not self._is_function:\n",
"return self._file\n"
] | [
"@property...\n",
"if not self._is_function:\n",
"return self._file\n"
] | [
0,
0,
0
] | [
"Condition",
"Condition",
"Return'"
] |
[
"def __repr__(self):...\n",
"return u'Array({})'.format(repr(self.inner_type))\n"
] | [
"def __repr__(self):...\n",
"return u'Array({})'.format(repr(self.inner_type))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6(self, VAR_16):...\n",
"if isinstance(VAR_16, CLASS_3) and (self.url, self.rev) == (VAR_16.url,\n",
"return True\n",
"return False\n"
] | [
"def same_checkout(self, other):...\n",
"if isinstance(other, SvnSubproject) and (self.url, self.rev) == (other.url,\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_2(self, VAR_10, VAR_11):...\n",
"VAR_18 = dict(scantree(VAR_10))\n",
"for VAR_17 in VAR_18.keys():\n",
"VAR_28 = VAR_17[len(VAR_10):]\n",
"VAR_29 = VAR_28.replace('\\\\', '/')\n",
"VAR_16 = VAR_11 + VAR_29\n",
"self.add_file_to_dictionary(VAR_16, VAR_17, VAR_18=stat_cache)\n"
] | [
"def update_files_dictionary(self, root, prefix):...\n",
"stat_cache = dict(scantree(root))\n",
"for path in stat_cache.keys():\n",
"relative_path = path[len(root):]\n",
"relative_url = relative_path.replace('\\\\', '/')\n",
"url = prefix + relative_url\n",
"self.add_file_to_dictionary(url, path, stat_cache=stat_cache)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = pyramid.request.Request({'HTTP_COOKIE': '{0}={1}'.format(VAR_5.\n registry.settings['session.cookie_name'], VAR_12)})\n",
"VAR_20 = VAR_8(VAR_14)\n",
"return VAR_20 and VAR_20.get('admin')\n"
] | [
"def authenticator(key):...\n",
"\"\"\"docstring\"\"\"\n",
"request = pyramid.request.Request({'HTTP_COOKIE': '{0}={1}'.format(config.\n registry.settings['session.cookie_name'], key)})\n",
"session_data = session_factory(request)\n",
"return session_data and session_data.get('admin')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"async def FUNC_9(VAR_6):...\n",
"VAR_10 = {'Authorization': f'Bearer {VAR_1}'}\n",
"VAR_8 = await VAR_6.get('/results?page=0&count=2', VAR_10=headers)\n",
"VAR_9 = await VAR_8.json()\n",
"assert VAR_8.status == 200\n",
"assert len(VAR_9) == 2\n",
"VAR_8 = await VAR_6.get('/results?page=1&count=1', VAR_10=headers)\n",
"VAR_9 = await VAR_8.json()\n",
"assert VAR_8.status == 200\n",
"assert len(VAR_9) == 1\n"
] | [
"async def test_positive_get_paging(test_cli):...\n",
"headers = {'Authorization': f'Bearer {access_token}'}\n",
"resp = await test_cli.get('/results?page=0&count=2', headers=headers)\n",
"resp_json = await resp.json()\n",
"assert resp.status == 200\n",
"assert len(resp_json) == 2\n",
"resp = await test_cli.get('/results?page=1&count=1', headers=headers)\n",
"resp_json = await resp.json()\n",
"assert resp.status == 200\n",
"assert len(resp_json) == 1\n"
] | [
0,
0,
4,
0,
0,
0,
4,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assert'",
"Assert'",
"Assign'",
"Assign'",
"Assert'",
"Assert'"
] |
[
"import db\n",
"import main\n",
"if __name__ == '__main__':\n",
"VAR_0 = 'SELECT track_id,username,chat_id FROM job WHERE done=0'\n",
"VAR_1 = db.select(VAR_0)\n",
"for i in VAR_1:\n",
"main.cron(i[0], i[1], i[2])\n"
] | [
"import db\n",
"import main\n",
"if __name__ == '__main__':\n",
"sql_cmd = 'SELECT track_id,username,chat_id FROM job WHERE done=0'\n",
"s = db.select(sql_cmd)\n",
"for i in s:\n",
"main.cron(i[0], i[1], i[2])\n"
] | [
0,
0,
0,
4,
4,
0,
0
] | [
"Import'",
"Import'",
"Condition",
"Assign'",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_24(self, VAR_14):...\n",
""
] | [
"def is_pk(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def __init__(self, VAR_38, VAR_36, VAR_39):...\n",
"self._user_id = VAR_38\n",
"self._email = VAR_36\n",
"self._picture = VAR_39\n"
] | [
"def __init__(self, user_id, email, picture):...\n",
"self._user_id = user_id\n",
"self._email = email\n",
"self._picture = picture\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_4(VAR_2):...\n",
"VAR_10 = sqlite3.connect(VAR_1)\n",
"VAR_11 = VAR_10.cursor()\n",
"VAR_11.execute('SELECT * FROM UserData WHERE username = ?', [VAR_2])\n",
"VAR_15 = VAR_11.fetchone()\n",
"VAR_10.close()\n",
"return VAR_15\n"
] | [
"def is_username_taken(username):...\n",
"connection = sqlite3.connect(DATABASE_PATH)\n",
"cursor = connection.cursor()\n",
"cursor.execute('SELECT * FROM UserData WHERE username = ?', [username])\n",
"records = cursor.fetchone()\n",
"connection.close()\n",
"return records\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"print('Unsupported external %s' % self.directory)\n"
] | [
"def status(self):...\n",
"print('Unsupported external %s' % self.directory)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"vimsupport.PostVimMessage('Restarting ycmd server...')\n",
"self._user_notified_about_crash = False\n",
"self._ServerCleanup()\n",
"self._SetupServer()\n"
] | [
"def RestartServer(self):...\n",
"vimsupport.PostVimMessage('Restarting ycmd server...')\n",
"self._user_notified_about_crash = False\n",
"self._ServerCleanup()\n",
"self._SetupServer()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"self.objects.extext.qa_edited = True\n",
"VAR_1 = QANotes.objects.create(extracted_text=self.objects.extext)\n",
"self.assertEqual(VAR_1.qa_notes, None)\n",
"VAR_1.qa_notes = 'A short QA note'\n",
"VAR_1.clean()\n",
"VAR_3 = \"\"\"An exception of type {0} occurred. Arguments:\n{1!r}\"\"\"\n",
"VAR_2 = 'A long QA note' * 200\n",
"VAR_4 = VAR_3.format(type(ex).__name__, ex.args)\n",
"VAR_1.qa_notes = VAR_2\n",
"VAR_1.clean()\n",
"VAR_3 = \"\"\"An exception of type {0} occurred. Arguments:\n{1!r}\"\"\"\n",
"VAR_4 = VAR_3.format(type(ex).__name__, ex.args)\n"
] | [
"def test_long_qa_notes(self):...\n",
"self.objects.extext.qa_edited = True\n",
"note = QANotes.objects.create(extracted_text=self.objects.extext)\n",
"self.assertEqual(note.qa_notes, None)\n",
"note.qa_notes = 'A short QA note'\n",
"note.clean()\n",
"template = \"\"\"An exception of type {0} occurred. Arguments:\n{1!r}\"\"\"\n",
"long_note = 'A long QA note' * 200\n",
"message = template.format(type(ex).__name__, ex.args)\n",
"note.qa_notes = long_note\n",
"note.clean()\n",
"template = \"\"\"An exception of type {0} occurred. Arguments:\n{1!r}\"\"\"\n",
"message = template.format(type(ex).__name__, ex.args)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(VAR_2, VAR_6):...\n",
"VAR_2.cursor.execute('string', (VAR_6,))\n",
"VAR_22 = VAR_2.cursor.fetchall()\n",
"return [VAR_0(*VAR_27) for VAR_27 in VAR_22]\n"
] | [
"def get_for_student(db, student_id):...\n",
"db.cursor.execute(\n \"\"\"SELECT id, sheet_id, student_id, time, files_path, deleted\n FROM submission WHERE student_id = ?\"\"\"\n , (student_id,))\n",
"rows = db.cursor.fetchall()\n",
"return [Submission(*row) for row in rows]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"super().get_common_objects()\n",
"self.get_summary_submissions()\n"
] | [
"def get_common_objects(self):...\n",
"super().get_common_objects()\n",
"self.get_summary_submissions()\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_11(VAR_35, VAR_27):...\n",
"return {'complete_output': VAR_35, 'categories': VAR_27}\n"
] | [
"def _output_dict(complete_output, categories):...\n",
"return {'complete_output': complete_output, 'categories': categories}\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_18, VAR_19):...\n",
"self.fs = VAR_18\n",
"self.cfg = VAR_19\n",
"self.tempfiles = {}\n",
"self.filenames = {}\n",
"self.newcount = 0\n",
"self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n"
] | [
"def __init__(self, fs, cfg):...\n",
"self.fs = fs\n",
"self.cfg = cfg\n",
"self.tempfiles = {}\n",
"self.filenames = {}\n",
"self.newcount = 0\n",
"self.init_honeyfs(self.cfg.get('honeypot', 'contents_path'))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_18(self, VAR_14):...\n",
""
] | [
"def is_relation_many_to_one(self, col_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(VAR_3, VAR_4=None):...\n",
"VAR_0 = CLASS_0(VAR_3)\n",
"VAR_0.rule = VAR_4\n",
"return VAR_0\n"
] | [
"def IOFile(file, rule=None):...\n",
"f = _IOFile(file)\n",
"f.rule = rule\n",
"return f\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_23 = User()\n",
"VAR_23.id = VAR_6\n",
"VAR_23.username = VAR_4\n",
"VAR_24 = current_app.config['MAPPER_LEVEL_INTERMEDIATE']\n",
"VAR_25 = current_app.config['MAPPER_LEVEL_ADVANCED']\n",
"if VAR_7 > VAR_25:\n",
"VAR_23.mapping_level = MappingLevel.ADVANCED.value\n",
"if VAR_24 < VAR_7 < VAR_25:\n",
"VAR_23.create()\n",
"VAR_23.mapping_level = MappingLevel.INTERMEDIATE.value\n",
"VAR_23.mapping_level = MappingLevel.BEGINNER.value\n",
"return VAR_23\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"new_user = User()\n",
"new_user.id = osm_id\n",
"new_user.username = username\n",
"intermediate_level = current_app.config['MAPPER_LEVEL_INTERMEDIATE']\n",
"advanced_level = current_app.config['MAPPER_LEVEL_ADVANCED']\n",
"if changeset_count > advanced_level:\n",
"new_user.mapping_level = MappingLevel.ADVANCED.value\n",
"if intermediate_level < changeset_count < advanced_level:\n",
"new_user.create()\n",
"new_user.mapping_level = MappingLevel.INTERMEDIATE.value\n",
"new_user.mapping_level = MappingLevel.BEGINNER.value\n",
"return new_user\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_31():...\n",
"VAR_24 = vim.eval('tagfiles()')\n",
"VAR_25 = VAR_0.getcwd()\n",
"return [VAR_0.path.join(VAR_25, x) for x in VAR_24]\n"
] | [
"def GetTagFiles():...\n",
"tag_files = vim.eval('tagfiles()')\n",
"current_working_directory = os.getcwd()\n",
"return [os.path.join(current_working_directory, x) for x in tag_files]\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self, VAR_15, VAR_16):...\n",
"self.write_line('')\n",
"self.write_line('define %s {' % VAR_15)\n",
"VAR_27 = VAR_16.keys()\n",
"VAR_27.sort()\n",
"for key in VAR_27:\n",
"VAR_17 = VAR_16[key]\n",
"self.write_line('}')\n",
"self.icinga_lines.append('%s%-45s%s' % (self.indent, key, self.\n value_to_icinga(VAR_17)))\n"
] | [
"def write_section(self, section_name, section_data):...\n",
"self.write_line('')\n",
"self.write_line('define %s {' % section_name)\n",
"sorted_keys = section_data.keys()\n",
"sorted_keys.sort()\n",
"for key in sorted_keys:\n",
"value = section_data[key]\n",
"self.write_line('}')\n",
"self.icinga_lines.append('%s%-45s%s' % (self.indent, key, self.\n value_to_icinga(value)))\n"
] | [
0,
0,
0,
0,
0,
0,
7,
0,
7
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = BokChoyTestSuite('', num_processes=2, verbosity=3)\n",
"BokChoyTestSuite.verbosity_processes_string(VAR_7)\n"
] | [
"def test_invalid_verbosity_and_processes(self):...\n",
"\"\"\"docstring\"\"\"\n",
"suite = BokChoyTestSuite('', num_processes=2, verbosity=3)\n",
"BokChoyTestSuite.verbosity_processes_string(suite)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"import httplib\n",
"import types\n",
"import collections\n",
"import json\n",
"import re\n",
"import sys\n",
"import logging\n",
"from StringIO import StringIO\n",
"from cgi import parse_header\n",
"from urllib import unquote\n",
"from twisted.python.failure import Failure\n",
"from twisted.internet.defer import inlineCallbacks\n",
"from twisted.internet import fdesc\n",
"from cyclone.web import RequestHandler, HTTPError, HTTPAuthenticationRequired, StaticFileHandler, RedirectHandler\n",
"from cyclone.httpserver import HTTPConnection, HTTPRequest, _BadRequestException\n",
"from cyclone import escape, httputil\n",
"from cyclone.escape import native_str, parse_qs_bytes\n",
"from globaleaks.jobs.statistics_sched import alarm_level\n",
"from globaleaks.utils.utility import log, log_remove_escapes, log_encode_html, datetime_now\n",
"from globaleaks.utils.mailutils import mail_exception\n",
"from globaleaks.settings import GLSetting\n",
"from globaleaks.rest import errors\n",
"from globaleaks.security import GLSecureTemporaryFile, security_sleep\n",
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(VAR_0) == 22 and VAR_0[16:22] == '.onion':\n",
"return True\n",
"VAR_2 = str(VAR_0).split(':')\n",
"if len(VAR_2) == 2:\n",
"VAR_0 = VAR_2[0]\n",
"if VAR_0 in GLSetting.accepted_hosts:\n",
"return True\n",
"log.debug('Error in host requested: %s not accepted between: %s ' % (VAR_0,\n GLSetting.accepted_hosts))\n",
"return False\n"
] | [
"import httplib\n",
"import types\n",
"import collections\n",
"import json\n",
"import re\n",
"import sys\n",
"import logging\n",
"from StringIO import StringIO\n",
"from cgi import parse_header\n",
"from urllib import unquote\n",
"from twisted.python.failure import Failure\n",
"from twisted.internet.defer import inlineCallbacks\n",
"from twisted.internet import fdesc\n",
"from cyclone.web import RequestHandler, HTTPError, HTTPAuthenticationRequired, StaticFileHandler, RedirectHandler\n",
"from cyclone.httpserver import HTTPConnection, HTTPRequest, _BadRequestException\n",
"from cyclone import escape, httputil\n",
"from cyclone.escape import native_str, parse_qs_bytes\n",
"from globaleaks.jobs.statistics_sched import alarm_level\n",
"from globaleaks.utils.utility import log, log_remove_escapes, log_encode_html, datetime_now\n",
"from globaleaks.utils.mailutils import mail_exception\n",
"from globaleaks.settings import GLSetting\n",
"from globaleaks.rest import errors\n",
"from globaleaks.security import GLSecureTemporaryFile, security_sleep\n",
"def validate_host(host_key):...\n",
"\"\"\"docstring\"\"\"\n",
"if len(host_key) == 22 and host_key[16:22] == '.onion':\n",
"return True\n",
"hostchunk = str(host_key).split(':')\n",
"if len(hostchunk) == 2:\n",
"host_key = hostchunk[0]\n",
"if host_key in GLSetting.accepted_hosts:\n",
"return True\n",
"log.debug('Error in host requested: %s not accepted between: %s ' % (\n host_key, GLSetting.accepted_hosts))\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Expr'",
"Return'"
] |
[
"def FUNC_18(VAR_11, VAR_1, VAR_18, VAR_3):...\n",
""
] | [
"def handleMemoryOps(item, stack, memory, symbols):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"\"\"\"\nCreated on 1 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\"\"\"\n",
"from beamr.debug import warn\n",
"def FUNC_0(VAR_0):...\n",
"warn('Skip lexing error..', VAR_0)\n",
"VAR_0.lexer.skip(1)\n"
] | [
"\"\"\"\nCreated on 1 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\"\"\"\n",
"from beamr.debug import warn\n",
"def t_error(t):...\n",
"warn('Skip lexing error..', t)\n",
"t.lexer.skip(1)\n"
] | [
0,
0,
0,
2,
0
] | [
"Expr'",
"ImportFrom'",
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"@VAR_0.route('/')...\n",
"return render_template('index.html')\n"
] | [
"@app.route('/')...\n",
"return render_template('index.html')\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_23(VAR_48):...\n",
"if VAR_48.name in VAR_47 or VAR_48.exclude_from_cmake:\n",
"return\n",
"VAR_47.add(VAR_48.name)\n",
"for c in sorted(VAR_48.children, VAR_29=lambda x: x.name):\n",
"FUNC_23(c)\n",
"FUNC_22(VAR_48)\n",
"if VAR_48 is not VAR_22 and exists(join(VAR_48.directory, 'CMakeLists.txt')):\n",
"VAR_46.append('add_subdirectory(%s)\\n' % VAR_48.directory)\n"
] | [
"def process_module(module):...\n",
"if module.name in processed or module.exclude_from_cmake:\n",
"return\n",
"processed.add(module.name)\n",
"for c in sorted(module.children, key=lambda x: x.name):\n",
"process_module(c)\n",
"dump_options(module)\n",
"if module is not root and exists(join(module.directory, 'CMakeLists.txt')):\n",
"cmakelists_rows.append('add_subdirectory(%s)\\n' % module.directory)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Expr'",
"For",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"\"\"\"\n\"\"\"\n",
"import sqlite3, pytz\n",
"from datetime import datetime, timedelta\n",
"def __init__(self, VAR_0):...\n",
"self.config = VAR_0\n",
"self.db = sqlite3.connect(self.config.get_database_path(),\n check_same_thread=False)\n",
"self.c = self.db.cursor()\n",
"def FUNC_0(self):...\n",
"VAR_16 = self.config.get_connection_interfaces()\n",
"for source in VAR_16:\n",
"if source['type'] == 'inverter':\n",
"def FUNC_1(self, VAR_1, VAR_2):...\n",
"VAR_20 = 'string' % (source['serial_id'], 0, source['prev_etotal'])\n",
"for VAR_3 in VAR_2:\n",
"self.c.execute(VAR_20)\n",
"VAR_28 = VAR_3['source']['type']\n",
"def FUNC_2(self, VAR_1, VAR_3):...\n",
"VAR_20 = 'string' % (source['name'], source['inverter_type'],\n 's0-bridge v0', 'OK', int(datetime.now().timestamp()), source['serial_id'])\n",
"if VAR_28 == 'inverter':\n",
"VAR_17 = VAR_3['source']['serial_id']\n",
"self.c.execute(VAR_20)\n",
"self.add_inverter_data(VAR_1, VAR_3)\n",
"if VAR_28 == 'consumption':\n",
"VAR_18, VAR_19, VAR_4 = self.get_previous_yields(VAR_17)\n",
"self.db.commit()\n",
"self.add_consumption_data_row(VAR_1, VAR_3['energy'], VAR_3['power'])\n",
"VAR_6 = 'OK'\n",
"self.add_day_data_row(VAR_1, VAR_3, VAR_4)\n",
"if self.is_timestamps_from_same_day(VAR_18, VAR_1):\n",
"self.update_inverter(VAR_17, VAR_1, VAR_6, VAR_19 + VAR_3['energy'], VAR_4 +\n VAR_3['energy'])\n",
"self.update_inverter(VAR_17, VAR_1, VAR_6, VAR_3['energy'], VAR_4 + VAR_3[\n 'energy'])\n",
"self.db.commit()\n",
"self.add_month_data_row(VAR_17, VAR_1, VAR_19, VAR_4)\n",
"def FUNC_3(self, VAR_1, VAR_3, VAR_4):...\n",
"if VAR_3['power'] > 0:\n",
"VAR_17 = VAR_3['source']['serial_id']\n",
"def FUNC_4(self, VAR_5):...\n",
"VAR_20 = 'string' % (VAR_1, VAR_17, VAR_3['power'], VAR_4 + VAR_3['energy'])\n",
"VAR_20 = 'string' % VAR_5\n",
"self.c.execute(VAR_20)\n",
"self.c.execute(VAR_20)\n",
"VAR_3 = self.c.fetchone()\n",
"return VAR_3[0], VAR_3[1], VAR_3[2]\n"
] | [
"\"\"\"\n\"\"\"\n",
"import sqlite3, pytz\n",
"from datetime import datetime, timedelta\n",
"def __init__(self, config):...\n",
"self.config = config\n",
"self.db = sqlite3.connect(self.config.get_database_path(),\n check_same_thread=False)\n",
"self.c = self.db.cursor()\n",
"def add_inverters(self):...\n",
"interfaces = self.config.get_connection_interfaces()\n",
"for source in interfaces:\n",
"if source['type'] == 'inverter':\n",
"def add_data(self, ts, data_points):...\n",
"query = (\n \"\"\"\n INSERT OR IGNORE INTO Inverters (\n Serial,\n EToday,\n ETotal\n ) VALUES (\n %s,\n %s,\n %s\n );\n \"\"\"\n % (source['serial_id'], 0, source['prev_etotal']))\n",
"for data in data_points:\n",
"self.c.execute(query)\n",
"data_type = data['source']['type']\n",
"def add_inverter_data(self, ts, data):...\n",
"query = (\n \"\"\"\n UPDATE Inverters\n SET \n Name='%s', \n Type='%s', \n SW_Version='%s', \n Status='%s',\n TimeStamp='%s'\n WHERE Serial='%s';\n \"\"\"\n % (source['name'], source['inverter_type'], 's0-bridge v0', 'OK', int(\n datetime.now().timestamp()), source['serial_id']))\n",
"if data_type == 'inverter':\n",
"inv_serial = data['source']['serial_id']\n",
"self.c.execute(query)\n",
"self.add_inverter_data(ts, data)\n",
"if data_type == 'consumption':\n",
"prev_ts, prev_etoday, prev_etotal = self.get_previous_yields(inv_serial)\n",
"self.db.commit()\n",
"self.add_consumption_data_row(ts, data['energy'], data['power'])\n",
"status = 'OK'\n",
"self.add_day_data_row(ts, data, prev_etotal)\n",
"if self.is_timestamps_from_same_day(prev_ts, ts):\n",
"self.update_inverter(inv_serial, ts, status, prev_etoday + data['energy'], \n prev_etotal + data['energy'])\n",
"self.update_inverter(inv_serial, ts, status, data['energy'], prev_etotal +\n data['energy'])\n",
"self.db.commit()\n",
"self.add_month_data_row(inv_serial, ts, prev_etoday, prev_etotal)\n",
"def add_day_data_row(self, ts, data, prev_etotal):...\n",
"if data['power'] > 0:\n",
"inv_serial = data['source']['serial_id']\n",
"def get_previous_yields(self, inverter_serial):...\n",
"query = (\n \"\"\"\n INSERT INTO DayData (\n TimeStamp,\n Serial,\n Power,\n TotalYield\n ) VALUES (\n %s,\n %s,\n %s,\n %s\n );\n \"\"\"\n % (ts, inv_serial, data['power'], prev_etotal + data['energy']))\n",
"query = (\n \"\"\"\n SELECT TimeStamp, EToday, ETotal\n FROM Inverters\n WHERE Serial = '%s'\n \"\"\"\n % inverter_serial)\n",
"self.c.execute(query)\n",
"self.c.execute(query)\n",
"data = self.c.fetchone()\n",
"return data[0], data[1], data[2]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"For",
"Condition",
"FunctionDef'",
"Assign'",
"For",
"Expr'",
"Assign'",
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Condition",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Return'"
] |
[
"def FUNC_22(self):...\n",
"self.compilation_ok('string')\n"
] | [
"def test_vacationext_with_multiline(self):...\n",
"self.compilation_ok(\n \"\"\"\nrequire \"vacation\";\nvacation :mime text:\nContent-Type: multipart/alternative; boundary=foo\n\n--foo\n\nI'm at the beach relaxing. Mmmm, surf...\n\n--foo\nContent-Type: text/html; charset=us-ascii\n\n<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\"\n \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n<HTML><HEAD><TITLE>How to relax</TITLE>\n<BASE HREF=\"http://home.example.com/pictures/\"></HEAD>\n<BODY><P>I'm at the <A HREF=\"beach.gif\">beach</A> relaxing.\nMmmm, <A HREF=\"ocean.gif\">surf</A>...\n</BODY></HTML>\n\n--foo--\n.\n;\n\"\"\"\n )\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_33(VAR_39):...\n",
"return True\n"
] | [
"def acquire(self2):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_0(VAR_0):...\n",
"if VAR_0 == '':\n",
"return True\n",
"if len(VAR_0.split()) > 1:\n",
"return True\n",
"VAR_2 = requests.get('http://codeforces.com/submissions/' + VAR_0)\n",
"VAR_3 = BeautifulSoup(VAR_2.text, 'lxml')\n",
"if VAR_3.find(attrs={'class': 'verdict'}) == None:\n",
"return True\n",
"return False\n"
] | [
"def check_username(username):...\n",
"if username == '':\n",
"return True\n",
"if len(username.split()) > 1:\n",
"return True\n",
"r = requests.get('http://codeforces.com/submissions/' + username)\n",
"soup = BeautifulSoup(r.text, 'lxml')\n",
"if soup.find(attrs={'class': 'verdict'}) == None:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"@api.require(lambda : False)...\n",
"VAR_6.fail('Handler code should not be called')\n"
] | [
"@api.require(lambda : False)...\n",
"test.fail('Handler code should not be called')\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_1(self, VAR_3, VAR_2):...\n",
"VAR_6 = 'string'.format(VAR_3, VAR_2.get_client_id())\n",
"VAR_7 = self.__conn.cursor()\n",
"VAR_7.execute(VAR_6)\n",
"self.__conn.commit()\n"
] | [
"def change_pass(self, new_pass, logged_user):...\n",
"update_sql = (\n \"\"\"\n UPDATE Clients\n SET password = '{}'\n WHERE client_id = '{}'\n \"\"\"\n .format(new_pass, logged_user.get_client_id()))\n",
"cursor = self.__conn.cursor()\n",
"cursor.execute(update_sql)\n",
"self.__conn.commit()\n"
] | [
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_16(self, VAR_49, **VAR_18):...\n",
"VAR_50 = VAR_49.method.lower()\n",
"VAR_56 = copy.copy(VAR_18)\n",
"if 'allow_redirects' in VAR_56:\n",
"for VAR_62 in ['url', 'body']:\n",
"VAR_56[VAR_62] = getattr(VAR_49, VAR_62, None)\n",
"VAR_56['headers'] = copy.copy(VAR_49.headers)\n",
"for header in ['Accept-Encoding', 'User-Agent', 'Connection',\n",
"if header in VAR_56['headers']:\n",
"VAR_56['headers'] = VAR_49.headers\n",
"VAR_33 = getattr(self._record, VAR_50)\n",
"VAR_33(**checked_kwargs)\n"
] | [
"def record_call(self, request, **kwargs):...\n",
"verb = request.method.lower()\n",
"checked_kwargs = copy.copy(kwargs)\n",
"if 'allow_redirects' in checked_kwargs:\n",
"for attr in ['url', 'body']:\n",
"checked_kwargs[attr] = getattr(request, attr, None)\n",
"checked_kwargs['headers'] = copy.copy(request.headers)\n",
"for header in ['Accept-Encoding', 'User-Agent', 'Connection',\n",
"if header in checked_kwargs['headers']:\n",
"checked_kwargs['headers'] = request.headers\n",
"method = getattr(self._record, verb)\n",
"method(**checked_kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"from twisted.internet.defer import inlineCallbacks, returnValue\n",
"from base import Query\n",
"\"\"\"\n Object representing an update query\n \"\"\"\n",
"def __init__(self, VAR_0, VAR_1):...\n",
"super(CLASS_0, self).__init__(VAR_0)\n",
"self.values = VAR_1\n",
"self.return_id = self.model_class._meta.primary_key\n",
"@inlineCallbacks...\n",
"VAR_2 = self.database.generate_update(self)\n",
"if self.return_id:\n",
"VAR_3 = yield self.database.runQuery(VAR_2)\n",
"yield self.database.runOperation(VAR_2)\n",
"if VAR_3 and self.model_class._meta.primary_key:\n",
"returnValue(None)\n",
"returnValue(VAR_3[0][0])\n"
] | [
"from twisted.internet.defer import inlineCallbacks, returnValue\n",
"from base import Query\n",
"\"\"\"\n Object representing an update query\n \"\"\"\n",
"def __init__(self, model_class, values):...\n",
"super(UpdateQuery, self).__init__(model_class)\n",
"self.values = values\n",
"self.return_id = self.model_class._meta.primary_key\n",
"@inlineCallbacks...\n",
"query = self.database.generate_update(self)\n",
"if self.return_id:\n",
"result = yield self.database.runQuery(query)\n",
"yield self.database.runOperation(query)\n",
"if result and self.model_class._meta.primary_key:\n",
"returnValue(None)\n",
"returnValue(result[0][0])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
4,
4,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Expr'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_22(VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_45 = FUNC_21(VAR_21)\n",
"if not isinstance(VAR_45, dict):\n",
"return VAR_45\n"
] | [
"def load_configfile(configpath):...\n",
"\"\"\"docstring\"\"\"\n",
"config = _load_configfile(configpath)\n",
"if not isinstance(config, dict):\n",
"return config\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_15(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"self.driver._eql_execute('pool', 'select', self.configuration.eqlx_pool, 'show'\n ).AndReturn(['TotalCapacity: 111GB', 'FreeSpace: 11GB'])\n",
"self.mox.ReplayAll()\n",
"VAR_10 = self.driver.get_volume_stats(refresh=True)\n",
"self.assertEqual(VAR_10['total_capacity_gb'], float('111.0'))\n",
"self.assertEqual(VAR_10['free_capacity_gb'], float('11.0'))\n",
"self.assertEqual(VAR_10['vendor_name'], 'Dell')\n"
] | [
"def test_get_volume_stats(self):...\n",
"self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n",
"self.driver._eql_execute('pool', 'select', self.configuration.eqlx_pool, 'show'\n ).AndReturn(['TotalCapacity: 111GB', 'FreeSpace: 11GB'])\n",
"self.mox.ReplayAll()\n",
"stats = self.driver.get_volume_stats(refresh=True)\n",
"self.assertEqual(stats['total_capacity_gb'], float('111.0'))\n",
"self.assertEqual(stats['free_capacity_gb'], float('11.0'))\n",
"self.assertEqual(stats['vendor_name'], 'Dell')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(VAR_0, VAR_4=1901, VAR_5=2100, VAR_1='_', VAR_3=-1):...\n",
"\"\"\"docstring\"\"\"\n",
"import pandas as pd\n",
"VAR_16 = [int(fn.split('.')[0].split(VAR_1)[VAR_3]) for fn in VAR_0]\n",
"VAR_17 = pd.DataFrame({'fn': VAR_0, 'year': VAR_16})\n",
"VAR_19 = VAR_17[(VAR_17.year >= VAR_4) & (VAR_17.year <= VAR_5)]\n",
"return VAR_19.fn.tolist()\n"
] | [
"def only_years(files, begin=1901, end=2100, split_on='_', elem_year=-1):...\n",
"\"\"\"docstring\"\"\"\n",
"import pandas as pd\n",
"years = [int(fn.split('.')[0].split(split_on)[elem_year]) for fn in files]\n",
"df = pd.DataFrame({'fn': files, 'year': years})\n",
"df_slice = df[(df.year >= begin) & (df.year <= end)]\n",
"return df_slice.fn.tolist()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Import'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_6(self, VAR_49):...\n",
""
] | [
"def run(self, sr_name):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def __init__(self, VAR_1, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"super(CLASS_0, self).__init__(VAR_1, VAR_4)\n",
"VAR_1 = self.bindings\n",
"VAR_1['TEST_APP_COMPONENT_NAME'] = '{app}-{stack}-{detail}'.format(app=\n bindings['TEST_APP'], stack=bindings['TEST_STACK'], detail=bindings[\n 'TEST_COMPONENT_DETAIL'])\n",
"self.TEST_APP = VAR_1['TEST_APP']\n"
] | [
"def __init__(self, bindings, agent=None):...\n",
"\"\"\"docstring\"\"\"\n",
"super(GoogleSmokeTestScenario, self).__init__(bindings, agent)\n",
"bindings = self.bindings\n",
"bindings['TEST_APP_COMPONENT_NAME'] = '{app}-{stack}-{detail}'.format(app=\n bindings['TEST_APP'], stack=bindings['TEST_STACK'], detail=bindings[\n 'TEST_COMPONENT_DETAIL'])\n",
"self.TEST_APP = bindings['TEST_APP']\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(VAR_1, *VAR_2, **VAR_3):...\n",
"def FUNC_6():...\n",
"if not request.method in VAR_0:\n",
"return True\n",
"if not request.environ.get('AUTH_TYPE') == 'cookie':\n",
"return True\n",
"if config.get('skip_authentication'):\n",
"return True\n",
"if request.environ.get('HTTP_REFERER'):\n",
"VAR_7 = urlparse(request.environ.get('HTTP_REFERER'))\n",
"if request.method == 'GET' and FUNC_4():\n",
"VAR_8 = VAR_7.hostname\n",
"return True\n",
"return False\n",
"if VAR_7.port:\n",
"VAR_8 += ':' + str(VAR_7.port)\n",
"if VAR_8.endswith(request.environ['adhocracy.domain']):\n",
"if request.method != 'GET':\n",
"return True\n"
] | [
"def _decorate(f, *a, **kw):...\n",
"def check():...\n",
"if not request.method in methods:\n",
"return True\n",
"if not request.environ.get('AUTH_TYPE') == 'cookie':\n",
"return True\n",
"if config.get('skip_authentication'):\n",
"return True\n",
"if request.environ.get('HTTP_REFERER'):\n",
"ref_url = urlparse(request.environ.get('HTTP_REFERER'))\n",
"if request.method == 'GET' and has_token():\n",
"ref_host = ref_url.hostname\n",
"return True\n",
"return False\n",
"if ref_url.port:\n",
"ref_host += ':' + str(ref_url.port)\n",
"if ref_host.endswith(request.environ['adhocracy.domain']):\n",
"if request.method != 'GET':\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
5,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'",
"Condition",
"AugAssign'",
"Condition",
"Condition",
"Return'"
] |
[
"def FUNC_9(self, VAR_12=None, VAR_13=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_12 and not VAR_13:\n",
"VAR_12, VAR_13 = self.model.rowCount() + 1, self.model.rowCount(\n ) + self.model.item_load_batch\n",
"if self.filter_input and self.filter_input.text().lower():\n",
"VAR_24 = self.filter_input.text().lower()\n",
"VAR_24 = ''\n",
"VAR_19, VAR_20 = self._get_sort_parameters()\n",
"self.request_mgr = TriblerRequestManager()\n",
"self.request_mgr.perform_request('metadata/channels', self.on_channels,\n VAR_23={'first': start, 'last': end, 'sort_by': sort_by, 'sort_asc':\n sort_asc, 'filter': to_fts_query(filter_text), 'hide_xxx': self.model.\n hide_xxx, 'subscribed': self.model.subscribed})\n"
] | [
"def load_channels(self, start=None, end=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if not start and not end:\n",
"start, end = self.model.rowCount() + 1, self.model.rowCount(\n ) + self.model.item_load_batch\n",
"if self.filter_input and self.filter_input.text().lower():\n",
"filter_text = self.filter_input.text().lower()\n",
"filter_text = ''\n",
"sort_by, sort_asc = self._get_sort_parameters()\n",
"self.request_mgr = TriblerRequestManager()\n",
"self.request_mgr.perform_request('metadata/channels', self.on_channels,\n url_params={'first': start, 'last': end, 'sort_by': sort_by, 'sort_asc':\n sort_asc, 'filter': to_fts_query(filter_text), 'hide_xxx': self.model.\n hide_xxx, 'subscribed': self.model.subscribed})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_46(self, VAR_14):...\n",
"return self.list_properties[VAR_14].columns[0]\n"
] | [
"def get_property_first_col(self, col_name):...\n",
"return self.list_properties[col_name].columns[0]\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(VAR_9='', VAR_10='GET', VAR_7='', VAR_4={}):...\n",
"VAR_19 = CLASS_2(VAR_9.split('/'))\n",
"for VAR_20, val in VAR_4.iteritems():\n",
"VAR_19.headers[VAR_20.lower()] = val\n",
"VAR_19.method = VAR_10\n",
"if isinstance(VAR_7, str):\n",
"VAR_19.content = io.BytesIO(VAR_7)\n",
"for VAR_12, val in VAR_7.items():\n",
"return VAR_19\n",
"VAR_19.addArg(VAR_12, val)\n"
] | [
"def request_mock(path='', method='GET', body='', headers={}):...\n",
"dummy = PixRequestMock(path.split('/'))\n",
"for name, val in headers.iteritems():\n",
"dummy.headers[name.lower()] = val\n",
"dummy.method = method\n",
"if isinstance(body, str):\n",
"dummy.content = io.BytesIO(body)\n",
"for key, val in body.items():\n",
"return dummy\n",
"dummy.addArg(key, val)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Return'",
"Expr'"
] |
[
"def FUNC_0(VAR_1: Project, VAR_2: str, VAR_3: typing.Optional[int]=None...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_4 = FileSource.objects.filter(VAR_1=project, VAR_2=path)\n",
"if VAR_3:\n",
"VAR_4 = VAR_4.exclude(pk=existing_source_pk)\n",
"if len(VAR_4):\n"
] | [
"def validate_unique_project_path(project: Project, path: str,...\n",
"\"\"\"docstring\"\"\"\n",
"existing_sources = FileSource.objects.filter(project=project, path=path)\n",
"if existing_source_pk:\n",
"existing_sources = existing_sources.exclude(pk=existing_source_pk)\n",
"if len(existing_sources):\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition"
] |
[
"def __init__(self, VAR_17, **VAR_18):...\n",
"self.value = VAR_17\n",
"for k, v in VAR_18.items():\n",
"setattr(self, k, v)\n"
] | [
"def __init__(self, value, **props):...\n",
"self.value = value\n",
"for k, v in props.items():\n",
"setattr(self, k, v)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Expr'"
] |
[
"@api.public...\n",
"self.response.write(json.dumps({'peer_id': api.get_peer_identity().to_bytes\n (), 'cur_id': api.get_current_identity().to_bytes()}))\n"
] | [
"@api.public...\n",
"self.response.write(json.dumps({'peer_id': api.get_peer_identity().to_bytes\n (), 'cur_id': api.get_current_identity().to_bytes()}))\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"@staticmethod...\n",
"return FUNC_1(CLASS_0.PostDataToHandlerAsync(VAR_9, VAR_7, VAR_10))\n"
] | [
"@staticmethod...\n",
"return JsonFromFuture(BaseRequest.PostDataToHandlerAsync(data, handler,\n timeout))\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"from GeneralEngine.constants import *\n",
"import random\n",
"def __init__(self):...\n",
"self.actors = {}\n",
"self.ids = 0\n",
"self.removed_actors = 0\n",
"def FUNC_0(self, VAR_0, VAR_1):...\n",
"self.max_x = VAR_0\n",
"self.max_y = VAR_1\n",
"def FUNC_1(self, VAR_2):...\n",
"for VAR_6 in self.actors.keys():\n",
"if not self.actors[VAR_6].noclip:\n",
"def FUNC_2(self, VAR_3):...\n",
"VAR_7, VAR_8 = self.actors[VAR_6].getPos()\n",
"VAR_10 = [self.actors[VAR_6] for VAR_6 in self.actors.keys() if \n 'User Controlled' in self.actors[VAR_6].name and self.actors[VAR_6].visible\n ]\n",
"assert VAR_2[VAR_7][VAR_8].isTraversable(), 'Actor not within traversable unit'\n",
"VAR_11 = [user.getPos() for user in VAR_10]\n",
"VAR_16 = self.actors[VAR_6].getColliding()\n",
"VAR_12 = []\n",
"if VECTOR_RIGHT in VAR_16:\n",
"VAR_13 = [self.actors[VAR_6] for VAR_6 in self.actors.keys() if 'AI Driven' in\n self.actors[VAR_6].name and self.actors[VAR_6].visible]\n",
"if not VAR_2[(VAR_7 + 1) % len(VAR_2)][VAR_8].isTraversable():\n",
"if VECTOR_LEFT in VAR_16:\n",
"for AI in VAR_13:\n",
"self.actors[VAR_6].resetSubx()\n",
"if not VAR_2[(VAR_7 - 1) % len(VAR_2)][VAR_8].isTraversable():\n",
"if VECTOR_UP in VAR_16:\n",
"if AI.isCentered():\n",
"return VAR_12\n",
"self.actors[VAR_6].resetSubx()\n",
"if not VAR_2[VAR_7][(VAR_8 + 1) % len(VAR_2[0])].isTraversable():\n",
"if VECTOR_DOWN in VAR_16:\n",
"if AI.mode == 'random':\n",
"if AI.cur_path != []:\n",
"self.actors[VAR_6].resetSuby()\n",
"if not VAR_2[VAR_7][(VAR_8 - 1) % len(VAR_2[0])].isTraversable():\n",
"VAR_18 = [UP, DOWN, LEFT, RIGHT]\n",
"if AI.mode == 'pathToUser':\n",
"VAR_12.append([[AI.getExactPos()] + AI.cur_path[0][1:], AI.cur_path[1]])\n",
"self.actors[VAR_6].resetSuby()\n",
"VAR_19 = [VAR_3.getUnit(*AI.getPos()).neighbours[index].isTraversable() for\n index in VAR_18]\n",
"VAR_20 = VAR_3.shortPath(AI.getPos(), VAR_11)\n",
"if sum(VAR_19) == 0:\n",
"if VAR_20 is None:\n",
"if sum(VAR_19) == 1:\n",
"AI.setDirection(getDirection(vectorSubtract(VAR_20[0][1], VAR_20[0][0])))\n",
"for vec in range(len(VAR_18)):\n",
"VAR_21 = []\n",
"VAR_12.append(VAR_20)\n",
"if VAR_19[vec]:\n",
"for index in range(len(VAR_18)):\n",
"AI.cur_path = VAR_20\n",
"AI.setDirection(VECTORS[VAR_18[vec]])\n",
"if VECTORS[VAR_18[index]] != [(-1 * VAR_7) for VAR_7 in AI.getDirection()\n",
"AI.setDirection(random.choice(VAR_21))\n",
"VAR_21.append(VECTORS[VAR_18[index]])\n"
] | [
"from GeneralEngine.constants import *\n",
"import random\n",
"def __init__(self):...\n",
"self.actors = {}\n",
"self.ids = 0\n",
"self.removed_actors = 0\n",
"def bound(self, max_x, max_y):...\n",
"self.max_x = max_x\n",
"self.max_y = max_y\n",
"def boundIDbyBoard(self, board):...\n",
"for ID in self.actors.keys():\n",
"if not self.actors[ID].noclip:\n",
"def computeAI(self, boardObj):...\n",
"x, y = self.actors[ID].getPos()\n",
"users = [self.actors[ID] for ID in self.actors.keys() if 'User Controlled' in\n self.actors[ID].name and self.actors[ID].visible]\n",
"assert board[x][y].isTraversable(), 'Actor not within traversable unit'\n",
"user_locations = [user.getPos() for user in users]\n",
"vecs = self.actors[ID].getColliding()\n",
"all_paths = []\n",
"if VECTOR_RIGHT in vecs:\n",
"AIs = [self.actors[ID] for ID in self.actors.keys() if 'AI Driven' in self.\n actors[ID].name and self.actors[ID].visible]\n",
"if not board[(x + 1) % len(board)][y].isTraversable():\n",
"if VECTOR_LEFT in vecs:\n",
"for AI in AIs:\n",
"self.actors[ID].resetSubx()\n",
"if not board[(x - 1) % len(board)][y].isTraversable():\n",
"if VECTOR_UP in vecs:\n",
"if AI.isCentered():\n",
"return all_paths\n",
"self.actors[ID].resetSubx()\n",
"if not board[x][(y + 1) % len(board[0])].isTraversable():\n",
"if VECTOR_DOWN in vecs:\n",
"if AI.mode == 'random':\n",
"if AI.cur_path != []:\n",
"self.actors[ID].resetSuby()\n",
"if not board[x][(y - 1) % len(board[0])].isTraversable():\n",
"dirs = [UP, DOWN, LEFT, RIGHT]\n",
"if AI.mode == 'pathToUser':\n",
"all_paths.append([[AI.getExactPos()] + AI.cur_path[0][1:], AI.cur_path[1]])\n",
"self.actors[ID].resetSuby()\n",
"available = [boardObj.getUnit(*AI.getPos()).neighbours[index].isTraversable\n () for index in dirs]\n",
"path = boardObj.shortPath(AI.getPos(), user_locations)\n",
"if sum(available) == 0:\n",
"if path is None:\n",
"if sum(available) == 1:\n",
"AI.setDirection(getDirection(vectorSubtract(path[0][1], path[0][0])))\n",
"for vec in range(len(dirs)):\n",
"possible = []\n",
"all_paths.append(path)\n",
"if available[vec]:\n",
"for index in range(len(dirs)):\n",
"AI.cur_path = path\n",
"AI.setDirection(VECTORS[dirs[vec]])\n",
"if VECTORS[dirs[index]] != [(-1 * x) for x in AI.getDirection()] and available[\n",
"AI.setDirection(random.choice(possible))\n",
"possible.append(VECTORS[dirs[index]])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"For",
"Condition",
"FunctionDef'",
"Assign'",
"Assign'",
"Assert'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"For",
"Expr'",
"Condition",
"Condition",
"Condition",
"Return'",
"Expr'",
"Condition",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'",
"For",
"Assign'",
"Expr'",
"Condition",
"For",
"Assign'",
"Expr'",
"For",
"Expr'",
"Expr'"
] |
[
"async def FUNC_2(self, VAR_5):...\n",
"if isinstance(VAR_5, PageIdle):\n",
"self._idle = bool(VAR_5)\n",
"if self._idle:\n",
"self._idleSince = self._loop.time()\n"
] | [
"async def push(self, item):...\n",
"if isinstance(item, PageIdle):\n",
"self._idle = bool(item)\n",
"if self._idle:\n",
"self._idleSince = self._loop.time()\n"
] | [
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.create_snapshot(VAR_9)\n",
"self.common.client_logout()\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.create_snapshot(snapshot)\n",
"self.common.client_logout()\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self, VAR_7, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = []\n",
"VAR_2 = VAR_1.view_submit(VAR_7)\n",
"for entry in VAR_12['file_selection']:\n",
"VAR_10 = copy.deepcopy(VAR_12['global'])\n",
"return VAR_17\n",
"VAR_10.update(entry)\n",
"VAR_11 = copy.deepcopy(VAR_12['global']['options'])\n",
"VAR_11.update(entry.get('per_file_options', {}))\n",
"VAR_19 = {'package': VAR_10.get('package'), 'timeout': VAR_10.get('timeout',\n 120), 'priority': VAR_10.get('priority'), 'custom': VAR_10.get('custom'\n ), 'owner': VAR_10.get('owner'), 'tags': VAR_10.get('tags'), 'memory':\n VAR_10.get('memory'), 'enforce_timeout': VAR_11.get('enforce-timeout'),\n 'machine': VAR_10.get('machine'), 'platform': VAR_10.get('platform'),\n 'options': self.translate_options(VAR_10, VAR_11), 'submit_id': VAR_7}\n",
"if entry['type'] == 'url':\n",
"VAR_17.append(VAR_1.add_url(url=info['filename'], **kw))\n",
"VAR_20 = Folders.create_temp()\n",
"if not VAR_10['extrpath']:\n",
"VAR_24 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['filename']))\n",
"if len(VAR_10['extrpath']) == 1:\n",
"VAR_18 = Files.copy(VAR_24, VAR_20=path_dest)\n",
"VAR_25 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['arcname']))\n",
"VAR_25 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['arcname']))\n",
"VAR_17.append(VAR_1.add_path(file_path=filepath, **kw))\n",
"if not os.path.exists(VAR_25):\n",
"if not os.path.exists(VAR_25):\n",
"VAR_2.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(VAR_10['arcname']))\n",
"VAR_26 = sflock.zipify(sflock.unpack(VAR_10['arcname'], contents=open(\n arcpath, 'rb').read()))\n",
"VAR_2.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(VAR_10['arcname']))\n",
"VAR_27 = sflock.unpack(VAR_25).read(VAR_10['extrpath'][:-1])\n",
"VAR_25 = Files.temp_named_put(VAR_26, os.path.basename(VAR_10['arcname']))\n",
"VAR_28 = sflock.unpack(VAR_10['extrpath'][-2], contents=content)\n",
"VAR_17.append(VAR_1.add_archive(file_path=arcpath, VAR_22=info['filename'],\n **kw))\n",
"VAR_25 = Files.temp_named_put(sflock.zipify(VAR_28), os.path.basename(\n VAR_10['extrpath'][-2]))\n",
"VAR_17.append(VAR_1.add_archive(file_path=arcpath, VAR_22=info['filename'],\n **kw))\n"
] | [
"def submit(self, submit_id, config):...\n",
"\"\"\"docstring\"\"\"\n",
"ret = []\n",
"submit = db.view_submit(submit_id)\n",
"for entry in config['file_selection']:\n",
"info = copy.deepcopy(config['global'])\n",
"return ret\n",
"info.update(entry)\n",
"options = copy.deepcopy(config['global']['options'])\n",
"options.update(entry.get('per_file_options', {}))\n",
"kw = {'package': info.get('package'), 'timeout': info.get('timeout', 120),\n 'priority': info.get('priority'), 'custom': info.get('custom'), 'owner':\n info.get('owner'), 'tags': info.get('tags'), 'memory': info.get(\n 'memory'), 'enforce_timeout': options.get('enforce-timeout'), 'machine':\n info.get('machine'), 'platform': info.get('platform'), 'options': self.\n translate_options(info, options), 'submit_id': submit_id}\n",
"if entry['type'] == 'url':\n",
"ret.append(db.add_url(url=info['filename'], **kw))\n",
"path_dest = Folders.create_temp()\n",
"if not info['extrpath']:\n",
"path = os.path.join(submit.tmp_path, os.path.basename(info['filename']))\n",
"if len(info['extrpath']) == 1:\n",
"filepath = Files.copy(path, path_dest=path_dest)\n",
"arcpath = os.path.join(submit.tmp_path, os.path.basename(info['arcname']))\n",
"arcpath = os.path.join(submit.tmp_path, os.path.basename(info['arcname']))\n",
"ret.append(db.add_path(file_path=filepath, **kw))\n",
"if not os.path.exists(arcpath):\n",
"if not os.path.exists(arcpath):\n",
"submit.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(info['arcname']))\n",
"arc = sflock.zipify(sflock.unpack(info['arcname'], contents=open(arcpath,\n 'rb').read()))\n",
"submit.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(info['arcname']))\n",
"content = sflock.unpack(arcpath).read(info['extrpath'][:-1])\n",
"arcpath = Files.temp_named_put(arc, os.path.basename(info['arcname']))\n",
"subarc = sflock.unpack(info['extrpath'][-2], contents=content)\n",
"ret.append(db.add_archive(file_path=arcpath, filename=info['filename'], **kw))\n",
"arcpath = Files.temp_named_put(sflock.zipify(subarc), os.path.basename(info\n ['extrpath'][-2]))\n",
"ret.append(db.add_archive(file_path=arcpath, filename=info['filename'], **kw))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_10):...\n",
"VAR_11 = 0\n",
"VAR_12 = False\n",
"while not VAR_12:\n",
"if self.instrPtr == 2:\n",
"VAR_10.append(self.symbols)\n",
"VAR_13 = self.opcodes[self.instrPtr]\n",
"return []\n",
"VAR_14 = VAR_13[0]\n",
"if VAR_14 in oplists.terminalOps:\n",
"if VAR_14 in oplists.arithOps:\n",
"ophandlers.handleArithOps(VAR_13, self.stack, self.symbols, self.symId)\n",
"if VAR_14 in oplists.boolOps:\n",
"self.instrPtr = VAR_13[-1]\n",
"ophandlers.handleBoolOp(VAR_13, self.stack, self.symbols, self.symId)\n",
"if VAR_14 == 'SHA3':\n",
"VAR_12 = self.instrPtr >= len(self.opcodes)\n",
"if VAR_14 in oplists.envOps:\n",
"ophandlers.handleEnvOps(VAR_13, self.stack, self.memory, self.symbols, self\n .userIn, self.symId)\n",
"if VAR_14 in oplists.blockOps:\n",
"ophandlers.handleBlockOps(VAR_13, self.stack, self.symbols)\n",
"if VAR_14 in oplists.jumpOps:\n",
"VAR_15 = ophandlers.handleJumpOps(VAR_14, self.stack, self.opcodes, self.\n symbols, self.symId)\n",
"if VAR_14 in oplists.memOps:\n",
"if VAR_15[0] != -1 and VAR_15[1] != -1:\n",
"ophandlers.handleMemoryOps(VAR_13, self.stack, self.memory, self.symbols)\n",
"if VAR_14 in oplists.storOps:\n",
"self.instrPtr, VAR_12 = VAR_15\n",
"if VAR_15[1] == -1:\n",
"ophandlers.handleStorageOps(VAR_13, self.stack, self.storage, self.symbols,\n self.userIn)\n",
"if VAR_14 == 'JUMPDEST':\n",
"self.instrPtr = VAR_13[-1]\n",
"if VAR_14 == 'POP':\n",
"VAR_16 = CLASS_0(self.opcodes, self.functions, self.stack[:], self.memory[:\n ], deepcopy(self.storage), deepcopy(self.symbols), self.userIn[:], self\n .instrPtr)\n",
"self.stack.pop()\n",
"if VAR_14 == 'PC':\n",
"VAR_12 = VAR_16.takeJumpPath(VAR_15[0], self.symbols)\n",
"self.stack.append(i)\n",
"if VAR_14[:4] == 'PUSH':\n",
"if VAR_12:\n",
"self.stack.append(VAR_14[7:])\n",
"if VAR_14[:3] == 'DUP':\n",
"return [self]\n",
"print('splitting')\n",
"ophandlers.handleDupOp(VAR_14, self.symbols, self.stack, self.symId)\n",
"if VAR_14[:4] == 'SWAP':\n",
"return [self, VAR_16]\n",
"VAR_17 = int(VAR_14[4:])\n",
"if VAR_14[:3] == 'LOG':\n",
"VAR_18 = self.stack[-VAR_17]\n",
"self.stack[-VAR_17] = self.stack[-1]\n",
"self.stack[-1] = VAR_18\n"
] | [
"def traverse(self, pathSymbols):...\n",
"gasCost = 0\n",
"stop = False\n",
"while not stop:\n",
"if self.instrPtr == 2:\n",
"pathSymbols.append(self.symbols)\n",
"item = self.opcodes[self.instrPtr]\n",
"return []\n",
"op = item[0]\n",
"if op in oplists.terminalOps:\n",
"if op in oplists.arithOps:\n",
"ophandlers.handleArithOps(item, self.stack, self.symbols, self.symId)\n",
"if op in oplists.boolOps:\n",
"self.instrPtr = item[-1]\n",
"ophandlers.handleBoolOp(item, self.stack, self.symbols, self.symId)\n",
"if op == 'SHA3':\n",
"stop = self.instrPtr >= len(self.opcodes)\n",
"if op in oplists.envOps:\n",
"ophandlers.handleEnvOps(item, self.stack, self.memory, self.symbols, self.\n userIn, self.symId)\n",
"if op in oplists.blockOps:\n",
"ophandlers.handleBlockOps(item, self.stack, self.symbols)\n",
"if op in oplists.jumpOps:\n",
"result = ophandlers.handleJumpOps(op, self.stack, self.opcodes, self.\n symbols, self.symId)\n",
"if op in oplists.memOps:\n",
"if result[0] != -1 and result[1] != -1:\n",
"ophandlers.handleMemoryOps(item, self.stack, self.memory, self.symbols)\n",
"if op in oplists.storOps:\n",
"self.instrPtr, stop = result\n",
"if result[1] == -1:\n",
"ophandlers.handleStorageOps(item, self.stack, self.storage, self.symbols,\n self.userIn)\n",
"if op == 'JUMPDEST':\n",
"self.instrPtr = item[-1]\n",
"if op == 'POP':\n",
"ep1 = ExecutionPath(self.opcodes, self.functions, self.stack[:], self.\n memory[:], deepcopy(self.storage), deepcopy(self.symbols), self.userIn[\n :], self.instrPtr)\n",
"self.stack.pop()\n",
"if op == 'PC':\n",
"stop = ep1.takeJumpPath(result[0], self.symbols)\n",
"self.stack.append(i)\n",
"if op[:4] == 'PUSH':\n",
"if stop:\n",
"self.stack.append(op[7:])\n",
"if op[:3] == 'DUP':\n",
"return [self]\n",
"print('splitting')\n",
"ophandlers.handleDupOp(op, self.symbols, self.stack, self.symId)\n",
"if op[:4] == 'SWAP':\n",
"return [self, ep1]\n",
"num = int(op[4:])\n",
"if op[:3] == 'LOG':\n",
"tmp = self.stack[-num]\n",
"self.stack[-num] = self.stack[-1]\n",
"self.stack[-1] = tmp\n"
] | [
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
1,
0,
0,
1,
0,
0,
1,
0,
1,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_6, VAR_7=None):...\n",
"self.name = VAR_6\n",
"self.display_name = VAR_7\n",
"self.type = 'text'\n",
"self.filter_string = None\n"
] | [
"def __init__(self, name, display_name=None):...\n",
"self.name = name\n",
"self.display_name = display_name\n",
"self.type = 'text'\n",
"self.filter_string = None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@VAR_1.route('/discover_new_cuisines/<int:cuisine_id>')...\n",
"VAR_0.info('GET discover_new_cuisines query')\n",
"if VAR_9 in VAR_4:\n",
"VAR_29, VAR_30 = VAR_4[VAR_9]\n",
"VAR_18 = VAR_3.discover_new_cuisines_from_cuisine(VAR_9)\n",
"if datetime.now() < VAR_29 + VAR_6:\n",
"if VAR_18 == -1:\n",
"return VAR_30\n",
"return None\n",
"VAR_4[VAR_9] = datetime.now(), VAR_18\n",
"return VAR_18\n"
] | [
"@app.route('/discover_new_cuisines/<int:cuisine_id>')...\n",
"logger.info('GET discover_new_cuisines query')\n",
"if cuisine_id in cuisine_discovery_cache:\n",
"insert_time, data = cuisine_discovery_cache[cuisine_id]\n",
"query_res = database.discover_new_cuisines_from_cuisine(cuisine_id)\n",
"if datetime.now() < insert_time + cache_persistence_time:\n",
"if query_res == -1:\n",
"return data\n",
"return None\n",
"cuisine_discovery_cache[cuisine_id] = datetime.now(), query_res\n",
"return query_res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Return'",
"Assign'",
"Return'"
] |
[
"import logging\n",
"import tornado.httpserver\n",
"import tornado.ioloop\n",
"import tornado.web\n",
"from tornado.options import define, options\n",
"import application\n",
"import handlers\n",
"import settings\n",
"import beacon\n",
"define('port', default=4000, help='run on the given port', type=int)\n",
"define('develop', default=False, help='Run in develop environment', type=bool)\n",
"VAR_0 = VAR_1.redirect_uri\n",
"VAR_1 = {'debug': False, 'cookie_secret': VAR_1.cookie_secret, 'login_url':\n '/login', 'google_oauth': {'key': VAR_1.google_key, 'secret': VAR_1.\n google_secret}, 'contact_person': '[email protected]',\n 'redirect_uri': VAR_0, 'template_path': 'templates/'}\n",
"def __init__(self, VAR_1):...\n",
"self.declared_handlers = [('/static/(.*)', tornado.web.StaticFileHandler, {\n 'path': 'static/'}), ('/(favicon.ico)', tornado.web.StaticFileHandler,\n {'path': 'static/img/'}), ('/release/(?P<dataset>[^\\\\/]+)/(?P<file>.*)',\n handlers.AuthorizedStaticNginxFileHanlder, {'path': '/release-files/'}),\n ('/login', handlers.LoginHandler), ('/logout', handlers.LogoutHandler),\n ('/api/countries', VAR_2.CountryList), ('/api/users/me', VAR_2.GetUser),\n ('/api/datasets', VAR_2.ListDatasets), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)', VAR_2.GetDataset), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/log/(?P<event>[^\\\\/]+)', VAR_2.\n LogEvent), ('/api/datasets/(?P<dataset>[^\\\\/]+)/logo', VAR_2.ServeLogo),\n ('/api/datasets/(?P<dataset>[^\\\\/]+)/files', VAR_2.DatasetFiles), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/sample_set', VAR_2.SampleSet), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/users', VAR_2.DatasetUsers), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/users/(?P<email>[^\\\\/]+)/request',\n VAR_2.RequestAccess), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/users/(?P<email>[^\\\\/]+)/approve',\n VAR_2.ApproveUser), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/users/(?P<email>[^\\\\/]+)/revoke',\n VAR_2.RevokeUser), ('/api/query', beacon.Query), ('/api/info', beacon.\n Info), ('/query', beacon.Query), ('/info', tornado.web.RedirectHandler,\n {'url': '/api/info'}), ('.*', VAR_2.Home)]\n",
"self.oauth_key = VAR_1['google_oauth']['key']\n",
"tornado.web.Application.__init__(self, self.declared_handlers, **settings)\n",
"if __name__ == '__main__':\n",
"tornado.log.enable_pretty_logging()\n",
"tornado.options.parse_command_line()\n",
"if options.develop:\n",
"VAR_1['debug'] = True\n",
"VAR_2 = CLASS_0(VAR_1)\n",
"VAR_1['develop'] = True\n",
"VAR_2.listen(options.port)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n",
"VAR_3 = tornado.httpserver.HTTPServer(VAR_2)\n",
"VAR_4 = tornado.ioloop.IOLoop.instance()\n",
"VAR_4.start()\n"
] | [
"import logging\n",
"import tornado.httpserver\n",
"import tornado.ioloop\n",
"import tornado.web\n",
"from tornado.options import define, options\n",
"import application\n",
"import handlers\n",
"import settings\n",
"import beacon\n",
"define('port', default=4000, help='run on the given port', type=int)\n",
"define('develop', default=False, help='Run in develop environment', type=bool)\n",
"redirect_uri = settings.redirect_uri\n",
"settings = {'debug': False, 'cookie_secret': settings.cookie_secret,\n 'login_url': '/login', 'google_oauth': {'key': settings.google_key,\n 'secret': settings.google_secret}, 'contact_person':\n '[email protected]', 'redirect_uri': redirect_uri,\n 'template_path': 'templates/'}\n",
"def __init__(self, settings):...\n",
"self.declared_handlers = [('/static/(.*)', tornado.web.StaticFileHandler, {\n 'path': 'static/'}), ('/(favicon.ico)', tornado.web.StaticFileHandler,\n {'path': 'static/img/'}), ('/release/(?P<dataset>[^\\\\/]+)/(?P<file>.*)',\n handlers.AuthorizedStaticNginxFileHanlder, {'path': '/release-files/'}),\n ('/login', handlers.LoginHandler), ('/logout', handlers.LogoutHandler),\n ('/api/countries', application.CountryList), ('/api/users/me',\n application.GetUser), ('/api/datasets', application.ListDatasets), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)', application.GetDataset), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/log/(?P<event>[^\\\\/]+)',\n application.LogEvent), ('/api/datasets/(?P<dataset>[^\\\\/]+)/logo',\n application.ServeLogo), ('/api/datasets/(?P<dataset>[^\\\\/]+)/files',\n application.DatasetFiles), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/sample_set', application.SampleSet),\n ('/api/datasets/(?P<dataset>[^\\\\/]+)/users', application.DatasetUsers),\n ('/api/datasets/(?P<dataset>[^\\\\/]+)/users/(?P<email>[^\\\\/]+)/request',\n application.RequestAccess), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/users/(?P<email>[^\\\\/]+)/approve',\n application.ApproveUser), (\n '/api/datasets/(?P<dataset>[^\\\\/]+)/users/(?P<email>[^\\\\/]+)/revoke',\n application.RevokeUser), ('/api/query', beacon.Query), ('/api/info',\n beacon.Info), ('/query', beacon.Query), ('/info', tornado.web.\n RedirectHandler, {'url': '/api/info'}), ('.*', application.Home)]\n",
"self.oauth_key = settings['google_oauth']['key']\n",
"tornado.web.Application.__init__(self, self.declared_handlers, **settings)\n",
"if __name__ == '__main__':\n",
"tornado.log.enable_pretty_logging()\n",
"tornado.options.parse_command_line()\n",
"if options.develop:\n",
"settings['debug'] = True\n",
"application = Application(settings)\n",
"settings['develop'] = True\n",
"application.listen(options.port)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n",
"http_server = tornado.httpserver.HTTPServer(application)\n",
"ioloop = tornado.ioloop.IOLoop.instance()\n",
"ioloop.start()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_6(VAR_6, VAR_7):...\n",
"VAR_12 = f\"\"\"\n DELETE FROM {VAR_1} \n WHERE user_id={VAR_6} && product_id={VAR_7}\n \"\"\"\n",
"VAR_14 = create_connection()\n",
"VAR_14.close()\n",
"VAR_15 = VAR_14.cursor()\n",
"VAR_15.execute(VAR_12)\n",
"VAR_14.commit()\n",
"return 'Ok'\n"
] | [
"def delete_product_from_cart(userId, productId):...\n",
"sql_query = f\"\"\"\n DELETE FROM {CARTS_TABLE} \n WHERE user_id={userId} && product_id={productId}\n \"\"\"\n",
"connection = create_connection()\n",
"connection.close()\n",
"cursor = connection.cursor()\n",
"cursor.execute(sql_query)\n",
"connection.commit()\n",
"return 'Ok'\n"
] | [
0,
4,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_46(self, VAR_16):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_56 = self.regex.search(VAR_16)\n",
"if VAR_56 is not None:\n",
"return VAR_56.group('value')\n"
] | [
"def is_periodic(self, value):...\n",
"\"\"\"docstring\"\"\"\n",
"m = self.regex.search(value)\n",
"if m is not None:\n",
"return m.group('value')\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_1(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"if not hasattr(VAR_1, 'queries'):\n",
"VAR_18 = fd.read()\n",
"return VAR_1.queries\n",
"VAR_14 = VAR_18.split(';')\n",
"VAR_1.queries = {}\n",
"for VAR_19 in VAR_14:\n",
"VAR_19 = re.sub('\\\\s*--\\\\s*|\\\\s*\\\\n\\\\s*', ' ', VAR_19)\n",
"VAR_2 = VAR_19.split(':')\n",
"VAR_1.queries[VAR_2[0]] = VAR_2[1]\n"
] | [
"def get_queries(app, context):...\n",
"\"\"\"docstring\"\"\"\n",
"if not hasattr(context, 'queries'):\n",
"sqlFile = fd.read()\n",
"return context.queries\n",
"sqlCommands = sqlFile.split(';')\n",
"context.queries = {}\n",
"for command in sqlCommands:\n",
"command = re.sub('\\\\s*--\\\\s*|\\\\s*\\\\n\\\\s*', ' ', command)\n",
"query = command.split(':')\n",
"context.queries[query[0]] = query[1]\n"
] | [
0,
0,
4,
4,
4,
4,
4,
4,
4,
4,
4
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@login_required()...\n",
"VAR_3 = get_object_or_404(DataDocument, VAR_1=pk)\n",
"VAR_4 = Script.objects.get(title='Manual (dummy)', script_type='EX')\n",
"VAR_5, VAR_6 = ExtractedText.objects.get_or_create(data_document=doc,\n extraction_script=script)\n",
"if VAR_6:\n",
"VAR_5.doc_date = 'please add...'\n",
"VAR_7, VAR_8 = create_detail_formset(VAR_3)\n",
"VAR_9 = VAR_7(VAR_0.POST or None, instance=extext)\n",
"VAR_10 = VAR_8(VAR_0.POST or None, instance=extext, prefix='habits')\n",
"if VAR_0.method == 'POST' and 'save' in VAR_0.POST:\n",
"if VAR_10.is_valid() and VAR_9.is_valid():\n",
"VAR_11 = {'doc': VAR_3, 'ext_form': VAR_9, 'hp_formset': VAR_10}\n",
"if not VAR_3.extracted:\n",
"return render(VAR_0, VAR_2, VAR_11)\n",
"VAR_3.extracted = True\n",
"VAR_10.save()\n",
"VAR_3.save()\n",
"VAR_9.save()\n",
"return HttpResponseRedirect(f'/habitsandpractices/{VAR_3.pk}')\n"
] | [
"@login_required()...\n",
"doc = get_object_or_404(DataDocument, pk=pk)\n",
"script = Script.objects.get(title='Manual (dummy)', script_type='EX')\n",
"extext, created = ExtractedText.objects.get_or_create(data_document=doc,\n extraction_script=script)\n",
"if created:\n",
"extext.doc_date = 'please add...'\n",
"ExtractedTextForm, HnPFormSet = create_detail_formset(doc)\n",
"ext_form = ExtractedTextForm(request.POST or None, instance=extext)\n",
"hp_formset = HnPFormSet(request.POST or None, instance=extext, prefix='habits')\n",
"if request.method == 'POST' and 'save' in request.POST:\n",
"if hp_formset.is_valid() and ext_form.is_valid():\n",
"context = {'doc': doc, 'ext_form': ext_form, 'hp_formset': hp_formset}\n",
"if not doc.extracted:\n",
"return render(request, template_name, context)\n",
"doc.extracted = True\n",
"hp_formset.save()\n",
"doc.save()\n",
"ext_form.save()\n",
"return HttpResponseRedirect(f'/habitsandpractices/{doc.pk}')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_6(self, VAR_46, VAR_47):...\n",
"return True\n"
] | [
"def run(self, vh, thing_name):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_8 = Tag.objects.order_by('id')\n",
"return VAR_8\n"
] | [
"def get_queryset(self):...\n",
"query = Tag.objects.order_by('id')\n",
"return query\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@VAR_0.route('/search', methods=['POST'])...\n",
"if request.method == 'POST':\n",
"VAR_6 = request.form['search']\n",
"VAR_22 = VAR_18.infer_vector(VAR_6.split())\n",
"VAR_23 = VAR_18.docvecs.most_similar(positive=[q_vec], topn=100)\n",
"VAR_23 = [int(r[0]) for r in VAR_23]\n",
"VAR_23 = FUNC_1(VAR_23)\n",
"return render_template('search.html', VAR_19=results)\n"
] | [
"@application.route('/search', methods=['POST'])...\n",
"if request.method == 'POST':\n",
"query = request.form['search']\n",
"q_vec = model.infer_vector(query.split())\n",
"results = model.docvecs.most_similar(positive=[q_vec], topn=100)\n",
"results = [int(r[0]) for r in results]\n",
"results = get_articles(results)\n",
"return render_template('search.html', articles=results)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@integration_synonym_api...\n",
"FUNC_2(VAR_2)\n",
"FUNC_3(VAR_2, 'JASMINE', VAR_7='1')\n",
"FUNC_5(VAR_4, VAR_5, VAR_11='OSMOND', VAR_10=[{'name': '----OSMOND'}])\n"
] | [
"@integration_synonym_api...\n",
"clean_database(solr)\n",
"seed_database_with(solr, 'JASMINE', id='1')\n",
"verify_results(client, jwt, query='OSMOND', expected=[{'name': '----OSMOND'}])\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self):...\n",
"VAR_5 = dict()\n",
"VAR_6 = 0\n",
"VAR_7 = 0\n",
"VAR_8 = 0\n",
"VAR_5['inverters'] = dict()\n",
"VAR_9 = self.get_inverters()\n",
"for VAR_28 in VAR_9:\n",
"VAR_27 = 0\n",
"VAR_5['dayTotal'] = VAR_6\n",
"if VAR_28['etotal'] is not None:\n",
"VAR_5['total'] = VAR_7\n",
"VAR_27 = round(VAR_28['etotal'] / 1000 * self.co2_mult)\n",
"VAR_5['inverters'][VAR_28['serial']] = {'serial': VAR_28['serial'], 'name':\n VAR_28['name'], 'lastUpdated': VAR_28['ts'], 'dayTotal': VAR_28[\n 'etoday'], 'total': VAR_28['etotal'], 'status': VAR_28['status'], 'co2':\n VAR_27}\n",
"VAR_5['co2'] = VAR_8\n",
"if VAR_28['etoday'] is not None:\n",
"return VAR_5\n",
"VAR_6 += VAR_28['etoday']\n",
"if VAR_28['etotal'] is not None:\n",
"VAR_7 += VAR_28['etotal']\n",
"VAR_8 += VAR_27\n"
] | [
"def get_today(self):...\n",
"data = dict()\n",
"total_day = 0\n",
"total = 0\n",
"co2 = 0\n",
"data['inverters'] = dict()\n",
"inverters = self.get_inverters()\n",
"for inv in inverters:\n",
"inv_co2 = 0\n",
"data['dayTotal'] = total_day\n",
"if inv['etotal'] is not None:\n",
"data['total'] = total\n",
"inv_co2 = round(inv['etotal'] / 1000 * self.co2_mult)\n",
"data['inverters'][inv['serial']] = {'serial': inv['serial'], 'name': inv[\n 'name'], 'lastUpdated': inv['ts'], 'dayTotal': inv['etoday'], 'total':\n inv['etotal'], 'status': inv['status'], 'co2': inv_co2}\n",
"data['co2'] = co2\n",
"if inv['etoday'] is not None:\n",
"return data\n",
"total_day += inv['etoday']\n",
"if inv['etotal'] is not None:\n",
"total += inv['etotal']\n",
"co2 += inv_co2\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"AugAssign'",
"Condition",
"AugAssign'",
"AugAssign'"
] |
[
"def FUNC_0(self):...\n",
"VAR_5 = database.users.get_current_user()\n",
"if VAR_5:\n",
"VAR_1 = db.GqlQuery(\n 'SELECT * FROM Item WHERE created_by_id = :1 ORDER BY created_at DESC',\n VAR_5.user_id())\n",
"self.redirect('/')\n",
"database.render_template(self, 'items/my_items.html', {'items': VAR_1})\n"
] | [
"def get(self):...\n",
"user = database.users.get_current_user()\n",
"if user:\n",
"items = db.GqlQuery(\n 'SELECT * FROM Item WHERE created_by_id = :1 ORDER BY created_at DESC',\n user.user_id())\n",
"self.redirect('/')\n",
"database.render_template(self, 'items/my_items.html', {'items': items})\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_9(self):...\n",
"self._tab_after('ls comp')\n",
"eq_(self.terminal.method_calls, [('write', ('utes ',), {})])\n"
] | [
"def test_spaces_between_arg(self):...\n",
"self._tab_after('ls comp')\n",
"eq_(self.terminal.method_calls, [('write', ('utes ',), {})])\n"
] | [
0,
0,
1
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_0, VAR_1):...\n",
"VAR_4 = 'curl ' + start_url + VAR_0 + '?'\n",
"VAR_5 = VAR_1.keys()\n",
"VAR_6 = VAR_1.values()\n",
"VAR_7 = [(VAR_5[i] + '=' + VAR_6[i]) for i in range(len(VAR_5))]\n",
"VAR_8 = '&'.join(VAR_7)\n",
"VAR_4 += VAR_8\n",
"return VAR_4\n"
] | [
"def createGetScript(endpoint, params):...\n",
"script = 'curl ' + start_url + endpoint + '?'\n",
"keys = params.keys()\n",
"values = params.values()\n",
"pair = [(keys[i] + '=' + values[i]) for i in range(len(keys))]\n",
"evil_param = '&'.join(pair)\n",
"script += evil_param\n",
"return script\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'",
"Return'"
] |
[
"def FUNC_6(self, VAR_9):...\n",
"self.command(VAR_9, VAR_11='Error continuing DUT', VAR_12=False)\n",
"db.log_event('Information', 'Debugger', 'Continue DUT')\n"
] | [
"def continue_dut(self, continue_command):...\n",
"self.command(continue_command, error_message='Error continuing DUT',\n log_event=False)\n",
"db.log_event('Information', 'Debugger', 'Continue DUT')\n"
] | [
0,
0,
2
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_1, VAR_5, VAR_6, VAR_4=None):...\n",
"GenericRequest.__init__(self, VAR_1, VAR_4)\n",
"self.url = '%stasks/%s/statements/%s' % (self.base_url, VAR_5, VAR_6)\n",
"self.task_id = VAR_5\n"
] | [
"def __init__(self, browser, task_id, language_code, base_url=None):...\n",
"GenericRequest.__init__(self, browser, base_url)\n",
"self.url = '%stasks/%s/statements/%s' % (self.base_url, task_id, language_code)\n",
"self.task_id = task_id\n"
] | [
0,
0,
5,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'"
] |
[
"@defer.inlineCallbacks...\n",
"VAR_14 = VAR_13.user_auth.uuid\n",
"if not self._services_factory.is_logged_in(VAR_14):\n",
"yield self._services_factory.create_services_from(VAR_13)\n",
"self._init_http_session(VAR_5, VAR_14)\n"
] | [
"@defer.inlineCallbacks...\n",
"user_id = leap_session.user_auth.uuid\n",
"if not self._services_factory.is_logged_in(user_id):\n",
"yield self._services_factory.create_services_from(leap_session)\n",
"self._init_http_session(request, user_id)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"@scenario('../features/dns_resolution.feature', 'check DNS')...\n",
""
] | [
"@scenario('../features/dns_resolution.feature', 'check DNS')...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_7(self):...\n",
"return not self.is_missing_points()\n"
] | [
"def is_passed(self):...\n",
"return not self.is_missing_points()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __hash__(self):...\n",
"return hash(self.name)\n"
] | [
"def __hash__(self):...\n",
"return hash(self.name)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_2():...\n",
"VAR_1 = int(input('Enter query option number: '))\n",
"if VAR_1 == 1:\n",
"etc_open_beds()\n",
"if VAR_1 == 2:\n",
"FUNC_1()\n",
"age_edu_sex_country()\n",
"if VAR_1 == 3:\n",
"FUNC_1()\n",
"count_sex_educ()\n",
"if VAR_1 == 4:\n",
"FUNC_1()\n",
"partner_lat_long()\n",
"if VAR_1 == 5:\n",
"FUNC_1()\n",
"org_ETC_codes()\n",
"if VAR_1 == 6:\n",
"FUNC_1()\n",
"distinct_org_types()\n",
"if VAR_1 == 7:\n",
"FUNC_1()\n",
"respondent_country_info()\n",
"if VAR_1 == 8:\n",
"FUNC_1()\n",
"non_closed_ETC_partner()\n",
"if VAR_1 == 9:\n",
"FUNC_1()\n",
"country_gdp()\n",
"if VAR_1 == 10:\n",
"FUNC_1()\n",
"count_organized()\n",
"if VAR_1 == 11:\n",
"FUNC_1()\n",
"surveyresp_country_byAge()\n",
"if VAR_1 == 12:\n",
"FUNC_1()\n",
"etc_limited_byName()\n",
"if VAR_1 == 13:\n",
"FUNC_1()\n",
"partner_org_limited_byCountry()\n",
"if VAR_1 == 14:\n",
"FUNC_1()\n",
"avg_age_resp()\n",
"if VAR_1 == 15:\n",
"FUNC_1()\n",
"avg_edu_resp()\n",
"print('Sorry, that is not an option.')\n",
"FUNC_1()\n",
"FUNC_1()\n"
] | [
"def run_query_case():...\n",
"case = int(input('Enter query option number: '))\n",
"if case == 1:\n",
"etc_open_beds()\n",
"if case == 2:\n",
"run_another()\n",
"age_edu_sex_country()\n",
"if case == 3:\n",
"run_another()\n",
"count_sex_educ()\n",
"if case == 4:\n",
"run_another()\n",
"partner_lat_long()\n",
"if case == 5:\n",
"run_another()\n",
"org_ETC_codes()\n",
"if case == 6:\n",
"run_another()\n",
"distinct_org_types()\n",
"if case == 7:\n",
"run_another()\n",
"respondent_country_info()\n",
"if case == 8:\n",
"run_another()\n",
"non_closed_ETC_partner()\n",
"if case == 9:\n",
"run_another()\n",
"country_gdp()\n",
"if case == 10:\n",
"run_another()\n",
"count_organized()\n",
"if case == 11:\n",
"run_another()\n",
"surveyresp_country_byAge()\n",
"if case == 12:\n",
"run_another()\n",
"etc_limited_byName()\n",
"if case == 13:\n",
"run_another()\n",
"partner_org_limited_byCountry()\n",
"if case == 14:\n",
"run_another()\n",
"avg_age_resp()\n",
"if case == 15:\n",
"run_another()\n",
"avg_edu_resp()\n",
"print('Sorry, that is not an option.')\n",
"run_another()\n",
"run_another()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self, VAR_0='crimemap'):...\n",
"return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n"
] | [
"def connect(self, database='crimemap'):...\n",
"return pymysql.connect(host='localhost', user=dbconfig.db_user, passwd=\n dbconfig.db_password, db=database)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@utils.add_cmd...\n",
"\"\"\"docstring\"\"\"\n",
"permissions.checkPermissions(VAR_0, VAR_1, ['networks.reloadproto'])\n",
"VAR_13 = VAR_2[0]\n",
"VAR_0.error('Not enough arguments (needs 1: protocol module name)')\n",
"VAR_10 = utils.getProtocolModule(VAR_13)\n",
"return\n",
"importlib.reload(VAR_10)\n",
"VAR_0.reply('string' % VAR_13)\n"
] | [
"@utils.add_cmd...\n",
"\"\"\"docstring\"\"\"\n",
"permissions.checkPermissions(irc, source, ['networks.reloadproto'])\n",
"name = args[0]\n",
"irc.error('Not enough arguments (needs 1: protocol module name)')\n",
"proto = utils.getProtocolModule(name)\n",
"return\n",
"importlib.reload(proto)\n",
"irc.reply(\n 'Done. You will have to manually disconnect and reconnect any network using the %r module for changes to apply.'\n % name)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Expr'",
"Expr'"
] |
[
"def FUNC_1(self, VAR_6, VAR_7, VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_6.get('readonly_global_domain'):\n",
"VAR_8 = VAR_8 + [VAR_6.get('readonly_global_domain')]\n",
"if VAR_6.tag == 'field':\n",
"VAR_10 = VAR_6.get('name')\n",
"for child_node in VAR_6:\n",
"VAR_11 = safe_eval(VAR_6.get('attrs', '{}'))\n",
"self._process_field(child_node, VAR_7, VAR_8)\n",
"VAR_12 = VAR_11.get('readonly') or VAR_6.get('readonly')\n",
"if isinstance(VAR_12, str):\n",
"VAR_12 = safe_eval(VAR_6.get('readonly', '{}'))\n",
"if not isinstance(VAR_12, (list, tuple)) and VAR_12:\n",
"return\n",
"if VAR_12 is None and VAR_7[VAR_10]['readonly']:\n",
"return\n",
"VAR_0 = expression.OR([safe_eval(domain, {'field_name': VAR_10}) for domain in\n VAR_8])\n",
"if VAR_12:\n",
"VAR_0 = expression.OR([VAR_12, VAR_0])\n",
"VAR_11['readonly'] = VAR_0\n",
"VAR_6.set('attrs', str(VAR_11))\n"
] | [
"def _process_field(self, node, readonly_fields, lst_domain):...\n",
"\"\"\"docstring\"\"\"\n",
"if node.get('readonly_global_domain'):\n",
"lst_domain = lst_domain + [node.get('readonly_global_domain')]\n",
"if node.tag == 'field':\n",
"field_name = node.get('name')\n",
"for child_node in node:\n",
"attrs = safe_eval(node.get('attrs', '{}'))\n",
"self._process_field(child_node, readonly_fields, lst_domain)\n",
"readonly = attrs.get('readonly') or node.get('readonly')\n",
"if isinstance(readonly, str):\n",
"readonly = safe_eval(node.get('readonly', '{}'))\n",
"if not isinstance(readonly, (list, tuple)) and readonly:\n",
"return\n",
"if readonly is None and readonly_fields[field_name]['readonly']:\n",
"return\n",
"_readonly_domain = expression.OR([safe_eval(domain, {'field_name':\n field_name}) for domain in lst_domain])\n",
"if readonly:\n",
"_readonly_domain = expression.OR([readonly, _readonly_domain])\n",
"attrs['readonly'] = _readonly_domain\n",
"node.set('attrs', str(attrs))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Condition",
"Assign'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(VAR_17, *VAR_18, **VAR_19):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_20 = VAR_17.POST\n",
"VAR_21 = VAR_20.get('new_status')\n",
"if not VAR_21:\n",
"if not VAR_8 or not callable(VAR_8):\n",
"if not 'fields' in VAR_20:\n",
"return error_handler.logErrorAndReturnOK(\n 'No valid status can be set by the manageModelStatus.')\n",
"error_handler.logErrorAndReturnOK(\n 'No fields to filter on found for manageModelStatus.')\n",
"VAR_22 = pickle.loads(str(VAR_20['fields']))\n",
"VAR_23 = VAR_7.getForFields(VAR_22, limit=BATCH_SIZE)\n",
"for VAR_9 in VAR_23:\n",
"if VAR_21:\n",
"db.put(VAR_23)\n",
"VAR_25 = VAR_21\n",
"VAR_25 = VAR_8(VAR_9)\n",
"if len(VAR_23) == VAR_4:\n",
"VAR_9.status = VAR_25\n",
"VAR_24 = VAR_20.copy()\n",
"return responses.terminateTask()\n",
"return responses.startTask(VAR_17.path, VAR_24=context)\n"
] | [
"def manageModelsStatus(request, *args, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"post_dict = request.POST\n",
"new_status = post_dict.get('new_status')\n",
"if not new_status:\n",
"if not status_retriever or not callable(status_retriever):\n",
"if not 'fields' in post_dict:\n",
"return error_handler.logErrorAndReturnOK(\n 'No valid status can be set by the manageModelStatus.')\n",
"error_handler.logErrorAndReturnOK(\n 'No fields to filter on found for manageModelStatus.')\n",
"fields = pickle.loads(str(post_dict['fields']))\n",
"entities = entity_logic.getForFields(fields, limit=BATCH_SIZE)\n",
"for entity in entities:\n",
"if new_status:\n",
"db.put(entities)\n",
"status = new_status\n",
"status = status_retriever(entity)\n",
"if len(entities) == BATCH_SIZE:\n",
"entity.status = status\n",
"context = post_dict.copy()\n",
"return responses.terminateTask()\n",
"return responses.startTask(request.path, context=context)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_2(self, VAR_8, VAR_9):...\n",
"VAR_14 = self.__cache_map[VAR_8]\n",
"return VAR_14.get_by_slug(VAR_9)\n"
] | [
"def get_by_slug(self, entity_class, slug):...\n",
"cache = self.__cache_map[entity_class]\n",
"return cache.get_by_slug(slug)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"from lib.core.agent import agent\n",
"from lib.core.data import conf\n",
"from lib.core.data import kb\n",
"from lib.core.data import logger\n",
"from lib.core.data import queries\n",
"from lib.core.session import setUnion\n",
"from lib.request.connect import Connect as Request\n",
"def FUNC_0(VAR_0, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {}\n",
"for count in range(0, 50):\n",
"if kb.dbms == 'Oracle' and VAR_0.endswith(' FROM DUAL'):\n",
"return None\n",
"VAR_0 = VAR_0[:-len(' FROM DUAL')]\n",
"if count:\n",
"VAR_0 += ', NULL'\n",
"if kb.dbms == 'Oracle':\n",
"VAR_0 += ' FROM DUAL'\n",
"VAR_5 = agent.postfixQuery(VAR_0, VAR_1)\n",
"VAR_6 = agent.payload(newValue=commentedQuery)\n",
"VAR_7 = Request.queryPage(VAR_6)\n",
"if not VAR_7 in VAR_2.keys():\n",
"VAR_2[VAR_7] = 1, VAR_5\n",
"VAR_2[VAR_7] = VAR_2[VAR_7][0] + 1, VAR_5\n",
"if count:\n",
"for element in VAR_2.values():\n",
"if element[0] == 1:\n",
"if kb.injPlace == 'GET':\n",
"VAR_4 = '%s?%s' % (conf.url, VAR_6)\n",
"if kb.injPlace == 'POST':\n",
"return VAR_4\n",
"VAR_4 = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'Cookie':\n",
"VAR_4 += \"\\nPOST:\\t'%s'\\n\" % VAR_6\n",
"VAR_4 = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'User-Agent':\n",
"VAR_4 += \"\\nCookie:\\t'%s'\\n\" % VAR_6\n",
"VAR_4 = \"URL:\\t\\t'%s'\" % conf.url\n",
"VAR_4 += \"\"\"\nUser-Agent:\t'%s'\n\"\"\" % VAR_6\n"
] | [
"\"\"\"\n$Id$\n\nThis file is part of the sqlmap project, http://sqlmap.sourceforge.net.\n\nCopyright (c) 2006-2008 Bernardo Damele A. G. <[email protected]>\n and Daniele Bellucci <[email protected]>\n\nsqlmap is free software; you can redistribute it and/or modify it under\nthe terms of the GNU General Public License as published by the Free\nSoftware Foundation version 2 of the License.\n\nsqlmap is distributed in the hope that it will be useful, but WITHOUT ANY\nWARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\nFOR A PARTICULAR PURPOSE. See the GNU General Public License for more\ndetails.\n\nYou should have received a copy of the GNU General Public License along\nwith sqlmap; if not, write to the Free Software Foundation, Inc., 51\nFranklin St, Fifth Floor, Boston, MA 02110-1301 USA\n\"\"\"\n",
"from lib.core.agent import agent\n",
"from lib.core.data import conf\n",
"from lib.core.data import kb\n",
"from lib.core.data import logger\n",
"from lib.core.data import queries\n",
"from lib.core.session import setUnion\n",
"from lib.request.connect import Connect as Request\n",
"def __effectiveUnionTest(query, comment):...\n",
"\"\"\"docstring\"\"\"\n",
"resultDict = {}\n",
"for count in range(0, 50):\n",
"if kb.dbms == 'Oracle' and query.endswith(' FROM DUAL'):\n",
"return None\n",
"query = query[:-len(' FROM DUAL')]\n",
"if count:\n",
"query += ', NULL'\n",
"if kb.dbms == 'Oracle':\n",
"query += ' FROM DUAL'\n",
"commentedQuery = agent.postfixQuery(query, comment)\n",
"payload = agent.payload(newValue=commentedQuery)\n",
"newResult = Request.queryPage(payload)\n",
"if not newResult in resultDict.keys():\n",
"resultDict[newResult] = 1, commentedQuery\n",
"resultDict[newResult] = resultDict[newResult][0] + 1, commentedQuery\n",
"if count:\n",
"for element in resultDict.values():\n",
"if element[0] == 1:\n",
"if kb.injPlace == 'GET':\n",
"value = '%s?%s' % (conf.url, payload)\n",
"if kb.injPlace == 'POST':\n",
"return value\n",
"value = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'Cookie':\n",
"value += \"\\nPOST:\\t'%s'\\n\" % payload\n",
"value = \"URL:\\t'%s'\" % conf.url\n",
"if kb.injPlace == 'User-Agent':\n",
"value += \"\\nCookie:\\t'%s'\\n\" % payload\n",
"value = \"URL:\\t\\t'%s'\" % conf.url\n",
"value += \"\"\"\nUser-Agent:\t'%s'\n\"\"\" % payload\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
2,
2,
0,
2,
0,
0,
0,
0,
2,
0,
0,
2,
0,
2
] | [
"Expr'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"Condition",
"AugAssign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"For",
"Condition",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"AugAssign'"
] |
[
"def FUNC_1(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._out = self.execute_queued()\n"
] | [
"def execute(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._out = self.execute_queued()\n"
] | [
0,
0,
7
] | [
"FunctionDef'",
"Docstring",
"Assign'"
] |
[
"def FUNC_17(self, VAR_18):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', VAR_18['volume_name'], 'snapshot',\n 'delete', VAR_18['name'])\n",
"VAR_0.error(_('Failed to delete snapshot %(snap)s of volume %(vol)s'), {\n 'snap': VAR_18['name'], 'vol': VAR_18['volume_name']})\n"
] | [
"def delete_snapshot(self, snapshot):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', snapshot['volume_name'], 'snapshot',\n 'delete', snapshot['name'])\n",
"LOG.error(_('Failed to delete snapshot %(snap)s of volume %(vol)s'), {\n 'snap': snapshot['name'], 'vol': snapshot['volume_name']})\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"def FUNC_12(self, VAR_1=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = FUNC_0(VAR_1)\n",
"VAR_57 = self.execute(\n \"SELECT path FROM history WHERE name LIKE ? AND status = 'Failed'\", (\n VAR_1,))\n",
"if VAR_57:\n",
"return [VAR_11.get('path') for VAR_11 in self.c.fetchall()]\n",
"return []\n"
] | [
"def get_failed_paths(self, search=None):...\n",
"\"\"\"docstring\"\"\"\n",
"search = convert_search(search)\n",
"fetch_ok = self.execute(\n \"SELECT path FROM history WHERE name LIKE ? AND status = 'Failed'\", (\n search,))\n",
"if fetch_ok:\n",
"return [item.get('path') for item in self.c.fetchall()]\n",
"return []\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_0(*VAR_0, **VAR_1):...\n",
"def FUNC_5(VAR_12):...\n",
"def FUNC_9(self, *VAR_15, **VAR_75):...\n",
"for validator in VAR_0:\n",
"if VAR_102.method == 'POST' and hasattr(self, 'ajax_login_redirect'):\n",
"return FUNC_9\n",
"validator(VAR_75)\n",
"VAR_16 = self.build_arg_list(VAR_12, VAR_75)\n",
"return self.ajax_login_redirect('/')\n",
"return self.intermediate_redirect('/login')\n",
"for VAR_110, validator in VAR_1.iteritems():\n",
"VAR_16[VAR_110] = validator(VAR_75)\n",
"return VAR_12(self, *VAR_15, **kw)\n"
] | [
"def validate(*simple_vals, **param_vals):...\n",
"def val(fn):...\n",
"def newfn(self, *a, **env):...\n",
"for validator in simple_vals:\n",
"if request.method == 'POST' and hasattr(self, 'ajax_login_redirect'):\n",
"return newfn\n",
"validator(env)\n",
"kw = self.build_arg_list(fn, env)\n",
"return self.ajax_login_redirect('/')\n",
"return self.intermediate_redirect('/login')\n",
"for var, validator in param_vals.iteritems():\n",
"kw[var] = validator(env)\n",
"return fn(self, *a, **kw)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"FunctionDef'",
"For",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Return'",
"Return'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_29(VAR_6):...\n",
"return FUNC_8(VAR_6, 'private')\n"
] | [
"def validate_private(page):...\n",
"return validate_cache_control_header(page, 'private')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_38(self, VAR_40, VAR_41, VAR_42=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self._names[VAR_40] = VAR_41, VAR_42\n",
"if VAR_42 is None:\n",
"setattr(self, VAR_40, self[VAR_41])\n",
"setattr(self, VAR_40, CLASS_2(VAR_37=self[index:end]))\n"
] | [
"def set_name(self, name, index, end=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self._names[name] = index, end\n",
"if end is None:\n",
"setattr(self, name, self[index])\n",
"setattr(self, name, Namedlist(toclone=self[index:end]))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_36(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'MEDIUM': 12}, 'CONFIDENCE': {'LOW': 7, 'MEDIUM': 5}}\n",
"self.check_example('sql_statements.py', VAR_2)\n"
] | [
"def test_sql_statements(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'MEDIUM': 12}, 'CONFIDENCE': {'LOW': 7, 'MEDIUM': 5}}\n",
"self.check_example('sql_statements.py', expect)\n"
] | [
0,
0,
4,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(VAR_14, VAR_15):...\n",
"return FUNC_6(VAR_14, [[VAR_16 for line in VAR_15 for VAR_16 in line]], True)\n"
] | [
"def hStrip(dims, files):...\n",
"return smartGrid(dims, [[file for line in files for file in line]], True)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_3, VAR_4, VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"self.pid = VAR_1\n",
"self.hostname = VAR_3\n",
"self.host_status = VAR_4\n",
"self.host_lock = VAR_5\n"
] | [
"def __init__(self, pid, hostname, host_status, host_lock):...\n",
"\"\"\"docstring\"\"\"\n",
"self.pid = pid\n",
"self.hostname = hostname\n",
"self.host_status = host_status\n",
"self.host_lock = host_lock\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.