lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def FUNC_5(self, **VAR_15):...\n",
"VAR_19 = self.executable + ' ' + self.arguments\n",
"for VAR_30 in ('filename', 'config_file'):\n",
"VAR_15[VAR_30] = escape_path_argument(VAR_15.get(VAR_30, '') or '')\n",
"return VAR_19.format(**kwargs)\n"
] | [
"def _create_command(self, **kwargs):...\n",
"command = self.executable + ' ' + self.arguments\n",
"for key in ('filename', 'config_file'):\n",
"kwargs[key] = escape_path_argument(kwargs.get(key, '') or '')\n",
"return command.format(**kwargs)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._remote\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return self._remote\n"
] | [
5,
0,
5
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_10(self):...\n",
"self.env_var_override.set('VERIFY_XSS', 'True')\n",
"VAR_7 = BokChoyTestSuite('')\n",
"VAR_1 = 'tests'\n",
"self.assertEqual(VAR_7.cmd, self._expected_command(VAR_1=name, VAR_3=True))\n"
] | [
"def test_verify_xss_env_var(self):...\n",
"self.env_var_override.set('VERIFY_XSS', 'True')\n",
"suite = BokChoyTestSuite('')\n",
"name = 'tests'\n",
"self.assertEqual(suite.cmd, self._expected_command(name=name, verify_xss=True))\n"
] | [
0,
3,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"@detail_route(methods=['post'])...\n",
"return self.set_pending_action(pending_actions.CANCEL, *VAR_13, **kwargs)\n"
] | [
"@detail_route(methods=['post'])...\n",
"return self.set_pending_action(pending_actions.CANCEL, *args, **kwargs)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"import logging\n",
"from django.contrib import admin\n",
"from django.contrib import messages\n",
"from checkcve.forms import CheckCVEForm, CheckCVEChangeForm\n",
"from checkcve.models import Checkcve, Software, WhiteList, Cve\n",
"from checkcve.utils import create_check_cve_task\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = CheckCVEForm\n",
"def FUNC_0(self, VAR_2, VAR_3):...\n",
"VAR_7 = list()\n",
"VAR_8 = True\n",
"for probe in VAR_3:\n",
"if VAR_8:\n",
"probe.check_cve()\n",
"VAR_8 = False\n",
"messages.add_message(VAR_2, messages.SUCCESS, 'Check CVE OK')\n",
"messages.add_message(VAR_2, messages.ERROR, 'Check CVE failed ! ' + str(VAR_7))\n",
"VAR_0.exception('Error in check_cve ' + str(self.actions))\n",
"VAR_4 = [FUNC_0]\n",
"VAR_7.append(str(e))\n",
"def FUNC_1(self, VAR_2, VAR_3, VAR_1, VAR_5):...\n",
"create_check_cve_task(VAR_3)\n",
"super().save_model(VAR_2, VAR_3, VAR_1, VAR_5)\n",
"def FUNC_2(self, VAR_2, VAR_3=None, **VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_3 is None:\n",
"return super(CLASS_0, self).get_form(VAR_2, VAR_3, **kwargs)\n",
"return CheckCVEChangeForm\n"
] | [
"import logging\n",
"from django.contrib import admin\n",
"from django.contrib import messages\n",
"from checkcve.forms import CheckCVEForm, CheckCVEChangeForm\n",
"from checkcve.models import Checkcve, Software, WhiteList, Cve\n",
"from checkcve.utils import create_check_cve_task\n",
"logger = logging.getLogger(__name__)\n",
"form = CheckCVEForm\n",
"def check_cve(self, request, obj):...\n",
"errors = list()\n",
"test = True\n",
"for probe in obj:\n",
"if test:\n",
"probe.check_cve()\n",
"test = False\n",
"messages.add_message(request, messages.SUCCESS, 'Check CVE OK')\n",
"messages.add_message(request, messages.ERROR, 'Check CVE failed ! ' + str(\n errors))\n",
"logger.exception('Error in check_cve ' + str(self.actions))\n",
"actions = [check_cve]\n",
"errors.append(str(e))\n",
"def save_model(self, request, obj, form, change):...\n",
"create_check_cve_task(obj)\n",
"super().save_model(request, obj, form, change)\n",
"def get_form(self, request, obj=None, **kwargs):...\n",
"\"\"\"docstring\"\"\"\n",
"if obj is None:\n",
"return super(CheckCVEAdmin, self).get_form(request, obj, **kwargs)\n",
"return CheckCVEChangeForm\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_5():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = FUNC_0()\n",
"VAR_4 = VAR_3.cursor()\n",
"VAR_5 = 'players'\n",
"VAR_4.execute('string' % (VAR_5,))\n",
"VAR_7 = VAR_4.fetchall()\n",
"VAR_3.commit()\n",
"VAR_3.close()\n",
"return VAR_7\n"
] | [
"def playerStandings():...\n",
"\"\"\"docstring\"\"\"\n",
"conn = connect()\n",
"c = conn.cursor()\n",
"table = 'players'\n",
"c.execute(\n \"\"\"SELECT playerID, \n playerName, \n wins, \n matchesPlayed FROM %s ORDER BY wins DESC;\"\"\"\n % (table,))\n",
"result = c.fetchall()\n",
"conn.commit()\n",
"conn.close()\n",
"return result\n"
] | [
0,
0,
0,
0,
0,
4,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_6(VAR_8):...\n",
"\"\"\"docstring\"\"\"\n",
"return hasattr(VAR_8, '__self__') and VAR_8.__self__ is not None\n"
] | [
"def is_class_method(f):...\n",
"\"\"\"docstring\"\"\"\n",
"return hasattr(f, '__self__') and f.__self__ is not None\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_4(VAR_11):...\n",
"VAR_17 = []\n",
"for r in VAR_11:\n",
"VAR_18 = {'value': r[0], 'description': ', '.join(unique(cstr(d) for d in r if\n d)[1:])}\n",
"return VAR_17\n",
"VAR_17.append(VAR_18)\n"
] | [
"def build_for_autosuggest(res):...\n",
"results = []\n",
"for r in res:\n",
"out = {'value': r[0], 'description': ', '.join(unique(cstr(d) for d in r if\n d)[1:])}\n",
"return results\n",
"results.append(out)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_10, VAR_11, VAR_12, VAR_13):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_24 = self.common._cli_run('createhost -persona %s -domain %s %s %s' % (\n VAR_13, VAR_12, VAR_10, ' '.join(VAR_11)), None)\n",
"if VAR_24 and len(VAR_24) > 1:\n",
"return self.common.parse_create_host_error(VAR_10, VAR_24)\n",
"return VAR_10\n"
] | [
"def _create_3par_fibrechan_host(self, hostname, wwn, domain, persona_id):...\n",
"\"\"\"docstring\"\"\"\n",
"out = self.common._cli_run('createhost -persona %s -domain %s %s %s' % (\n persona_id, domain, hostname, ' '.join(wwn)), None)\n",
"if out and len(out) > 1:\n",
"return self.common.parse_create_host_error(hostname, out)\n",
"return hostname\n"
] | [
0,
0,
2,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"@login_required()...\n",
"VAR_24 = get_object_or_404(DataDocument, VAR_2=pk)\n",
"VAR_25 = Script.objects.get(title='Manual (dummy)', script_type='EX')\n",
"VAR_26, VAR_27 = ExtractedText.objects.get_or_create(data_document=doc,\n extraction_script=script)\n",
"if VAR_27:\n",
"VAR_26.doc_date = 'please add...'\n",
"VAR_28, VAR_29 = create_detail_formset(VAR_24)\n",
"VAR_30 = VAR_28(VAR_0.POST or None, instance=extext)\n",
"VAR_31 = VAR_29(VAR_0.POST or None, instance=extext, prefix='habits')\n",
"VAR_12 = {'doc': VAR_24, 'ext_form': VAR_30, 'hp_formset': VAR_31}\n",
"if VAR_0.method == 'POST' and 'save' in VAR_0.POST:\n",
"if VAR_31.is_valid():\n",
"return render(VAR_0, VAR_1, VAR_12)\n",
"VAR_31.save()\n",
"if VAR_30.is_valid():\n",
"VAR_30.save()\n",
"VAR_24.extracted = True\n",
"VAR_24.save()\n",
"VAR_12 = {'doc': VAR_24, 'ext_form': VAR_30, 'hp_formset': VAR_31}\n"
] | [
"@login_required()...\n",
"doc = get_object_or_404(DataDocument, pk=pk)\n",
"script = Script.objects.get(title='Manual (dummy)', script_type='EX')\n",
"extext, created = ExtractedText.objects.get_or_create(data_document=doc,\n extraction_script=script)\n",
"if created:\n",
"extext.doc_date = 'please add...'\n",
"ExtractedTextForm, HPFormSet = create_detail_formset(doc)\n",
"ext_form = ExtractedTextForm(request.POST or None, instance=extext)\n",
"hp_formset = HPFormSet(request.POST or None, instance=extext, prefix='habits')\n",
"context = {'doc': doc, 'ext_form': ext_form, 'hp_formset': hp_formset}\n",
"if request.method == 'POST' and 'save' in request.POST:\n",
"if hp_formset.is_valid():\n",
"return render(request, template_name, context)\n",
"hp_formset.save()\n",
"if ext_form.is_valid():\n",
"ext_form.save()\n",
"doc.extracted = True\n",
"doc.save()\n",
"context = {'doc': doc, 'ext_form': ext_form, 'hp_formset': hp_formset}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"def FUNC_25(self):...\n",
"if not self._IsServerAlive():\n",
"return\n",
"VAR_23 = BaseRequest.PostDataToHandler(BuildRequestData(),\n 'detailed_diagnostic')\n",
"vimsupport.PostVimMessage(str(e))\n",
"if 'message' in VAR_23:\n",
"vimsupport.EchoText(VAR_23['message'])\n"
] | [
"def ShowDetailedDiagnostic(self):...\n",
"if not self._IsServerAlive():\n",
"return\n",
"debug_info = BaseRequest.PostDataToHandler(BuildRequestData(),\n 'detailed_diagnostic')\n",
"vimsupport.PostVimMessage(str(e))\n",
"if 'message' in debug_info:\n",
"vimsupport.EchoText(debug_info['message'])\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_17(self, VAR_18):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', VAR_18['volume_name'], 'snapshot',\n 'delete', VAR_18['name'])\n",
"VAR_0.error(_('Failed to delete snapshot %(snap)s of volume %(vol)s'), {\n 'snap': VAR_18['name'], 'vol': VAR_18['volume_name']})\n"
] | [
"def delete_snapshot(self, snapshot):...\n",
"\"\"\"docstring\"\"\"\n",
"self._eql_execute('volume', 'select', snapshot['volume_name'], 'snapshot',\n 'delete', snapshot['name'])\n",
"LOG.error(_('Failed to delete snapshot %(snap)s of volume %(vol)s'), {\n 'snap': snapshot['name'], 'vol': snapshot['volume_name']})\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Expr'"
] |
[
"@property...\n",
"return self.dag.priority(self)\n"
] | [
"@property...\n",
"return self.dag.priority(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_9(self, VAR_47, VAR_49):...\n",
"\"\"\"docstring\"\"\"\n",
"assert isinstance(VAR_47, list)\n",
"assert isinstance(VAR_49, dict)\n",
"for cond in self.extract_key_from_nested_dict(VAR_49, self.WHERE_CONDITION):\n",
"assert isinstance(cond, dict), 'where condition needs to be dict'\n",
"return True\n",
"assert 'aggregate_lhs' in cond or cond.get('field'\n ) in VAR_47, 'Use of non aggregate value or non grouped field: {}'.format(\n cond)\n"
] | [
"def validate_group_by_data(self, group_by_fields, having):...\n",
"\"\"\"docstring\"\"\"\n",
"assert isinstance(group_by_fields, list)\n",
"assert isinstance(having, dict)\n",
"for cond in self.extract_key_from_nested_dict(having, self.WHERE_CONDITION):\n",
"assert isinstance(cond, dict), 'where condition needs to be dict'\n",
"return True\n",
"assert 'aggregate_lhs' in cond or cond.get('field'\n ) in group_by_fields, 'Use of non aggregate value or non grouped field: {}'.format(\n cond)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assert'",
"Assert'",
"For",
"Assert'",
"Return'",
"Assert'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.create_snapshot(VAR_8)\n",
"self.common.client_logout()\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"self.common.create_snapshot(snapshot)\n",
"self.common.client_logout()\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_15(self):...\n",
"self.assertTrue(zf._allowZip64)\n"
] | [
"def test_open_zipDefault(self):...\n",
"self.assertTrue(zf._allowZip64)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"return self.data['courses']\n"
] | [
"def courses(self):...\n",
"return self.data['courses']\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"return 'submit source %s for task %s (ID %d) %s' % (self.source_path, self.\n task[1], self.task[0], self.url)\n"
] | [
"def describe(self):...\n",
"return 'submit source %s for task %s (ID %d) %s' % (self.source_path, self.\n task[1], self.task[0], self.url)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"import os\n",
"import unittest\n",
"from mock import patch, call\n",
"from test.test_support import EnvironmentVarGuard\n",
"from paver.easy import BuildFailure\n",
"from pavelib.utils.test.suites import BokChoyTestSuite, Pa11yCrawler\n",
"VAR_0 = os.getcwd()\n",
"\"\"\"\n Paver Bok Choy Command test cases\n \"\"\"\n",
"def FUNC_0(self, VAR_1, VAR_2=None, VAR_3=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = 'string'.format(default_store=store, repo_dir=REPO_DIR, shard_str=\n '/shard_' + self.shard if self.shard else '', exp_text=name,\n a11y_custom_file=\n 'node_modules/edx-custom-a11y-rules/lib/custom_a11y_rules.js', VAR_3=\n verify_xss)\n",
"return VAR_6\n"
] | [
"\"\"\"\nTests for the bok-choy paver commands themselves.\nRun just this test with: paver test_lib -t pavelib/paver_tests/test_paver_bok_choy_cmds.py\n\"\"\"\n",
"import os\n",
"import unittest\n",
"from mock import patch, call\n",
"from test.test_support import EnvironmentVarGuard\n",
"from paver.easy import BuildFailure\n",
"from pavelib.utils.test.suites import BokChoyTestSuite, Pa11yCrawler\n",
"REPO_DIR = os.getcwd()\n",
"\"\"\"\n Paver Bok Choy Command test cases\n \"\"\"\n",
"def _expected_command(self, name, store=None, verify_xss=False):...\n",
"\"\"\"docstring\"\"\"\n",
"expected_statement = (\n \"DEFAULT_STORE={default_store} SCREENSHOT_DIR='{repo_dir}/test_root/log{shard_str}' BOK_CHOY_HAR_DIR='{repo_dir}/test_root/log{shard_str}/hars' BOKCHOY_A11Y_CUSTOM_RULES_FILE='{repo_dir}/{a11y_custom_file}' SELENIUM_DRIVER_LOG_DIR='{repo_dir}/test_root/log{shard_str}' VERIFY_XSS='{verify_xss}' nosetests {repo_dir}/common/test/acceptance/{exp_text} --with-xunit --xunit-file={repo_dir}/reports/bok_choy{shard_str}/xunit.xml --verbosity=2 \"\n .format(default_store=store, repo_dir=REPO_DIR, shard_str='/shard_' +\n self.shard if self.shard else '', exp_text=name, a11y_custom_file=\n 'node_modules/edx-custom-a11y-rules/lib/custom_a11y_rules.js',\n verify_xss=verify_xss))\n",
"return expected_statement\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_4() in VAR_0:\n",
"return True\n",
"return False\n"
] | [
"def has_meta(contents):...\n",
"\"\"\"docstring\"\"\"\n",
"if meta_separator() in contents:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_12(self):...\n",
"self.object = self.report\n",
"self.template_name = self.access_template_name\n",
"VAR_24 = self.get_context_data(VAR_20=self._get_access_form())\n",
"return self.render_to_response(VAR_24)\n"
] | [
"def _render_access_form(self):...\n",
"self.object = self.report\n",
"self.template_name = self.access_template_name\n",
"context = self.get_context_data(form=self._get_access_form())\n",
"return self.render_to_response(context)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@property...\n",
"return filter(lambda VAR_28: VAR_28.exists, self.expanded_output)\n"
] | [
"@property...\n",
"return filter(lambda f: f.exists, self.expanded_output)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_11(self, VAR_9, *VAR_13, **VAR_14):...\n",
"VAR_14['partial'] = True\n",
"return self.update(VAR_9, *VAR_13, **kwargs)\n"
] | [
"def partial_update(self, request, *args, **kwargs):...\n",
"kwargs['partial'] = True\n",
"return self.update(request, *args, **kwargs)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_22(self, VAR_6=1.0):...\n",
"if self._event is None:\n",
"return\n",
"if not self._event.wait(VAR_6):\n",
"VAR_44 = 'Timeout waiting for '\n",
"if isinstance(self, CLASS_4):\n",
"VAR_44 += 'Event {}'.format(self.name)\n",
"VAR_44 += 'Response {}'.format(self.name)\n"
] | [
"def wait(self, timeout=1.0):...\n",
"if self._event is None:\n",
"return\n",
"if not self._event.wait(timeout):\n",
"message = 'Timeout waiting for '\n",
"if isinstance(self, AwaitableEvent):\n",
"message += 'Event {}'.format(self.name)\n",
"message += 'Response {}'.format(self.name)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Condition",
"AugAssign'",
"AugAssign'"
] |
[
"def FUNC_5(self, VAR_7):...\n",
"return VAR_54.db.get_value(self.doctype, self.name, VAR_7)\n"
] | [
"def get_db_value(self, key):...\n",
"return frappe.db.get_value(self.doctype, self.name, key)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_37(self):...\n",
"VAR_81 = Priority.objects.filter(pk=self.new_value).exists()\n",
"if not VAR_81:\n",
"self.get_update_targets().update(**{str(self.target_field): self.new_value})\n"
] | [
"def _update_priority(self):...\n",
"exists = Priority.objects.filter(pk=self.new_value).exists()\n",
"if not exists:\n",
"self.get_update_targets().update(**{str(self.target_field): self.new_value})\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'"
] |
[
"def FUNC_7(VAR_11):...\n",
"return True\n"
] | [
"def directory_filter(_path):...\n",
"return True\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"return 'Running ssh host check for %s with pid %s' % (self.hostname, self.pid)\n"
] | [
"def info(self):...\n",
"return 'Running ssh host check for %s with pid %s' % (self.hostname, self.pid)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@api.public...\n",
"FUNC_31(self, 'POST')\n"
] | [
"@api.public...\n",
"record(self, 'POST')\n"
] | [
0,
0
] | [
"Condition",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_83.included_stack:\n",
"return None\n",
"return os.path.join(os.path.dirname(VAR_83.included_stack[-1]), VAR_0)\n"
] | [
"def srcdir(path):...\n",
"\"\"\"docstring\"\"\"\n",
"if not workflow.included_stack:\n",
"return None\n",
"return os.path.join(os.path.dirname(workflow.included_stack[-1]), path)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_25(VAR_3, VAR_6):...\n",
"VAR_38 = FUNC_1(VAR_3)\n",
"VAR_45, VAR_41 = VAR_38.accept()\n",
"return Connection(VAR_45, VAR_38)\n"
] | [
"def connect(addr, timeout):...\n",
"server = create_server(addr)\n",
"client, _ = server.accept()\n",
"return Connection(client, server)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"import mock\n",
"import requests\n",
"from django.test import TransactionTestCase\n",
"from api.models import Build\n",
"def FUNC_0(*VAR_0, **VAR_1):...\n",
"VAR_2 = requests.Response()\n",
"VAR_2.status_code = 200\n",
"VAR_2._content_consumed = True\n",
"return VAR_2\n"
] | [
"\"\"\"\nUnit tests for the Deis api app.\n\nRun the tests with \"./manage.py test api\\\"\n\"\"\"\n",
"from __future__ import unicode_literals\n",
"import json\n",
"import mock\n",
"import requests\n",
"from django.test import TransactionTestCase\n",
"from api.models import Build\n",
"def mock_import_repository_task(*args, **kwargs):...\n",
"resp = requests.Response()\n",
"resp.status_code = 200\n",
"resp._content_consumed = True\n",
"return resp\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"from __future__ import absolute_import\n",
"import logging\n",
"import time\n",
"from PyQt5.QtCore import QTimer, QUrl, pyqtSignal\n",
"from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QNetworkRequest\n",
"import Tribler.Core.Utilities.json_util as json\n",
"VAR_0 = []\n",
"\"\"\"string\"\"\"\n",
"VAR_1 = pyqtSignal(object)\n",
"VAR_2 = pyqtSignal(object)\n",
"VAR_3 = pyqtSignal(object)\n",
"VAR_4 = pyqtSignal()\n",
"VAR_5 = pyqtSignal(str)\n",
"VAR_6 = pyqtSignal()\n",
"VAR_7 = pyqtSignal()\n",
"VAR_8 = pyqtSignal(str)\n",
"VAR_9 = pyqtSignal(object)\n",
"VAR_10 = pyqtSignal(object)\n",
"VAR_11 = pyqtSignal(object)\n",
"VAR_12 = pyqtSignal(object)\n",
"VAR_13 = pyqtSignal(object)\n",
"VAR_14 = pyqtSignal(object)\n",
"VAR_15 = pyqtSignal(object)\n",
"VAR_16 = pyqtSignal(object)\n",
"VAR_17 = pyqtSignal(object)\n",
"VAR_18 = pyqtSignal(object)\n",
"VAR_19 = pyqtSignal(object)\n",
"VAR_20 = pyqtSignal(object)\n",
"VAR_21 = pyqtSignal(object)\n",
"VAR_22 = pyqtSignal(object)\n",
"VAR_23 = pyqtSignal(str)\n",
"def __init__(self, VAR_24):...\n",
"QNetworkAccessManager.__init__(self)\n",
"VAR_27 = QUrl('http://localhost:%d/events' % VAR_24)\n",
"self.request = QNetworkRequest(VAR_27)\n",
"self.failed_attempts = 0\n",
"self.connect_timer = QTimer()\n",
"self.current_event_string = ''\n",
"self.tribler_version = 'Unknown'\n",
"self.reply = None\n",
"self.emitted_tribler_started = False\n",
"self.shutting_down = False\n",
"self._logger = logging.getLogger('TriblerGUI')\n",
"def FUNC_0(self, VAR_25, VAR_26):...\n",
"self._logger.info('Got Tribler core error: %s' % VAR_25)\n",
"if VAR_25 == QNetworkReply.ConnectionRefusedError:\n",
"if self.failed_attempts == 40:\n",
"def FUNC_1(self):...\n",
"self.failed_attempts += 1\n",
"if self.receivers(self.finished) == 0:\n",
"if VAR_26:\n",
"self.finished.connect(lambda reply: self.on_finished())\n",
"self.connect_timer.stop()\n",
"self.connect_timer = QTimer()\n",
"VAR_28 = self.reply.readAll()\n",
"self.connect_timer.setSingleShot(True)\n",
"self.current_event_string += VAR_28\n",
"self.connect_timer.timeout.connect(self.connect)\n",
"if len(self.current_event_string) > 0 and self.current_event_string[-1\n",
"self.connect_timer.start(500)\n",
"for event in self.current_event_string.split('\\n'):\n",
"def FUNC_2(self):...\n",
"if len(event) == 0:\n",
"self.current_event_string = ''\n",
"\"\"\"docstring\"\"\"\n",
"VAR_29 = json.loads(str(event))\n",
"if self.shutting_down:\n",
"VAR_0.insert(0, (VAR_29, time.time()))\n",
"return\n",
"self._logger.warning('Events connection dropped, attempting to reconnect')\n",
"if len(VAR_0) > 100:\n",
"self.failed_attempts = 0\n",
"VAR_0.pop()\n",
"if VAR_29['type'] == 'torrent_info_updated':\n",
"self.connect_timer = QTimer()\n",
"self.torrent_info_updated.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'tribler_started' and not self.emitted_tribler_started:\n",
"self.connect_timer.setSingleShot(True)\n",
"self.tribler_started.emit()\n",
"if VAR_29['type'] == 'new_version_available':\n",
"self.connect_timer.timeout.connect(self.connect)\n",
"self.emitted_tribler_started = True\n",
"self.new_version_available.emit(VAR_29['event']['version'])\n",
"if VAR_29['type'] == 'upgrader_started':\n",
"self.connect_timer.start(500)\n",
"self.upgrader_started.emit()\n",
"if VAR_29['type'] == 'upgrader_finished':\n",
"def FUNC_3(self, VAR_26=True):...\n",
"self.upgrader_finished.emit()\n",
"if VAR_29['type'] == 'upgrader_tick':\n",
"self._logger.info('Will connect to events endpoint')\n",
"self.upgrader_tick.emit(VAR_29['event']['text'])\n",
"if VAR_29['type'] == 'channel_discovered':\n",
"self.reply = self.get(self.request)\n",
"self.discovered_channel.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'torrent_discovered':\n",
"self.reply.readyRead.connect(self.on_read_data)\n",
"self.discovered_torrent.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'events_start':\n",
"self.reply.error.connect(lambda VAR_25: self.on_error(VAR_25, VAR_26=\n reschedule_on_err))\n",
"self.events_started.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'torrent_finished':\n",
"self.tribler_version = VAR_29['event']['version']\n",
"self.torrent_finished.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_ask':\n",
"if VAR_29['event']['tribler_started'] and not self.emitted_tribler_started:\n",
"self.received_market_ask.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_bid':\n",
"self.tribler_started.emit()\n",
"self.received_market_bid.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_ask_timeout':\n",
"self.emitted_tribler_started = True\n",
"self.expired_market_ask.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_bid_timeout':\n",
"self.expired_market_bid.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_transaction_complete':\n",
"self.market_transaction_complete.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_payment_received':\n",
"self.market_payment_received.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_payment_sent':\n",
"self.market_payment_sent.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'market_iom_input_required':\n",
"self.market_iom_input_required.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'signal_low_space':\n",
"self.low_storage_signal.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'credit_mining_error':\n",
"self.credit_mining_signal.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'shutdown':\n",
"self.tribler_shutdown_signal.emit(VAR_29['event'])\n",
"if VAR_29['type'] == 'tribler_exception':\n"
] | [
"from __future__ import absolute_import\n",
"import logging\n",
"import time\n",
"from PyQt5.QtCore import QTimer, QUrl, pyqtSignal\n",
"from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkReply, QNetworkRequest\n",
"import Tribler.Core.Utilities.json_util as json\n",
"received_events = []\n",
"\"\"\"\n The EventRequestManager class handles the events connection over which important events in Tribler are pushed.\n \"\"\"\n",
"torrent_info_updated = pyqtSignal(object)\n",
"received_search_result_channel = pyqtSignal(object)\n",
"received_search_result_torrent = pyqtSignal(object)\n",
"tribler_started = pyqtSignal()\n",
"upgrader_tick = pyqtSignal(str)\n",
"upgrader_started = pyqtSignal()\n",
"upgrader_finished = pyqtSignal()\n",
"new_version_available = pyqtSignal(str)\n",
"discovered_channel = pyqtSignal(object)\n",
"discovered_torrent = pyqtSignal(object)\n",
"torrent_finished = pyqtSignal(object)\n",
"received_market_ask = pyqtSignal(object)\n",
"received_market_bid = pyqtSignal(object)\n",
"expired_market_ask = pyqtSignal(object)\n",
"expired_market_bid = pyqtSignal(object)\n",
"market_transaction_complete = pyqtSignal(object)\n",
"market_payment_received = pyqtSignal(object)\n",
"market_payment_sent = pyqtSignal(object)\n",
"market_iom_input_required = pyqtSignal(object)\n",
"events_started = pyqtSignal(object)\n",
"low_storage_signal = pyqtSignal(object)\n",
"credit_mining_signal = pyqtSignal(object)\n",
"tribler_shutdown_signal = pyqtSignal(str)\n",
"def __init__(self, api_port):...\n",
"QNetworkAccessManager.__init__(self)\n",
"url = QUrl('http://localhost:%d/events' % api_port)\n",
"self.request = QNetworkRequest(url)\n",
"self.failed_attempts = 0\n",
"self.connect_timer = QTimer()\n",
"self.current_event_string = ''\n",
"self.tribler_version = 'Unknown'\n",
"self.reply = None\n",
"self.emitted_tribler_started = False\n",
"self.shutting_down = False\n",
"self._logger = logging.getLogger('TriblerGUI')\n",
"def on_error(self, error, reschedule_on_err):...\n",
"self._logger.info('Got Tribler core error: %s' % error)\n",
"if error == QNetworkReply.ConnectionRefusedError:\n",
"if self.failed_attempts == 40:\n",
"def on_read_data(self):...\n",
"self.failed_attempts += 1\n",
"if self.receivers(self.finished) == 0:\n",
"if reschedule_on_err:\n",
"self.finished.connect(lambda reply: self.on_finished())\n",
"self.connect_timer.stop()\n",
"self.connect_timer = QTimer()\n",
"data = self.reply.readAll()\n",
"self.connect_timer.setSingleShot(True)\n",
"self.current_event_string += data\n",
"self.connect_timer.timeout.connect(self.connect)\n",
"if len(self.current_event_string) > 0 and self.current_event_string[-1\n",
"self.connect_timer.start(500)\n",
"for event in self.current_event_string.split('\\n'):\n",
"def on_finished(self):...\n",
"if len(event) == 0:\n",
"self.current_event_string = ''\n",
"\"\"\"docstring\"\"\"\n",
"json_dict = json.loads(str(event))\n",
"if self.shutting_down:\n",
"received_events.insert(0, (json_dict, time.time()))\n",
"return\n",
"self._logger.warning('Events connection dropped, attempting to reconnect')\n",
"if len(received_events) > 100:\n",
"self.failed_attempts = 0\n",
"received_events.pop()\n",
"if json_dict['type'] == 'torrent_info_updated':\n",
"self.connect_timer = QTimer()\n",
"self.torrent_info_updated.emit(json_dict['event'])\n",
"if json_dict['type'] == 'tribler_started' and not self.emitted_tribler_started:\n",
"self.connect_timer.setSingleShot(True)\n",
"self.tribler_started.emit()\n",
"if json_dict['type'] == 'new_version_available':\n",
"self.connect_timer.timeout.connect(self.connect)\n",
"self.emitted_tribler_started = True\n",
"self.new_version_available.emit(json_dict['event']['version'])\n",
"if json_dict['type'] == 'upgrader_started':\n",
"self.connect_timer.start(500)\n",
"self.upgrader_started.emit()\n",
"if json_dict['type'] == 'upgrader_finished':\n",
"def connect(self, reschedule_on_err=True):...\n",
"self.upgrader_finished.emit()\n",
"if json_dict['type'] == 'upgrader_tick':\n",
"self._logger.info('Will connect to events endpoint')\n",
"self.upgrader_tick.emit(json_dict['event']['text'])\n",
"if json_dict['type'] == 'channel_discovered':\n",
"self.reply = self.get(self.request)\n",
"self.discovered_channel.emit(json_dict['event'])\n",
"if json_dict['type'] == 'torrent_discovered':\n",
"self.reply.readyRead.connect(self.on_read_data)\n",
"self.discovered_torrent.emit(json_dict['event'])\n",
"if json_dict['type'] == 'events_start':\n",
"self.reply.error.connect(lambda error: self.on_error(error,\n reschedule_on_err=reschedule_on_err))\n",
"self.events_started.emit(json_dict['event'])\n",
"if json_dict['type'] == 'torrent_finished':\n",
"self.tribler_version = json_dict['event']['version']\n",
"self.torrent_finished.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_ask':\n",
"if json_dict['event']['tribler_started'] and not self.emitted_tribler_started:\n",
"self.received_market_ask.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_bid':\n",
"self.tribler_started.emit()\n",
"self.received_market_bid.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_ask_timeout':\n",
"self.emitted_tribler_started = True\n",
"self.expired_market_ask.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_bid_timeout':\n",
"self.expired_market_bid.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_transaction_complete':\n",
"self.market_transaction_complete.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_payment_received':\n",
"self.market_payment_received.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_payment_sent':\n",
"self.market_payment_sent.emit(json_dict['event'])\n",
"if json_dict['type'] == 'market_iom_input_required':\n",
"self.market_iom_input_required.emit(json_dict['event'])\n",
"if json_dict['type'] == 'signal_low_space':\n",
"self.low_storage_signal.emit(json_dict['event'])\n",
"if json_dict['type'] == 'credit_mining_error':\n",
"self.credit_mining_signal.emit(json_dict['event'])\n",
"if json_dict['type'] == 'shutdown':\n",
"self.tribler_shutdown_signal.emit(json_dict['event'])\n",
"if json_dict['type'] == 'tribler_exception':\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Expr'",
"Condition",
"Condition",
"FunctionDef'",
"AugAssign'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"AugAssign'",
"Expr'",
"Condition",
"Expr'",
"For",
"FunctionDef'",
"Condition",
"Assign'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Return'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition"
] |
[
"def FUNC_3(VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = {'new': FUNC_8, 'tasks': FUNC_12, 'view': FUNC_11, 'complete':\n FUNC_6, 'analyze': FUNC_9}\n",
"return VAR_11[VAR_5]()\n",
"click.echo(chalk.red('Command does not exist!'))\n",
"click.echo('Try \"yoda goals --help\" for more info')\n"
] | [
"def check_sub_command(c):...\n",
"\"\"\"docstring\"\"\"\n",
"sub_commands = {'new': new_goal, 'tasks': view_related_tasks, 'view':\n list_goals, 'complete': complete_goal, 'analyze': goals_analysis}\n",
"return sub_commands[c]()\n",
"click.echo(chalk.red('Command does not exist!'))\n",
"click.echo('Try \"yoda goals --help\" for more info')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'",
"Expr'",
"Expr'"
] |
[
"def __eq__(self, VAR_35):...\n",
"return self.__class__ == VAR_35.__class__ and self._flattened == VAR_35._flattened\n"
] | [
"def __eq__(self, other):...\n",
"return self.__class__ == other.__class__ and self._flattened == other._flattened\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_16(self):...\n",
"VAR_6 = None\n",
"VAR_6 = tempfile.NamedTemporaryFile(delete=False)\n",
"if VAR_6:\n",
"VAR_6.close()\n",
"os.remove(VAR_6.name)\n",
"os.chmod(VAR_6.name, stat.S_IREAD)\n",
"self._mox.StubOutWithMock(url_helper, 'UrlOpen')\n",
"url_helper.UrlOpen(mox.IgnoreArg(), method='GET').AndReturn('data')\n",
"self._mox.ReplayAll()\n",
"self.assertFalse(url_helper.DownloadFile(VAR_6.name, 'http://www.fakeurl.com'))\n",
"self._mox.VerifyAll()\n"
] | [
"def testDownloadFileSavingErrors(self):...\n",
"file_readonly = None\n",
"file_readonly = tempfile.NamedTemporaryFile(delete=False)\n",
"if file_readonly:\n",
"file_readonly.close()\n",
"os.remove(file_readonly.name)\n",
"os.chmod(file_readonly.name, stat.S_IREAD)\n",
"self._mox.StubOutWithMock(url_helper, 'UrlOpen')\n",
"url_helper.UrlOpen(mox.IgnoreArg(), method='GET').AndReturn('data')\n",
"self._mox.ReplayAll()\n",
"self.assertFalse(url_helper.DownloadFile(file_readonly.name,\n 'http://www.fakeurl.com'))\n",
"self._mox.VerifyAll()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(VAR_5):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = {'message': {'opened': {'header': '', 'footer': ''}, 'updated': {\n 'header': '', 'footer': ''}}, 'scanner': {'diff_only': False},\n 'pycodestyle': {'ignore': [], 'max-line-length': 79, 'count': False,\n 'first': False, 'show-pep8': False, 'filename': [], 'exclude': [],\n 'select': [], 'show-source': False, 'statistics': False, 'hang-closing':\n False}, 'no_blank_comment': True, 'only_mention_files_with_errors': True}\n",
"VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n",
"VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"VAR_10 = 'https://raw.githubusercontent.com/{}/{}/.pep8speaks.yml'\n",
"VAR_10 = VAR_10.format(VAR_5['repository'], VAR_5['after_commit_hash'])\n",
"VAR_11 = requests.get(VAR_10, VAR_8=headers, VAR_9=auth)\n",
"if VAR_11.status_code == 200:\n",
"VAR_14 = []\n",
"VAR_59 = yaml.load(VAR_11.text)\n",
"VAR_15 = VAR_6['pycodestyle']\n",
"VAR_6 = FUNC_2(VAR_6, VAR_59)\n",
"for VAR_61, value in VAR_15.items():\n",
"if value:\n",
"VAR_6['pycodestyle_cmd_config'] = ' {arguments}'.format(VAR_14=' '.join(\n arguments))\n",
"if isinstance(value, int):\n",
"VAR_6['pycodestyle']['ignore'] = [e.upper() for e in list(VAR_6[\n 'pycodestyle']['ignore'])]\n",
"if isinstance(value, bool):\n",
"if isinstance(value, list):\n",
"return VAR_6\n",
"VAR_14.append('--{}'.format(VAR_61))\n",
"VAR_14.append('--{}={}'.format(VAR_61, value))\n",
"VAR_14.append('--{}={}'.format(VAR_61, ','.join(value)))\n"
] | [
"def get_config(data):...\n",
"\"\"\"docstring\"\"\"\n",
"config = {'message': {'opened': {'header': '', 'footer': ''}, 'updated': {\n 'header': '', 'footer': ''}}, 'scanner': {'diff_only': False},\n 'pycodestyle': {'ignore': [], 'max-line-length': 79, 'count': False,\n 'first': False, 'show-pep8': False, 'filename': [], 'exclude': [],\n 'select': [], 'show-source': False, 'statistics': False, 'hang-closing':\n False}, 'no_blank_comment': True, 'only_mention_files_with_errors': True}\n",
"headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n",
"auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n",
"url = 'https://raw.githubusercontent.com/{}/{}/.pep8speaks.yml'\n",
"url = url.format(data['repository'], data['after_commit_hash'])\n",
"r = requests.get(url, headers=headers, auth=auth)\n",
"if r.status_code == 200:\n",
"arguments = []\n",
"new_config = yaml.load(r.text)\n",
"confs = config['pycodestyle']\n",
"config = update_dict(config, new_config)\n",
"for key, value in confs.items():\n",
"if value:\n",
"config['pycodestyle_cmd_config'] = ' {arguments}'.format(arguments=' '.join\n (arguments))\n",
"if isinstance(value, int):\n",
"config['pycodestyle']['ignore'] = [e.upper() for e in list(config[\n 'pycodestyle']['ignore'])]\n",
"if isinstance(value, bool):\n",
"if isinstance(value, list):\n",
"return config\n",
"arguments.append('--{}'.format(key))\n",
"arguments.append('--{}={}'.format(key, value))\n",
"arguments.append('--{}={}'.format(key, ','.join(value)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_3(self, VAR_8, VAR_9):...\n",
"api.keystone.mapping_delete(VAR_8, VAR_9)\n"
] | [
"def delete(self, request, obj_id):...\n",
"api.keystone.mapping_delete(request, obj_id)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"self._check_key(VAR_1)\n"
] | [
"def test_ecdsa_key(self):...\n",
"self._check_key(ECDSA_PUBKEY)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"@VAR_0.route('/email/verify/', defaults={'id_hash': None})...\n",
""
] | [
"@app.route('/email/verify/', defaults={'id_hash': None})...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_3():...\n",
"def FUNC_12():...\n",
"VAR_18 = requests.get(FUNC_2('healthy'))\n",
"VAR_18.raise_for_status()\n",
"return VAR_18.json()\n"
] | [
"def _CheckServerIsHealthyWithCache():...\n",
"def _ServerIsHealthy():...\n",
"response = requests.get(_BuildUri('healthy'))\n",
"response.raise_for_status()\n",
"return response.json()\n"
] | [
0,
0,
7,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_1(self, VAR_0):...\n",
"self.__init__(VAR_4=False)\n",
"self.name = VAR_0.get('name')\n",
"self.vars = VAR_0.get('vars', dict())\n",
"self.address = VAR_0.get('address', '')\n",
"self._uuid = VAR_0.get('uuid', None)\n",
"self.implicit = VAR_0.get('implicit', False)\n",
"VAR_8 = VAR_0.get('groups', [])\n",
"for group_data in VAR_8:\n",
"VAR_10 = Group()\n",
"VAR_10.deserialize(group_data)\n",
"self.groups.append(VAR_10)\n"
] | [
"def deserialize(self, data):...\n",
"self.__init__(gen_uuid=False)\n",
"self.name = data.get('name')\n",
"self.vars = data.get('vars', dict())\n",
"self.address = data.get('address', '')\n",
"self._uuid = data.get('uuid', None)\n",
"self.implicit = data.get('implicit', False)\n",
"groups = data.get('groups', [])\n",
"for group_data in groups:\n",
"g = Group()\n",
"g.deserialize(group_data)\n",
"self.groups.append(g)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_30(self):...\n",
"self.log.info('Initializing Evaluator')\n",
"from evproxy import EvaluatorProxy\n",
"def FUNC_39():...\n",
"from lib.evaluators.PyQt4Evaluator import Evaluator\n",
"return Evaluator()\n"
] | [
"def spawn_evaluators(self):...\n",
"self.log.info('Initializing Evaluator')\n",
"from evproxy import EvaluatorProxy\n",
"def ev_init():...\n",
"from lib.evaluators.PyQt4Evaluator import Evaluator\n",
"return Evaluator()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"ImportFrom'",
"FunctionDef'",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_6(self, VAR_28):...\n",
"if VAR_28 is None:\n",
"VAR_28 = 0\n",
"return VAR_55(int(VAR_28), 0)\n"
] | [
"def run(self, count):...\n",
"if count is None:\n",
"count = 0\n",
"return max(int(count), 0)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = 'sos-collector'\n",
"if self.config['label']:\n",
"VAR_15 += '-%s' % self.config['label']\n",
"if self.config['case_id']:\n",
"VAR_15 += '-%s' % self.config['case_id']\n",
"VAR_16 = datetime.strftime(datetime.now(), '%Y-%m-%d')\n",
"string.lowercase = string.ascii_lowercase\n",
"VAR_17 = ''.join(random.choice(string.lowercase) for x in range(5))\n",
"return '%s-%s-%s' % (VAR_15, VAR_16, VAR_17)\n"
] | [
"def _get_archive_name(self):...\n",
"\"\"\"docstring\"\"\"\n",
"nstr = 'sos-collector'\n",
"if self.config['label']:\n",
"nstr += '-%s' % self.config['label']\n",
"if self.config['case_id']:\n",
"nstr += '-%s' % self.config['case_id']\n",
"dt = datetime.strftime(datetime.now(), '%Y-%m-%d')\n",
"string.lowercase = string.ascii_lowercase\n",
"rand = ''.join(random.choice(string.lowercase) for x in range(5))\n",
"return '%s-%s-%s' % (nstr, dt, rand)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"AugAssign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_1 = bot.Bot(None, {}, 'https://localhost:1/',\n '1234-1a2b3c4-tainted-joe', 'base_dir', None)\n",
"VAR_2 = threading.Event()\n",
"VAR_1.call_later(0.001, VAR_2.set)\n",
"self.assertTrue(VAR_2.wait(1))\n"
] | [
"def test_bot_call_later(self):...\n",
"obj = bot.Bot(None, {}, 'https://localhost:1/', '1234-1a2b3c4-tainted-joe',\n 'base_dir', None)\n",
"ev = threading.Event()\n",
"obj.call_later(0.001, ev.set)\n",
"self.assertTrue(ev.wait(1))\n"
] | [
0,
5,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"import copy\n",
"import logging\n",
"import os\n",
"import sflock\n",
"from cuckoo.common.config import emit_options\n",
"from cuckoo.common.exceptions import CuckooOperationalError\n",
"from cuckoo.common.files import Folders, Files, Storage\n",
"from cuckoo.common.utils import validate_url, validate_hash\n",
"from cuckoo.common.virustotal import VirusTotalAPI\n",
"from cuckoo.core.database import Database\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = Database()\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"if not VAR_4:\n",
"return\n",
"if validate_hash(VAR_4):\n",
"if validate_url(VAR_4):\n",
"VAR_21 = VirusTotalAPI().hash_fetch(VAR_4)\n",
"VAR_2['errors'].append('Error retrieving file hash: %s' % e)\n",
"VAR_18 = Files.create(VAR_3, VAR_4, VAR_21)\n",
"VAR_2['data'].append({'type': 'url', 'data': VAR_4})\n",
"VAR_2['errors'].append(\"'%s' was neither a valid hash or url\" % VAR_4)\n",
"return\n",
"VAR_2['data'].append({'type': 'file', 'data': VAR_18})\n",
"return\n",
"def FUNC_1(self, VAR_5, VAR_6):...\n",
"return\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 not in ('strings', 'files'):\n",
"VAR_0.error(\"Bad parameter '%s' for submit_type\", VAR_5)\n",
"VAR_13 = Folders.create_temp()\n",
"return False\n",
"VAR_14 = {'data': [], 'errors': []}\n",
"if VAR_5 == 'strings':\n",
"for VAR_4 in VAR_6:\n",
"if VAR_5 == 'files':\n",
"self._handle_string(VAR_14, VAR_13, VAR_4)\n",
"for entry in VAR_6:\n",
"return Database().add_submit(VAR_13, VAR_5, VAR_14)\n",
"VAR_22 = Storage.get_filename_from_path(entry['name'])\n",
"VAR_18 = Files.create(VAR_13, VAR_22, entry['data'])\n",
"VAR_14['data'].append({'type': 'file', 'data': VAR_18})\n"
] | [
"import copy\n",
"import logging\n",
"import os\n",
"import sflock\n",
"from cuckoo.common.config import emit_options\n",
"from cuckoo.common.exceptions import CuckooOperationalError\n",
"from cuckoo.common.files import Folders, Files, Storage\n",
"from cuckoo.common.utils import validate_url, validate_hash\n",
"from cuckoo.common.virustotal import VirusTotalAPI\n",
"from cuckoo.core.database import Database\n",
"log = logging.getLogger(__name__)\n",
"db = Database()\n",
"def _handle_string(self, submit, tmppath, line):...\n",
"if not line:\n",
"return\n",
"if validate_hash(line):\n",
"if validate_url(line):\n",
"filedata = VirusTotalAPI().hash_fetch(line)\n",
"submit['errors'].append('Error retrieving file hash: %s' % e)\n",
"filepath = Files.create(tmppath, line, filedata)\n",
"submit['data'].append({'type': 'url', 'data': line})\n",
"submit['errors'].append(\"'%s' was neither a valid hash or url\" % line)\n",
"return\n",
"submit['data'].append({'type': 'file', 'data': filepath})\n",
"return\n",
"def pre(self, submit_type, data):...\n",
"return\n",
"\"\"\"docstring\"\"\"\n",
"if submit_type not in ('strings', 'files'):\n",
"log.error(\"Bad parameter '%s' for submit_type\", submit_type)\n",
"path_tmp = Folders.create_temp()\n",
"return False\n",
"submit_data = {'data': [], 'errors': []}\n",
"if submit_type == 'strings':\n",
"for line in data:\n",
"if submit_type == 'files':\n",
"self._handle_string(submit_data, path_tmp, line)\n",
"for entry in data:\n",
"return Database().add_submit(path_tmp, submit_type, submit_data)\n",
"filename = Storage.get_filename_from_path(entry['name'])\n",
"filepath = Files.create(path_tmp, filename, entry['data'])\n",
"submit_data['data'].append({'type': 'file', 'data': filepath})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'",
"FunctionDef'",
"Return'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"For",
"Condition",
"Expr'",
"For",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_2 = {'SEVERITY': {'MEDIUM': 1}, 'CONFIDENCE': {'HIGH': 1}}\n",
"self.check_example('cipher-modes.py', VAR_2)\n"
] | [
"def test_cipher_modes(self):...\n",
"\"\"\"docstring\"\"\"\n",
"expect = {'SEVERITY': {'MEDIUM': 1}, 'CONFIDENCE': {'HIGH': 1}}\n",
"self.check_example('cipher-modes.py', expect)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |
[
"def FUNC_10(self, VAR_11):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_27 = io.StringIO()\n",
"VAR_31 = connection.cursor()\n",
"VAR_31.execute('select * from wins_{};'.format(VAR_11))\n",
"VAR_30 = csv.writer(VAR_27)\n",
"VAR_32 = [i[0] for i in VAR_31.description]\n",
"VAR_30.writerow(VAR_32)\n",
"VAR_30.writerows(VAR_31)\n",
"return VAR_27.getvalue()\n"
] | [
"def _make_plain_csv(self, table):...\n",
"\"\"\"docstring\"\"\"\n",
"stringio = io.StringIO()\n",
"cursor = connection.cursor()\n",
"cursor.execute('select * from wins_{};'.format(table))\n",
"csv_writer = csv.writer(stringio)\n",
"header = [i[0] for i in cursor.description]\n",
"csv_writer.writerow(header)\n",
"csv_writer.writerows(cursor)\n",
"return stringio.getvalue()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_15(self, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6, VAR_7={}):...\n",
"VAR_40 = {}\n",
"for VAR_23 in self.browse(VAR_2, VAR_3, VAR_4, VAR_7=context):\n",
"VAR_40[VAR_23.id] = self._str_get(VAR_23, VAR_25='===')\n",
"return VAR_40\n",
"VAR_54 = VAR_23.parent_id\n",
"VAR_24 = 0\n",
"while VAR_54:\n",
"VAR_24 -= 1\n",
"VAR_55 = map(lambda x: (x, 1), VAR_23.child_ids)\n",
"VAR_40[VAR_23.id] = self._str_get(VAR_54, VAR_24) + VAR_40[VAR_23.id]\n",
"while VAR_55:\n",
"VAR_54 = VAR_54.parent_id\n",
"VAR_54 = VAR_55.pop(0)\n",
"VAR_40[VAR_23.id] = VAR_40[VAR_23.id] + self._str_get(VAR_54[0], VAR_54[1])\n",
"VAR_55 += map(lambda x: (x, VAR_54[1] + 1), VAR_54[0].child_ids)\n"
] | [
"def _history_get(self, cr, uid, ids, name, args, context={}):...\n",
"result = {}\n",
"for task in self.browse(cr, uid, ids, context=context):\n",
"result[task.id] = self._str_get(task, border='===')\n",
"return result\n",
"t2 = task.parent_id\n",
"level = 0\n",
"while t2:\n",
"level -= 1\n",
"t3 = map(lambda x: (x, 1), task.child_ids)\n",
"result[task.id] = self._str_get(t2, level) + result[task.id]\n",
"while t3:\n",
"t2 = t2.parent_id\n",
"t2 = t3.pop(0)\n",
"result[task.id] = result[task.id] + self._str_get(t2[0], t2[1])\n",
"t3 += map(lambda x: (x, t2[1] + 1), t2[0].child_ids)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"AugAssign'"
] |
[
"def FUNC_2(self):...\n",
"VAR_5 = set(bot_main.get_dimensions(None))\n",
"VAR_5.discard('hidpi')\n",
"VAR_5.discard('zone')\n",
"VAR_6 = {'cores', 'cpu', 'gpu', 'id', 'machine_type', 'os', 'pool'}\n",
"self.assertEqual(VAR_6, VAR_5)\n"
] | [
"def test_get_dimensions(self):...\n",
"dimensions = set(bot_main.get_dimensions(None))\n",
"dimensions.discard('hidpi')\n",
"dimensions.discard('zone')\n",
"expected = {'cores', 'cpu', 'gpu', 'id', 'machine_type', 'os', 'pool'}\n",
"self.assertEqual(expected, dimensions)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_3(self, *VAR_4, **VAR_5):...\n",
"VAR_11 = self.REQUEST\n",
"VAR_14 = ['zmi']\n",
"VAR_14.append(VAR_11['lang'])\n",
"VAR_14.extend(map(lambda x: VAR_5[x], VAR_5.keys()))\n",
"VAR_14.append(self.meta_id)\n",
"VAR_15 = self.attr('internal_dict')\n",
"if isinstance(VAR_15, dict) and VAR_15.get('css_classes', None):\n",
"VAR_14.extend(VAR_15['css_classes'])\n",
"VAR_14.extend(VAR_11['AUTHENTICATED_USER'].getRolesInContext(self))\n",
"return ' '.join(VAR_14)\n"
] | [
"def zmi_body_class(self, *args, **kwargs):...\n",
"request = self.REQUEST\n",
"l = ['zmi']\n",
"l.append(request['lang'])\n",
"l.extend(map(lambda x: kwargs[x], kwargs.keys()))\n",
"l.append(self.meta_id)\n",
"internal_dict = self.attr('internal_dict')\n",
"if isinstance(internal_dict, dict) and internal_dict.get('css_classes', None):\n",
"l.extend(internal_dict['css_classes'])\n",
"l.extend(request['AUTHENTICATED_USER'].getRolesInContext(self))\n",
"return ' '.join(l)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_6(VAR_1, VAR_5=''):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_18 = VAR_17(VAR_1.name)\n",
"return VAR_16.pop(VAR_18, None)\n"
] | [
"def token_delete(remote, token=''):...\n",
"\"\"\"docstring\"\"\"\n",
"session_key = token_session_key(remote.name)\n",
"return session.pop(session_key, None)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self, VAR_16, VAR_17=None, VAR_18=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if re.match('^https?://', VAR_16):\n",
"return VAR_16\n",
"return resolve_path(VAR_7=self.project, VAR_17=language, VAR_18=\n version_slug, VAR_16=filename)\n"
] | [
"def get_full_path(self, filename, language=None, version_slug=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if re.match('^https?://', filename):\n",
"return filename\n",
"return resolve_path(project=self.project, language=language, version_slug=\n version_slug, filename=filename)\n"
] | [
0,
0,
6,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_8(self):...\n",
"\"\"\"docstring\"\"\"\n",
"Log.add('Calculating distance matrix in higher-order network (k = ' + str(\n self.order) + ') ...', Severity.INFO)\n",
"VAR_16 = _co.defaultdict(lambda : _co.defaultdict(lambda : _np.inf))\n",
"for VAR_25 in self.nodes:\n",
"VAR_16[VAR_25][VAR_25] = 0\n",
"for VAR_45 in self.edges:\n",
"VAR_16[VAR_45[0]][VAR_45[1]] = 1\n",
"for VAR_1 in self.nodes:\n",
"for VAR_25 in self.nodes:\n",
"Log.add('finished.', Severity.INFO)\n",
"for VAR_34 in self.nodes:\n",
"return VAR_16\n",
"if VAR_16[VAR_25][VAR_34] > VAR_16[VAR_25][VAR_1] + VAR_16[VAR_1][VAR_34]:\n",
"VAR_16[VAR_25][VAR_34] = VAR_16[VAR_25][VAR_1] + VAR_16[VAR_1][VAR_34]\n"
] | [
"def getDistanceMatrix(self):...\n",
"\"\"\"docstring\"\"\"\n",
"Log.add('Calculating distance matrix in higher-order network (k = ' + str(\n self.order) + ') ...', Severity.INFO)\n",
"dist = _co.defaultdict(lambda : _co.defaultdict(lambda : _np.inf))\n",
"for v in self.nodes:\n",
"dist[v][v] = 0\n",
"for e in self.edges:\n",
"dist[e[0]][e[1]] = 1\n",
"for k in self.nodes:\n",
"for v in self.nodes:\n",
"Log.add('finished.', Severity.INFO)\n",
"for w in self.nodes:\n",
"return dist\n",
"if dist[v][w] > dist[v][k] + dist[k][w]:\n",
"dist[v][w] = dist[v][k] + dist[k][w]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"For",
"Assign'",
"For",
"Assign'",
"For",
"For",
"Expr'",
"For",
"Return'",
"Condition",
"Assign'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"VAR_19 = self.common.get_volume_stats(VAR_5)\n",
"VAR_19['storage_protocol'] = 'iSCSI'\n",
"VAR_20 = self.configuration.safe_get('volume_backend_name')\n",
"VAR_19['volume_backend_name'] = VAR_20 or self.__class__.__name__\n",
"self.common.client_logout()\n",
"return VAR_19\n"
] | [
"@utils.synchronized('3par', external=True)...\n",
"self.common.client_login()\n",
"stats = self.common.get_volume_stats(refresh)\n",
"stats['storage_protocol'] = 'iSCSI'\n",
"backend_name = self.configuration.safe_get('volume_backend_name')\n",
"stats['volume_backend_name'] = backend_name or self.__class__.__name__\n",
"self.common.client_logout()\n",
"return stats\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_15(self, VAR_6):...\n",
""
] | [
"def set_priority(self, priority):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_18(self, VAR_36, VAR_37=None):...\n",
"VAR_37 = frappe.permissions.get_user_permission_doctypes(VAR_37, VAR_36)\n",
"VAR_1 = frappe.get_meta(self.doctype)\n",
"for doctypes in VAR_37:\n",
"VAR_65 = {}\n",
"VAR_59 = []\n",
"for VAR_6 in VAR_1.get_fields_to_check_permissions(doctypes):\n",
"VAR_69 = VAR_36.get(VAR_6.options, [])\n",
"if VAR_59:\n",
"VAR_70 = 'ifnull(`tab{doctype}`.`{fieldname}`, \"\")=\"\"'.format(VAR_0=self.\n doctype, fieldname=df.fieldname)\n",
"self.match_conditions.append(' and '.join(VAR_59))\n",
"if VAR_65:\n",
"if VAR_69:\n",
"self.match_filters.append(VAR_65)\n",
"if not cint(frappe.get_system_settings('apply_strict_user_permissions')):\n",
"VAR_63 = VAR_70\n",
"VAR_63 = VAR_70 + ' or '\n",
"VAR_63 = ''\n",
"VAR_59.append('({condition})'.format(VAR_63=condition))\n",
"VAR_63 += '`tab{doctype}`.`{fieldname}` in ({values})'.format(VAR_0=self.\n doctype, fieldname=df.fieldname, VAR_61=', '.join([('\"' + frappe.db.\n escape(v, percent=False) + '\"') for v in user_permission_values]))\n",
"VAR_65[VAR_6.options] = VAR_69\n"
] | [
"def add_user_permissions(self, user_permissions, user_permission_doctypes=None...\n",
"user_permission_doctypes = frappe.permissions.get_user_permission_doctypes(\n user_permission_doctypes, user_permissions)\n",
"meta = frappe.get_meta(self.doctype)\n",
"for doctypes in user_permission_doctypes:\n",
"match_filters = {}\n",
"match_conditions = []\n",
"for df in meta.get_fields_to_check_permissions(doctypes):\n",
"user_permission_values = user_permissions.get(df.options, [])\n",
"if match_conditions:\n",
"cond = 'ifnull(`tab{doctype}`.`{fieldname}`, \"\")=\"\"'.format(doctype=self.\n doctype, fieldname=df.fieldname)\n",
"self.match_conditions.append(' and '.join(match_conditions))\n",
"if match_filters:\n",
"if user_permission_values:\n",
"self.match_filters.append(match_filters)\n",
"if not cint(frappe.get_system_settings('apply_strict_user_permissions')):\n",
"condition = cond\n",
"condition = cond + ' or '\n",
"condition = ''\n",
"match_conditions.append('({condition})'.format(condition=condition))\n",
"condition += '`tab{doctype}`.`{fieldname}` in ({values})'.format(doctype=\n self.doctype, fieldname=df.fieldname, values=', '.join([('\"' + frappe.\n db.escape(v, percent=False) + '\"') for v in user_permission_values]))\n",
"match_filters[df.options] = user_permission_values\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"AugAssign'",
"Assign'"
] |
[
"def FUNC_16():...\n",
"VAR_19.join(timeout=self._connecttimeout)\n",
"if VAR_19.is_alive():\n",
"warnings.warn('timed out waiting for connection')\n",
"if self._session is None:\n",
"VAR_22 = 'unable to connect after {} secs'.format(self._connecttimeout)\n",
"if self._run_server_ex is None:\n",
"VAR_22 = VAR_22 + os.linesep + self._run_server_ex\n"
] | [
"def wait():...\n",
"t.join(timeout=self._connecttimeout)\n",
"if t.is_alive():\n",
"warnings.warn('timed out waiting for connection')\n",
"if self._session is None:\n",
"message = 'unable to connect after {} secs'.format(self._connecttimeout)\n",
"if self._run_server_ex is None:\n",
"message = message + os.linesep + self._run_server_ex\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'"
] |
[
"def FUNC_2(self, VAR_4):...\n",
"if VAR_4:\n",
"VAR_23 = os.path.dirname(os.path.abspath(__file__))\n",
"VAR_24 = os.path.join(VAR_23, '..', '..', '..', VAR_4)\n",
"self.loader = XMLFile(FilePath(VAR_24))\n"
] | [
"def _set_loader(self, banner):...\n",
"if banner:\n",
"current_path = os.path.dirname(os.path.abspath(__file__))\n",
"banner_file_path = os.path.join(current_path, '..', '..', '..', banner)\n",
"self.loader = XMLFile(FilePath(banner_file_path))\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_4, VAR_5):...\n",
"def FUNC_16():...\n",
"VAR_38 = socket(AF_INET, SOCK_STREAM)\n",
"VAR_38.bind(('', 0))\n",
"VAR_39 = VAR_38.getsockname()[1]\n",
"VAR_38.close()\n",
"return VAR_39\n"
] | [
"def __init__(self, database, options):...\n",
"def find_open_port():...\n",
"sock = socket(AF_INET, SOCK_STREAM)\n",
"sock.bind(('', 0))\n",
"port = sock.getsockname()[1]\n",
"sock.close()\n",
"return port\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self, VAR_9, VAR_10):...\n",
"if self._IsServerAlive():\n",
"return FUNC_9(VAR_9, VAR_10)\n"
] | [
"def SendCommandRequest(self, arguments, completer):...\n",
"if self._IsServerAlive():\n",
"return SendCommandRequest(arguments, completer)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'"
] |
[
"def FUNC_0(VAR_5):...\n",
"while not VAR_5.ok:\n",
"VAR_2.pop(-1)\n",
"print(f\"There are {VAR_2.count('NULL')} columns\")\n",
"VAR_2.extend([',', 'NULL', '--'])\n",
"return VAR_2\n",
"VAR_3['category'] = f\"Lifestyle{' '.join(VAR_2)}\"\n",
"VAR_5 = VAR_4.get(VAR_0, VAR_1=sqli)\n"
] | [
"def sqli_union_1_lab(response):...\n",
"while not response.ok:\n",
"null.pop(-1)\n",
"print(f\"There are {null.count('NULL')} columns\")\n",
"null.extend([',', 'NULL', '--'])\n",
"return null\n",
"sqli['category'] = f\"Lifestyle{' '.join(null)}\"\n",
"response = api_session.get(url, params=sqli)\n"
] | [
0,
4,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_17(self, VAR_2, VAR_3, VAR_4, VAR_27, VAR_28=0.0):...\n",
"return {'value': {'remaining_hours': VAR_27 - VAR_28}}\n"
] | [
"def onchange_planned(self, cr, uid, ids, planned, effective=0.0):...\n",
"return {'value': {'remaining_hours': planned - effective}}\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(self, VAR_4, VAR_6):...\n",
"VAR_5 = 'select * from {} limit 10000'.format(VAR_6)\n",
"return self.execute_query(VAR_4, VAR_5)\n"
] | [
"def get_table_summary(self, connection_url, table_name):...\n",
"query = 'select * from {} limit 10000'.format(table_name)\n",
"return self.execute_query(connection_url, query)\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@VAR_47.middleware('response')...\n",
"if VAR_51.status == 404 and VAR_6.path.endswith('/'):\n",
"VAR_61 = VAR_6.path.rstrip('/')\n",
"if VAR_6.query_string:\n",
"VAR_61 = '{}?{}'.format(VAR_61, VAR_6.query_string)\n",
"return response.redirect(VAR_61)\n"
] | [
"@app.middleware('response')...\n",
"if original_response.status == 404 and request.path.endswith('/'):\n",
"path = request.path.rstrip('/')\n",
"if request.query_string:\n",
"path = '{}?{}'.format(path, request.query_string)\n",
"return response.redirect(path)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_5(VAR_4):...\n",
"VAR_9 = FUNC_0()\n",
"VAR_10 = VAR_9.cursor()\n",
"VAR_10.execute('string'.format(VAR_4))\n",
"VAR_2.error('Execution failed with error: {}'.format(e))\n",
"VAR_17 = VAR_10.fetchone()[0] + 1\n",
"VAR_2.debug('Rank of {} found for name {}'.format(VAR_17, VAR_4))\n",
"VAR_9.close()\n",
"return VAR_17\n"
] | [
"def karma_rank(name):...\n",
"db = db_connect()\n",
"cursor = db.cursor()\n",
"cursor.execute(\n \"\"\"\n SELECT (SELECT COUNT(*) FROM people AS t2 WHERE t2.karma > t1.karma)\n AS row_Num FROM people AS t1 WHERE name='{}'\n \"\"\"\n .format(name))\n",
"logger.error('Execution failed with error: {}'.format(e))\n",
"rank = cursor.fetchone()[0] + 1\n",
"logger.debug('Rank of {} found for name {}'.format(rank, name))\n",
"db.close()\n",
"return rank\n"
] | [
0,
0,
0,
4,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_22(VAR_30, VAR_3):...\n",
"if not isinstance(VAR_30, _IOFile):\n",
"return IOFile(VAR_30, VAR_16=self)\n",
"return VAR_30.apply_wildcards(VAR_3, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n"
] | [
"def concretize_iofile(f, wildcards):...\n",
"if not isinstance(f, _IOFile):\n",
"return IOFile(f, rule=self)\n",
"return f.apply_wildcards(wildcards, fill_missing=f in self.dynamic_input,\n fail_dynamic=self.dynamic_output)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_7(self, VAR_19, VAR_17=None, VAR_18=None):...\n",
"if VAR_19.endswith('.html'):\n",
"VAR_0.debug('Redirecting %s', self)\n",
"VAR_19 = VAR_19[1:]\n",
"VAR_27 = re.sub('.html$', '/', VAR_19)\n",
"return self.get_full_path(VAR_16=to, VAR_17=language, VAR_18=version_slug)\n"
] | [
"def redirect_sphinx_htmldir(self, path, language=None, version_slug=None):...\n",
"if path.endswith('.html'):\n",
"log.debug('Redirecting %s', self)\n",
"path = path[1:]\n",
"to = re.sub('.html$', '/', path)\n",
"return self.get_full_path(filename=to, language=language, version_slug=\n version_slug)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(self, VAR_3):...\n",
"if VAR_3.name in self.host_names:\n",
"self.hosts.remove(VAR_3)\n",
"self._hosts.remove(VAR_3.name)\n",
"VAR_3.remove_group(self)\n",
"self.clear_hosts_cache()\n"
] | [
"def remove_host(self, host):...\n",
"if host.name in self.host_names:\n",
"self.hosts.remove(host)\n",
"self._hosts.remove(host.name)\n",
"host.remove_group(self)\n",
"self.clear_hosts_cache()\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_10=None):...\n",
"VAR_25 = VAR_20.path.join(self.parlai_home, 'downloads')\n",
"VAR_26 = self.add_argument_group('Main ParlAI Arguments')\n",
"VAR_26.add_argument('-t', '--task', help=\n 'ParlAI task(s), e.g. \"babi:Task1\" or \"babi,cbt\"')\n",
"VAR_26.add_argument('--download-path', default=default_downloads_path, help\n =\n 'path for non-data dependencies to store any needed files.defaults to {parlai_dir}/downloads'\n )\n",
"VAR_26.add_argument('-dt', '--datatype', default='train', choices=['train',\n 'train:stream', 'train:ordered', 'train:ordered:stream',\n 'train:stream:ordered', 'valid', 'valid:stream', 'test', 'test:stream'],\n help=\n 'choose from: train, train:ordered, valid, test. to stream data add \":stream\" to any option (e.g., train:stream). by default: train is random with replacement, valid is ordered, test is ordered.'\n )\n",
"VAR_26.add_argument('-im', '--image-mode', default='raw', type=str, help=\n 'image preprocessor to use. default is \"raw\". set to \"none\" to skip image loading.'\n )\n",
"VAR_26.add_argument('-nt', '--numthreads', default=1, type=int, help=\n 'number of threads. If batchsize set to 1, used for hogwild; otherwise, used for number of threads in threadpool loading, e.g. in vqa'\n )\n",
"VAR_26.add_argument('--hide-labels', default=False, type='bool', help=\n 'default (False) moves labels in valid and test sets to the eval_labels field. If True, they are hidden completely.'\n )\n",
"VAR_27 = self.add_argument_group('Batching Arguments')\n",
"VAR_27.add_argument('-bs', '--batchsize', default=1, type=int, help=\n 'batch size for minibatch training schemes')\n",
"VAR_27.add_argument('-bsrt', '--batch-sort', default=True, type='bool',\n help=\n 'If enabled (default True), create batches by flattening all episodes to have exactly one utterance exchange and then sorting all the examples according to their length. This dramatically reduces the amount of padding present after examples have been parsed, speeding up training.'\n )\n",
"VAR_27.add_argument('-clen', '--context-length', default=-1, type=int, help\n =\n 'Number of past utterances to remember when building flattened batches of data in multi-example episodes.'\n )\n",
"VAR_27.add_argument('-incl', '--include-labels', default=True, type='bool',\n help=\n 'Specifies whether or not to include labels as past utterances when building flattened batches of data in multi-example episodes.'\n )\n",
"self.add_parlai_data_path(VAR_26)\n"
] | [
"def add_parlai_args(self, args=None):...\n",
"default_downloads_path = os.path.join(self.parlai_home, 'downloads')\n",
"parlai = self.add_argument_group('Main ParlAI Arguments')\n",
"parlai.add_argument('-t', '--task', help=\n 'ParlAI task(s), e.g. \"babi:Task1\" or \"babi,cbt\"')\n",
"parlai.add_argument('--download-path', default=default_downloads_path, help\n =\n 'path for non-data dependencies to store any needed files.defaults to {parlai_dir}/downloads'\n )\n",
"parlai.add_argument('-dt', '--datatype', default='train', choices=['train',\n 'train:stream', 'train:ordered', 'train:ordered:stream',\n 'train:stream:ordered', 'valid', 'valid:stream', 'test', 'test:stream'],\n help=\n 'choose from: train, train:ordered, valid, test. to stream data add \":stream\" to any option (e.g., train:stream). by default: train is random with replacement, valid is ordered, test is ordered.'\n )\n",
"parlai.add_argument('-im', '--image-mode', default='raw', type=str, help=\n 'image preprocessor to use. default is \"raw\". set to \"none\" to skip image loading.'\n )\n",
"parlai.add_argument('-nt', '--numthreads', default=1, type=int, help=\n 'number of threads. If batchsize set to 1, used for hogwild; otherwise, used for number of threads in threadpool loading, e.g. in vqa'\n )\n",
"parlai.add_argument('--hide-labels', default=False, type='bool', help=\n 'default (False) moves labels in valid and test sets to the eval_labels field. If True, they are hidden completely.'\n )\n",
"batch = self.add_argument_group('Batching Arguments')\n",
"batch.add_argument('-bs', '--batchsize', default=1, type=int, help=\n 'batch size for minibatch training schemes')\n",
"batch.add_argument('-bsrt', '--batch-sort', default=True, type='bool', help\n =\n 'If enabled (default True), create batches by flattening all episodes to have exactly one utterance exchange and then sorting all the examples according to their length. This dramatically reduces the amount of padding present after examples have been parsed, speeding up training.'\n )\n",
"batch.add_argument('-clen', '--context-length', default=-1, type=int, help=\n 'Number of past utterances to remember when building flattened batches of data in multi-example episodes.'\n )\n",
"batch.add_argument('-incl', '--include-labels', default=True, type='bool',\n help=\n 'Specifies whether or not to include labels as past utterances when building flattened batches of data in multi-example episodes.'\n )\n",
"self.add_parlai_data_path(parlai)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_13(self):...\n",
"VAR_19 = list(filter(lambda VAR_28: VAR_28.protected, self.expanded_output))\n",
"if VAR_19:\n"
] | [
"def check_protected_output(self):...\n",
"protected = list(filter(lambda f: f.protected, self.expanded_output))\n",
"if protected:\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition"
] |
[
"def FUNC_6(self, VAR_24, VAR_25):...\n",
"\"\"\"docstring\"\"\"\n",
"if not VAR_24[VAR_10] and os.path.exists(VAR_25) and not os.path.islink(VAR_25\n",
"VAR_24[VAR_10] = VAR_25\n"
] | [
"def update_realfile(self, f, realfile):...\n",
"\"\"\"docstring\"\"\"\n",
"if not f[A_REALFILE] and os.path.exists(realfile) and not os.path.islink(\n",
"f[A_REALFILE] = realfile\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'"
] |
[
"def FUNC_38(self):...\n",
"\"\"\"docstring\"\"\"\n",
"pyodbc.lowercase = True\n",
"self.cursor = self.cnxn.cursor()\n",
"self.cursor.execute('create table t1(Abc int, dEf int)')\n",
"self.cursor.execute('select * from t1')\n",
"VAR_27 = [VAR_32[0] for VAR_32 in self.cursor.description]\n",
"VAR_27.sort()\n",
"self.assertEqual(VAR_27, ['abc', 'def'])\n",
"pyodbc.lowercase = False\n"
] | [
"def test_lower_case(self):...\n",
"\"\"\"docstring\"\"\"\n",
"pyodbc.lowercase = True\n",
"self.cursor = self.cnxn.cursor()\n",
"self.cursor.execute('create table t1(Abc int, dEf int)')\n",
"self.cursor.execute('select * from t1')\n",
"names = [t[0] for t in self.cursor.description]\n",
"names.sort()\n",
"self.assertEqual(names, ['abc', 'def'])\n",
"pyodbc.lowercase = False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_15 = ''\n",
"for x in VAR_2.split(' '):\n",
"if x not in VAR_15:\n",
"return VAR_15.rstrip()\n",
"VAR_15 += x + ' '\n"
] | [
"@staticmethod...\n",
"\"\"\"docstring\"\"\"\n",
"deduped_string = ''\n",
"for x in string.split(' '):\n",
"if x not in deduped_string:\n",
"return deduped_string.rstrip()\n",
"deduped_string += x + ' '\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"AugAssign'"
] |
[
"def __init__(self, VAR_5):...\n",
"if isinstance(VAR_5, (list, tuple)):\n",
"self._resource_iters = [iter([(i, r) for i, r in enumerate(VAR_5)])]\n",
"self._resource_iters = [iter([(None, VAR_5)])]\n",
"self._field_iters = []\n",
"self._path = [(CLASS_0, CLASS_0, CLASS_0)]\n",
"self._resource_stack = [None]\n"
] | [
"def __init__(self, resource):...\n",
"if isinstance(resource, (list, tuple)):\n",
"self._resource_iters = [iter([(i, r) for i, r in enumerate(resource)])]\n",
"self._resource_iters = [iter([(None, resource)])]\n",
"self._field_iters = []\n",
"self._path = [(NotSupplied, NotSupplied, NotSupplied)]\n",
"self._resource_stack = [None]\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_3(self, VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_9 = \"SELECT tid FROM tids WHERE rowid = '\" + str(VAR_4) + \"'\"\n",
"self.query(VAR_9)\n",
"return self.c.fetchone()[0]\n"
] | [
"def tid_num_to_tid(self, tid_num):...\n",
"\"\"\"docstring\"\"\"\n",
"q = \"SELECT tid FROM tids WHERE rowid = '\" + str(tid_num) + \"'\"\n",
"self.query(q)\n",
"return self.c.fetchone()[0]\n"
] | [
0,
0,
4,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"return combine_vars(self.vars, self.get_magic_vars())\n"
] | [
"def get_vars(self):...\n",
"return combine_vars(self.vars, self.get_magic_vars())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"from __future__ import absolute_import, division, print_function\n",
"__metaclass__ = type\n",
"from ansible.inventory.group import Group\n",
"from ansible.utils.vars import combine_vars, get_unique_id\n",
"__all__ = ['Host']\n",
"\"\"\" a single ansible host \"\"\"\n",
"def __getstate__(self):...\n",
"return self.serialize()\n"
] | [
"from __future__ import absolute_import, division, print_function\n",
"__metaclass__ = type\n",
"from ansible.inventory.group import Group\n",
"from ansible.utils.vars import combine_vars, get_unique_id\n",
"__all__ = ['Host']\n",
"\"\"\" a single ansible host \"\"\"\n",
"def __getstate__(self):...\n",
"return self.serialize()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"Assign'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_7, VAR_20=True, *VAR_15, **VAR_16):...\n",
"CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n",
"self.redirect = VAR_20\n"
] | [
"def __init__(self, param, redirect=True, *a, **kw):...\n",
"Validator.__init__(self, param, *a, **kw)\n",
"self.redirect = redirect\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_4(self, VAR_7, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_17 = []\n",
"VAR_2 = VAR_1.view_submit(VAR_7)\n",
"for entry in VAR_12['file_selection']:\n",
"VAR_10 = copy.deepcopy(VAR_12['global'])\n",
"return VAR_17\n",
"VAR_10.update(entry)\n",
"VAR_11 = copy.deepcopy(VAR_12['global']['options'])\n",
"VAR_11.update(entry.get('per_file_options', {}))\n",
"VAR_19 = {'package': VAR_10.get('package'), 'timeout': VAR_10.get('timeout',\n 120), 'priority': VAR_10.get('priority'), 'custom': VAR_10.get('custom'\n ), 'owner': VAR_10.get('owner'), 'tags': VAR_10.get('tags'), 'memory':\n VAR_10.get('memory'), 'enforce_timeout': VAR_11.get('enforce-timeout'),\n 'machine': VAR_10.get('machine'), 'platform': VAR_10.get('platform'),\n 'options': self.translate_options(VAR_10, VAR_11), 'submit_id': VAR_7}\n",
"if entry['type'] == 'url':\n",
"VAR_17.append(VAR_1.add_url(url=info['filename'], **kw))\n",
"VAR_20 = Folders.create_temp()\n",
"if not VAR_10['extrpath']:\n",
"VAR_24 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['filename']))\n",
"if len(VAR_10['extrpath']) == 1:\n",
"VAR_18 = Files.copy(VAR_24, VAR_20=path_dest)\n",
"VAR_25 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['arcname']))\n",
"VAR_25 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['arcname']))\n",
"VAR_17.append(VAR_1.add_path(file_path=filepath, **kw))\n",
"if not os.path.exists(VAR_25):\n",
"if not os.path.exists(VAR_25):\n",
"VAR_2.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(VAR_10['arcname']))\n",
"VAR_26 = sflock.zipify(sflock.unpack(VAR_10['arcname'], contents=open(\n arcpath, 'rb').read()))\n",
"VAR_2.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(VAR_10['arcname']))\n",
"VAR_27 = sflock.unpack(VAR_25).read(VAR_10['extrpath'][:-1])\n",
"VAR_25 = Files.temp_named_put(VAR_26, os.path.basename(VAR_10['arcname']))\n",
"VAR_28 = sflock.unpack(VAR_10['extrpath'][-2], contents=content)\n",
"VAR_17.append(VAR_1.add_archive(file_path=arcpath, VAR_22=info['filename'],\n **kw))\n",
"VAR_25 = Files.temp_named_put(sflock.zipify(VAR_28), os.path.basename(\n VAR_10['extrpath'][-2]))\n",
"VAR_17.append(VAR_1.add_archive(file_path=arcpath, VAR_22=info['filename'],\n **kw))\n"
] | [
"def submit(self, submit_id, config):...\n",
"\"\"\"docstring\"\"\"\n",
"ret = []\n",
"submit = db.view_submit(submit_id)\n",
"for entry in config['file_selection']:\n",
"info = copy.deepcopy(config['global'])\n",
"return ret\n",
"info.update(entry)\n",
"options = copy.deepcopy(config['global']['options'])\n",
"options.update(entry.get('per_file_options', {}))\n",
"kw = {'package': info.get('package'), 'timeout': info.get('timeout', 120),\n 'priority': info.get('priority'), 'custom': info.get('custom'), 'owner':\n info.get('owner'), 'tags': info.get('tags'), 'memory': info.get(\n 'memory'), 'enforce_timeout': options.get('enforce-timeout'), 'machine':\n info.get('machine'), 'platform': info.get('platform'), 'options': self.\n translate_options(info, options), 'submit_id': submit_id}\n",
"if entry['type'] == 'url':\n",
"ret.append(db.add_url(url=info['filename'], **kw))\n",
"path_dest = Folders.create_temp()\n",
"if not info['extrpath']:\n",
"path = os.path.join(submit.tmp_path, os.path.basename(info['filename']))\n",
"if len(info['extrpath']) == 1:\n",
"filepath = Files.copy(path, path_dest=path_dest)\n",
"arcpath = os.path.join(submit.tmp_path, os.path.basename(info['arcname']))\n",
"arcpath = os.path.join(submit.tmp_path, os.path.basename(info['arcname']))\n",
"ret.append(db.add_path(file_path=filepath, **kw))\n",
"if not os.path.exists(arcpath):\n",
"if not os.path.exists(arcpath):\n",
"submit.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(info['arcname']))\n",
"arc = sflock.zipify(sflock.unpack(info['arcname'], contents=open(arcpath,\n 'rb').read()))\n",
"submit.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(info['arcname']))\n",
"content = sflock.unpack(arcpath).read(info['extrpath'][:-1])\n",
"arcpath = Files.temp_named_put(arc, os.path.basename(info['arcname']))\n",
"subarc = sflock.unpack(info['extrpath'][-2], contents=content)\n",
"ret.append(db.add_archive(file_path=arcpath, filename=info['filename'], **kw))\n",
"arcpath = Files.temp_named_put(sflock.zipify(subarc), os.path.basename(info\n ['extrpath'][-2]))\n",
"ret.append(db.add_archive(file_path=arcpath, filename=info['filename'], **kw))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_7, VAR_20=True, *VAR_15, **VAR_16):...\n",
"CLASS_6.__init__(self, VAR_7, *VAR_15, VAR_20=redirect, **kw)\n"
] | [
"def __init__(self, param, redirect=True, *a, **kw):...\n",
"VMeetup.__init__(self, param, *a, redirect=redirect, **kw)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_18(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_60 = self._get_smb_connection()\n",
"if VAR_60 and VAR_60.connect(CLASS_1.smb_ip, CLASS_1.smb_port):\n",
"VAR_65 = self.env['ir.config_parameter']\n",
"VAR_66 = VAR_65.get_param('partner_compassion.share_on_nas')\n",
"VAR_67 = VAR_65.get_param('partner_compassion.store_path')\n",
"VAR_68 = tempfile.NamedTemporaryFile()\n",
"VAR_69 = VAR_60.retrieveFile(VAR_66, VAR_67, VAR_68)\n",
"VAR_70 = VAR_69[1]\n",
"if VAR_70:\n",
"VAR_68.flush()\n",
"VAR_71 = tempfile.mkdtemp()\n",
"pyminizip.uncompress(VAR_68.name, CLASS_1.file_pw, VAR_71, 0)\n",
"VAR_72 = VAR_71 + '/partner_data.csv'\n",
"VAR_74 = csv.writer(csv_file)\n",
"VAR_74.writerow([str(self.id), self.ref, self.contact_address, fields.Date.\n today()])\n",
"VAR_73 = tempfile.NamedTemporaryFile()\n",
"pyminizip.compress(VAR_72, '', VAR_73.name, CLASS_1.file_pw, 5)\n",
"VAR_60.storeFile(VAR_66, VAR_67, VAR_73)\n",
"VAR_1.error('string' + VAR_73.name)\n"
] | [
"def _secure_save_data(self):...\n",
"\"\"\"docstring\"\"\"\n",
"smb_conn = self._get_smb_connection()\n",
"if smb_conn and smb_conn.connect(SmbConfig.smb_ip, SmbConfig.smb_port):\n",
"config_obj = self.env['ir.config_parameter']\n",
"share_nas = config_obj.get_param('partner_compassion.share_on_nas')\n",
"store_path = config_obj.get_param('partner_compassion.store_path')\n",
"src_zip_file = tempfile.NamedTemporaryFile()\n",
"attrs = smb_conn.retrieveFile(share_nas, store_path, src_zip_file)\n",
"file_size = attrs[1]\n",
"if file_size:\n",
"src_zip_file.flush()\n",
"zip_dir = tempfile.mkdtemp()\n",
"pyminizip.uncompress(src_zip_file.name, SmbConfig.file_pw, zip_dir, 0)\n",
"csv_path = zip_dir + '/partner_data.csv'\n",
"csv_writer = csv.writer(csv_file)\n",
"csv_writer.writerow([str(self.id), self.ref, self.contact_address, fields.\n Date.today()])\n",
"dst_zip_file = tempfile.NamedTemporaryFile()\n",
"pyminizip.compress(csv_path, '', dst_zip_file.name, SmbConfig.file_pw, 5)\n",
"smb_conn.storeFile(share_nas, store_path, dst_zip_file)\n",
"logger.error(\n \"Couldn't store secure partner data on NAS. Please do it manually by replicating the following file: \"\n + dst_zip_file.name)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_4(self):...\n",
"VAR_1 = ['DTXSID9022528', 'DTXSID1020273', 'DTXSID6026296', 'DTXSID2021781']\n",
"VAR_2 = stats_by_dtxsids(VAR_1)\n",
"for e in VAR_2:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(0, VAR_3['products_n'],\n 'There should be 0 products associated with ethylparaben')\n",
"VAR_3 = e\n",
"self.client.login(username='Karyn', password='specialP@55word')\n",
"VAR_4 = DataDocument.objects.filter(pk__in=ExtractedChemical.objects.filter\n (dsstox__sid='DTXSID9022528').values('extracted_text__data_document'))\n",
"VAR_5 = VAR_4[0]\n",
"VAR_6 = VAR_5.data_group.data_source\n",
"VAR_7 = Product.objects.create(data_source=ds, title='Test Product', upc=\n 'Test UPC for ProductToPUC')\n",
"VAR_8 = ProductDocument.objects.create(document=dd, product=p)\n",
"VAR_8.save()\n",
"VAR_5.refresh_from_db()\n",
"VAR_2 = stats_by_dtxsids(VAR_1)\n",
"for e in VAR_2:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(1, VAR_3['products_n'],\n 'There should now be 1 product associated with ethylparaben')\n",
"VAR_3 = e\n"
] | [
"def test_dtxsid_products_n(self):...\n",
"dtxs = ['DTXSID9022528', 'DTXSID1020273', 'DTXSID6026296', 'DTXSID2021781']\n",
"stats = stats_by_dtxsids(dtxs)\n",
"for e in stats:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(0, ethylparaben_stats['products_n'],\n 'There should be 0 products associated with ethylparaben')\n",
"ethylparaben_stats = e\n",
"self.client.login(username='Karyn', password='specialP@55word')\n",
"dds = DataDocument.objects.filter(pk__in=ExtractedChemical.objects.filter(\n dsstox__sid='DTXSID9022528').values('extracted_text__data_document'))\n",
"dd = dds[0]\n",
"ds = dd.data_group.data_source\n",
"p = Product.objects.create(data_source=ds, title='Test Product', upc=\n 'Test UPC for ProductToPUC')\n",
"pd = ProductDocument.objects.create(document=dd, product=p)\n",
"pd.save()\n",
"dd.refresh_from_db()\n",
"stats = stats_by_dtxsids(dtxs)\n",
"for e in stats:\n",
"if e['sid'] == 'DTXSID9022528':\n",
"self.assertEqual(1, ethylparaben_stats['products_n'],\n 'There should now be 1 product associated with ethylparaben')\n",
"ethylparaben_stats = e\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"For",
"Condition",
"Expr'",
"Assign'"
] |
[
"def FUNC_6(self, VAR_66, VAR_23):...\n",
"if VAR_66:\n",
"VAR_103 = cache.get(VAR_100(self.cache_prefix + '_' + VAR_66))\n",
"VAR_101.errors.add(errors.EXPIRED)\n",
"VAR_15 = Account._byID(VAR_103, data=True)\n",
"return None\n",
"if VAR_23 and VAR_15.name.lower() != VAR_23.lower():\n",
"VAR_101.errors.add(errors.BAD_USERNAME)\n",
"if VAR_15:\n",
"return VAR_15\n"
] | [
"def run(self, key, name):...\n",
"if key:\n",
"uid = cache.get(str(self.cache_prefix + '_' + key))\n",
"c.errors.add(errors.EXPIRED)\n",
"a = Account._byID(uid, data=True)\n",
"return None\n",
"if name and a.name.lower() != name.lower():\n",
"c.errors.add(errors.BAD_USERNAME)\n",
"if a:\n",
"return a\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Condition",
"Return'"
] |
[
"def FUNC_16(self):...\n",
"self.assertEqual(self.driver._get_space_in_gb('123.0GB'), 123.0)\n",
"self.assertEqual(self.driver._get_space_in_gb('123.0TB'), 123.0 * 1024)\n",
"self.assertEqual(self.driver._get_space_in_gb('1024.0MB'), 1.0)\n"
] | [
"def test_get_space_in_gb(self):...\n",
"self.assertEqual(self.driver._get_space_in_gb('123.0GB'), 123.0)\n",
"self.assertEqual(self.driver._get_space_in_gb('123.0TB'), 123.0 * 1024)\n",
"self.assertEqual(self.driver._get_space_in_gb('1024.0MB'), 1.0)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_5(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_5 = {}\n",
"VAR_5['clusterName'] = self.configuration.san_clustername\n",
"if self.configuration.san_thin_provision:\n",
"VAR_5['thinProvision'] = '1'\n",
"VAR_5['thinProvision'] = '0'\n",
"VAR_5['volumeName'] = VAR_10['name']\n",
"if int(VAR_10['size']) == 0:\n",
"VAR_5['size'] = '100MB'\n",
"VAR_5['size'] = '%sGB' % VAR_10['size']\n",
"self._cliq_run_xml('createVolume', VAR_5)\n",
"VAR_27 = self._cliq_get_volume_info(VAR_10['name'])\n",
"VAR_8 = VAR_27['volume.clusterName']\n",
"VAR_28 = VAR_27['volume.iscsiIqn']\n",
"VAR_29 = '1'\n",
"if not self.cluster_vip:\n",
"self.cluster_vip = self._cliq_get_cluster_vip(VAR_8)\n",
"VAR_30 = self.cluster_vip + ':3260,' + VAR_29\n",
"VAR_31 = {}\n",
"VAR_31['provider_location'] = '%s %s %s' % (VAR_30, VAR_28, 0)\n",
"return VAR_31\n"
] | [
"def create_volume(self, volume):...\n",
"\"\"\"docstring\"\"\"\n",
"cliq_args = {}\n",
"cliq_args['clusterName'] = self.configuration.san_clustername\n",
"if self.configuration.san_thin_provision:\n",
"cliq_args['thinProvision'] = '1'\n",
"cliq_args['thinProvision'] = '0'\n",
"cliq_args['volumeName'] = volume['name']\n",
"if int(volume['size']) == 0:\n",
"cliq_args['size'] = '100MB'\n",
"cliq_args['size'] = '%sGB' % volume['size']\n",
"self._cliq_run_xml('createVolume', cliq_args)\n",
"volume_info = self._cliq_get_volume_info(volume['name'])\n",
"cluster_name = volume_info['volume.clusterName']\n",
"iscsi_iqn = volume_info['volume.iscsiIqn']\n",
"cluster_interface = '1'\n",
"if not self.cluster_vip:\n",
"self.cluster_vip = self._cliq_get_cluster_vip(cluster_name)\n",
"iscsi_portal = self.cluster_vip + ':3260,' + cluster_interface\n",
"model_update = {}\n",
"model_update['provider_location'] = '%s %s %s' % (iscsi_portal, iscsi_iqn, 0)\n",
"return model_update\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_3(self, VAR_3=True):...\n",
"VAR_20 = self.result['dut_serial_port'] + ', ' + str(self.result['id']\n ) + ': ' + self.result['outcome_category'] + ' - ' + self.result['outcome']\n",
"if self.result['data_diff'] is not None and self.result['data_diff'] < 1.0:\n",
"VAR_20 += ' {0:.2f}%'.format(max(self.result['data_diff'] * 100, 99.99))\n",
"print(colored(VAR_20, 'blue'))\n",
"self.update('result')\n",
"if VAR_3:\n",
"self.__create_result()\n"
] | [
"def log_result(self, create_result=True):...\n",
"out = self.result['dut_serial_port'] + ', ' + str(self.result['id']\n ) + ': ' + self.result['outcome_category'] + ' - ' + self.result['outcome']\n",
"if self.result['data_diff'] is not None and self.result['data_diff'] < 1.0:\n",
"out += ' {0:.2f}%'.format(max(self.result['data_diff'] * 100, 99.99))\n",
"print(colored(out, 'blue'))\n",
"self.update('result')\n",
"if create_result:\n",
"self.__create_result()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"AugAssign'",
"Expr'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_17(self):...\n",
""
] | [
"def touch(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_39(VAR_101):...\n",
"VAR_101.priority = VAR_77\n",
"return VAR_101\n"
] | [
"def decorate(ruleinfo):...\n",
"ruleinfo.priority = priority\n",
"return ruleinfo\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@mock.patch('requests.post', FUNC_0)...\n",
"\"\"\"docstring\"\"\"\n",
"self.client.login(username='autotest2', password='password')\n",
"VAR_4 = '/api/apps'\n",
"VAR_5 = self.client.post(VAR_4)\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_6 = VAR_5.data['id']\n",
"self.client.login(username='autotest', password='password')\n",
"VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n",
"VAR_7 = {'image': 'autotest/example'}\n",
"VAR_5 = self.client.post(VAR_4, json.dumps(VAR_7), content_type=\n 'application/json')\n",
"self.assertEqual(VAR_5.status_code, 201)\n",
"VAR_13 = Build.objects.get(uuid=response.data['uuid'])\n",
"self.assertEqual(str(VAR_13), '{}-{}'.format(VAR_5.data['app'], VAR_5.data[\n 'uuid'][:7]))\n"
] | [
"@mock.patch('requests.post', mock_import_repository_task)...\n",
"\"\"\"docstring\"\"\"\n",
"self.client.login(username='autotest2', password='password')\n",
"url = '/api/apps'\n",
"response = self.client.post(url)\n",
"self.assertEqual(response.status_code, 201)\n",
"app_id = response.data['id']\n",
"self.client.login(username='autotest', password='password')\n",
"url = '/api/apps/{app_id}/builds'.format(**locals())\n",
"body = {'image': 'autotest/example'}\n",
"response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n",
"self.assertEqual(response.status_code, 201)\n",
"build = Build.objects.get(uuid=response.data['uuid'])\n",
"self.assertEqual(str(build), '{}-{}'.format(response.data['app'], response.\n data['uuid'][:7]))\n"
] | [
0,
0,
5,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5, VAR_6=False,...\n",
"self.map_pos = Point()\n",
"self.map_angle = 0\n",
"self._path = None\n",
"Localization.__init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4, VAR_5)\n",
"Navigation.__init__(self, VAR_6=jerky, VAR_7=walking_speed)\n",
"self._logger = Logger('NavLoc')\n",
"VAR_15 = time()\n",
"while time() - VAR_15 < 0.5:\n"
] | [
"def __init__(self, point_ids, locations, neighbors, landmark_ids,...\n",
"self.map_pos = Point()\n",
"self.map_angle = 0\n",
"self._path = None\n",
"Localization.__init__(self, point_ids, locations, neighbors, landmark_ids,\n landmark_positions, landmark_angles)\n",
"Navigation.__init__(self, jerky=jerky, walking_speed=walking_speed)\n",
"self._logger = Logger('NavLoc')\n",
"timer = time()\n",
"while time() - timer < 0.5:\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Condition"
] |
[
"import json\n",
"import multiprocessing\n",
"from leap.mail.adaptors.soledad import SoledadMailAdaptor\n",
"from leap.srp_session import SRPSession\n",
"from mockito import mock\n",
"import os\n",
"import shutil\n",
"import time\n",
"import uuid\n",
"import random\n",
"from leap.mail.imap.account import IMAPAccount\n",
"from leap.soledad.client import Soledad\n",
"from mock import Mock\n",
"from twisted.internet import reactor, defer\n",
"from twisted.internet.defer import succeed\n",
"from twisted.web.resource import getChildForRequest\n",
"from zope.interface import implementer\n",
"from twisted.cred import checkers, credentials\n",
"from pixelated.adapter.mailstore.leap_attachment_store import LeapAttachmentStore\n",
"from pixelated.adapter.services.feedback_service import FeedbackService\n",
"from pixelated.application import ServicesFactory, UserAgentMode, SingleUserServicesFactory, set_up_protected_resources\n",
"from pixelated.bitmask_libraries.config import LeapConfig\n",
"from pixelated.bitmask_libraries.session import LeapSession\n",
"from pixelated.config.services import Services\n",
"from pixelated.config.site import PixelatedSite\n",
"from pixelated.adapter.mailstore import LeapMailStore\n",
"from pixelated.adapter.mailstore.searchable_mailstore import SearchableMailStore\n",
"from pixelated.adapter.search import SearchEngine\n",
"from pixelated.adapter.services.draft_service import DraftService\n",
"from pixelated.adapter.services.mail_service import MailService\n",
"from pixelated.resources.root_resource import RootResource\n",
"from test.support.integration.model import MailBuilder\n",
"from test.support.test_helper import request_mock\n",
"from test.support.integration.model import ResponseMail\n",
"from tempdir import TempDir\n",
"VAR_2 = (\n 'Þ3?\\x87ÿÙÓ\\x14ð§>\\x1f%C{\\x16.\\\\®\\x8c\\x13§û\\x04Ô]+\\x8d_íÑ\\x8d\\x0bI\\x8a\\x0e¤tm«¿´¥\\x99\\x00dÕw\\x9f\\x18¼\\x1dÔ_WÒ¶èH\\x83\\x1bØ\\x9d\\xad'\n )\n",
"def __init__(self, VAR_3, VAR_4):...\n",
"self._user_id = VAR_3\n",
"self._leap_home = VAR_4\n",
"self._uuid = str(VAR_1.uuid4())\n",
"self._mail_address = '%[email protected]' % VAR_3\n",
"self._soledad = None\n",
"self._services = None\n",
"@defer.inlineCallbacks...\n",
"VAR_37 = os.path.join(self._leap_home, self._uuid)\n",
"self.soledad = yield FUNC_0(VAR_0=soledad_test_folder, VAR_1=self._uuid)\n",
"self.search_engine = SearchEngine(self.INDEX_KEY, user_home=soledad_test_folder\n )\n",
"self.keymanager = mock()\n",
"self.mail_sender = self._create_mail_sender()\n",
"self.mail_store = SearchableMailStore(LeapMailStore(self.soledad), self.\n search_engine)\n",
"self.attachment_store = LeapAttachmentStore(self.soledad)\n",
"yield self._initialize_imap_account()\n",
"self.draft_service = DraftService(self.mail_store)\n",
"self.leap_session = mock()\n",
"self.feedback_service = FeedbackService(self.leap_session)\n",
"self.mail_service = self._create_mail_service(self.mail_sender, self.\n mail_store, self.search_engine, self.attachment_store)\n",
"VAR_58 = yield self.mail_service.all_mails()\n",
"if len(VAR_58) > 0:\n",
"self.search_engine.index_mails(VAR_58)\n",
"@property...\n",
"if self._services is None:\n",
"VAR_79 = mock(Services)\n",
"return self._services\n",
"VAR_79.keymanager = self.keymanager\n",
"VAR_79.mail_service = self.mail_service\n",
"VAR_79.draft_service = self.draft_service\n",
"VAR_79.search_engine = self.search_engine\n",
"VAR_79.feedback_service = self.feedback_service\n",
"VAR_79._leap_session = self.leap_session\n",
"self._services = VAR_79\n",
"self.leap_session.close = lambda : 'mocked'\n"
] | [
"import json\n",
"import multiprocessing\n",
"from leap.mail.adaptors.soledad import SoledadMailAdaptor\n",
"from leap.srp_session import SRPSession\n",
"from mockito import mock\n",
"import os\n",
"import shutil\n",
"import time\n",
"import uuid\n",
"import random\n",
"from leap.mail.imap.account import IMAPAccount\n",
"from leap.soledad.client import Soledad\n",
"from mock import Mock\n",
"from twisted.internet import reactor, defer\n",
"from twisted.internet.defer import succeed\n",
"from twisted.web.resource import getChildForRequest\n",
"from zope.interface import implementer\n",
"from twisted.cred import checkers, credentials\n",
"from pixelated.adapter.mailstore.leap_attachment_store import LeapAttachmentStore\n",
"from pixelated.adapter.services.feedback_service import FeedbackService\n",
"from pixelated.application import ServicesFactory, UserAgentMode, SingleUserServicesFactory, set_up_protected_resources\n",
"from pixelated.bitmask_libraries.config import LeapConfig\n",
"from pixelated.bitmask_libraries.session import LeapSession\n",
"from pixelated.config.services import Services\n",
"from pixelated.config.site import PixelatedSite\n",
"from pixelated.adapter.mailstore import LeapMailStore\n",
"from pixelated.adapter.mailstore.searchable_mailstore import SearchableMailStore\n",
"from pixelated.adapter.search import SearchEngine\n",
"from pixelated.adapter.services.draft_service import DraftService\n",
"from pixelated.adapter.services.mail_service import MailService\n",
"from pixelated.resources.root_resource import RootResource\n",
"from test.support.integration.model import MailBuilder\n",
"from test.support.test_helper import request_mock\n",
"from test.support.integration.model import ResponseMail\n",
"from tempdir import TempDir\n",
"INDEX_KEY = (\n 'Þ3?\\x87ÿÙÓ\\x14ð§>\\x1f%C{\\x16.\\\\®\\x8c\\x13§û\\x04Ô]+\\x8d_íÑ\\x8d\\x0bI\\x8a\\x0e¤tm«¿´¥\\x99\\x00dÕw\\x9f\\x18¼\\x1dÔ_WÒ¶èH\\x83\\x1bØ\\x9d\\xad'\n )\n",
"def __init__(self, user_id, leap_home):...\n",
"self._user_id = user_id\n",
"self._leap_home = leap_home\n",
"self._uuid = str(uuid.uuid4())\n",
"self._mail_address = '%[email protected]' % user_id\n",
"self._soledad = None\n",
"self._services = None\n",
"@defer.inlineCallbacks...\n",
"soledad_test_folder = os.path.join(self._leap_home, self._uuid)\n",
"self.soledad = yield initialize_soledad(tempdir=soledad_test_folder, uuid=\n self._uuid)\n",
"self.search_engine = SearchEngine(self.INDEX_KEY, user_home=soledad_test_folder\n )\n",
"self.keymanager = mock()\n",
"self.mail_sender = self._create_mail_sender()\n",
"self.mail_store = SearchableMailStore(LeapMailStore(self.soledad), self.\n search_engine)\n",
"self.attachment_store = LeapAttachmentStore(self.soledad)\n",
"yield self._initialize_imap_account()\n",
"self.draft_service = DraftService(self.mail_store)\n",
"self.leap_session = mock()\n",
"self.feedback_service = FeedbackService(self.leap_session)\n",
"self.mail_service = self._create_mail_service(self.mail_sender, self.\n mail_store, self.search_engine, self.attachment_store)\n",
"mails = yield self.mail_service.all_mails()\n",
"if len(mails) > 0:\n",
"self.search_engine.index_mails(mails)\n",
"@property...\n",
"if self._services is None:\n",
"services = mock(Services)\n",
"return self._services\n",
"services.keymanager = self.keymanager\n",
"services.mail_service = self.mail_service\n",
"services.draft_service = self.draft_service\n",
"services.search_engine = self.search_engine\n",
"services.feedback_service = self.feedback_service\n",
"services._leap_session = self.leap_session\n",
"self._services = services\n",
"self.leap_session.close = lambda : 'mocked'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"@auth.require(acl.is_bot)...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28, VAR_14, VAR_9, VAR_25, VAR_24, VAR_26 = self._process()\n",
"VAR_15 = VAR_25.get('sleep_streak', 0)\n",
"VAR_16 = bool(VAR_26)\n",
"VAR_30 = 'bot_inactive' if VAR_16 else 'bot_active'\n",
"stats.add_entry(VAR_30=action, VAR_14=bot_id, VAR_24=dimensions)\n",
"def FUNC_14(VAR_31, VAR_17=None, VAR_32=None):...\n",
"bot_management.bot_event(VAR_31=event_type, VAR_14=bot_id, external_ip=self\n .request.remote_addr, VAR_24=dimensions, VAR_25=state, VAR_9=version,\n VAR_16=quarantined, VAR_17=task_id, VAR_32=task_name, VAR_8=quarantined_msg\n )\n",
"VAR_18 = bot_code.get_bot_version(self.request.host_url)\n",
"if VAR_9 != VAR_18:\n",
"FUNC_14('request_update')\n",
"if VAR_16:\n",
"self._cmd_update(VAR_18)\n",
"FUNC_14('request_sleep')\n",
"VAR_33, VAR_34 = bot_management.should_restart_bot(VAR_14, VAR_25)\n",
"return\n",
"self._cmd_sleep(VAR_15, VAR_16)\n",
"if VAR_33:\n",
"return\n",
"FUNC_14('request_restart')\n",
"VAR_4, VAR_54 = task_scheduler.bot_reap_task(VAR_24, VAR_14, VAR_9, VAR_25.\n get('lease_expiration_ts'))\n",
"self.abort(500, 'Deadline')\n",
"self._cmd_restart(VAR_34)\n",
"if not VAR_4:\n",
"return\n",
"FUNC_14('request_sleep')\n",
"if VAR_4.properties.is_terminate:\n",
"logging.exception('Dang, exception after reaping')\n",
"self._cmd_sleep(VAR_15, VAR_16)\n",
"FUNC_14('bot_terminate', VAR_17=run_result.task_id)\n",
"FUNC_14('request_task', VAR_17=run_result.task_id, VAR_32=request.name)\n",
"return\n",
"self._cmd_terminate(VAR_54.task_id)\n",
"self._cmd_run(VAR_4, VAR_54.key, VAR_14)\n"
] | [
"@auth.require(acl.is_bot)...\n",
"\"\"\"docstring\"\"\"\n",
"_request, bot_id, version, state, dimensions, quarantined_msg = self._process()\n",
"sleep_streak = state.get('sleep_streak', 0)\n",
"quarantined = bool(quarantined_msg)\n",
"action = 'bot_inactive' if quarantined else 'bot_active'\n",
"stats.add_entry(action=action, bot_id=bot_id, dimensions=dimensions)\n",
"def bot_event(event_type, task_id=None, task_name=None):...\n",
"bot_management.bot_event(event_type=event_type, bot_id=bot_id, external_ip=\n self.request.remote_addr, dimensions=dimensions, state=state, version=\n version, quarantined=quarantined, task_id=task_id, task_name=task_name,\n message=quarantined_msg)\n",
"expected_version = bot_code.get_bot_version(self.request.host_url)\n",
"if version != expected_version:\n",
"bot_event('request_update')\n",
"if quarantined:\n",
"self._cmd_update(expected_version)\n",
"bot_event('request_sleep')\n",
"needs_restart, restart_message = bot_management.should_restart_bot(bot_id,\n state)\n",
"return\n",
"self._cmd_sleep(sleep_streak, quarantined)\n",
"if needs_restart:\n",
"return\n",
"bot_event('request_restart')\n",
"request, run_result = task_scheduler.bot_reap_task(dimensions, bot_id,\n version, state.get('lease_expiration_ts'))\n",
"self.abort(500, 'Deadline')\n",
"self._cmd_restart(restart_message)\n",
"if not request:\n",
"return\n",
"bot_event('request_sleep')\n",
"if request.properties.is_terminate:\n",
"logging.exception('Dang, exception after reaping')\n",
"self._cmd_sleep(sleep_streak, quarantined)\n",
"bot_event('bot_terminate', task_id=run_result.task_id)\n",
"bot_event('request_task', task_id=run_result.task_id, task_name=request.name)\n",
"return\n",
"self._cmd_terminate(run_result.task_id)\n",
"self._cmd_run(request, run_result.key, bot_id)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Assign'",
"Return'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Return'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Expr'"
] |
[
"def FUNC_2(VAR_4, VAR_10, VAR_11, VAR_12=None, VAR_9=None):...\n",
"VAR_16 = collect_config.collect_config(VAR_4, VAR_9)\n",
"VAR_23 = VAR_10.split('.')\n",
"for VAR_10 in VAR_23:\n",
"value_types.ensure_type(str(VAR_16), VAR_11)\n",
"VAR_16 = VAR_16[VAR_10]\n",
"if VAR_12 is not None:\n",
"print(str(VAR_16))\n",
"print(str(VAR_12))\n",
"return\n"
] | [
"def print_key(config_path, key, type_name, default=None, fallback_metadata=None...\n",
"config = collect_config.collect_config(config_path, fallback_metadata)\n",
"keys = key.split('.')\n",
"for key in keys:\n",
"value_types.ensure_type(str(config), type_name)\n",
"config = config[key]\n",
"if default is not None:\n",
"print(str(config))\n",
"print(str(default))\n",
"return\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_3(VAR_2, VAR_3):...\n",
"VAR_11 = crypt.crypt(VAR_3, '22')\n",
"os.system('useradd -G docker,wheel -p ' + VAR_11 + ' ' + VAR_2)\n"
] | [
"def add_user(username, password):...\n",
"encPass = crypt.crypt(password, '22')\n",
"os.system('useradd -G docker,wheel -p ' + encPass + ' ' + username)\n"
] | [
0,
0,
7
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(VAR_12, VAR_3, VAR_4=-1):...\n",
"VAR_7 = VAR_12[0]\n",
"VAR_15 = VAR_12[1]\n",
"VAR_25 = 1 << VAR_15 - 1\n",
"return (VAR_7 & VAR_25 - 1) - (VAR_7 & VAR_25)\n"
] | [
"def signExtend(params, symbols, symId=-1):...\n",
"x = params[0]\n",
"i = params[1]\n",
"sign_bit = 1 << i - 1\n",
"return (x & sign_bit - 1) - (x & sign_bit)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@staticmethod...\n",
"return openid.create_login_url(VAR_3, VAR_23)\n"
] | [
"@staticmethod...\n",
"return openid.create_login_url(request, dest_url)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_24(VAR_36):...\n",
"return type(VAR_36).__name__ == 'cython_function_or_method'\n"
] | [
"def check_cython(x):...\n",
"return type(x).__name__ == 'cython_function_or_method'\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __gt__(self, VAR_11):...\n",
"return self.rule.__gt__(VAR_11.rule)\n"
] | [
"def __gt__(self, other):...\n",
"return self.rule.__gt__(other.rule)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.