lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_2(self):...\n", "VAR_7 = Pa11yCrawler('')\n", "self.assertEqual(VAR_7.cmd, self._expected_command(VAR_7.pa11y_report_dir,\n VAR_7.start_urls))\n" ]
[ "def test_default(self):...\n", "suite = Pa11yCrawler('')\n", "self.assertEqual(suite.cmd, self._expected_command(suite.pa11y_report_dir,\n suite.start_urls))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_12(VAR_35):...\n", "return VAR_35.format(host=url.host, date=datetime.utcnow().isoformat(),\n VAR_30=seqnum)\n" ]
[ "def formatOutput(p):...\n", "return p.format(host=url.host, date=datetime.utcnow().isoformat(), seqnum=\n seqnum)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(self):...\n", "self.parser = Parser()\n" ]
[ "def setUp(self):...\n", "self.parser = Parser()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_4(self, VAR_8):...\n", "VAR_0.info('Handling daily repeating event')\n", "VAR_16 = datetime.strptime(VAR_8.date_end, '%Y-%m-%d %H:%M:00')\n", "VAR_17 = datetime.now()\n", "if VAR_17 >= VAR_16:\n", "VAR_10 = datetime.strptime(VAR_8.date_begin, '%Y-%m-%d %H:%M:00'\n ) + relativedelta(days=+1)\n", "VAR_9 = VAR_16 + relativedelta(days=+1)\n", "if self._event_does_not_exist(VAR_8, VAR_9):\n", "self._create_new_event(VAR_8, VAR_10, VAR_9)\n" ]
[ "def _handle_daily_event_repetition(self, old_repeating_event):...\n", "_logger.info('Handling daily repeating event')\n", "end_date = datetime.strptime(old_repeating_event.date_end, '%Y-%m-%d %H:%M:00')\n", "present = datetime.now()\n", "if present >= end_date:\n", "new_start_date = datetime.strptime(old_repeating_event.date_begin,\n '%Y-%m-%d %H:%M:00') + relativedelta(days=+1)\n", "new_end_date = end_date + relativedelta(days=+1)\n", "if self._event_does_not_exist(old_repeating_event, new_end_date):\n", "self._create_new_event(old_repeating_event, new_start_date, new_end_date)\n" ]
[ 0, 0, 6, 0, 0, 6, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_18(self, VAR_28: str) ->UserDTO:...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = UserDTO()\n", "VAR_19.id = self.id\n", "VAR_19.username = self.username\n", "VAR_19.role = UserRole(self.role).name\n", "VAR_19.mapping_level = MappingLevel(self.mapping_level).name\n", "VAR_19.is_expert = self.is_expert or False\n", "VAR_19.date_registered = str(self.date_registered)\n", "VAR_19.projects_mapped = len(self.projects_mapped)\n", "VAR_19.projects_mapped = 0\n", "VAR_19.tasks_mapped = self.tasks_mapped\n", "VAR_19.tasks_validated = self.tasks_validated\n", "VAR_19.tasks_invalidated = self.tasks_invalidated\n", "VAR_19.twitter_id = self.twitter_id\n", "VAR_19.linkedin_id = self.linkedin_id\n", "VAR_19.facebook_id = self.facebook_id\n", "VAR_19.validation_message = self.validation_message\n", "VAR_19.total_time_spent = 0\n", "VAR_19.time_spent_mapping = 0\n", "VAR_19.time_spent_validating = 0\n", "VAR_32 = 'string'.format(self.id)\n", "VAR_36 = db.engine.execute(VAR_32)\n", "for row in VAR_36:\n", "VAR_36 = row[0]\n", "VAR_32 = 'string'.format(self.id)\n", "if VAR_36:\n", "VAR_37 = db.engine.execute(VAR_32)\n", "VAR_42 = VAR_36.total_seconds()\n", "for row in VAR_37:\n", "VAR_19.time_spent_validating = VAR_42\n", "VAR_37 = row[0]\n", "if self.username == VAR_28:\n", "VAR_19.total_time_spent += VAR_19.time_spent_validating\n", "if VAR_37:\n", "VAR_19.email_address = self.email_address\n", "return VAR_19\n", "VAR_43 = VAR_37.total_seconds()\n", "VAR_19.is_email_verified = self.is_email_verified\n", "VAR_19.time_spent_mapping = VAR_43\n", "VAR_19.total_time_spent += VAR_19.time_spent_mapping\n" ]
[ "def as_dto(self, logged_in_username: str) ->UserDTO:...\n", "\"\"\"docstring\"\"\"\n", "user_dto = UserDTO()\n", "user_dto.id = self.id\n", "user_dto.username = self.username\n", "user_dto.role = UserRole(self.role).name\n", "user_dto.mapping_level = MappingLevel(self.mapping_level).name\n", "user_dto.is_expert = self.is_expert or False\n", "user_dto.date_registered = str(self.date_registered)\n", "user_dto.projects_mapped = len(self.projects_mapped)\n", "user_dto.projects_mapped = 0\n", "user_dto.tasks_mapped = self.tasks_mapped\n", "user_dto.tasks_validated = self.tasks_validated\n", "user_dto.tasks_invalidated = self.tasks_invalidated\n", "user_dto.twitter_id = self.twitter_id\n", "user_dto.linkedin_id = self.linkedin_id\n", "user_dto.facebook_id = self.facebook_id\n", "user_dto.validation_message = self.validation_message\n", "user_dto.total_time_spent = 0\n", "user_dto.time_spent_mapping = 0\n", "user_dto.time_spent_validating = 0\n", "sql = (\n \"\"\"SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history\n WHERE action='LOCKED_FOR_VALIDATION'\n and user_id = {0};\"\"\"\n .format(self.id))\n", "total_validation_time = db.engine.execute(sql)\n", "for row in total_validation_time:\n", "total_validation_time = row[0]\n", "sql = (\n \"\"\"SELECT SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME) FROM task_history\n WHERE action='LOCKED_FOR_MAPPING'\n and user_id = {0};\"\"\"\n .format(self.id))\n", "if total_validation_time:\n", "total_mapping_time = db.engine.execute(sql)\n", "total_validation_seconds = total_validation_time.total_seconds()\n", "for row in total_mapping_time:\n", "user_dto.time_spent_validating = total_validation_seconds\n", "total_mapping_time = row[0]\n", "if self.username == logged_in_username:\n", "user_dto.total_time_spent += user_dto.time_spent_validating\n", "if total_mapping_time:\n", "user_dto.email_address = self.email_address\n", "return user_dto\n", "total_mapping_seconds = total_mapping_time.total_seconds()\n", "user_dto.is_email_verified = self.is_email_verified\n", "user_dto.time_spent_mapping = total_mapping_seconds\n", "user_dto.total_time_spent += user_dto.time_spent_mapping\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Condition", "AugAssign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "AugAssign'" ]
[ "\"\"\"Volume driver for Dell EqualLogic Storage.\"\"\"\n", "import functools\n", "import random\n", "import eventlet\n", "from eventlet import greenthread\n", "import greenlet\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder.openstack.common import processutils\n", "from cinder import utils\n", "from cinder.volume.drivers.san import SanISCSIDriver\n", "VAR_0 = logging.getLogger(__name__)\n", "VAR_1 = [cfg.StrOpt('eqlx_group_name', default='group-0', help=\n 'Group name to use for creating volumes'), cfg.IntOpt(\n 'eqlx_cli_timeout', default=30, help=\n 'Timeout for the Group Manager cli command execution'), cfg.IntOpt(\n 'eqlx_cli_max_retries', default=5, help=\n 'Maximum retry count for reconnection'), cfg.BoolOpt('eqlx_use_chap',\n default=False, help='Use CHAP authentication for targets?'), cfg.StrOpt\n ('eqlx_chap_login', default='admin', help='Existing CHAP account name'),\n cfg.StrOpt('eqlx_chap_password', default='password', help=\n 'Password for specified CHAP account name', secret=True), cfg.StrOpt(\n 'eqlx_pool', default='default', help=\n 'Pool in which volumes will be created')]\n", "VAR_2 = cfg.CONF\n", "VAR_2.register_opts(VAR_1)\n", "def FUNC_0(VAR_3):...\n", "@functools.wraps(VAR_3)...\n", "VAR_23 = VAR_5.pop('timeout', None)\n", "VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n", "if VAR_23 is None:\n", "return VAR_24.wait()\n", "VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n", "VAR_48 = VAR_24.wait()\n", "VAR_36.cancel()\n", "return FUNC_1\n", "return VAR_48\n" ]
[ "\"\"\"Volume driver for Dell EqualLogic Storage.\"\"\"\n", "import functools\n", "import random\n", "import eventlet\n", "from eventlet import greenthread\n", "import greenlet\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder.openstack.common import processutils\n", "from cinder import utils\n", "from cinder.volume.drivers.san import SanISCSIDriver\n", "LOG = logging.getLogger(__name__)\n", "eqlx_opts = [cfg.StrOpt('eqlx_group_name', default='group-0', help=\n 'Group name to use for creating volumes'), cfg.IntOpt(\n 'eqlx_cli_timeout', default=30, help=\n 'Timeout for the Group Manager cli command execution'), cfg.IntOpt(\n 'eqlx_cli_max_retries', default=5, help=\n 'Maximum retry count for reconnection'), cfg.BoolOpt('eqlx_use_chap',\n default=False, help='Use CHAP authentication for targets?'), cfg.StrOpt\n ('eqlx_chap_login', default='admin', help='Existing CHAP account name'),\n cfg.StrOpt('eqlx_chap_password', default='password', help=\n 'Password for specified CHAP account name', secret=True), cfg.StrOpt(\n 'eqlx_pool', default='default', help=\n 'Pool in which volumes will be created')]\n", "CONF = cfg.CONF\n", "CONF.register_opts(eqlx_opts)\n", "def with_timeout(f):...\n", "@functools.wraps(f)...\n", "timeout = kwargs.pop('timeout', None)\n", "gt = eventlet.spawn(f, self, *args, **kwargs)\n", "if timeout is None:\n", "return gt.wait()\n", "kill_thread = eventlet.spawn_after(timeout, gt.kill)\n", "res = gt.wait()\n", "kill_thread.cancel()\n", "return __inner\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_19(self, VAR_18, VAR_24):...\n", "\"\"\"docstring\"\"\"\n", "if type(VAR_18) == str:\n", "VAR_18 = self.locate(VAR_18)\n", "def FUNC_26(VAR_51, VAR_52):...\n", "if not VAR_18:\n", "for sub_ in VAR_51.sub_items:\n", "return False\n", "FUNC_26(sub_, VAR_52)\n", "VAR_51.owner = VAR_52\n", "if VAR_51.is_dir:\n", "self._update_in_db(VAR_51)\n", "return\n" ]
[ "def chown(self, item, owner):...\n", "\"\"\"docstring\"\"\"\n", "if type(item) == str:\n", "item = self.locate(item)\n", "def _chown_recursive(item_, owner_):...\n", "if not item:\n", "for sub_ in item_.sub_items:\n", "return False\n", "_chown_recursive(sub_, owner_)\n", "item_.owner = owner_\n", "if item_.is_dir:\n", "self._update_in_db(item_)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "FunctionDef'", "Condition", "For", "Return'", "Expr'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_13(self):...\n", "VAR_11 = {}\n", "for VAR_12 in self.parent_groups:\n", "VAR_11[VAR_12.name] = VAR_12\n", "return VAR_11\n", "VAR_11.update(VAR_12._get_ancestors())\n" ]
[ "def _get_ancestors(self):...\n", "results = {}\n", "for g in self.parent_groups:\n", "results[g.name] = g\n", "return results\n", "results.update(g._get_ancestors())\n" ]
[ 0, 1, 0, 1, 1, 1 ]
[ "FunctionDef'", "Assign'", "For", "Assign'", "Return'", "Expr'" ]
[ "from __future__ import absolute_import\n", "import os\n", "from pony.orm import db_session\n", "from six.moves import xrange\n", "from twisted.internet.defer import inlineCallbacks\n", "from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW\n", "from Tribler.Core.Modules.MetadataStore.store import MetadataStore\n", "from Tribler.Core.Utilities.random_utils import random_infohash\n", "from Tribler.community.gigachannel.community import GigaChannelCommunity\n", "from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto\n", "from Tribler.pyipv8.ipv8.peer import Peer\n", "from Tribler.pyipv8.ipv8.test.base import TestBase\n", "\"\"\"\n Unit tests for the GigaChannel community which do not need a real Session.\n \"\"\"\n", "def FUNC_0(self):...\n", "super(CLASS_0, self).setUp()\n", "self.count = 0\n", "self.initialize(GigaChannelCommunity, 2)\n", "def FUNC_1(self, *VAR_0, **VAR_1):...\n", "VAR_3 = MetadataStore(os.path.join(self.temporary_directory(), '%d.db' %\n self.count), self.temporary_directory(), default_eccrypto.generate_key(\n u'curve25519'))\n", "VAR_1['metadata_store'] = VAR_3\n", "VAR_4 = super(CLASS_0, self).create_node(*VAR_0, **kwargs)\n", "self.count += 1\n", "return VAR_4\n" ]
[ "from __future__ import absolute_import\n", "import os\n", "from pony.orm import db_session\n", "from six.moves import xrange\n", "from twisted.internet.defer import inlineCallbacks\n", "from Tribler.Core.Modules.MetadataStore.OrmBindings.channel_node import NEW\n", "from Tribler.Core.Modules.MetadataStore.store import MetadataStore\n", "from Tribler.Core.Utilities.random_utils import random_infohash\n", "from Tribler.community.gigachannel.community import GigaChannelCommunity\n", "from Tribler.pyipv8.ipv8.keyvault.crypto import default_eccrypto\n", "from Tribler.pyipv8.ipv8.peer import Peer\n", "from Tribler.pyipv8.ipv8.test.base import TestBase\n", "\"\"\"\n Unit tests for the GigaChannel community which do not need a real Session.\n \"\"\"\n", "def setUp(self):...\n", "super(TestGigaChannelUnits, self).setUp()\n", "self.count = 0\n", "self.initialize(GigaChannelCommunity, 2)\n", "def create_node(self, *args, **kwargs):...\n", "metadata_store = MetadataStore(os.path.join(self.temporary_directory(), \n '%d.db' % self.count), self.temporary_directory(), default_eccrypto.\n generate_key(u'curve25519'))\n", "kwargs['metadata_store'] = metadata_store\n", "node = super(TestGigaChannelUnits, self).create_node(*args, **kwargs)\n", "self.count += 1\n", "return node\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Return'" ]
[ "def FUNC_1(VAR_1):...\n", "VAR_8 = ['gif', 'png', 'jpg']\n", "if VAR_1[-3:].lower() in VAR_8:\n", "return VAR_1\n", "return 'http://ic.pics.livejournal.com/masio/8221809/287143/287143_original.gif'\n" ]
[ "def validate_image(image):...\n", "checklist = ['gif', 'png', 'jpg']\n", "if image[-3:].lower() in checklist:\n", "return image\n", "return 'http://ic.pics.livejournal.com/masio/8221809/287143/287143_original.gif'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@user_passes_test(user_is_staff)...\n", "\"\"\"docstring\"\"\"\n", "VAR_5 = []\n", "VAR_6 = ''\n", "VAR_7 = VAR_0.GET.get('level', '')\n", "if VAR_7 == 'project':\n", "create_testcases_celery(VAR_1, call_from='OldTC')\n", "if VAR_7 == 'module':\n", "VAR_4 = context_testcases()\n", "VAR_18 = get_object_or_404(Module, pk=object_id)\n", "VAR_4['testcases'] = VAR_5\n", "VAR_6 = VAR_18.project.id\n", "VAR_4['link_id'] = VAR_6\n", "VAR_5 = create_routing_test_suite(modules=[module])\n", "return render(VAR_0, 'testcases/testcases.html', VAR_4)\n" ]
[ "@user_passes_test(user_is_staff)...\n", "\"\"\"docstring\"\"\"\n", "testcases = []\n", "link_id = ''\n", "level = request.GET.get('level', '')\n", "if level == 'project':\n", "create_testcases_celery(object_id, call_from='OldTC')\n", "if level == 'module':\n", "context = context_testcases()\n", "module = get_object_or_404(Module, pk=object_id)\n", "context['testcases'] = testcases\n", "link_id = module.project.id\n", "context['link_id'] = link_id\n", "testcases = create_routing_test_suite(modules=[module])\n", "return render(request, 'testcases/testcases.html', context)\n" ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_16(self, VAR_20):...\n", "" ]
[ "def form_valid(self, form):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@utils.synchronized('3par', external=True)...\n", "\"\"\"docstring\"\"\"\n", "self.common.client_login()\n", "self.common.terminate_connection(VAR_7, VAR_10['host'], VAR_10['initiator'])\n", "self.common.client_logout()\n" ]
[ "@utils.synchronized('3par', external=True)...\n", "\"\"\"docstring\"\"\"\n", "self.common.client_login()\n", "self.common.terminate_connection(volume, connector['host'], connector[\n 'initiator'])\n", "self.common.client_logout()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(self, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_16 = []\n", "for VAR_2 in VAR_4.__path__:\n", "if os.path.isdir(VAR_2):\n", "return VAR_16\n", "VAR_16.extend(self._find_modules_in_path(VAR_2, VAR_5))\n" ]
[ "def _load_modules(self, package, submod):...\n", "\"\"\"docstring\"\"\"\n", "modules = []\n", "for path in package.__path__:\n", "if os.path.isdir(path):\n", "return modules\n", "modules.extend(self._find_modules_in_path(path, submod))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Return'", "Expr'" ]
[ "def FUNC_13(self, VAR_1):...\n", "VAR_19 = 80\n", "VAR_20 = ''\n", "for line in VAR_1.splitlines():\n", "VAR_20 = VAR_20 + fill(line, VAR_19, replace_whitespace=False) + '\\n'\n", "return VAR_20\n" ]
[ "def _fmt_msg(self, msg):...\n", "width = 80\n", "_fmt = ''\n", "for line in msg.splitlines():\n", "_fmt = _fmt + fill(line, width, replace_whitespace=False) + '\\n'\n", "return _fmt\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_13(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7, VAR_12 = self.make_xsrf_handling_app()\n", "VAR_17 = VAR_7.get('/request').body\n", "VAR_7.post('/request', {'xsrf_token': VAR_17})\n", "self.assertEqual([('POST', True)], VAR_12)\n" ]
[ "def test_xsrf_token_post_param(self):...\n", "\"\"\"docstring\"\"\"\n", "app, calls = self.make_xsrf_handling_app()\n", "token = app.get('/request').body\n", "app.post('/request', {'xsrf_token': token})\n", "self.assertEqual([('POST', True)], calls)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@app.route('/articles')...\n", "VAR_4 = FUNC_0('Article')\n", "return render_template('articles.html', allarticles=results)\n" ]
[ "@app.route('/articles')...\n", "results = get_table_dict('Article')\n", "return render_template('articles.html', allarticles=results)\n" ]
[ 0, 4, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_3(VAR_1):...\n", "return frappe.db.get_value('Blog Category', {'name': VAR_1}, 'title') or VAR_1\n" ]
[ "def get_blog_category(route):...\n", "return frappe.db.get_value('Blog Category', {'name': route}, 'title') or route\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_9(self):...\n", "self.run_test_case(self.scenario.create_app())\n" ]
[ "def test_a_create_app(self):...\n", "self.run_test_case(self.scenario.create_app())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_17(self):...\n", "self.connections.remove(self)\n" ]
[ "def on_close(self):...\n", "self.connections.remove(self)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_18(self, VAR_14):...\n", "" ]
[ "def is_relation_many_to_one(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_16(self, **VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "self.set_defaults(**kwargs)\n", "for VAR_42, VAR_3 in VAR_17.items():\n", "self.overridable[VAR_42] = VAR_3\n" ]
[ "def set_params(self, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "self.set_defaults(**kwargs)\n", "for k, v in kwargs.items():\n", "self.overridable[k] = v\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "For", "Assign'" ]
[ "def FUNC_1(self, VAR_10, VAR_11, VAR_12):...\n", "fd.write(VAR_12)\n", "fd.flush()\n", "fd.seek(0)\n", "VAR_35 = os.path.basename(fd.name)\n", "if self.shouldLog:\n", "self.logger.debug('Sending file %s with mime type : %s', VAR_35, VAR_11)\n", "VAR_30 = self.session.post(self.uploadUrl, files={self.inputName: (filename,\n fd, mime)}, data=self.postData)\n", "self.httpRequests += 1\n", "if self.shouldLog:\n", "if self.logger.verbosity > 1:\n", "return VAR_30, VAR_35\n", "printSimpleResponseObject(VAR_30)\n", "if self.logger.verbosity > 2:\n", "print('\\x1b[36m' + VAR_30.text + '\\x1b[m')\n" ]
[ "def uploadFile(self, suffix, mime, payload):...\n", "fd.write(payload)\n", "fd.flush()\n", "fd.seek(0)\n", "filename = os.path.basename(fd.name)\n", "if self.shouldLog:\n", "self.logger.debug('Sending file %s with mime type : %s', filename, mime)\n", "fu = self.session.post(self.uploadUrl, files={self.inputName: (filename, fd,\n mime)}, data=self.postData)\n", "self.httpRequests += 1\n", "if self.shouldLog:\n", "if self.logger.verbosity > 1:\n", "return fu, filename\n", "printSimpleResponseObject(fu)\n", "if self.logger.verbosity > 2:\n", "print('\\x1b[36m' + fu.text + '\\x1b[m')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "AugAssign'", "Condition", "Condition", "Return'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_16(self, VAR_10=True):...\n", "VAR_25 = self.get_dvd_device()\n", "VAR_17 = shellutil.run('cdcontrol -f {0} eject'.format(VAR_25))\n", "if VAR_10 and VAR_17 != 0:\n" ]
[ "def eject_dvd(self, chk_err=True):...\n", "dvd = self.get_dvd_device()\n", "retcode = shellutil.run('cdcontrol -f {0} eject'.format(dvd))\n", "if chk_err and retcode != 0:\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition" ]
[ "from DateTime.DateTime import DateTime\n", "from Products.PageTemplates.PageTemplateFile import PageTemplateFile\n", "from Persistence import Persistent\n", "from Acquisition import Implicit\n", "import OFS.SimpleItem, OFS.ObjectManager\n", "import zope.interface\n", "import IZMSDaemon\n", "__doc__ = 'ZMS product module.'\n", "__version__ = '0.1'\n", "__authorPermissions__ = ('manage_page_header', 'manage_page_footer',\n 'manage_tabs', 'manage_main_iframe')\n", "__viewPermissions__ = 'manage_menu',\n", "__ac_permissions__ = ('ZMS Author', __authorPermissions__), ('View',\n __viewPermissions__)\n", "VAR_0 = PageTemplateFile('zpt/object/manage', globals())\n", "VAR_1 = PageTemplateFile('zpt/object/manage', globals())\n", "VAR_2 = PageTemplateFile('zpt/ZMSObject/manage_main', globals())\n", "VAR_3 = PageTemplateFile('zpt/ZMSObject/manage_main_iframe', globals())\n", "def FUNC_0(self, *VAR_4, **VAR_5):...\n", "VAR_11 = self.REQUEST\n", "VAR_12 = VAR_11.RESPONSE\n", "return self.getBodyContent(VAR_11)\n" ]
[ "from DateTime.DateTime import DateTime\n", "from Products.PageTemplates.PageTemplateFile import PageTemplateFile\n", "from Persistence import Persistent\n", "from Acquisition import Implicit\n", "import OFS.SimpleItem, OFS.ObjectManager\n", "import zope.interface\n", "import IZMSDaemon\n", "__doc__ = 'ZMS product module.'\n", "__version__ = '0.1'\n", "__authorPermissions__ = ('manage_page_header', 'manage_page_footer',\n 'manage_tabs', 'manage_main_iframe')\n", "__viewPermissions__ = 'manage_menu',\n", "__ac_permissions__ = ('ZMS Author', __authorPermissions__), ('View',\n __viewPermissions__)\n", "manage = PageTemplateFile('zpt/object/manage', globals())\n", "manage_workspace = PageTemplateFile('zpt/object/manage', globals())\n", "manage_main = PageTemplateFile('zpt/ZMSObject/manage_main', globals())\n", "manage_main_iframe = PageTemplateFile('zpt/ZMSObject/manage_main_iframe',\n globals())\n", "def zmi_body_content(self, *args, **kwargs):...\n", "request = self.REQUEST\n", "response = request.RESPONSE\n", "return self.getBodyContent(request)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_9, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "VAR_10 = urlencode(VAR_10)\n", "VAR_21 = requests.get('http://%s/solr/biblio/select?%s' % (VAR_9, VAR_10))\n", "return VAR_21.json()\n" ]
[ "def get_solr_result(index, params):...\n", "\"\"\"docstring\"\"\"\n", "params = urlencode(params)\n", "result = requests.get('http://%s/solr/biblio/select?%s' % (index, params))\n", "return result.json()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_3):...\n", "return VAR_3 if VAR_3 and VAR_4.match(VAR_3) else None\n" ]
[ "def chkpass(x):...\n", "return x if x and pass_rx.match(x) else None\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1():...\n", "VAR_2.set('user:ids', '0')\n" ]
[ "def auth_init():...\n", "db.set('user:ids', '0')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "\"\"\"string\"\"\"\n", "from hp3parclient import exceptions as hpexceptions\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "import cinder.volume.driver\n", "from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon\n", "from cinder.volume.drivers.san import san\n", "VAR_0 = 1.1\n", "VAR_1 = logging.getLogger(__name__)\n", "\"\"\"string\"\"\"\n", "def __init__(self, *VAR_2, **VAR_3):...\n", "super(CLASS_0, self).__init__(*VAR_2, **kwargs)\n", "self.common = None\n", "self.configuration.append_config_values(hpcommon.hp3par_opts)\n", "self.configuration.append_config_values(san.san_opts)\n", "def FUNC_0(self):...\n", "return hpcommon.HP3PARCommon(self.configuration)\n" ]
[ "\"\"\"\nVolume driver for HP 3PAR Storage array.\nThis driver requires 3.1.2 MU2 firmware on the 3PAR array.\n\nYou will need to install the python hp3parclient.\nsudo pip install hp3parclient\n\nSet the following in the cinder.conf file to enable the\n3PAR Fibre Channel Driver along with the required flags:\n\nvolume_driver=cinder.volume.drivers.san.hp.hp_3par_fc.HP3PARFCDriver\n\"\"\"\n", "from hp3parclient import exceptions as hpexceptions\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import log as logging\n", "from cinder import utils\n", "import cinder.volume.driver\n", "from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon\n", "from cinder.volume.drivers.san import san\n", "VERSION = 1.1\n", "LOG = logging.getLogger(__name__)\n", "\"\"\"OpenStack Fibre Channel driver to enable 3PAR storage array.\n\n Version history:\n 1.0 - Initial driver\n 1.1 - QoS, extend volume, multiple iscsi ports, remove domain,\n session changes, faster clone, requires 3.1.2 MU2 firmware,\n copy volume <--> Image.\n \"\"\"\n", "def __init__(self, *args, **kwargs):...\n", "super(HP3PARFCDriver, self).__init__(*args, **kwargs)\n", "self.common = None\n", "self.configuration.append_config_values(hpcommon.hp3par_opts)\n", "self.configuration.append_config_values(san.san_opts)\n", "def _init_common(self):...\n", "return hpcommon.HP3PARCommon(self.configuration)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Return'" ]
[ "@staticmethod...\n", "VAR_14 = mongo.db.analysis.find(VAR_4, sort=[('_id', pymongo.DESCENDING)])\n", "return [VAR_5 for VAR_5 in VAR_14]\n" ]
[ "@staticmethod...\n", "cursor = mongo.db.analysis.find(filters, sort=[('_id', pymongo.DESCENDING)])\n", "return [report for report in cursor]\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def __init__(self, VAR_8):...\n", "self.spdx = VAR_8\n", "self.lasttok = None\n", "self.lastid = None\n", "self.lexer = lex.lex(module=self, reflags=re.UNICODE)\n", "self.parser = yacc.yacc(module=self, write_tables=False, debug=False)\n", "self.lines_checked = 0\n", "self.checked = 0\n", "self.spdx_valid = 0\n", "self.spdx_errors = 0\n", "self.curline = 0\n", "self.deepest = 0\n" ]
[ "def __init__(self, spdx):...\n", "self.spdx = spdx\n", "self.lasttok = None\n", "self.lastid = None\n", "self.lexer = lex.lex(module=self, reflags=re.UNICODE)\n", "self.parser = yacc.yacc(module=self, write_tables=False, debug=False)\n", "self.lines_checked = 0\n", "self.checked = 0\n", "self.spdx_valid = 0\n", "self.spdx_errors = 0\n", "self.curline = 0\n", "self.deepest = 0\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_4(VAR_6):...\n", "VAR_7 = HttpResponse(VAR_6)\n", "VAR_7['Access-Control-Allow-Origin'] = '*'\n", "return VAR_7\n" ]
[ "def respond_text(text):...\n", "response = HttpResponse(text)\n", "response['Access-Control-Allow-Origin'] = '*'\n", "return response\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = '[email protected]'\n", "__license__ = 'MIT'\n", "import re\n", "import os\n", "import sys\n", "import signal\n", "import json\n", "import urllib\n", "from collections import OrderedDict\n", "from itertools import filterfalse, chain\n", "from functools import partial\n", "from operator import attrgetter\n", "from snakemake.logging import logger, format_resources, format_resource_names\n", "from snakemake.rules import Rule, Ruleorder\n", "from snakemake.exceptions import RuleException, CreateRuleException, UnknownRuleException, NoRulesException, print_exception, WorkflowError\n", "from snakemake.shell import shell\n", "from snakemake.dag import DAG\n", "from snakemake.scheduler import JobScheduler\n", "from snakemake.parser import parse\n", "import snakemake.io\n", "from snakemake.io import protected, temp, temporary, expand, dynamic, glob_wildcards, flag, not_iterable, touch\n", "from snakemake.persistence import Persistence\n", "from snakemake.utils import update_config\n", "def __init__(self, VAR_1=None, VAR_2=None, VAR_3=None, VAR_4=None, VAR_5=...\n", "\"\"\"docstring\"\"\"\n", "self._rules = OrderedDict()\n", "self.first_rule = None\n", "self._workdir = None\n", "self.overwrite_workdir = VAR_6\n", "self.workdir_init = os.path.abspath(os.curdir)\n", "self._ruleorder = Ruleorder()\n", "self._localrules = set()\n", "self.linemaps = dict()\n", "self.rule_count = 0\n", "self.basedir = os.path.dirname(VAR_1)\n", "self.snakefile = os.path.abspath(VAR_1)\n", "self.snakemakepath = VAR_2\n", "self.included = []\n", "self.included_stack = []\n", "self.jobscript = VAR_3\n", "self.persistence = None\n", "self.global_resources = None\n", "self.globals = globals()\n", "self._subworkflows = dict()\n", "self.overwrite_shellcmd = VAR_4\n", "self.overwrite_config = VAR_5\n", "self.overwrite_configfile = VAR_7\n", "self.config_args = VAR_8\n", "self._onsuccess = lambda log: None\n", "self._onerror = lambda log: None\n", "self.debug = VAR_9\n", "VAR_85 = dict()\n", "VAR_85.update(self.overwrite_config)\n", "VAR_86 = CLASS_3()\n", "@property...\n", "return self._subworkflows.values()\n" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = '[email protected]'\n", "__license__ = 'MIT'\n", "import re\n", "import os\n", "import sys\n", "import signal\n", "import json\n", "import urllib\n", "from collections import OrderedDict\n", "from itertools import filterfalse, chain\n", "from functools import partial\n", "from operator import attrgetter\n", "from snakemake.logging import logger, format_resources, format_resource_names\n", "from snakemake.rules import Rule, Ruleorder\n", "from snakemake.exceptions import RuleException, CreateRuleException, UnknownRuleException, NoRulesException, print_exception, WorkflowError\n", "from snakemake.shell import shell\n", "from snakemake.dag import DAG\n", "from snakemake.scheduler import JobScheduler\n", "from snakemake.parser import parse\n", "import snakemake.io\n", "from snakemake.io import protected, temp, temporary, expand, dynamic, glob_wildcards, flag, not_iterable, touch\n", "from snakemake.persistence import Persistence\n", "from snakemake.utils import update_config\n", "def __init__(self, snakefile=None, snakemakepath=None, jobscript=None,...\n", "\"\"\"docstring\"\"\"\n", "self._rules = OrderedDict()\n", "self.first_rule = None\n", "self._workdir = None\n", "self.overwrite_workdir = overwrite_workdir\n", "self.workdir_init = os.path.abspath(os.curdir)\n", "self._ruleorder = Ruleorder()\n", "self._localrules = set()\n", "self.linemaps = dict()\n", "self.rule_count = 0\n", "self.basedir = os.path.dirname(snakefile)\n", "self.snakefile = os.path.abspath(snakefile)\n", "self.snakemakepath = snakemakepath\n", "self.included = []\n", "self.included_stack = []\n", "self.jobscript = jobscript\n", "self.persistence = None\n", "self.global_resources = None\n", "self.globals = globals()\n", "self._subworkflows = dict()\n", "self.overwrite_shellcmd = overwrite_shellcmd\n", "self.overwrite_config = overwrite_config\n", "self.overwrite_configfile = overwrite_configfile\n", "self.config_args = config_args\n", "self._onsuccess = lambda log: None\n", "self._onerror = lambda log: None\n", "self.debug = debug\n", "config = dict()\n", "config.update(self.overwrite_config)\n", "rules = Rules()\n", "@property...\n", "return self._subworkflows.values()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Assign'", "Assign'", "Assign'", "Assign'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_4(VAR_6, VAR_7, VAR_8):...\n", "VAR_12 = f\"\"\"\n INSERT INTO {VAR_1} (user_id, product_id, quantity)\n VALUES({VAR_6}, {VAR_7}, {VAR_8['quantity']})\n \"\"\"\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_15.execute(VAR_12)\n", "VAR_14.commit()\n", "return 'Ok'\n" ]
[ "def add_product_to_cart(userId, productId, cart):...\n", "sql_query = f\"\"\"\n INSERT INTO {CARTS_TABLE} (user_id, product_id, quantity)\n VALUES({userId}, {productId}, {cart['quantity']})\n \"\"\"\n", "connection = create_connection()\n", "connection.close()\n", "cursor = connection.cursor()\n", "cursor.execute(sql_query)\n", "connection.commit()\n", "return 'Ok'\n" ]
[ 0, 4, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_7(self):...\n", "self.log.debug('Clearing our auth records')\n", "def FUNC_23(VAR_33, VAR_13, VAR_20, VAR_21, VAR_12):...\n", "if VAR_21 == wzrpc.status.success:\n", "self.log.debug('Auth records on router were cleared')\n", "self.log.warn('Status %s, passing', wzrpc.name_status(VAR_21))\n", "return self.wz_wait_reply(FUNC_23, *self.wz.make_auth_clear_data())\n" ]
[ "def clear_auth(self):...\n", "self.log.debug('Clearing our auth records')\n", "def accept(that, reqid, seqnum, status, data):...\n", "if status == wzrpc.status.success:\n", "self.log.debug('Auth records on router were cleared')\n", "self.log.warn('Status %s, passing', wzrpc.name_status(status))\n", "return self.wz_wait_reply(accept, *self.wz.make_auth_clear_data())\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "FunctionDef'", "Condition", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = FUNC_0(VAR_0)\n", "VAR_3 = os.listdir(VAR_2)\n", "VAR_4 = []\n", "VAR_5 = re.compile('\\\\b\\\\d{14}\\\\b')\n", "for VAR_13 in VAR_3:\n", "return VAR_4\n", "VAR_18 = codecs.open(os.path.join(VAR_2, VAR_13), encoding='utf-8', errors=\n 'strict')\n", "print('Urtext Skipping %s, invalid utf-8' % VAR_13)\n", "for line in VAR_18:\n", "print('Urtext Skipping %s' % VAR_13)\n", "if VAR_5.search(VAR_13):\n", "VAR_4.append(VAR_13)\n" ]
[ "def get_all_files(window):...\n", "\"\"\"docstring\"\"\"\n", "path = get_path(window)\n", "files = os.listdir(path)\n", "urtext_files = []\n", "regexp = re.compile('\\\\b\\\\d{14}\\\\b')\n", "for file in files:\n", "return urtext_files\n", "f = codecs.open(os.path.join(path, file), encoding='utf-8', errors='strict')\n", "print('Urtext Skipping %s, invalid utf-8' % file)\n", "for line in f:\n", "print('Urtext Skipping %s' % file)\n", "if regexp.search(file):\n", "urtext_files.append(file)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Return'", "Assign'", "Expr'", "For", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_0(self, VAR_2, VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = []\n", "VAR_7 = self.pool.get('ir.model.data')\n", "VAR_8 = self.pool.get('res.users').browse(VAR_2, VAR_3, VAR_3).company_id.id\n", "VAR_9 = self.pool.get('account.bank.statement')\n", "VAR_10 = self.pool.get('ir.sequence')\n", "VAR_11 = self.pool.get('account.journal')\n", "VAR_2.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % VAR_3)\n", "VAR_12 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "VAR_2.execute('string' % ','.join(map(lambda x: \"'\" + str(x) + \"'\", VAR_12)))\n", "VAR_13 = map(lambda x1: x1[0], VAR_2.fetchall())\n", "for journal in VAR_11.browse(VAR_2, VAR_3, VAR_13):\n", "VAR_4 = VAR_9.search(VAR_2, VAR_3, [('state', '!=', 'confirm'), ('user_id',\n '=', VAR_3), ('journal_id', '=', journal.id)])\n", "VAR_14 = self.pool.get('ir.model.data')\n", "if len(VAR_4):\n", "VAR_15 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_tree')\n", "VAR_17 = ''\n", "VAR_16 = VAR_14._get_id(VAR_2, VAR_3, 'account', 'view_bank_statement_form2')\n", "if journal.sequence_id:\n", "if VAR_15:\n", "VAR_17 = VAR_10.get_id(VAR_2, VAR_3, journal.sequence_id.id)\n", "VAR_17 = VAR_10.get(VAR_2, VAR_3, 'account.bank.statement')\n", "VAR_15 = VAR_14.browse(VAR_2, VAR_3, VAR_15, VAR_5=context).res_id\n", "if VAR_16:\n", "VAR_18 = VAR_9.create(VAR_2, VAR_3, {'journal_id': journal.id, 'company_id':\n VAR_8, 'user_id': VAR_3, 'state': 'open', 'name': VAR_17,\n 'starting_details_ids': VAR_9._get_cash_close_box_lines(VAR_2, VAR_3, [])})\n", "VAR_16 = VAR_14.browse(VAR_2, VAR_3, VAR_16, VAR_5=context).res_id\n", "return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(VAR_15, 'tree'), (VAR_16, 'form')],\n 'type': 'ir.actions.act_window'}\n", "VAR_9.button_open(VAR_2, VAR_3, [VAR_18], VAR_5)\n" ]
[ "def open_statement(self, cr, uid, ids, context):...\n", "\"\"\"docstring\"\"\"\n", "list_statement = []\n", "mod_obj = self.pool.get('ir.model.data')\n", "company_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.id\n", "statement_obj = self.pool.get('account.bank.statement')\n", "sequence_obj = self.pool.get('ir.sequence')\n", "journal_obj = self.pool.get('account.journal')\n", "cr.execute(\n 'select DISTINCT journal_id from pos_journal_users where user_id=%d order by journal_id'\n % uid)\n", "j_ids = map(lambda x1: x1[0], cr.fetchall())\n", "cr.execute(\n \"\"\" select id from account_journal\n where auto_cash='True' and type='cash'\n and id in (%s)\"\"\"\n % ','.join(map(lambda x: \"'\" + str(x) + \"'\", j_ids)))\n", "journal_ids = map(lambda x1: x1[0], cr.fetchall())\n", "for journal in journal_obj.browse(cr, uid, journal_ids):\n", "ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id',\n '=', uid), ('journal_id', '=', journal.id)])\n", "data_obj = self.pool.get('ir.model.data')\n", "if len(ids):\n", "id2 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_tree')\n", "number = ''\n", "id3 = data_obj._get_id(cr, uid, 'account', 'view_bank_statement_form2')\n", "if journal.sequence_id:\n", "if id2:\n", "number = sequence_obj.get_id(cr, uid, journal.sequence_id.id)\n", "number = sequence_obj.get(cr, uid, 'account.bank.statement')\n", "id2 = data_obj.browse(cr, uid, id2, context=context).res_id\n", "if id3:\n", "statement_id = statement_obj.create(cr, uid, {'journal_id': journal.id,\n 'company_id': company_id, 'user_id': uid, 'state': 'open', 'name':\n number, 'starting_details_ids': statement_obj._get_cash_close_box_lines\n (cr, uid, [])})\n", "id3 = data_obj.browse(cr, uid, id3, context=context).res_id\n", "return {'domain': \"[('state','=','open')]\", 'name': 'Open Statement',\n 'view_type': 'form', 'view_mode': 'tree,form', 'res_model':\n 'account.bank.statement', 'views': [(id2, 'tree'), (id3, 'form')],\n 'type': 'ir.actions.act_window'}\n", "statement_obj.button_open(cr, uid, [statement_id], context)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_22(self, VAR_21, VAR_40):...\n", "\"\"\"docstring\"\"\"\n", "VAR_57 = self.getfile(VAR_21)\n", "if VAR_57 == False:\n", "VAR_57[VAR_6] = FUNC_30.S_IFMT(VAR_57[VAR_6]) | VAR_40\n" ]
[ "def chmod(self, path, perm):...\n", "\"\"\"docstring\"\"\"\n", "p = self.getfile(path)\n", "if p == False:\n", "p[A_MODE] = stat.S_IFMT(p[A_MODE]) | perm\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'" ]
[ "from django.conf.urls import url\n", "from .django_views import email_confirmation_redirect, login_redirect, experience_redirect, profile_redirect, root_redirect, aasa_redirect\n", "VAR_0 = [url('^redirects/people/me/email-confirmation$',\n email_confirmation_redirect, name='email-confirmation-redirect'), url(\n '^redirects/people/me/login$', login_redirect, name='login-redirect'),\n url('^e/(?P<experience_share_id>[a-zA-Z0-9]+)$', experience_redirect,\n name='experience-redirect'), url('^p/(?P<username>[a-zA-Z0-9._]+)$',\n profile_redirect, name='profile-redirect'), url(\n '^apple-app-site-association$', aasa_redirect, name='aasa'), url('^$',\n root_redirect, name='root-redirect')]\n" ]
[ "from django.conf.urls import url\n", "from .django_views import email_confirmation_redirect, login_redirect, experience_redirect, profile_redirect, root_redirect, aasa_redirect\n", "urlpatterns = [url('^redirects/people/me/email-confirmation$',\n email_confirmation_redirect, name='email-confirmation-redirect'), url(\n '^redirects/people/me/login$', login_redirect, name='login-redirect'),\n url('^e/(?P<experience_share_id>[a-zA-Z0-9]+)$', experience_redirect,\n name='experience-redirect'), url('^p/(?P<username>[a-zA-Z0-9._]+)$',\n profile_redirect, name='profile-redirect'), url(\n '^apple-app-site-association$', aasa_redirect, name='aasa'), url('^$',\n root_redirect, name='root-redirect')]\n" ]
[ 0, 6, 6 ]
[ "ImportFrom'", "ImportFrom'", "Assign'" ]
[ "def FUNC_6(self, VAR_33):...\n", "if not VAR_33:\n", "VAR_101.errors.add(self.error)\n", "VAR_33 = float(VAR_33)\n", "VAR_101.errors.add(self.error)\n", "return\n", "if self.min is not None and VAR_33 < self.min:\n", "VAR_33 = self.min\n", "if self.max is not None and VAR_33 > self.max:\n", "return VAR_33\n", "VAR_33 = self.max\n" ]
[ "def run(self, val):...\n", "if not val:\n", "c.errors.add(self.error)\n", "val = float(val)\n", "c.errors.add(self.error)\n", "return\n", "if self.min is not None and val < self.min:\n", "val = self.min\n", "if self.max is not None and val > self.max:\n", "return val\n", "val = self.max\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Expr'", "Return'", "Condition", "Assign'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_1(self):...\n", "VAR_2 = self.get_secure_cookie('sid')\n", "if not VAR_2:\n", "return False\n", "VAR_3 = self.session.get('email')\n", "VAR_4 = dbapi.User()\n", "if VAR_3 and VAR_4.get_user(VAR_3) == 0:\n", "VAR_7 = VAR_4.get_user_all(VAR_3)\n", "self.clear_cookies()\n", "if VAR_7:\n", "return False\n", "self.id = VAR_7[0]\n", "self.time = VAR_7[4]\n", "self.email = VAR_3\n", "return True\n" ]
[ "def check(self):...\n", "sid = self.get_secure_cookie('sid')\n", "if not sid:\n", "return False\n", "email = self.session.get('email')\n", "user = dbapi.User()\n", "if email and user.get_user(email) == 0:\n", "profile = user.get_user_all(email)\n", "self.clear_cookies()\n", "if profile:\n", "return False\n", "self.id = profile[0]\n", "self.time = profile[4]\n", "self.email = email\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@VAR_7.route('/view-calendar', methods=['GET', 'POST'])...\n", "VAR_10 = FUNC_0()\n", "if not VAR_10:\n", "return 'Unauthorized'\n", "VAR_13 = request.args.get('calendar')\n", "if VAR_13 is None:\n", "return 'Must provide a calendar id'\n", "VAR_8, VAR_3 = FUNC_1()\n", "if request.method == 'GET':\n", "VAR_18 = calendar.getCalendarDetails(VAR_13, VAR_6)\n", "VAR_19 = calendar.getAvailabilityForCalendar(VAR_13, VAR_6)\n", "VAR_20 = {'Morning Hours': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],\n 'Evening Hours': [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]}\n", "if VAR_18 is None:\n", "return 'No calendar exists with that ID'\n", "return render_template('view-calendar.html', VAR_20=hours, VAR_19=\n availabilityDetails, VAR_17=id, VAR_18=calendarDetails)\n" ]
[ "@app.route('/view-calendar', methods=['GET', 'POST'])...\n", "isAuthorized = isUserAuthorized()\n", "if not isAuthorized:\n", "return 'Unauthorized'\n", "id = request.args.get('calendar')\n", "if id is None:\n", "return 'Must provide a calendar id'\n", "username, password = getUsernameAndPassword()\n", "if request.method == 'GET':\n", "calendarDetails = calendar.getCalendarDetails(id, mysql)\n", "availabilityDetails = calendar.getAvailabilityForCalendar(id, mysql)\n", "hours = {'Morning Hours': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],\n 'Evening Hours': [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]}\n", "if calendarDetails is None:\n", "return 'No calendar exists with that ID'\n", "return render_template('view-calendar.html', hours=hours,\n availabilityDetails=availabilityDetails, calendarId=id, calendarDetails\n =calendarDetails)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@functools.wraps(VAR_1)...\n", "if g.user is None:\n", "return redirect(url_for('auth.login'))\n", "if g.user['admin'] != 1:\n", "return redirect(url_for('blog.feedpage', VAR_3=0))\n", "return VAR_1(**kwargs)\n" ]
[ "@functools.wraps(view)...\n", "if g.user is None:\n", "return redirect(url_for('auth.login'))\n", "if g.user['admin'] != 1:\n", "return redirect(url_for('blog.feedpage', page=0))\n", "return view(**kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_11(self):...\n", "return True\n" ]
[ "def use_client_cert_auth(self):...\n", "return True\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@functools.wraps(VAR_3)...\n", "VAR_23 = VAR_5.pop('timeout', None)\n", "VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n", "if VAR_23 is None:\n", "return VAR_24.wait()\n", "VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n", "VAR_48 = VAR_24.wait()\n", "VAR_36.cancel()\n", "return VAR_48\n" ]
[ "@functools.wraps(f)...\n", "timeout = kwargs.pop('timeout', None)\n", "gt = eventlet.spawn(f, self, *args, **kwargs)\n", "if timeout is None:\n", "return gt.wait()\n", "kill_thread = eventlet.spawn_after(timeout, gt.kill)\n", "res = gt.wait()\n", "kill_thread.cancel()\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_22(self):...\n", "self.assertEqual('', sys.stdin.read())\n", "print('garbage', file=sys.stdout)\n", "print('garbage', file=sys.stderr)\n" ]
[ "def test_stdio_as_dev_null(self):...\n", "self.assertEqual('', sys.stdin.read())\n", "print('garbage', file=sys.stdout)\n", "print('garbage', file=sys.stderr)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'" ]
[ "@VAR_0.route('/')...\n", "" ]
[ "@app.route('/')...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.get(self.section, 'static_dir')\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.get(self.section, 'static_dir')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_6(self):...\n", "VAR_83 = []\n", "if self.rate_user and VAR_101.user_is_loggedin:\n", "VAR_83.append('user' + VAR_100(VAR_101.user._id36))\n", "if self.rate_ip:\n", "VAR_83.append('ip' + VAR_100(VAR_102.ip))\n", "VAR_84 = cache.get_multi(VAR_83, self.prefix)\n", "if VAR_84:\n", "VAR_87 = VAR_55(VAR_84.values())\n", "VAR_99 = utils.timeuntil(VAR_87)\n", "VAR_101.errors.add(errors.RATELIMIT, {'time': VAR_99})\n" ]
[ "def run(self):...\n", "to_check = []\n", "if self.rate_user and c.user_is_loggedin:\n", "to_check.append('user' + str(c.user._id36))\n", "if self.rate_ip:\n", "to_check.append('ip' + str(request.ip))\n", "r = cache.get_multi(to_check, self.prefix)\n", "if r:\n", "expire_time = max(r.values())\n", "time = utils.timeuntil(expire_time)\n", "c.errors.add(errors.RATELIMIT, {'time': time})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self, *VAR_20, **VAR_21):...\n", "FlaskForm.__init__(self, *VAR_20, **kwargs)\n", "self.user = None\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "FlaskForm.__init__(self, *args, **kwargs)\n", "self.user = None\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_12(self, VAR_16='svc'):...\n", "VAR_22 = {VAR_20: key for key, VAR_20 in self.modes.items()}\n", "VAR_32 = VAR_22[VAR_16]\n", "VAR_31 = int(self.get_register_value('cpsr', 'CPU', None), base=16)\n", "self.set_register_value('cpsr', 'CPU', None, hex(int(str(bin(VAR_31))[:-5] +\n VAR_32, base=2)))\n" ]
[ "def set_mode(self, mode='svc'):...\n", "modes = {value: key for key, value in self.modes.items()}\n", "mask = modes[mode]\n", "cpsr = int(self.get_register_value('cpsr', 'CPU', None), base=16)\n", "self.set_register_value('cpsr', 'CPU', None, hex(int(str(bin(cpsr))[:-5] +\n mask, base=2)))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(self):...\n", "VAR_1 = []\n", "VAR_2 = sqlite3.connect('brewdie.db')\n", "print('Something went wrong')\n", "if VAR_2:\n", "return VAR_1\n", "VAR_3 = VAR_2.cursor()\n", "print(e)\n", "VAR_2.close()\n", "for row in VAR_3.execute('SELECT * FROM Recipes'):\n", "if VAR_2:\n", "VAR_0 = Recipe(row[0], row[1], row[2])\n", "VAR_2.rollback()\n", "return\n", "for VAR_7 in VAR_3.execute(\"SELECT * FROM Malts WHERE recipe_name='%s'\" %\n", "VAR_0.malts[VAR_7[1]] = VAR_7[2]\n", "for rest_row in VAR_3.execute(\n", "VAR_0.rests.append(Rest(rest_row[1], rest_row[2], rest_row[3]))\n", "for hop_dosage_row in VAR_3.execute(\n", "VAR_0.hop_dosages.append(HopDosage(hop_dosage_row[1], hop_dosage_row[3],\n hop_dosage_row[2]))\n", "VAR_1.append(VAR_0)\n" ]
[ "def load_recipes(self):...\n", "recipes = []\n", "connection = sqlite3.connect('brewdie.db')\n", "print('Something went wrong')\n", "if connection:\n", "return recipes\n", "cursor = connection.cursor()\n", "print(e)\n", "connection.close()\n", "for row in cursor.execute('SELECT * FROM Recipes'):\n", "if connection:\n", "recipe = Recipe(row[0], row[1], row[2])\n", "connection.rollback()\n", "return\n", "for malt_row in cursor.execute(\"SELECT * FROM Malts WHERE recipe_name='%s'\" %\n", "recipe.malts[malt_row[1]] = malt_row[2]\n", "for rest_row in cursor.execute(\n", "recipe.rests.append(Rest(rest_row[1], rest_row[2], rest_row[3]))\n", "for hop_dosage_row in cursor.execute(\n", "recipe.hop_dosages.append(HopDosage(hop_dosage_row[1], hop_dosage_row[3],\n hop_dosage_row[2]))\n", "recipes.append(recipe)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Condition", "Return'", "Assign'", "Expr'", "Expr'", "For", "Condition", "Assign'", "Expr'", "Return'", "For", "Assign'", "For", "Expr'", "For", "Expr'", "Expr'" ]
[ "def FUNC_23(self, *VAR_69, **VAR_70):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.output = VAR_69, VAR_70\n", "return VAR_101\n" ]
[ "def output(self, *paths, **kwpaths):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.output = paths, kwpaths\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "@staticmethod...\n", "if VAR_5 == 'north':\n", "return 'west'\n", "if VAR_5 == 'west':\n", "return 'north'\n", "if VAR_5 == 'south':\n", "return 'east'\n", "if VAR_5 == 'east':\n", "return 'south'\n" ]
[ "@staticmethod...\n", "if direction == 'north':\n", "return 'west'\n", "if direction == 'west':\n", "return 'north'\n", "if direction == 'south':\n", "return 'east'\n", "if direction == 'east':\n", "return 'south'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'" ]
[ "from __future__ import absolute_import, division, print_function, with_statement\n", "import os\n", "import sys\n", "import argparse\n", "if __name__ == '__main__':\n", "VAR_0 = argparse.ArgumentParser(description='See README')\n", "VAR_0.add_argument('-c', '--count', default=3, type=int, help=\n 'with how many failure times it should be considered as an attack')\n", "VAR_1 = VAR_0.parse_args()\n", "VAR_2 = {}\n", "VAR_3 = set()\n", "for line in sys.stdin:\n", "if 'can not parse header when' in line:\n", "VAR_4 = line.split()[-1].split(':')[-2]\n", "if VAR_4 not in VAR_2:\n", "VAR_2[VAR_4] = 1\n", "VAR_2[VAR_4] += 1\n", "print(VAR_4)\n", "if VAR_4 not in VAR_3 and VAR_2[VAR_4] >= VAR_1.count:\n", "sys.stdout.flush()\n", "VAR_3.add(VAR_4)\n", "VAR_5 = 'iptables -A INPUT -s %s -j DROP' % VAR_4\n", "print(VAR_5, file=sys.stderr)\n", "sys.stderr.flush()\n", "os.system(VAR_5)\n" ]
[ "from __future__ import absolute_import, division, print_function, with_statement\n", "import os\n", "import sys\n", "import argparse\n", "if __name__ == '__main__':\n", "parser = argparse.ArgumentParser(description='See README')\n", "parser.add_argument('-c', '--count', default=3, type=int, help=\n 'with how many failure times it should be considered as an attack')\n", "config = parser.parse_args()\n", "ips = {}\n", "banned = set()\n", "for line in sys.stdin:\n", "if 'can not parse header when' in line:\n", "ip = line.split()[-1].split(':')[-2]\n", "if ip not in ips:\n", "ips[ip] = 1\n", "ips[ip] += 1\n", "print(ip)\n", "if ip not in banned and ips[ip] >= config.count:\n", "sys.stdout.flush()\n", "banned.add(ip)\n", "cmd = 'iptables -A INPUT -s %s -j DROP' % ip\n", "print(cmd, file=sys.stderr)\n", "sys.stderr.flush()\n", "os.system(cmd)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 0, 0, 0, 2, 0, 0, 0, 2, 2, 0, 2 ]
[ "ImportFrom'", "Import'", "Import'", "Import'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Condition", "Assign'", "AugAssign'", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "@wraps(VAR_7)...\n", "if VAR_1.is_expired():\n", "return VAR_1.redirect_to_goodbye()\n", "if VAR_1.is_authenticated():\n", "return VAR_7(*VAR_9, **kwargs)\n", "return VAR_1.redirect_to_goodbye()\n" ]
[ "@wraps(f)...\n", "if user_mod.is_expired():\n", "return user_mod.redirect_to_goodbye()\n", "if user_mod.is_authenticated():\n", "return f(*args, **kwargs)\n", "return user_mod.redirect_to_goodbye()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_11(self, VAR_7=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_18 = set()\n", "if self.benchmark and (VAR_7 is None or self.benchmark in VAR_7):\n", "if not self.benchmark.exists:\n", "for VAR_28, VAR_27 in zip(self.output, self.rule.output):\n", "VAR_18.add(self.benchmark)\n", "if VAR_7 is None or VAR_28 in VAR_7:\n", "return VAR_18\n", "if VAR_28 in self.dynamic_output:\n", "if not self.expand_dynamic(VAR_27, VAR_13=self.wildcards, VAR_14=_IOFile.\n", "if not VAR_28.exists:\n", "VAR_18.add('{} (dynamic)'.format(VAR_27))\n", "VAR_18.add(VAR_28)\n" ]
[ "def missing_output(self, requested=None):...\n", "\"\"\"docstring\"\"\"\n", "files = set()\n", "if self.benchmark and (requested is None or self.benchmark in requested):\n", "if not self.benchmark.exists:\n", "for f, f_ in zip(self.output, self.rule.output):\n", "files.add(self.benchmark)\n", "if requested is None or f in requested:\n", "return files\n", "if f in self.dynamic_output:\n", "if not self.expand_dynamic(f_, restriction=self.wildcards, omit_value=\n", "if not f.exists:\n", "files.add('{} (dynamic)'.format(f_))\n", "files.add(f)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Condition", "For", "Expr'", "Condition", "Return'", "Condition", "Condition", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_3(self, VAR_6):...\n", "if 'doctype' in VAR_6:\n", "self.set('doctype', VAR_6.get('doctype'))\n", "for VAR_7 in default_fields:\n", "if VAR_7 in VAR_6:\n", "for VAR_7, VAR_9 in iteritems(VAR_6):\n", "self.set(VAR_7, VAR_6.get(VAR_7))\n", "self.set(VAR_7, VAR_9)\n", "return self\n" ]
[ "def update(self, d):...\n", "if 'doctype' in d:\n", "self.set('doctype', d.get('doctype'))\n", "for key in default_fields:\n", "if key in d:\n", "for key, value in iteritems(d):\n", "self.set(key, d.get(key))\n", "self.set(key, value)\n", "return self\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "For", "Condition", "For", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_11(self, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_24 = self.getfile(VAR_21)\n", "return False\n", "return VAR_24[VAR_2] == VAR_13\n" ]
[ "def isfile(self, path):...\n", "\"\"\"docstring\"\"\"\n", "f = self.getfile(path)\n", "return False\n", "return f[A_TYPE] == T_FILE\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'", "Return'" ]
[ "from setuptools import find_packages, setup\n", "setup(name='jupyter-notebook-gist', version='0.4.0', description=\n 'Create a gist from the Jupyter Notebook UI', author=\n 'Mozilla Firefox Data Platform', author_email=\n '[email protected]', packages=find_packages(where='src'),\n package_dir={'': 'src'}, include_package_data=True, license='MPL2',\n install_requires=['ipython >= 4', 'notebook >= 4.2', 'jupyter',\n 'requests', 'six', 'widgetsnbextension'], url=\n 'https://github.com/mozilla/jupyter-notebook-gist', zip_safe=False)\n" ]
[ "from setuptools import find_packages, setup\n", "setup(name='jupyter-notebook-gist', version='0.4.0', description=\n 'Create a gist from the Jupyter Notebook UI', author=\n 'Mozilla Firefox Data Platform', author_email=\n '[email protected]', packages=find_packages(where='src'),\n package_dir={'': 'src'}, include_package_data=True, license='MPL2',\n install_requires=['ipython >= 4', 'notebook >= 4.2', 'jupyter',\n 'requests', 'six', 'widgetsnbextension'], url=\n 'https://github.com/mozilla/jupyter-notebook-gist', zip_safe=False)\n" ]
[ 0, 5 ]
[ "ImportFrom'", "Expr'" ]
[ "@staticmethod...\n", "return FUNC_1(CLASS_0.PostDataToHandlerAsync(VAR_9, VAR_7, VAR_10))\n" ]
[ "@staticmethod...\n", "return JsonFromFuture(BaseRequest.PostDataToHandlerAsync(data, handler,\n timeout))\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "from typing import Iterator, TypeVar\n", "from .interface import ISpy\n", "VAR_0 = TypeVar('T')\n", "def FUNC_0(VAR_1: ISpy, VAR_2: str) ->str:...\n", "return f\"SELECT {', '.join(FUNC_1(VAR_1))} FROM {VAR_2}\"\n" ]
[ "from typing import Iterator, TypeVar\n", "from .interface import ISpy\n", "T = TypeVar('T')\n", "def construct_select_statement(spy: ISpy, from_: str) ->str:...\n", "return f\"SELECT {', '.join(construct_selects(spy))} FROM {from_}\"\n" ]
[ 4, 4, 0, 0, 4 ]
[ "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Return'" ]
[ "def FUNC_11(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_1 = {'name': self.volume_name}\n", "self.stubs.Set(self.driver, '_get_iscsi_properties', self.\n _fake_get_iscsi_properties)\n", "self.driver._eql_execute('volume', 'select', VAR_1['name'], 'access',\n 'create', 'initiator', self.connector['initiator'], 'authmethod chap',\n 'username', self.configuration.eqlx_chap_login)\n", "self.mox.ReplayAll()\n", "VAR_8 = self.driver.initialize_connection(VAR_1, self.connector)\n", "self.assertEqual(VAR_8['data'], self._fake_get_iscsi_properties(VAR_1))\n" ]
[ "def test_initialize_connection(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "volume = {'name': self.volume_name}\n", "self.stubs.Set(self.driver, '_get_iscsi_properties', self.\n _fake_get_iscsi_properties)\n", "self.driver._eql_execute('volume', 'select', volume['name'], 'access',\n 'create', 'initiator', self.connector['initiator'], 'authmethod chap',\n 'username', self.configuration.eqlx_chap_login)\n", "self.mox.ReplayAll()\n", "iscsi_properties = self.driver.initialize_connection(volume, self.connector)\n", "self.assertEqual(iscsi_properties['data'], self._fake_get_iscsi_properties(\n volume))\n" ]
[ 0, 0, 0, 0, 2, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_0(self, VAR_0):...\n", "return VAR_0.id\n" ]
[ "def to_representation(self, obj):...\n", "return obj.id\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self, VAR_2, VAR_4):...\n", "return False\n" ]
[ "def has_permission(self, request, view):...\n", "return False\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "\"\"\"netgrph is the primary CLI query too for NetGrph\n Also see ngreport\n\"\"\"\n", "import os\n", "import re\n", "import argparse\n", "import nglib\n", "import nglib.query\n", "VAR_0 = '/etc/netgrph.ini'\n", "VAR_1 = './docs/netgrph.ini'\n", "VAR_2 = os.path.dirname(os.path.realpath(__file__))\n", "if re.search('\\\\/dev$', VAR_2):\n", "VAR_0 = 'netgrphdev.ini'\n", "if re.search('\\\\/test$', VAR_2):\n", "VAR_3 = argparse.ArgumentParser()\n", "VAR_0 = 'netgrphdev.ini'\n", "VAR_3 = argparse.ArgumentParser(prog='netgrph', description=\n 'Query the NetGrph Database', epilog=\n \"\"\"\n Examples:\n netgrph 10.1.1.1 (Free Search for IP),\n netgrph -net 10.1.1.0/24 (Search for CIDR),\n netgrph -group MDC (VLAN Database Search),\n netgrph -fp 10.1.1.1 10.2.2.1 (Firewall Path Search)\n \"\"\"\n )\n", "VAR_3.add_argument('search', help=\n 'Search the NetGrph Database (Wildcard Default)', type=str)\n", "VAR_3.add_argument('-ip', help='Network Details for an IP', action='store_true'\n )\n", "VAR_3.add_argument('-net', help=\n 'All networks within a CIDR (eg. 10.0.0.0/8)', action='store_true')\n", "VAR_3.add_argument('-nlist', help='Get all networks in an alert group',\n action='store_true')\n", "VAR_3.add_argument('-nfilter', help=\n 'Get all networks on a filter (see netgrph.ini)', action='store_true')\n", "VAR_3.add_argument('-dev', help=\n 'Get the Details for a Device (Switch/Router/FW)', action='store_true')\n", "VAR_3.add_argument('-path', metavar='src', help=\n 'Full Path Between -p src dst (ip/cidr, requires NetDB)', type=str)\n", "VAR_3.add_argument('-fpath', metavar='src', help=\n 'Security Path between -fp src dst', type=str)\n", "VAR_3.add_argument('-rpath', metavar='src', help=\n 'Routed Path between -rp IP/CIDR1 IP/CIDR2 ', type=str)\n", "VAR_3.add_argument('-spath', metavar='src', help=\n 'Switched Path between -sp sw1 sw2 (Neo4j Regex)', type=str)\n", "VAR_3.add_argument('-group', help='Get VLANs for a Management Group',\n action='store_true')\n", "VAR_3.add_argument('-vrange', metavar='1[-4096]', help=\n 'VLAN Range (default 1-1999)', type=str)\n", "VAR_3.add_argument('-vid', help='VLAN ID Search', action='store_true')\n", "VAR_3.add_argument('-vtree', help='Get the VLAN Tree for a VNAME', action=\n 'store_true')\n", "VAR_3.add_argument('-output', metavar='TREE', help=\n 'Return Format: TREE, TABLE, CSV, JSON, YAML', type=str)\n", "VAR_3.add_argument('--days', metavar='int', help=\n 'Days in Past (NetDB Specific)', type=int)\n", "VAR_3.add_argument('--conf', metavar='file', help='Alternate Config File',\n type=str)\n", "VAR_3.add_argument('--debug', help='Set debugging level', type=int)\n", "VAR_3.add_argument('--verbose', help='Verbose Output', action='store_true')\n", "VAR_4 = VAR_3.parse_args()\n", "if VAR_4.conf:\n", "VAR_0 = VAR_4.conf\n", "if not os.path.exists(VAR_0):\n", "if not os.path.exists(VAR_1):\n", "VAR_5 = 0\n", "VAR_0 = VAR_1\n", "if VAR_4.verbose:\n", "VAR_5 = 1\n", "if VAR_4.debug:\n", "VAR_5 = VAR_4.debug\n", "if not VAR_4.days:\n", "VAR_4.days = 7\n", "if not VAR_4.vrange:\n", "VAR_4.vrange = '1-1999'\n", "if VAR_4.output:\n", "VAR_4.output = VAR_4.output.upper()\n", "nglib.verbose = VAR_5\n", "nglib.init_nglib(VAR_0)\n", "if VAR_4.fpath:\n", "nglib.query.path.get_fw_path(VAR_4.fpath, VAR_4.search)\n", "if VAR_4.spath:\n", "VAR_6 = 'TREE'\n", "if VAR_4.rpath:\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "if VAR_4.path:\n", "VAR_6 = VAR_4.output\n", "nglib.query.path.get_switched_path(VAR_4.spath, VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "if VAR_4.dev:\n", "VAR_6 = VAR_4.output\n", "nglib.query.path.get_routed_path(VAR_4.rpath, VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "if VAR_4.ip:\n", "VAR_6 = VAR_4.output\n", "nglib.query.path.get_full_path(VAR_4.path, VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "if VAR_4.net:\n", "VAR_6 = VAR_4.output\n", "nglib.query.dev.get_device(VAR_4.search, VAR_6=rtype, vrange=args.vrange)\n", "if VAR_4.output:\n", "VAR_6 = 'CSV'\n", "if VAR_4.nlist:\n", "VAR_6 = VAR_4.output\n", "nglib.query.net.get_net(VAR_4.search, VAR_6=rtype, days=args.days)\n", "if VAR_4.output:\n", "VAR_6 = 'CSV'\n", "if VAR_4.nfilter:\n", "VAR_6 = VAR_4.output\n", "nglib.query.net.get_networks_on_cidr(VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_6 = 'CSV'\n", "if VAR_4.group:\n", "VAR_6 = VAR_4.output\n", "nglib.query.net.get_networks_on_filter(VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "nglib.query.vlan.get_vlans_on_group(VAR_4.search, VAR_4.vrange)\n", "if VAR_4.vtree:\n", "VAR_6 = VAR_4.output\n", "nglib.query.net.get_networks_on_filter(nFilter=args.search, VAR_6=rtype)\n", "VAR_6 = 'TREE'\n", "if VAR_4.vid:\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "if VAR_4.search:\n", "VAR_6 = VAR_4.output\n", "nglib.query.vlan.get_vtree(VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_7 = re.search('^(\\\\d+)$', VAR_4.search)\n", "VAR_3.print_help()\n", "VAR_6 = VAR_4.output\n", "nglib.query.vlan.search_vlan_id(VAR_4.search, VAR_6=rtype)\n", "VAR_8 = re.search('^(\\\\w+\\\\-\\\\d+)$', VAR_4.search)\n", "print()\n", "VAR_9 = re.search('^(\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+)$', VAR_4.search)\n", "VAR_10 = re.search('^(\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\/\\\\d+)$', VAR_4.search)\n", "VAR_11 = re.search('^(\\\\w+)$', VAR_4.search)\n", "if VAR_7:\n", "if VAR_8:\n", "if int(VAR_4.search) >= 0 and int(VAR_4.search) <= 4096:\n", "VAR_6 = 'TREE'\n", "if VAR_10:\n", "VAR_6 = 'TREE'\n", "if VAR_4.output:\n", "VAR_6 = 'CSV'\n", "if VAR_9:\n", "if VAR_4.output:\n", "VAR_6 = VAR_4.output\n", "nglib.query.vlan.get_vtree(VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "if VAR_11:\n", "VAR_6 = VAR_4.output\n", "nglib.query.vlan.search_vlan_id(VAR_4.search, VAR_6=rtype)\n", "VAR_6 = VAR_4.output\n", "nglib.query.net.get_networks_on_cidr(VAR_4.search, VAR_6=rtype)\n", "if VAR_4.output:\n", "VAR_6 = 'TREE'\n", "print('Unknown Search:', VAR_4.search)\n", "VAR_6 = VAR_4.output\n", "nglib.query.net.get_net(VAR_4.search, VAR_6=rtype, days=args.days)\n", "if VAR_4.output:\n", "VAR_6 = VAR_4.output\n", "nglib.query.universal_text_search(VAR_4.search, VAR_4.vrange, VAR_6=rtype)\n" ]
[ "\"\"\"netgrph is the primary CLI query too for NetGrph\n Also see ngreport\n\"\"\"\n", "import os\n", "import re\n", "import argparse\n", "import nglib\n", "import nglib.query\n", "config_file = '/etc/netgrph.ini'\n", "alt_config = './docs/netgrph.ini'\n", "dirname = os.path.dirname(os.path.realpath(__file__))\n", "if re.search('\\\\/dev$', dirname):\n", "config_file = 'netgrphdev.ini'\n", "if re.search('\\\\/test$', dirname):\n", "parser = argparse.ArgumentParser()\n", "config_file = 'netgrphdev.ini'\n", "parser = argparse.ArgumentParser(prog='netgrph', description=\n 'Query the NetGrph Database', epilog=\n \"\"\"\n Examples:\n netgrph 10.1.1.1 (Free Search for IP),\n netgrph -net 10.1.1.0/24 (Search for CIDR),\n netgrph -group MDC (VLAN Database Search),\n netgrph -fp 10.1.1.1 10.2.2.1 (Firewall Path Search)\n \"\"\"\n )\n", "parser.add_argument('search', help=\n 'Search the NetGrph Database (Wildcard Default)', type=str)\n", "parser.add_argument('-ip', help='Network Details for an IP', action=\n 'store_true')\n", "parser.add_argument('-net', help=\n 'All networks within a CIDR (eg. 10.0.0.0/8)', action='store_true')\n", "parser.add_argument('-nlist', help='Get all networks in an alert group',\n action='store_true')\n", "parser.add_argument('-nfilter', help=\n 'Get all networks on a filter (see netgrph.ini)', action='store_true')\n", "parser.add_argument('-dev', help=\n 'Get the Details for a Device (Switch/Router/FW)', action='store_true')\n", "parser.add_argument('-path', metavar='src', help=\n 'Full Path Between -p src dst (ip/cidr, requires NetDB)', type=str)\n", "parser.add_argument('-fpath', metavar='src', help=\n 'Security Path between -fp src dst', type=str)\n", "parser.add_argument('-rpath', metavar='src', help=\n 'Routed Path between -rp IP/CIDR1 IP/CIDR2 ', type=str)\n", "parser.add_argument('-spath', metavar='src', help=\n 'Switched Path between -sp sw1 sw2 (Neo4j Regex)', type=str)\n", "parser.add_argument('-group', help='Get VLANs for a Management Group',\n action='store_true')\n", "parser.add_argument('-vrange', metavar='1[-4096]', help=\n 'VLAN Range (default 1-1999)', type=str)\n", "parser.add_argument('-vid', help='VLAN ID Search', action='store_true')\n", "parser.add_argument('-vtree', help='Get the VLAN Tree for a VNAME', action=\n 'store_true')\n", "parser.add_argument('-output', metavar='TREE', help=\n 'Return Format: TREE, TABLE, CSV, JSON, YAML', type=str)\n", "parser.add_argument('--days', metavar='int', help=\n 'Days in Past (NetDB Specific)', type=int)\n", "parser.add_argument('--conf', metavar='file', help='Alternate Config File',\n type=str)\n", "parser.add_argument('--debug', help='Set debugging level', type=int)\n", "parser.add_argument('--verbose', help='Verbose Output', action='store_true')\n", "args = parser.parse_args()\n", "if args.conf:\n", "config_file = args.conf\n", "if not os.path.exists(config_file):\n", "if not os.path.exists(alt_config):\n", "verbose = 0\n", "config_file = alt_config\n", "if args.verbose:\n", "verbose = 1\n", "if args.debug:\n", "verbose = args.debug\n", "if not args.days:\n", "args.days = 7\n", "if not args.vrange:\n", "args.vrange = '1-1999'\n", "if args.output:\n", "args.output = args.output.upper()\n", "nglib.verbose = verbose\n", "nglib.init_nglib(config_file)\n", "if args.fpath:\n", "nglib.query.path.get_fw_path(args.fpath, args.search)\n", "if args.spath:\n", "rtype = 'TREE'\n", "if args.rpath:\n", "if args.output:\n", "rtype = 'TREE'\n", "if args.path:\n", "rtype = args.output\n", "nglib.query.path.get_switched_path(args.spath, args.search, rtype=rtype)\n", "if args.output:\n", "rtype = 'TREE'\n", "if args.dev:\n", "rtype = args.output\n", "nglib.query.path.get_routed_path(args.rpath, args.search, rtype=rtype)\n", "if args.output:\n", "rtype = 'TREE'\n", "if args.ip:\n", "rtype = args.output\n", "nglib.query.path.get_full_path(args.path, args.search, rtype=rtype)\n", "if args.output:\n", "rtype = 'TREE'\n", "if args.net:\n", "rtype = args.output\n", "nglib.query.dev.get_device(args.search, rtype=rtype, vrange=args.vrange)\n", "if args.output:\n", "rtype = 'CSV'\n", "if args.nlist:\n", "rtype = args.output\n", "nglib.query.net.get_net(args.search, rtype=rtype, days=args.days)\n", "if args.output:\n", "rtype = 'CSV'\n", "if args.nfilter:\n", "rtype = args.output\n", "nglib.query.net.get_networks_on_cidr(args.search, rtype=rtype)\n", "if args.output:\n", "rtype = 'CSV'\n", "if args.group:\n", "rtype = args.output\n", "nglib.query.net.get_networks_on_filter(args.search, rtype=rtype)\n", "if args.output:\n", "nglib.query.vlan.get_vlans_on_group(args.search, args.vrange)\n", "if args.vtree:\n", "rtype = args.output\n", "nglib.query.net.get_networks_on_filter(nFilter=args.search, rtype=rtype)\n", "rtype = 'TREE'\n", "if args.vid:\n", "if args.output:\n", "rtype = 'TREE'\n", "if args.search:\n", "rtype = args.output\n", "nglib.query.vlan.get_vtree(args.search, rtype=rtype)\n", "if args.output:\n", "vid = re.search('^(\\\\d+)$', args.search)\n", "parser.print_help()\n", "rtype = args.output\n", "nglib.query.vlan.search_vlan_id(args.search, rtype=rtype)\n", "vname = re.search('^(\\\\w+\\\\-\\\\d+)$', args.search)\n", "print()\n", "ip = re.search('^(\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+)$', args.search)\n", "net = re.search('^(\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\.\\\\d+\\\\/\\\\d+)$', args.search)\n", "text = re.search('^(\\\\w+)$', args.search)\n", "if vid:\n", "if vname:\n", "if int(args.search) >= 0 and int(args.search) <= 4096:\n", "rtype = 'TREE'\n", "if net:\n", "rtype = 'TREE'\n", "if args.output:\n", "rtype = 'CSV'\n", "if ip:\n", "if args.output:\n", "rtype = args.output\n", "nglib.query.vlan.get_vtree(args.search, rtype=rtype)\n", "if args.output:\n", "rtype = 'TREE'\n", "if text:\n", "rtype = args.output\n", "nglib.query.vlan.search_vlan_id(args.search, rtype=rtype)\n", "rtype = args.output\n", "nglib.query.net.get_networks_on_cidr(args.search, rtype=rtype)\n", "if args.output:\n", "rtype = 'TREE'\n", "print('Unknown Search:', args.search)\n", "rtype = args.output\n", "nglib.query.net.get_net(args.search, rtype=rtype, days=args.days)\n", "if args.output:\n", "rtype = args.output\n", "nglib.query.universal_text_search(args.search, args.vrange, rtype=rtype)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_13):...\n", "self.filters = VAR_13\n" ]
[ "def __init__(self, filters):...\n", "self.filters = filters\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_21(VAR_16):...\n", "return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n" ]
[ "def get_io(rule):...\n", "return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_6(self):...\n", "if self._ownsock:\n", "close(self._sock)\n" ]
[ "def _close(self):...\n", "if self._ownsock:\n", "close(self._sock)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'" ]
[ "def FUNC_6(self, VAR_7):...\n", "VAR_14 = [data for data in VAR_7 if len(data) > VAR_6]\n", "if VAR_14:\n", "return False, VAR_14\n", "return True, []\n" ]
[ "def validate_row(self, input_row):...\n", "length_exceeded = [data for data in input_row if len(data) > max_data_length]\n", "if length_exceeded:\n", "return False, length_exceeded\n", "return True, []\n" ]
[ 0, 4, 4, 4, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def __str__(self):...\n", "\"\"\"docstring\"\"\"\n", "return self.__repr__(\n ) + '\\nName: ' + self.name + '\\nAPI version: ' + self.api_version + \"\"\"\nPlugin version: \"\"\" + self.version + '\\nAuthor: ' + self.author + '\\n'\n" ]
[ "def __str__(self):...\n", "\"\"\"docstring\"\"\"\n", "return self.__repr__(\n ) + '\\nName: ' + self.name + '\\nAPI version: ' + self.api_version + \"\"\"\nPlugin version: \"\"\" + self.version + '\\nAuthor: ' + self.author + '\\n'\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_20(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_61 = super(CLASS_0, self)._get_active_sponsorships_domain()\n", "VAR_61.insert(0, '|')\n", "VAR_61.insert(3, ('partner_id', 'in', self.mapped('member_ids').ids))\n", "VAR_61.insert(4, '|')\n", "VAR_61.insert(6, ('correspondent_id', 'in', self.mapped('member_ids').ids))\n", "return VAR_61\n" ]
[ "def _get_active_sponsorships_domain(self):...\n", "\"\"\"docstring\"\"\"\n", "domain = super(ResPartner, self)._get_active_sponsorships_domain()\n", "domain.insert(0, '|')\n", "domain.insert(3, ('partner_id', 'in', self.mapped('member_ids').ids))\n", "domain.insert(4, '|')\n", "domain.insert(6, ('correspondent_id', 'in', self.mapped('member_ids').ids))\n", "return domain\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_6):...\n", "self.path = VAR_6\n", "self.abspath = VAR_3.STORAGE_DIR + VAR_6\n", "self.meta = ImageMetadata(self.abspath)\n", "self.meta.read()\n" ]
[ "def __init__(self, path):...\n", "self.path = path\n", "self.abspath = settings.STORAGE_DIR + path\n", "self.meta = ImageMetadata(self.abspath)\n", "self.meta.read()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "import json\n", "import os\n", "import stat\n", "import tempfile\n", "from django.conf import settings\n", "def FUNC_0(VAR_0, VAR_1, VAR_2):...\n", "VAR_1['AWS_ACCESS_KEY_ID'] = VAR_0.get_input('username', default='')\n", "VAR_1['AWS_SECRET_ACCESS_KEY'] = VAR_0.get_input('password', default='')\n", "if VAR_0.has_input('security_token'):\n", "VAR_1['AWS_SECURITY_TOKEN'] = VAR_0.get_input('security_token', default='')\n", "def FUNC_1(VAR_0, VAR_1, VAR_2):...\n", "VAR_3 = VAR_0.get_input('project', default='')\n", "VAR_4 = VAR_0.get_input('username', default='')\n", "VAR_1['GCE_EMAIL'] = VAR_4\n", "VAR_1['GCE_PROJECT'] = VAR_3\n", "VAR_5 = {'type': 'service_account', 'private_key': VAR_0.get_input(\n 'ssh_key_data', default=''), 'client_email': VAR_4, 'project_id': VAR_3}\n", "VAR_6, VAR_7 = tempfile.mkstemp(dir=private_data_dir)\n", "VAR_8 = os.fdopen(VAR_6, 'w')\n", "json.dump(VAR_5, VAR_8)\n", "VAR_8.close()\n", "os.chmod(VAR_7, stat.S_IRUSR | stat.S_IWUSR)\n", "VAR_1['GCE_CREDENTIALS_FILE_PATH'] = VAR_7\n", "def FUNC_2(VAR_0, VAR_1, VAR_2):...\n", "VAR_9 = VAR_0.get_input('client', default='')\n", "VAR_10 = VAR_0.get_input('tenant', default='')\n", "if len(VAR_9) and len(VAR_10):\n", "VAR_1['AZURE_CLIENT_ID'] = VAR_9\n", "VAR_1['AZURE_SUBSCRIPTION_ID'] = VAR_0.get_input('subscription', default='')\n", "VAR_1['AZURE_TENANT'] = VAR_10\n", "VAR_1['AZURE_AD_USER'] = VAR_0.get_input('username', default='')\n", "VAR_1['AZURE_SECRET'] = VAR_0.get_input('secret', default='')\n", "VAR_1['AZURE_PASSWORD'] = VAR_0.get_input('password', default='')\n", "VAR_1['AZURE_SUBSCRIPTION_ID'] = VAR_0.get_input('subscription', default='')\n", "if VAR_0.has_input('cloud_environment'):\n", "VAR_1['AZURE_CLOUD_ENVIRONMENT'] = VAR_0.get_input('cloud_environment')\n", "def FUNC_3(VAR_0, VAR_1, VAR_2):...\n", "VAR_1['VMWARE_USER'] = VAR_0.get_input('username', default='')\n", "VAR_1['VMWARE_PASSWORD'] = VAR_0.get_input('password', default='')\n", "VAR_1['VMWARE_HOST'] = VAR_0.get_input('host', default='')\n", "VAR_1['VMWARE_VALIDATE_CERTS'] = str(settings.VMWARE_VALIDATE_CERTS)\n" ]
[ "import json\n", "import os\n", "import stat\n", "import tempfile\n", "from django.conf import settings\n", "def aws(cred, env, private_data_dir):...\n", "env['AWS_ACCESS_KEY_ID'] = cred.get_input('username', default='')\n", "env['AWS_SECRET_ACCESS_KEY'] = cred.get_input('password', default='')\n", "if cred.has_input('security_token'):\n", "env['AWS_SECURITY_TOKEN'] = cred.get_input('security_token', default='')\n", "def gce(cred, env, private_data_dir):...\n", "project = cred.get_input('project', default='')\n", "username = cred.get_input('username', default='')\n", "env['GCE_EMAIL'] = username\n", "env['GCE_PROJECT'] = project\n", "json_cred = {'type': 'service_account', 'private_key': cred.get_input(\n 'ssh_key_data', default=''), 'client_email': username, 'project_id':\n project}\n", "handle, path = tempfile.mkstemp(dir=private_data_dir)\n", "f = os.fdopen(handle, 'w')\n", "json.dump(json_cred, f)\n", "f.close()\n", "os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)\n", "env['GCE_CREDENTIALS_FILE_PATH'] = path\n", "def azure_rm(cred, env, private_data_dir):...\n", "client = cred.get_input('client', default='')\n", "tenant = cred.get_input('tenant', default='')\n", "if len(client) and len(tenant):\n", "env['AZURE_CLIENT_ID'] = client\n", "env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')\n", "env['AZURE_TENANT'] = tenant\n", "env['AZURE_AD_USER'] = cred.get_input('username', default='')\n", "env['AZURE_SECRET'] = cred.get_input('secret', default='')\n", "env['AZURE_PASSWORD'] = cred.get_input('password', default='')\n", "env['AZURE_SUBSCRIPTION_ID'] = cred.get_input('subscription', default='')\n", "if cred.has_input('cloud_environment'):\n", "env['AZURE_CLOUD_ENVIRONMENT'] = cred.get_input('cloud_environment')\n", "def vmware(cred, env, private_data_dir):...\n", "env['VMWARE_USER'] = cred.get_input('username', default='')\n", "env['VMWARE_PASSWORD'] = cred.get_input('password', default='')\n", "env['VMWARE_HOST'] = cred.get_input('host', default='')\n", "env['VMWARE_VALIDATE_CERTS'] = str(settings.VMWARE_VALIDATE_CERTS)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_21(VAR_49, **VAR_18):...\n", "VAR_63 = mock.Mock()\n", "VAR_63.history = None\n", "VAR_63.headers = {'location': ''}\n", "VAR_63.raw._original_response = {}\n", "VAR_20.record_call(VAR_49, **kwargs)\n", "return VAR_63\n" ]
[ "def _adapter_send(request, **kwargs):...\n", "mock_response = mock.Mock()\n", "mock_response.history = None\n", "mock_response.headers = {'location': ''}\n", "mock_response.raw._original_response = {}\n", "cluster_api.record_call(request, **kwargs)\n", "return mock_response\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_0, VAR_1, *VAR_2, **VAR_3):...\n", "VAR_12 = JustURL.objects.get(VAR_1=pk)\n", "VAR_11 = CounterCountingForm(VAR_0.POST or None)\n", "if VAR_11.is_valid():\n", "VAR_12.count += 1\n", "return redirect('home-view')\n", "VAR_22 = get_client_ip(VAR_0)\n", "VAR_23 = VAR_0.META['HTTP_USER_AGENT']\n", "VAR_24 = ClickTracking.objects.create(client_ip=ip, user_agent=client_agent)\n", "VAR_24.url.add(VAR_12)\n", "VAR_24.save()\n", "VAR_12.save()\n", "return FUNC_0(VAR_0, VAR_1)\n" ]
[ "def post(self, request, pk, *args, **kwargs):...\n", "object = JustURL.objects.get(pk=pk)\n", "form = CounterCountingForm(request.POST or None)\n", "if form.is_valid():\n", "object.count += 1\n", "return redirect('home-view')\n", "ip = get_client_ip(request)\n", "client_agent = request.META['HTTP_USER_AGENT']\n", "clicktracker = ClickTracking.objects.create(client_ip=ip, user_agent=\n client_agent)\n", "clicktracker.url.add(object)\n", "clicktracker.save()\n", "object.save()\n", "return link_redirect(request, pk)\n" ]
[ 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 6, 6 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "AugAssign'", "Return'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_26 = ''\n", "VAR_27 = ''\n", "VAR_28 = ''\n", "VAR_23, VAR_22 = shellutil.run_get_output('ifconfig -l ether', VAR_10=False)\n", "if VAR_23:\n", "VAR_29 = VAR_22.split()\n", "if not VAR_29:\n", "VAR_26 = VAR_29[0]\n", "VAR_23, VAR_22 = shellutil.run_get_output('ifconfig ' + VAR_26, VAR_10=False)\n", "if VAR_23:\n", "for line in VAR_22.split('\\n'):\n", "if line.find('inet ') != -1:\n", "logger.verbose('Interface info: ({0},{1},{2})', VAR_26, VAR_27, VAR_28)\n", "VAR_27 = line.split()[1]\n", "if line.find('ether ') != -1:\n", "return VAR_26, VAR_27, VAR_28\n", "VAR_28 = line.split()[1]\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "iface = ''\n", "inet = ''\n", "mac = ''\n", "err, output = shellutil.run_get_output('ifconfig -l ether', chk_err=False)\n", "if err:\n", "ifaces = output.split()\n", "if not ifaces:\n", "iface = ifaces[0]\n", "err, output = shellutil.run_get_output('ifconfig ' + iface, chk_err=False)\n", "if err:\n", "for line in output.split('\\n'):\n", "if line.find('inet ') != -1:\n", "logger.verbose('Interface info: ({0},{1},{2})', iface, inet, mac)\n", "inet = line.split()[1]\n", "if line.find('ether ') != -1:\n", "return iface, inet, mac\n", "mac = line.split()[1]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "For", "Condition", "Expr'", "Assign'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_28(self):...\n", "VAR_54 = Digraph('Deps', strict=True)\n", "VAR_54.graph_attr.update(rankdir='BT')\n", "VAR_49 = self.nodes.get('master_node')\n", "self.logger.error(\n 'Detected circular dependency reference between %s and %s!' % (ex.node1,\n ex.node2))\n", "VAR_54.view()\n", "for current in VAR_49.depends_on:\n", "VAR_54.edge(ex.node1, ex.node2, 'circular error', color='red')\n", "VAR_54.node(current.comp_name)\n", "VAR_54.edge(ex.node2, ex.node1, color='red')\n", "VAR_50 = []\n", "VAR_51 = []\n", "dep_resolve(current, VAR_50, VAR_51)\n", "for VAR_49 in VAR_50:\n", "if 'depends' in VAR_49.component:\n", "for dep in VAR_49.component['depends']:\n", "if dep not in self.nodes:\n", "VAR_54.node(dep, color='red')\n", "if VAR_49.comp_name is not 'master_node':\n", "VAR_54.edge(VAR_49.comp_name, dep, 'missing', color='red')\n", "VAR_54.edge(VAR_49.comp_name, dep)\n" ]
[ "def draw_graph(self):...\n", "deps = Digraph('Deps', strict=True)\n", "deps.graph_attr.update(rankdir='BT')\n", "node = self.nodes.get('master_node')\n", "self.logger.error(\n 'Detected circular dependency reference between %s and %s!' % (ex.node1,\n ex.node2))\n", "deps.view()\n", "for current in node.depends_on:\n", "deps.edge(ex.node1, ex.node2, 'circular error', color='red')\n", "deps.node(current.comp_name)\n", "deps.edge(ex.node2, ex.node1, color='red')\n", "res = []\n", "unres = []\n", "dep_resolve(current, res, unres)\n", "for node in res:\n", "if 'depends' in node.component:\n", "for dep in node.component['depends']:\n", "if dep not in self.nodes:\n", "deps.node(dep, color='red')\n", "if node.comp_name is not 'master_node':\n", "deps.edge(node.comp_name, dep, 'missing', color='red')\n", "deps.edge(node.comp_name, dep)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "For", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "For", "Condition", "For", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "@property...\n", "if self._xsrf_token is None:\n", "self.clear_cookies()\n", "return self._xsrf_token\n", "VAR_25 = self.get(\n '_ah/login?email=georges%40example.com&admin=True&action=Login&continue=/_ah/admin/interactive'\n )\n", "self._xsrf_token = re.search('name=\"xsrf_token\" value=\"(.*?)\"/>', VAR_25\n ).group(1)\n", "self.clear_cookies()\n" ]
[ "@property...\n", "if self._xsrf_token is None:\n", "self.clear_cookies()\n", "return self._xsrf_token\n", "interactive = self.get(\n '_ah/login?email=georges%40example.com&admin=True&action=Login&continue=/_ah/admin/interactive'\n )\n", "self._xsrf_token = re.search('name=\"xsrf_token\" value=\"(.*?)\"/>', interactive\n ).group(1)\n", "self.clear_cookies()\n" ]
[ 0, 0, 0, 0, 5, 5, 0 ]
[ "Condition", "Condition", "Expr'", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1():...\n", "VAR_0 = raw_input('Run another query? (y/n): ')\n", "if VAR_0 == 'y' or VAR_0 == 'Y':\n", "FUNC_0()\n", "print('Goodbye.')\n", "FUNC_2()\n" ]
[ "def run_another():...\n", "opt = raw_input('Run another query? (y/n): ')\n", "if opt == 'y' or opt == 'Y':\n", "print_menu()\n", "print('Goodbye.')\n", "run_query_case()\n" ]
[ 0, 0, 0, 0, 4, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def __getattr__(self, VAR_8):...\n", "def FUNC_10(*VAR_11):...\n", "VAR_12 = ' '.join(str(elem) for elem in VAR_11)\n", "return VAR_12\n" ]
[ "def __getattr__(self, name):...\n", "def fastboot_call(*args):...\n", "arg_str = ' '.join(str(elem) for elem in args)\n", "return arg_str\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_1: ISpy, VAR_4: str) ->str:...\n", "VAR_6 = FUNC_3(FUNC_1(field_spy, field_name) for field_name, field_spy in\n VAR_1.selected_fields.items())\n", "return f\"(SELECT {', '.join(VAR_6)} FROM {VAR_4})\"\n" ]
[ "def construct_subquery(spy: ISpy, name: str) ->str:...\n", "select_fields = _flatten(construct_selects(field_spy, field_name) for \n field_name, field_spy in spy.selected_fields.items())\n", "return f\"(SELECT {', '.join(select_fields)} FROM {name})\"\n" ]
[ 0, 0, 4 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_0, VAR_1):...\n", "for VAR_32, v in VAR_1.items():\n", "if isinstance(v, dict):\n", "return VAR_0\n", "VAR_31 = VAR_0.setdefault(VAR_32, {})\n", "VAR_0[VAR_32] = v\n", "FUNC_1(VAR_31, v)\n" ]
[ "def merge_cfg(dest, source):...\n", "for k, v in source.items():\n", "if isinstance(v, dict):\n", "return dest\n", "subdest = dest.setdefault(k, {})\n", "dest[k] = v\n", "merge_cfg(subdest, v)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Return'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_3(self, VAR_0, VAR_1):...\n", "if VAR_1 and self.db.check_date(VAR_1):\n", "return self.csv_generator.write_csv(VAR_1)\n", "if VAR_1:\n", "return 'Нет данных за этот день.'\n", "if VAR_0 == 'отчет' or VAR_0 == 'отчёт':\n", "return self.csv_generator.write_csv()\n", "return 'Ошибка! Неверный формат даты.'\n" ]
[ "def parse_report_message(self, string, date):...\n", "if date and self.db.check_date(date):\n", "return self.csv_generator.write_csv(date)\n", "if date:\n", "return 'Нет данных за этот день.'\n", "if string == 'отчет' or string == 'отчёт':\n", "return self.csv_generator.write_csv()\n", "return 'Ошибка! Неверный формат даты.'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = CLASS_1.get_user_by_id(VAR_3)\n", "VAR_39 = MappingLevel(VAR_22.mapping_level)\n", "if VAR_39 == MappingLevel.ADVANCED:\n", "return\n", "VAR_24 = current_app.config['MAPPER_LEVEL_INTERMEDIATE']\n", "VAR_25 = current_app.config['MAPPER_LEVEL_ADVANCED']\n", "VAR_47 = OSMService.get_osm_details_for_user(VAR_3)\n", "current_app.logger.error('Error attempting to update mapper level')\n", "VAR_22.save()\n", "if VAR_47.changeset_count > VAR_25 and VAR_22.mapping_level != MappingLevel.ADVANCED.value:\n", "return\n", "return VAR_22\n", "VAR_22.mapping_level = MappingLevel.ADVANCED.value\n", "if VAR_24 < VAR_47.changeset_count < VAR_25 and VAR_22.mapping_level != MappingLevel.INTERMEDIATE.value:\n", "CLASS_1.notify_level_upgrade(VAR_3, VAR_22.username, 'ADVANCED')\n", "VAR_22.mapping_level = MappingLevel.INTERMEDIATE.value\n", "CLASS_1.notify_level_upgrade(VAR_3, VAR_22.username, 'INTERMEDIATE')\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "user = UserService.get_user_by_id(user_id)\n", "user_level = MappingLevel(user.mapping_level)\n", "if user_level == MappingLevel.ADVANCED:\n", "return\n", "intermediate_level = current_app.config['MAPPER_LEVEL_INTERMEDIATE']\n", "advanced_level = current_app.config['MAPPER_LEVEL_ADVANCED']\n", "osm_details = OSMService.get_osm_details_for_user(user_id)\n", "current_app.logger.error('Error attempting to update mapper level')\n", "user.save()\n", "if osm_details.changeset_count > advanced_level and user.mapping_level != MappingLevel.ADVANCED.value:\n", "return\n", "return user\n", "user.mapping_level = MappingLevel.ADVANCED.value\n", "if intermediate_level < osm_details.changeset_count < advanced_level and user.mapping_level != MappingLevel.INTERMEDIATE.value:\n", "UserService.notify_level_upgrade(user_id, user.username, 'ADVANCED')\n", "user.mapping_level = MappingLevel.INTERMEDIATE.value\n", "UserService.notify_level_upgrade(user_id, user.username, 'INTERMEDIATE')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Return'", "Return'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_2(self):...\n", "if self.get_option('no-hypervisors'):\n", "return []\n", "VAR_6 = self.exec_master_cmd(self.dbcmd, need_root=True)\n", "if VAR_6['status'] == 0:\n", "VAR_12 = VAR_6['stdout'].splitlines()[2:-1]\n", "return [n.split('(')[0].strip() for n in VAR_12]\n" ]
[ "def get_nodes(self):...\n", "if self.get_option('no-hypervisors'):\n", "return []\n", "res = self.exec_master_cmd(self.dbcmd, need_root=True)\n", "if res['status'] == 0:\n", "nodes = res['stdout'].splitlines()[2:-1]\n", "return [n.split('(')[0].strip() for n in nodes]\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_1(self):...\n", "super(CLASS_1, self).setUp()\n", "VAR_12 = patch('pavelib.utils.test.suites.bokchoy_suite.sh')\n", "self._mock_sh = VAR_12.start()\n", "self.addCleanup(VAR_12.stop)\n" ]
[ "def setUp(self):...\n", "super(TestPaverPa11yCrawlerCmd, self).setUp()\n", "mock_sh = patch('pavelib.utils.test.suites.bokchoy_suite.sh')\n", "self._mock_sh = mock_sh.start()\n", "self.addCleanup(mock_sh.stop)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_4(self, VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = VAR_14.groupdict()\n", "if isinstance(self.severity_map, dict) and 'severity' in VAR_28 and VAR_28[\n", "VAR_28['severity'] = self.severity_map[VAR_28['severity']]\n", "return VAR_28\n" ]
[ "def _get_groupdict(self, match):...\n", "\"\"\"docstring\"\"\"\n", "groups = match.groupdict()\n", "if isinstance(self.severity_map, dict) and 'severity' in groups and groups[\n", "groups['severity'] = self.severity_map[groups['severity']]\n", "return groups\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_9(self, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "VAR_26 = 'string' % {'database': VAR_10}\n", "self.db.executescript(VAR_26)\n", "self.db.commit()\n" ]
[ "def drop_indexes(self, database):...\n", "\"\"\"docstring\"\"\"\n", "sqlstr = (\n \"\"\"\n DROP INDEX IF EXISTS %(database)s.phrases_index_p;\n DROP INDEX IF EXISTS %(database)s.phrases_index_i;\n VACUUM;\n \"\"\"\n % {'database': database})\n", "self.db.executescript(sqlstr)\n", "self.db.commit()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_6(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_9 = model.Identity(model.IDENTITY_USER, '[email protected]')\n", "VAR_10 = model.Identity(model.IDENTITY_USER, '[email protected]')\n", "model.bootstrap_ip_whitelist('whitelist', ['192.168.1.100/32'])\n", "model.bootstrap_ip_whitelist_assignment(VAR_9, 'whitelist')\n", "VAR_11 = [None]\n", "@classmethod...\n", "return [lambda _req: VAR_11[0]]\n" ]
[ "def test_ip_whitelist(self):...\n", "\"\"\"docstring\"\"\"\n", "ident1 = model.Identity(model.IDENTITY_USER, '[email protected]')\n", "ident2 = model.Identity(model.IDENTITY_USER, '[email protected]')\n", "model.bootstrap_ip_whitelist('whitelist', ['192.168.1.100/32'])\n", "model.bootstrap_ip_whitelist_assignment(ident1, 'whitelist')\n", "mocked_ident = [None]\n", "@classmethod...\n", "return [lambda _req: mocked_ident[0]]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def __init__(self, VAR_18, *VAR_15, **VAR_16):...\n", "CLASS_3.__init__(self, VAR_18, errors.BAD_PASSWORD, *VAR_15, **kw)\n" ]
[ "def __init__(self, item, *a, **kw):...\n", "VRequired.__init__(self, item, errors.BAD_PASSWORD, *a, **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@VAR_7.context_processor...\n", "return dict(admin_base_template=admin.base_template, admin_view=admin.\n index_view, h=helpers, get_url=url_for)\n" ]
[ "@security_ctx.context_processor...\n", "return dict(admin_base_template=admin.base_template, admin_view=admin.\n index_view, h=helpers, get_url=url_for)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_16(VAR_1):...\n", "async def FUNC_15(VAR_3):...\n", "VAR_19 = await VAR_1(VAR_3, VAR_4)\n", "return VAR_19\n" ]
[ "def decorator(func):...\n", "async def ret(request):...\n", "out = await func(request, base)\n", "return out\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "AsyncFunctionDef'", "Assign'", "Return'" ]
[ "from lxml import html\n", "from importlib import import_module\n", "from django.test import Client\n", "from django.test import TestCase\n", "from dashboard.tests.loader import load_model_objects, fixtures_standard\n", "from dashboard.views.data_group import ExtractionScriptForm, DataGroupForm\n", "from django.core.files.uploadedfile import SimpleUploadedFile\n", "from django.contrib.auth.models import User\n", "from django.test import Client\n", "from importlib import import_module\n", "from django.db.models import Max\n", "from dashboard.forms import *\n", "from dashboard.models import *\n", "def FUNC_0(self):...\n", "self.objects = load_model_objects()\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def FUNC_1(self):...\n", "VAR_1 = self.objects.dg.pk\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertFalse(self.objects.doc.matched,\n 'Document should start w/ matched False')\n", "self.assertFalse(self.objects.doc.extracted,\n 'Document should start w/ extracted False')\n", "self.assertFalse(VAR_2.context['datagroup'].all_matched(),\n 'UploadForm should be included in the page!')\n", "self.assertFalse(VAR_2.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertTrue(VAR_2.context['datagroup'].all_matched(),\n 'UploadForm should not be included in the page!')\n", "self.assertIsInstance(VAR_2.context['extract_form'], ExtractionScriptForm,\n 'ExtractForm should be included in the page!')\n", "self.objects.doc.extracted = True\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertTrue(VAR_2.context['datagroup'].all_matched(),\n 'UploadForm should not be included in the page!')\n", "self.assertFalse(VAR_2.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n", "def FUNC_2(self):...\n", "VAR_1 = self.objects.dg.pk\n", "self.assertEqual(str(self.objects.dg.group_type), 'Composition',\n 'Type of DataGroup needs to be \"composition\" for this test.')\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertEqual(VAR_2.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse',\n 'raw_min_comp', 'raw_max_comp', 'unit_type', 'ingredient_rank',\n 'raw_central_comp'], 'Fieldnames passed are incorrect!')\n", "self.objects.gt.title = 'Functional use'\n", "self.objects.gt.code = 'FU'\n", "self.objects.gt.save()\n", "self.assertEqual(str(self.objects.dg.group_type), 'Functional use',\n 'Type of DataGroup needs to be \"FU\" for this test.')\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertEqual(VAR_2.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse'],\n 'Fieldnames passed are incorrect!')\n", "def FUNC_3(self):...\n", "VAR_1 = self.objects.dg.pk\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertIsInstance(VAR_2.context['extract_form'], ExtractionScriptForm,\n 'ExtractForm should be included in the page!')\n", "self.objects.gt.code = 'UN'\n", "self.objects.gt.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertFalse(VAR_2.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n", "def FUNC_4(self):...\n", "VAR_2 = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(VAR_2.context['bulk'], 0,\n 'Product linked to all DataDocuments, no bulk_create needed.')\n", "VAR_3 = DataDocument.objects.create(data_group=self.objects.dg)\n", "VAR_3.matched = True\n", "self.objects.doc.matched = True\n", "VAR_3.save()\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(VAR_2.context['bulk'], 1,\n 'Not all DataDocuments linked to Product, bulk_create needed')\n", "self.assertIn('Bulk Create', VAR_2.content.decode(),\n 'Bulk create button should be present.')\n", "VAR_4 = Product.objects.create(upc='stub_47', data_source=self.objects.ds)\n", "ProductDocument.objects.create(document=doc, VAR_6=p)\n", "VAR_2 = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(VAR_2.context['bulk'], 0,\n 'Product linked to all DataDocuments, no bulk_create needed.')\n", "self.objects.dg.group_type = GroupType.objects.create(title=\n 'Habits and practices')\n", "VAR_2 = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertNotIn('Bulk Create', VAR_2.content.decode(),\n \"Bulk button shouldn't be present w/ Habits and practices group_type.\")\n", "def FUNC_5(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_3 = DataDocument.objects.create(data_group=self.objects.dg)\n", "VAR_2 = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(VAR_2.context['bulk'], 1,\n 'Not all DataDocuments linked to Product, bulk_create needed')\n", "VAR_5 = Product.objects.all().aggregate(Max('id'))['id__max'] + 1\n", "VAR_2 = self.client.post(f'/datagroup/{self.objects.dg.pk}/', {'bulk': 1})\n", "self.assertEqual(VAR_2.context['bulk'], 0,\n 'Products linked to all DataDocuments, no bulk_create needed.')\n", "VAR_6 = ProductDocument.objects.get(document=doc).product\n", "self.assertEqual(VAR_6.title, 'unknown',\n 'Title should be unknown in bulk_create')\n", "self.assertEqual(VAR_6.upc, f'stub_%s' % VAR_5,\n 'UPC should be created for second Product')\n", "def FUNC_6(self):...\n", "VAR_2 = self.client.get(f'/datagroup/{DataGroup.objects.first().id}/'\n ).content.decode('utf8')\n", "self.assertIn('Please limit upload to <600 documents at one time', VAR_2,\n 'Note to limit upload to <600 should be on the page')\n", "def FUNC_7(self):...\n", "VAR_2 = self.client.get(f'/datagroup/{DataGroup.objects.first().id}/'\n ).content.decode('utf8')\n", "self.assertIn('0 extracted', VAR_2,\n 'Data Group should contain a count of 0 total extracted documents')\n", "self.objects.doc.extracted = True\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(f'/datagroup/{DataGroup.objects.first().id}/'\n ).content.decode('utf8')\n", "self.assertIn('1 extracted', VAR_2,\n 'Data Group should contain a count of 1 total extracted documents')\n", "def FUNC_8(self):...\n", "VAR_7 = f'/datagroup/{DataGroup.objects.first().id}/'\n", "VAR_2 = self.client.get(VAR_7).content.decode('utf8')\n", "VAR_8 = '<span class=\"oi oi-trash\"></span>'\n", "self.assertIn(VAR_8, VAR_2, 'Trash button should be present if not matched.')\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "VAR_2 = self.client.get(VAR_7).content.decode('utf8')\n", "VAR_8 = '<span class=\"oi oi-circle-check\" style=\"color:green;\"></span>'\n", "self.assertIn(VAR_8, VAR_2, 'Check should be present if matched.')\n", "def FUNC_9(self):...\n", "VAR_1 = self.objects.dg.pk\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/').content.decode('utf8')\n", "self.assertIn('<th>Product</th>', VAR_2,\n 'Data Group should have Product column.')\n", "VAR_9 = GroupType.objects.create(title='Functional use')\n", "self.objects.dg.group_type = VAR_9\n", "self.objects.dg.save()\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/').content.decode('utf8')\n", "self.assertNotIn('<th>Product</th>', VAR_2,\n 'Data Group should have Product column.')\n", "def FUNC_10(self):...\n", "VAR_1 = self.objects.dg.pk\n", "VAR_2 = self.client.get(f'/datagroup/{VAR_1}/')\n", "self.assertContains(VAR_2, '<a href=\"/datasource/', msg_prefix=\n 'Should be able to get back to DataSource from here.')\n", "def FUNC_11(self):...\n", "VAR_10 = self.objects.dg.pk\n", "VAR_11 = str(self.objects.ds.pk)\n", "VAR_12 = str(self.objects.gt.pk)\n", "VAR_13 = {'name': ['Changed Name'], 'group_type': [VAR_12], 'downloaded_by':\n [str(User.objects.get(username='Karyn').pk)], 'downloaded_at': [\n '08/20/2017'], 'data_source': [VAR_11]}\n", "VAR_2 = self.client.post(f'/datagroup/edit/{VAR_10}/', VAR_13=data)\n", "self.assertEqual(VAR_2.status_code, 302, 'User is redirected to detail page.')\n", "self.assertEqual(VAR_2.url, f'/datagroup/{VAR_10}/',\n 'Should go to detail page.')\n", "VAR_0 = fixtures_standard\n", "def FUNC_0(self):...\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def FUNC_12(self):...\n", "VAR_14 = DataGroup.objects.filter(group_type__code='CO').first()\n", "VAR_15 = self.client.get(f'/datagroup/%s/' % VAR_14.id)\n", "self.assertIn(b'Download Raw', VAR_15.content)\n", "VAR_16 = DataDocument.objects.filter(id__in=ExtractedChemical.objects.all()\n .values('extracted_text_id')).order_by().values_list('data_group_id',\n flat=True).distinct()\n", "for dg_id in VAR_16:\n", "VAR_15 = self.client.get(f'/datagroup/raw_extracted_records/%s/' % dg_id)\n", "VAR_15 = self.client.get(f'/datagroup/raw_extracted_records/%s/' % VAR_16[0])\n", "self.assertEqual(VAR_15.status_code, 200)\n", "VAR_17 = (\n 'ExtractedChemical_id,raw_cas,raw_chem_name,raw_min_comp,raw_central_comp,raw_max_comp,unit_type'\n )\n", "VAR_18 = list(i.decode('utf-8') for i in VAR_15.streaming_content)\n", "self.assertIn(VAR_17, VAR_18[1])\n" ]
[ "from lxml import html\n", "from importlib import import_module\n", "from django.test import Client\n", "from django.test import TestCase\n", "from dashboard.tests.loader import load_model_objects, fixtures_standard\n", "from dashboard.views.data_group import ExtractionScriptForm, DataGroupForm\n", "from django.core.files.uploadedfile import SimpleUploadedFile\n", "from django.contrib.auth.models import User\n", "from django.test import Client\n", "from importlib import import_module\n", "from django.db.models import Max\n", "from dashboard.forms import *\n", "from dashboard.models import *\n", "def setUp(self):...\n", "self.objects = load_model_objects()\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def test_detail_form_load(self):...\n", "pk = self.objects.dg.pk\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertFalse(self.objects.doc.matched,\n 'Document should start w/ matched False')\n", "self.assertFalse(self.objects.doc.extracted,\n 'Document should start w/ extracted False')\n", "self.assertFalse(response.context['datagroup'].all_matched(),\n 'UploadForm should be included in the page!')\n", "self.assertFalse(response.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertTrue(response.context['datagroup'].all_matched(),\n 'UploadForm should not be included in the page!')\n", "self.assertIsInstance(response.context['extract_form'],\n ExtractionScriptForm, 'ExtractForm should be included in the page!')\n", "self.objects.doc.extracted = True\n", "self.objects.doc.save()\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertTrue(response.context['datagroup'].all_matched(),\n 'UploadForm should not be included in the page!')\n", "self.assertFalse(response.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n", "def test_detail_template_fieldnames(self):...\n", "pk = self.objects.dg.pk\n", "self.assertEqual(str(self.objects.dg.group_type), 'Composition',\n 'Type of DataGroup needs to be \"composition\" for this test.')\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertEqual(response.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse',\n 'raw_min_comp', 'raw_max_comp', 'unit_type', 'ingredient_rank',\n 'raw_central_comp'], 'Fieldnames passed are incorrect!')\n", "self.objects.gt.title = 'Functional use'\n", "self.objects.gt.code = 'FU'\n", "self.objects.gt.save()\n", "self.assertEqual(str(self.objects.dg.group_type), 'Functional use',\n 'Type of DataGroup needs to be \"FU\" for this test.')\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertEqual(response.context['extract_fields'], ['data_document_id',\n 'data_document_filename', 'prod_name', 'doc_date', 'rev_num',\n 'raw_category', 'raw_cas', 'raw_chem_name', 'report_funcuse'],\n 'Fieldnames passed are incorrect!')\n", "def test_unidentifed_group_type(self):...\n", "pk = self.objects.dg.pk\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertIsInstance(response.context['extract_form'],\n ExtractionScriptForm, 'ExtractForm should be included in the page!')\n", "self.objects.gt.code = 'UN'\n", "self.objects.gt.save()\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertFalse(response.context['extract_form'],\n 'ExtractForm should not be included in the page!')\n", "def test_bulk_create_products_form(self):...\n", "response = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(response.context['bulk'], 0,\n 'Product linked to all DataDocuments, no bulk_create needed.')\n", "doc = DataDocument.objects.create(data_group=self.objects.dg)\n", "doc.matched = True\n", "self.objects.doc.matched = True\n", "doc.save()\n", "self.objects.doc.save()\n", "response = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(response.context['bulk'], 1,\n 'Not all DataDocuments linked to Product, bulk_create needed')\n", "self.assertIn('Bulk Create', response.content.decode(),\n 'Bulk create button should be present.')\n", "p = Product.objects.create(upc='stub_47', data_source=self.objects.ds)\n", "ProductDocument.objects.create(document=doc, product=p)\n", "response = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(response.context['bulk'], 0,\n 'Product linked to all DataDocuments, no bulk_create needed.')\n", "self.objects.dg.group_type = GroupType.objects.create(title=\n 'Habits and practices')\n", "response = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertNotIn('Bulk Create', response.content.decode(),\n \"Bulk button shouldn't be present w/ Habits and practices group_type.\")\n", "def test_bulk_create_post(self):...\n", "\"\"\"docstring\"\"\"\n", "doc = DataDocument.objects.create(data_group=self.objects.dg)\n", "response = self.client.get(f'/datagroup/{self.objects.dg.pk}/')\n", "self.assertEqual(response.context['bulk'], 1,\n 'Not all DataDocuments linked to Product, bulk_create needed')\n", "new_stub_id = Product.objects.all().aggregate(Max('id'))['id__max'] + 1\n", "response = self.client.post(f'/datagroup/{self.objects.dg.pk}/', {'bulk': 1})\n", "self.assertEqual(response.context['bulk'], 0,\n 'Products linked to all DataDocuments, no bulk_create needed.')\n", "product = ProductDocument.objects.get(document=doc).product\n", "self.assertEqual(product.title, 'unknown',\n 'Title should be unknown in bulk_create')\n", "self.assertEqual(product.upc, f'stub_%s' % new_stub_id,\n 'UPC should be created for second Product')\n", "def test_upload_note(self):...\n", "response = self.client.get(f'/datagroup/{DataGroup.objects.first().id}/'\n ).content.decode('utf8')\n", "self.assertIn('Please limit upload to <600 documents at one time', response,\n 'Note to limit upload to <600 should be on the page')\n", "def test_extracted_count(self):...\n", "response = self.client.get(f'/datagroup/{DataGroup.objects.first().id}/'\n ).content.decode('utf8')\n", "self.assertIn('0 extracted', response,\n 'Data Group should contain a count of 0 total extracted documents')\n", "self.objects.doc.extracted = True\n", "self.objects.doc.save()\n", "response = self.client.get(f'/datagroup/{DataGroup.objects.first().id}/'\n ).content.decode('utf8')\n", "self.assertIn('1 extracted', response,\n 'Data Group should contain a count of 1 total extracted documents')\n", "def test_delete_doc_button(self):...\n", "url = f'/datagroup/{DataGroup.objects.first().id}/'\n", "response = self.client.get(url).content.decode('utf8')\n", "span = '<span class=\"oi oi-trash\"></span>'\n", "self.assertIn(span, response, 'Trash button should be present if not matched.')\n", "self.objects.doc.matched = True\n", "self.objects.doc.save()\n", "response = self.client.get(url).content.decode('utf8')\n", "span = '<span class=\"oi oi-circle-check\" style=\"color:green;\"></span>'\n", "self.assertIn(span, response, 'Check should be present if matched.')\n", "def test_detail_table_headers(self):...\n", "pk = self.objects.dg.pk\n", "response = self.client.get(f'/datagroup/{pk}/').content.decode('utf8')\n", "self.assertIn('<th>Product</th>', response,\n 'Data Group should have Product column.')\n", "fu = GroupType.objects.create(title='Functional use')\n", "self.objects.dg.group_type = fu\n", "self.objects.dg.save()\n", "response = self.client.get(f'/datagroup/{pk}/').content.decode('utf8')\n", "self.assertNotIn('<th>Product</th>', response,\n 'Data Group should have Product column.')\n", "def test_detail_datasource_link(self):...\n", "pk = self.objects.dg.pk\n", "response = self.client.get(f'/datagroup/{pk}/')\n", "self.assertContains(response, '<a href=\"/datasource/', msg_prefix=\n 'Should be able to get back to DataSource from here.')\n", "def test_edit_redirect(self):...\n", "dgpk = self.objects.dg.pk\n", "dspk = str(self.objects.ds.pk)\n", "gtpk = str(self.objects.gt.pk)\n", "data = {'name': ['Changed Name'], 'group_type': [gtpk], 'downloaded_by': [\n str(User.objects.get(username='Karyn').pk)], 'downloaded_at': [\n '08/20/2017'], 'data_source': [dspk]}\n", "response = self.client.post(f'/datagroup/edit/{dgpk}/', data=data)\n", "self.assertEqual(response.status_code, 302,\n 'User is redirected to detail page.')\n", "self.assertEqual(response.url, f'/datagroup/{dgpk}/',\n 'Should go to detail page.')\n", "fixtures = fixtures_standard\n", "def setUp(self):...\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def test_download_raw_comp_data(self):...\n", "dg_co = DataGroup.objects.filter(group_type__code='CO').first()\n", "resp = self.client.get(f'/datagroup/%s/' % dg_co.id)\n", "self.assertIn(b'Download Raw', resp.content)\n", "dg_ids = DataDocument.objects.filter(id__in=ExtractedChemical.objects.all()\n .values('extracted_text_id')).order_by().values_list('data_group_id',\n flat=True).distinct()\n", "for dg_id in dg_ids:\n", "resp = self.client.get(f'/datagroup/raw_extracted_records/%s/' % dg_id)\n", "resp = self.client.get(f'/datagroup/raw_extracted_records/%s/' % dg_ids[0])\n", "self.assertEqual(resp.status_code, 200)\n", "field_list = (\n 'ExtractedChemical_id,raw_cas,raw_chem_name,raw_min_comp,raw_central_comp,raw_max_comp,unit_type'\n )\n", "content = list(i.decode('utf-8') for i in resp.streaming_content)\n", "self.assertIn(field_list, content[1])\n" ]
[ 6, 6, 6, 0, 0, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "FunctionDef'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self, VAR_3):...\n", "QObject.__init__(self)\n", "self.main = VAR_3\n", "self.lsp_plugins = {}\n", "self.clients = {}\n", "self.requests = {}\n", "self.register_queue = {}\n", "self.configurations_for_servers = CONF.options('lsp-server')\n", "for VAR_6 in self.configurations_for_servers:\n", "self.clients[VAR_6] = {'status': self.STOPPED, 'config': CONF.get(\n 'lsp-server', VAR_6), 'instance': None}\n", "self.register_queue[VAR_6] = []\n" ]
[ "def __init__(self, parent):...\n", "QObject.__init__(self)\n", "self.main = parent\n", "self.lsp_plugins = {}\n", "self.clients = {}\n", "self.requests = {}\n", "self.register_queue = {}\n", "self.configurations_for_servers = CONF.options('lsp-server')\n", "for language in self.configurations_for_servers:\n", "self.clients[language] = {'status': self.STOPPED, 'config': CONF.get(\n 'lsp-server', language), 'instance': None}\n", "self.register_queue[language] = []\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'" ]
[ "def FUNC_15(self, VAR_6):...\n", "" ]
[ "def set_priority(self, priority):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_5(self, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "self.logger.warn(VAR_1)\n", "self.console.warn('WARNING: %s' % VAR_1)\n" ]
[ "def log_warn(self, msg):...\n", "\"\"\"docstring\"\"\"\n", "self.logger.warn(msg)\n", "self.console.warn('WARNING: %s' % msg)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'", "Expr'" ]
[ "def __gt__(self, VAR_16):...\n", "VAR_39 = self.workflow._ruleorder.compare(self, VAR_16)\n", "return VAR_39 > 0\n" ]
[ "def __gt__(self, rule):...\n", "comp = self.workflow._ruleorder.compare(self, rule)\n", "return comp > 0\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_10):...\n", "return os.path.join(self.target_dir, VAR_10)\n" ]
[ "def output_path(self, file_name):...\n", "return os.path.join(self.target_dir, file_name)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(VAR_14):...\n", "VAR_17 = generate_account()\n", "VAR_35 = VAR_17['private']\n", "VAR_17 = VAR_17['account']\n", "print(type(VAR_35), type(VAR_17), type(VAR_14))\n", "print(VAR_35, VAR_17, VAR_14)\n", "VAR_32 = (\n 'INSERT INTO accounts (username, private_key, address, minimum) VALUES (%s, %s, %s, %s)'\n )\n", "VAR_33 = VAR_14, VAR_35, VAR_17, nano_to_raw(0.01)\n", "VAR_13.execute(VAR_32, VAR_33)\n", "VAR_12.commit()\n", "return VAR_17\n" ]
[ "def add_new_account(username):...\n", "address = generate_account()\n", "private = address['private']\n", "address = address['account']\n", "print(type(private), type(address), type(username))\n", "print(private, address, username)\n", "sql = (\n 'INSERT INTO accounts (username, private_key, address, minimum) VALUES (%s, %s, %s, %s)'\n )\n", "val = username, private, address, nano_to_raw(0.01)\n", "mycursor.execute(sql, val)\n", "mydb.commit()\n", "return address\n" ]
[ 0, 4, 4, 4, 4, 4, 4, 4, 0, 0, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]