lines
sequencelengths 1
444
| raw_lines
sequencelengths 1
444
| label
sequencelengths 1
444
| type
sequencelengths 1
444
|
---|---|---|---|
[
"def __repr__(self):...\n",
"return 'Float({})'.format(self.size)\n"
] | [
"def __repr__(self):...\n",
"return 'Float({})'.format(self.size)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"\"\"\"string\"\"\"\n",
"from hp3parclient import exceptions as hpexceptions\n",
"from oslo.config import cfg\n",
"from cinder import exception\n",
"from cinder.openstack.common import log as logging\n",
"from cinder import utils\n",
"import cinder.volume.driver\n",
"from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon\n",
"from cinder.volume.drivers.san import san\n",
"VAR_0 = 1.1\n",
"VAR_1 = logging.getLogger(__name__)\n",
"\"\"\"string\"\"\"\n",
"def __init__(self, *VAR_2, **VAR_3):...\n",
"super(CLASS_0, self).__init__(*VAR_2, **kwargs)\n",
"self.common = None\n",
"self.configuration.append_config_values(hpcommon.hp3par_opts)\n",
"self.configuration.append_config_values(san.san_opts)\n",
"def FUNC_0(self):...\n",
"return hpcommon.HP3PARCommon(self.configuration)\n"
] | [
"\"\"\"\nVolume driver for HP 3PAR Storage array.\nThis driver requires 3.1.2 MU2 firmware on the 3PAR array.\n\nYou will need to install the python hp3parclient.\nsudo pip install hp3parclient\n\nSet the following in the cinder.conf file to enable the\n3PAR Fibre Channel Driver along with the required flags:\n\nvolume_driver=cinder.volume.drivers.san.hp.hp_3par_fc.HP3PARFCDriver\n\"\"\"\n",
"from hp3parclient import exceptions as hpexceptions\n",
"from oslo.config import cfg\n",
"from cinder import exception\n",
"from cinder.openstack.common import log as logging\n",
"from cinder import utils\n",
"import cinder.volume.driver\n",
"from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon\n",
"from cinder.volume.drivers.san import san\n",
"VERSION = 1.1\n",
"LOG = logging.getLogger(__name__)\n",
"\"\"\"OpenStack Fibre Channel driver to enable 3PAR storage array.\n\n Version history:\n 1.0 - Initial driver\n 1.1 - QoS, extend volume, multiple iscsi ports, remove domain,\n session changes, faster clone, requires 3.1.2 MU2 firmware,\n copy volume <--> Image.\n \"\"\"\n",
"def __init__(self, *args, **kwargs):...\n",
"super(HP3PARFCDriver, self).__init__(*args, **kwargs)\n",
"self.common = None\n",
"self.configuration.append_config_values(hpcommon.hp3par_opts)\n",
"self.configuration.append_config_values(san.san_opts)\n",
"def _init_common(self):...\n",
"return hpcommon.HP3PARCommon(self.configuration)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_9(self):...\n",
"VAR_2 = EngineAuthRequest.blank('/auth/google')\n",
"VAR_2._load_session()\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, None)\n",
"VAR_2.add_message('TEST MESSAGE')\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, [{'level': None, 'message': 'TEST MESSAGE'}])\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, None)\n",
"VAR_2.add_message('TEST1', 'error')\n",
"VAR_2.add_message('TEST2', 'success')\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, [{'level': 'error', 'message': 'TEST1'}, {'level':\n 'success', 'message': 'TEST2'}])\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, None)\n",
"VAR_2.add_message('TEST1', 'error')\n",
"VAR_2.add_message('TEST2', 'success', '_mykey')\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, [{'level': 'error', 'message': 'TEST1'}])\n",
"VAR_18 = VAR_2.get_messages('_mykey')\n",
"self.assertEquals(VAR_18, [{'level': 'success', 'message': 'TEST2'}])\n",
"VAR_17 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_17, None)\n",
"VAR_18 = VAR_2.get_messages()\n",
"self.assertEquals(VAR_18, None)\n"
] | [
"def test_add_message(self):...\n",
"req = EngineAuthRequest.blank('/auth/google')\n",
"req._load_session()\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, None)\n",
"req.add_message('TEST MESSAGE')\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, [{'level': None, 'message': 'TEST MESSAGE'}])\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, None)\n",
"req.add_message('TEST1', 'error')\n",
"req.add_message('TEST2', 'success')\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, [{'level': 'error', 'message': 'TEST1'}, {'level':\n 'success', 'message': 'TEST2'}])\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, None)\n",
"req.add_message('TEST1', 'error')\n",
"req.add_message('TEST2', 'success', '_mykey')\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, [{'level': 'error', 'message': 'TEST1'}])\n",
"msgs_key = req.get_messages('_mykey')\n",
"self.assertEquals(msgs_key, [{'level': 'success', 'message': 'TEST2'}])\n",
"msgs = req.get_messages()\n",
"self.assertEquals(msgs, None)\n",
"msgs_key = req.get_messages()\n",
"self.assertEquals(msgs_key, None)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_13(self, VAR_10, VAR_11, VAR_12, VAR_13):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_24 = self.common._cli_run('createhost -persona %s -domain %s %s %s' % (\n VAR_13, VAR_12, VAR_10, ' '.join(VAR_11)), None)\n",
"if VAR_24 and len(VAR_24) > 1:\n",
"return self.common.parse_create_host_error(VAR_10, VAR_24)\n",
"return VAR_10\n"
] | [
"def _create_3par_fibrechan_host(self, hostname, wwn, domain, persona_id):...\n",
"\"\"\"docstring\"\"\"\n",
"out = self.common._cli_run('createhost -persona %s -domain %s %s %s' % (\n persona_id, domain, hostname, ' '.join(wwn)), None)\n",
"if out and len(out) > 1:\n",
"return self.common.parse_create_host_error(hostname, out)\n",
"return hostname\n"
] | [
0,
0,
2,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"\"\"\"\nModule for gathering disk information\n\"\"\"\n",
"import logging\n",
"import salt.utils\n",
"VAR_0 = logging.getLogger(__name__)\n",
"def __virtual__():...\n",
"\"\"\"docstring\"\"\"\n",
"if salt.utils.is_windows():\n",
"return False\n",
"return 'disk'\n"
] | [
"\"\"\"\nModule for gathering disk information\n\"\"\"\n",
"import logging\n",
"import salt.utils\n",
"log = logging.getLogger(__name__)\n",
"def __virtual__():...\n",
"\"\"\"docstring\"\"\"\n",
"if salt.utils.is_windows():\n",
"return False\n",
"return 'disk'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_0(VAR_3):...\n",
"@functools.wraps(VAR_3)...\n",
"VAR_23 = VAR_5.pop('timeout', None)\n",
"VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n",
"if VAR_23 is None:\n",
"return VAR_24.wait()\n",
"VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n",
"VAR_48 = VAR_24.wait()\n",
"VAR_36.cancel()\n",
"return FUNC_1\n",
"return VAR_48\n"
] | [
"def with_timeout(f):...\n",
"@functools.wraps(f)...\n",
"timeout = kwargs.pop('timeout', None)\n",
"gt = eventlet.spawn(f, self, *args, **kwargs)\n",
"if timeout is None:\n",
"return gt.wait()\n",
"kill_thread = eventlet.spawn_after(timeout, gt.kill)\n",
"res = gt.wait()\n",
"kill_thread.cancel()\n",
"return __inner\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"\"\"\"Auth management UI handlers.\"\"\"\n",
"import functools\n",
"import json\n",
"import os\n",
"import re\n",
"import webapp2\n",
"from components import template\n",
"from components import utils\n",
"from . import acl\n",
"from . import rest_api\n",
"from .. import api\n",
"from .. import change_log\n",
"from .. import handler\n",
"from .. import model\n",
"from .. import replication\n",
"VAR_0 = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates')\n",
"VAR_1 = 'Unknown'\n",
"VAR_2 = None\n",
"VAR_3 = ()\n",
"def FUNC_0(VAR_4, VAR_5=None, VAR_6=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = VAR_4\n",
"VAR_2 = VAR_6\n",
"if VAR_5 is not None:\n",
"assert all(issubclass(VAR_12, CLASS_6) for VAR_12 in VAR_5)\n",
"template.bootstrap({'auth': VAR_0})\n",
"VAR_3 = tuple(VAR_5)\n",
"def FUNC_1():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_8 = []\n",
"for VAR_12 in VAR_3:\n",
"VAR_8.extend(VAR_12.get_webapp2_routes())\n",
"VAR_8.extend([webapp2.Route('/auth', CLASS_5), webapp2.Route(\n '/auth/bootstrap', CLASS_1, name='bootstrap'), webapp2.Route(\n '/auth/bootstrap/oauth', CLASS_2), webapp2.Route('/auth/link', CLASS_3)])\n",
"return VAR_8\n"
] | [
"\"\"\"Auth management UI handlers.\"\"\"\n",
"import functools\n",
"import json\n",
"import os\n",
"import re\n",
"import webapp2\n",
"from components import template\n",
"from components import utils\n",
"from . import acl\n",
"from . import rest_api\n",
"from .. import api\n",
"from .. import change_log\n",
"from .. import handler\n",
"from .. import model\n",
"from .. import replication\n",
"TEMPLATES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'templates')\n",
"_ui_app_name = 'Unknown'\n",
"_ui_data_callback = None\n",
"_ui_navbar_tabs = ()\n",
"def configure_ui(app_name, ui_tabs=None, ui_data_callback=None):...\n",
"\"\"\"docstring\"\"\"\n",
"_ui_app_name = app_name\n",
"_ui_data_callback = ui_data_callback\n",
"if ui_tabs is not None:\n",
"assert all(issubclass(cls, UINavbarTabHandler) for cls in ui_tabs)\n",
"template.bootstrap({'auth': TEMPLATES_DIR})\n",
"_ui_navbar_tabs = tuple(ui_tabs)\n",
"def get_ui_routes():...\n",
"\"\"\"docstring\"\"\"\n",
"routes = []\n",
"for cls in _ui_navbar_tabs:\n",
"routes.extend(cls.get_webapp2_routes())\n",
"routes.extend([webapp2.Route('/auth', MainHandler), webapp2.Route(\n '/auth/bootstrap', BootstrapHandler, name='bootstrap'), webapp2.Route(\n '/auth/bootstrap/oauth', BootstrapOAuthHandler), webapp2.Route(\n '/auth/link', LinkToPrimaryHandler)])\n",
"return routes\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Expr'",
"Import'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assert'",
"Expr'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Expr'",
"Return'"
] |
[
"def __init__(self, VAR_15):...\n",
"self._store = []\n",
"self._ds = VAR_15\n"
] | [
"def __init__(self, dirstructure):...\n",
"self._store = []\n",
"self._ds = dirstructure\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"for VAR_28, VAR_27 in zip(self.output, self.rule.output):\n",
"if VAR_28 in self.dynamic_output:\n",
"VAR_29 = self.expand_dynamic(VAR_27, VAR_13=self.wildcards, VAR_14=_IOFile.\n dynamic_fill)\n",
"yield VAR_28\n",
"if not VAR_29:\n",
"yield VAR_27\n",
"for VAR_28, _ in VAR_29:\n",
"yield IOFile(VAR_28, self.rule)\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"for f, f_ in zip(self.output, self.rule.output):\n",
"if f in self.dynamic_output:\n",
"expansion = self.expand_dynamic(f_, restriction=self.wildcards, omit_value=\n _IOFile.dynamic_fill)\n",
"yield f\n",
"if not expansion:\n",
"yield f_\n",
"for f, _ in expansion:\n",
"yield IOFile(f, self.rule)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
7
] | [
"Condition",
"Docstring",
"For",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"For",
"Expr'"
] |
[
"def FUNC_2(self, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = self.undot(VAR_1)\n",
"if VAR_1 == '':\n",
"return [self, '']\n",
"if '/' in VAR_1:\n",
"VAR_34, VAR_23 = VAR_1.split('/', 1)\n",
"VAR_34 = VAR_1\n",
"if VAR_34 in self.nodes:\n",
"VAR_23 = ''\n",
"return self.nodes[VAR_34]._find(VAR_23)\n",
"return [self, VAR_1]\n"
] | [
"def _find(self, vpath):...\n",
"\"\"\"docstring\"\"\"\n",
"vpath = self.undot(vpath)\n",
"if vpath == '':\n",
"return [self, '']\n",
"if '/' in vpath:\n",
"name, rem = vpath.split('/', 1)\n",
"name = vpath\n",
"if name in self.nodes:\n",
"rem = ''\n",
"return self.nodes[name]._find(rem)\n",
"return [self, vpath]\n"
] | [
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_4():...\n",
"FUNC_1()\n",
"tools.bulksql(open('sql/data.sql', 'r').read())\n"
] | [
"def create_dummy_data():...\n",
"drop()\n",
"tools.bulksql(open('sql/data.sql', 'r').read())\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"from discord_service import DiscordService\n",
"from welcome_message import WelcomeMessage\n",
"from discord_mention_factory import DiscordMentionFactory\n",
"from user_leave_notification import UserLeaveNotification\n",
"from dependency_injection import Dependencies\n",
"import json\n",
"def FUNC_0(VAR_0):...\n",
"return json.load(f)\n"
] | [
"from discord_service import DiscordService\n",
"from welcome_message import WelcomeMessage\n",
"from discord_mention_factory import DiscordMentionFactory\n",
"from user_leave_notification import UserLeaveNotification\n",
"from dependency_injection import Dependencies\n",
"import json\n",
"def readJsonFile(file_name):...\n",
"return json.load(f)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_12(VAR_10, VAR_11=None):...\n",
"\"\"\"docstring\"\"\"\n",
"for VAR_1 in oauth.remote_apps.values():\n",
"FUNC_6(VAR_1)\n"
] | [
"def oauth_logout_handler(sender_app, user=None):...\n",
"\"\"\"docstring\"\"\"\n",
"for remote in oauth.remote_apps.values():\n",
"token_delete(remote)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Expr'"
] |
[
"def FUNC_25(*VAR_19, **VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_49 = CLASS_1('\\n' + VAR_19[0], VAR_26=ka)\n",
"VAR_50 = VAR_49.render(VAR_20)\n",
"return VAR_50[1:]\n"
] | [
"def tpl(*args, **ka):...\n",
"\"\"\"docstring\"\"\"\n",
"tp = MetalTemplate('\\n' + args[0], tags=ka)\n",
"x = tp.render(ka)\n",
"return x[1:]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_14(VAR_32, VAR_5=False):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_52 = {}\n",
"for VAR_81, info in VAR_32:\n",
"VAR_52[VAR_81.output(VAR_5)] = len(info[0])\n",
"return VAR_52\n"
] | [
"def _get_singlekws(skw_matches, spires=False):...\n",
"\"\"\"docstring\"\"\"\n",
"output = {}\n",
"for single_keyword, info in skw_matches:\n",
"output[single_keyword.output(spires)] = len(info[0])\n",
"return output\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Assign'",
"Return'"
] |
[
"def FUNC_56(self):...\n",
"VAR_40 = VAR_37.cursor()\n",
"VAR_40.execute('begin')\n",
"VAR_40.execute('create table t1(i int)')\n",
"VAR_40.execute('rollback')\n",
"def FUNC_62():...\n",
"VAR_37.execute('rollback')\n",
"self.assertRaises(pyodbc.Error, FUNC_62)\n"
] | [
"def _test_context_manager(self):...\n",
"cursor = cnxn.cursor()\n",
"cursor.execute('begin')\n",
"cursor.execute('create table t1(i int)')\n",
"cursor.execute('rollback')\n",
"def test():...\n",
"cnxn.execute('rollback')\n",
"self.assertRaises(pyodbc.Error, test)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(VAR_3):...\n",
"@functools.wraps(VAR_3)...\n",
"VAR_23 = VAR_5.pop('timeout', None)\n",
"VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n",
"if VAR_23 is None:\n",
"return VAR_24.wait()\n",
"VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n",
"VAR_48 = VAR_24.wait()\n",
"VAR_36.cancel()\n",
"return FUNC_1\n",
"return VAR_48\n"
] | [
"def with_timeout(f):...\n",
"@functools.wraps(f)...\n",
"timeout = kwargs.pop('timeout', None)\n",
"gt = eventlet.spawn(f, self, *args, **kwargs)\n",
"if timeout is None:\n",
"return gt.wait()\n",
"kill_thread = eventlet.spawn_after(timeout, gt.kill)\n",
"res = gt.wait()\n",
"kill_thread.cancel()\n",
"return __inner\n",
"return res\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"def __str__(self):...\n",
"return self.name\n"
] | [
"def __str__(self):...\n",
"return self.name\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_17(self):...\n",
"VAR_26 = []\n",
"self.mock(bot_main, 'post_error_task', lambda *VAR_24: VAR_26.append(VAR_24))\n",
"def FUNC_24(VAR_25, VAR_23, *VAR_24):...\n",
"if VAR_23 == 'on_after_task':\n",
"VAR_50, VAR_51, VAR_5, VAR_52 = VAR_24\n",
"self.mock(bot_main, 'call_hook', FUNC_24)\n",
"self.assertEqual(False, VAR_50)\n",
"VAR_22 = self._mock_popen(VAR_1=1)\n",
"self.assertEqual(True, VAR_51)\n",
"VAR_19 = {'command': ['echo', 'hi'], 'dimensions': {'pool': 'default'},\n 'grace_period': 30, 'hard_timeout': 60, 'io_timeout': 60, 'task_id': '24'}\n",
"self.assertEqual({'pool': 'default'}, VAR_5)\n",
"bot_main.run_manifest(self.bot, VAR_19, time.time())\n",
"self.assertEqual(VAR_22, VAR_52)\n",
"VAR_6 = [(self.bot, 'Execution failed: internal error (1).', '24')]\n",
"self.assertEqual(VAR_6, VAR_26)\n"
] | [
"def test_run_manifest_internal_failure(self):...\n",
"posted = []\n",
"self.mock(bot_main, 'post_error_task', lambda *args: posted.append(args))\n",
"def call_hook(_botobj, name, *args):...\n",
"if name == 'on_after_task':\n",
"failure, internal_failure, dimensions, summary = args\n",
"self.mock(bot_main, 'call_hook', call_hook)\n",
"self.assertEqual(False, failure)\n",
"result = self._mock_popen(returncode=1)\n",
"self.assertEqual(True, internal_failure)\n",
"manifest = {'command': ['echo', 'hi'], 'dimensions': {'pool': 'default'},\n 'grace_period': 30, 'hard_timeout': 60, 'io_timeout': 60, 'task_id': '24'}\n",
"self.assertEqual({'pool': 'default'}, dimensions)\n",
"bot_main.run_manifest(self.bot, manifest, time.time())\n",
"self.assertEqual(result, summary)\n",
"expected = [(self.bot, 'Execution failed: internal error (1).', '24')]\n",
"self.assertEqual(expected, posted)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"FunctionDef'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"@login_required...\n",
"from .forms import FormatChecklistForm\n",
"VAR_15 = get_object_or_404(Candidate, id=candidate_id)\n",
"VAR_24 = FormatChecklistForm(VAR_2.POST, instance=candidate.thesis.\n format_checklist)\n",
"if VAR_24.is_valid():\n",
"VAR_24.handle_post(VAR_2.POST, VAR_15)\n",
"return HttpResponseRedirect(reverse('approve', kwargs={'candidate_id':\n candidate_id}))\n"
] | [
"@login_required...\n",
"from .forms import FormatChecklistForm\n",
"candidate = get_object_or_404(Candidate, id=candidate_id)\n",
"format_form = FormatChecklistForm(request.POST, instance=candidate.thesis.\n format_checklist)\n",
"if format_form.is_valid():\n",
"format_form.handle_post(request.POST, candidate)\n",
"return HttpResponseRedirect(reverse('approve', kwargs={'candidate_id':\n candidate_id}))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"ImportFrom'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Return'"
] |
[
"@staticmethod...\n",
"VAR_5 = urlparse(VAR_10)\n",
"VAR_17 = VAR_9, VAR_5, VAR_1, VAR_6, VAR_11\n",
"if VAR_10 is None:\n",
"VAR_25 = CLASS_1(VAR_9, VAR_1, VAR_6, VAR_11, **kwargs)\n",
"if VAR_5.scheme.startswith('git'):\n",
"VAR_25.urlstring = VAR_10\n",
"VAR_25 = CLASS_2(*VAR_17, **kwargs)\n",
"if VAR_5.scheme.startswith('svn'):\n",
"return VAR_25\n",
"VAR_25 = CLASS_3(*VAR_17, **kwargs)\n"
] | [
"@staticmethod...\n",
"url = urlparse(urlstring)\n",
"args = name, url, directory, options, conf\n",
"if urlstring is None:\n",
"res = Subproject(name, directory, options, conf, **kwargs)\n",
"if url.scheme.startswith('git'):\n",
"res.urlstring = urlstring\n",
"res = GitSubproject(*args, **kwargs)\n",
"if url.scheme.startswith('svn'):\n",
"return res\n",
"res = SvnSubproject(*args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'"
] |
[
"import logging\n",
"import model\n",
"import utils\n",
"from utils import DateTime, ErrorMessage, Redirect\n",
"from utils import db, get_message, html_escape, users\n",
"from access import check_user_role\n",
"VAR_5 = 10\n",
"def FUNC_2(self, VAR_6, VAR_7):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(VAR_7, unicode):\n",
"if isinstance(VAR_7, str):\n",
"return u'<input name=\"%s\" value=\"%s\" size=%d>' % (html_escape(VAR_6),\n html_escape(VAR_7), self.input_size)\n",
"VAR_7 = VAR_7.decode('utf-8')\n",
"if VAR_7 is not None:\n",
"VAR_7 = str(VAR_7)\n",
"VAR_7 = ''\n"
] | [
"import logging\n",
"import model\n",
"import utils\n",
"from utils import DateTime, ErrorMessage, Redirect\n",
"from utils import db, get_message, html_escape, users\n",
"from access import check_user_role\n",
"input_size = 10\n",
"def text_input(self, name, value):...\n",
"\"\"\"docstring\"\"\"\n",
"if isinstance(value, unicode):\n",
"if isinstance(value, str):\n",
"return u'<input name=\"%s\" value=\"%s\" size=%d>' % (html_escape(name),\n html_escape(value), self.input_size)\n",
"value = value.decode('utf-8')\n",
"if value is not None:\n",
"value = str(value)\n",
"value = ''\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Return'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_10(self, VAR_9, VAR_10=None, VAR_11=None, VAR_15=False):...\n",
"get_and_check_project(VAR_9, VAR_11, ('change_project',))\n",
"VAR_28 = self.queryset.get(VAR_10=pk, VAR_1=project_pk)\n",
"VAR_26 = CLASS_1(VAR_28, data=request.data, VAR_15=partial)\n",
"VAR_26.is_valid(raise_exception=True)\n",
"VAR_26.save()\n",
"scheduler.process_pending_tasks(background=True)\n",
"return Response(VAR_26.data)\n"
] | [
"def update(self, request, pk=None, project_pk=None, partial=False):...\n",
"get_and_check_project(request, project_pk, ('change_project',))\n",
"task = self.queryset.get(pk=pk, project=project_pk)\n",
"serializer = TaskSerializer(task, data=request.data, partial=partial)\n",
"serializer.is_valid(raise_exception=True)\n",
"serializer.save()\n",
"scheduler.process_pending_tasks(background=True)\n",
"return Response(serializer.data)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_15():...\n",
"VAR_37 = CLASS_6()\n",
"VAR_37.exe_models.choices = FUNC_11()\n",
"return VAR_37\n"
] | [
"def get_run_form():...\n",
"run_form = RunForm()\n",
"run_form.exe_models.choices = get_models_choices()\n",
"return run_form\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self, VAR_6):...\n",
"self.cursor.execute(self.SQL_GET_JSON % (self.name, VAR_6))\n",
"return self.cursor.fetchone()\n"
] | [
"def get(self, id):...\n",
"self.cursor.execute(self.SQL_GET_JSON % (self.name, id))\n",
"return self.cursor.fetchone()\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"def FUNC_31(self, VAR_17, VAR_15):...\n",
"VAR_28 = FileManager()\n",
"VAR_29 = ImageManager()\n",
"for file_col in VAR_17.files:\n",
"if self.is_file(file_col):\n",
"for file_col in VAR_17.files:\n",
"VAR_28.save_file(VAR_17.files[file_col], getattr(VAR_15, file_col))\n",
"if self.is_image(file_col):\n",
"VAR_29.save_file(VAR_17.files[file_col], getattr(VAR_15, file_col))\n"
] | [
"def _add_files(self, this_request, item):...\n",
"fm = FileManager()\n",
"im = ImageManager()\n",
"for file_col in this_request.files:\n",
"if self.is_file(file_col):\n",
"for file_col in this_request.files:\n",
"fm.save_file(this_request.files[file_col], getattr(item, file_col))\n",
"if self.is_image(file_col):\n",
"im.save_file(this_request.files[file_col], getattr(item, file_col))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"For",
"Condition",
"For",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_13(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1.value = beamr.interpreters.Macro(VAR_1.value)\n",
"return VAR_1\n"
] | [
"def t_MACRO(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t.value = beamr.interpreters.Macro(t.value)\n",
"return t\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self):...\n",
"VAR_7.environ.pop('SWARMING_BOT_ID', None)\n",
"VAR_7.chdir(self.old_cwd)\n",
"file_path.rmtree(self.root_dir)\n",
"super(CLASS_0, self).tearDown()\n"
] | [
"def tearDown(self):...\n",
"os.environ.pop('SWARMING_BOT_ID', None)\n",
"os.chdir(self.old_cwd)\n",
"file_path.rmtree(self.root_dir)\n",
"super(TestBotMain, self).tearDown()\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_8(self):...\n",
"if app.config['PERMANENT_SESSION_LIFETIME']:\n",
"VAR_15.permanent = True\n",
"VAR_37 = datetime.datetime.utcnow()\n",
"VAR_38 = app.config['PERMANENT_SESSION_LIFETIME']\n",
"VAR_39 = VAR_37 + datetime.timedelta(seconds=lifetime)\n",
"VAR_15['expiration'] = VAR_39\n",
"if not VAR_15.get('max_expiration'):\n",
"VAR_45 = app.config['MAX_PERMANENT_SESSION_LIFETIME']\n",
"if not VAR_45:\n",
"VAR_45 = VAR_38\n",
"VAR_46 = VAR_37 + datetime.timedelta(seconds=max_lifetime)\n",
"VAR_15['max_expiration'] = VAR_46\n"
] | [
"def set_expiration(self):...\n",
"if app.config['PERMANENT_SESSION_LIFETIME']:\n",
"session.permanent = True\n",
"now = datetime.datetime.utcnow()\n",
"lifetime = app.config['PERMANENT_SESSION_LIFETIME']\n",
"expiration = now + datetime.timedelta(seconds=lifetime)\n",
"session['expiration'] = expiration\n",
"if not session.get('max_expiration'):\n",
"max_lifetime = app.config['MAX_PERMANENT_SESSION_LIFETIME']\n",
"if not max_lifetime:\n",
"max_lifetime = lifetime\n",
"max_expiration = now + datetime.timedelta(seconds=max_lifetime)\n",
"session['max_expiration'] = max_expiration\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_5(VAR_9):...\n",
"if VAR_9 is None:\n",
"return\n",
"VAR_25 = ['%Y-%m-%d', '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M']\n",
"for mask in VAR_25:\n",
"return datetime.strptime(VAR_9, mask)\n"
] | [
"def parse_date(date_):...\n",
"if date_ is None:\n",
"return\n",
"masks = ['%Y-%m-%d', '%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M']\n",
"for mask in masks:\n",
"return datetime.strptime(date_, mask)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"For",
"Return'"
] |
[
"def FUNC_23(self, VAR_21=None):...\n",
"VAR_22 = {}\n",
"for VAR_46 in self._meta.fields:\n",
"if VAR_21 and VAR_46.name in VAR_21:\n",
"if VAR_22:\n",
"VAR_42 = VAR_46.value_from_object(self)\n",
"if VAR_46.null and VAR_42 is None:\n",
"VAR_42 = VAR_46.clean(VAR_42)\n",
"VAR_22[VAR_46.name] = e.messages\n",
"VAR_43 = getattr(self, 'clean_%s' % VAR_46.attname, None)\n",
"if callable(VAR_43):\n",
"setattr(self, VAR_46.attname, VAR_42)\n",
"VAR_42 = VAR_43(VAR_42)\n",
"VAR_22.setdefault(VAR_46.name, []).extend(e.messages)\n"
] | [
"def clean_fields(self, exclude=None):...\n",
"errors = {}\n",
"for f in self._meta.fields:\n",
"if exclude and f.name in exclude:\n",
"if errors:\n",
"raw_value = f.value_from_object(self)\n",
"if f.null and raw_value is None:\n",
"raw_value = f.clean(raw_value)\n",
"errors[f.name] = e.messages\n",
"clean_method = getattr(self, 'clean_%s' % f.attname, None)\n",
"if callable(clean_method):\n",
"setattr(self, f.attname, raw_value)\n",
"raw_value = clean_method(raw_value)\n",
"errors.setdefault(f.name, []).extend(e.messages)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_5(self, VAR_3):...\n",
"VAR_4 = self.value_from_object(VAR_3)\n",
"return base64.b64encode(self.get_prep_value(VAR_4))\n"
] | [
"def value_to_string(self, obj):...\n",
"value = self.value_from_object(obj)\n",
"return base64.b64encode(self.get_prep_value(value))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def FUNC_0(self):...\n",
"def FUNC_9(VAR_0):...\n",
"if VAR_0.do_not_run is True:\n",
"VAR_15 = 'DNR'\n",
"VAR_15 = 'RUN'\n",
"VAR_15 += '_{}'.format(VAR_0.id)\n",
"return VAR_15\n"
] | [
"def generate_graphviz_plot(self):...\n",
"def run_status(obj):...\n",
"if obj.do_not_run is True:\n",
"s = 'DNR'\n",
"s = 'RUN'\n",
"s += '_{}'.format(obj.id)\n",
"return s\n"
] | [
0,
0,
0,
1,
1,
1,
1
] | [
"FunctionDef'",
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"AugAssign'",
"Return'"
] |
[
"def FUNC_11(self, VAR_17):...\n",
"VAR_36 = {'cmd': 'terminate', 'task_id': VAR_17}\n",
"self.send_response(VAR_36)\n"
] | [
"def _cmd_terminate(self, task_id):...\n",
"out = {'cmd': 'terminate', 'task_id': task_id}\n",
"self.send_response(out)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_29(self, VAR_5):...\n",
"def FUNC_31():...\n",
"VAR_24 = vim.eval('tagfiles()')\n",
"VAR_25 = VAR_0.getcwd()\n",
"return [VAR_0.path.join(VAR_25, x) for x in VAR_24]\n"
] | [
"def _AddTagsFilesIfNeeded(self, extra_data):...\n",
"def GetTagFiles():...\n",
"tag_files = vim.eval('tagfiles()')\n",
"current_working_directory = os.getcwd()\n",
"return [os.path.join(current_working_directory, x) for x in tag_files]\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(VAR_2, VAR_3, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_29, VAR_30 = [], {}\n",
"if not VAR_2:\n",
"return VAR_29\n",
"if VAR_3:\n",
"for VAR_64 in VAR_3:\n",
"for VAR_6 in VAR_2:\n",
"VAR_63 = VAR_3[VAR_64]\n",
"VAR_50 = True\n",
"return VAR_29\n",
"if not isinstance(VAR_63, (tuple, list)):\n",
"for VAR_64, VAR_63 in iteritems(VAR_30):\n",
"if VAR_63 is True:\n",
"VAR_30[VAR_64] = VAR_63\n",
"if not VAR_54.compare(getattr(VAR_6, VAR_64, None), VAR_63[0], VAR_63[1]):\n",
"if VAR_50:\n",
"VAR_63 = 'not None', VAR_63\n",
"if VAR_63 is False:\n",
"VAR_50 = False\n",
"VAR_29.append(VAR_6)\n",
"VAR_63 = 'None', VAR_63\n",
"if isinstance(VAR_63, string_types) and VAR_63.startswith('^'):\n",
"if VAR_4 and len(VAR_29) - 1 == VAR_4:\n",
"VAR_63 = '^', VAR_63[1:]\n",
"VAR_63 = '=', VAR_63\n"
] | [
"def _filter(data, filters, limit=None):...\n",
"\"\"\"docstring\"\"\"\n",
"out, _filters = [], {}\n",
"if not data:\n",
"return out\n",
"if filters:\n",
"for f in filters:\n",
"for d in data:\n",
"fval = filters[f]\n",
"add = True\n",
"return out\n",
"if not isinstance(fval, (tuple, list)):\n",
"for f, fval in iteritems(_filters):\n",
"if fval is True:\n",
"_filters[f] = fval\n",
"if not frappe.compare(getattr(d, f, None), fval[0], fval[1]):\n",
"if add:\n",
"fval = 'not None', fval\n",
"if fval is False:\n",
"add = False\n",
"out.append(d)\n",
"fval = 'None', fval\n",
"if isinstance(fval, string_types) and fval.startswith('^'):\n",
"if limit and len(out) - 1 == limit:\n",
"fval = '^', fval[1:]\n",
"fval = '=', fval\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Condition",
"For",
"For",
"Assign'",
"Assign'",
"Return'",
"Condition",
"For",
"Condition",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_0(self):...\n",
"VAR_9 = request.headers.get('Authorization')\n",
"return VAR_9\n"
] | [
"def post(self):...\n",
"auth_header = request.headers.get('Authorization')\n",
"return auth_header\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"\"\"\"\nItem Exporters are used to export/serialize items into sqlite3 database.\n\"\"\"\n",
"from scrapy.exporters import BaseItemExporter\n",
"import sqlite3\n",
"def __init__(self, VAR_0, **VAR_1):...\n",
"self._configure(VAR_1)\n",
"self.conn = sqlite3.connect(VAR_0.name)\n",
"self.conn.execute('string')\n",
"self.conn.commit()\n",
"self.conn.text_factory = str\n",
"def FUNC_0(self, VAR_2):...\n",
"self.start_exporting()\n",
"self.conn.execute(\n \"INSERT INTO webpages(title, content, url) VALUES ('%s', '%s', '%s')\" %\n (VAR_2['title'], VAR_2['content'], VAR_2['url']))\n",
"self.conn.commit()\n",
"self.finish_exporting()\n",
"def __del__(self):...\n",
"self.conn.close()\n"
] | [
"\"\"\"\nItem Exporters are used to export/serialize items into sqlite3 database.\n\"\"\"\n",
"from scrapy.exporters import BaseItemExporter\n",
"import sqlite3\n",
"def __init__(self, file, **kwargs):...\n",
"self._configure(kwargs)\n",
"self.conn = sqlite3.connect(file.name)\n",
"self.conn.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS `webpages`(\n `id` INTEGER PRIMARY KEY,\n `title` VARCHAR DEFAULT NULL,\n `content` VARCHAR DEFAULT NULL,\n `url` VARCHAR DEFAULT NULL UNIQUE \n );\n \"\"\"\n )\n",
"self.conn.commit()\n",
"self.conn.text_factory = str\n",
"def export_item(self, item):...\n",
"self.start_exporting()\n",
"self.conn.execute(\n \"INSERT INTO webpages(title, content, url) VALUES ('%s', '%s', '%s')\" %\n (item['title'], item['content'], item['url']))\n",
"self.conn.commit()\n",
"self.finish_exporting()\n",
"def __del__(self):...\n",
"self.conn.close()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"Import'",
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"FunctionDef'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_3(self):...\n",
"super(CLASS_1, self).prepare()\n",
"self._ = {'id': '', 'title': '', 'content': '', 'is_private': False,\n 'group': '', 'tmpatts': [], 'atts': [], 'tags': [], 'alert': ''}\n"
] | [
"def prepare(self):...\n",
"super(EditAnnHandler, self).prepare()\n",
"self._ = {'id': '', 'title': '', 'content': '', 'is_private': False,\n 'group': '', 'tmpatts': [], 'atts': [], 'tags': [], 'alert': ''}\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'"
] |
[
"def FUNC_62():...\n",
"VAR_37.execute('rollback')\n"
] | [
"def test():...\n",
"cnxn.execute('rollback')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_0):...\n",
"return users.is_current_user_admin() and self.request.get('debug'\n ) == '1' and VAR_0.status != analysis_status.RUNNING and VAR_0.try_job_status != analysis_status.RUNNING\n"
] | [
"def _ShowInputUI(self, analysis):...\n",
"return users.is_current_user_admin() and self.request.get('debug'\n ) == '1' and analysis.status != analysis_status.RUNNING and analysis.try_job_status != analysis_status.RUNNING\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def __init__(self, VAR_1, VAR_2, VAR_3, VAR_4=None):...\n",
"\"\"\"docstring\"\"\"\n",
"self.node_ip_address = VAR_3\n",
"self.redis_client = redis.StrictRedis(host=redis_ip_address, port=\n redis_port, password=redis_password)\n",
"self.log_files = {}\n",
"self.log_file_handles = {}\n",
"self.files_to_ignore = set()\n"
] | [
"def __init__(self, redis_ip_address, redis_port, node_ip_address,...\n",
"\"\"\"docstring\"\"\"\n",
"self.node_ip_address = node_ip_address\n",
"self.redis_client = redis.StrictRedis(host=redis_ip_address, port=\n redis_port, password=redis_password)\n",
"self.log_files = {}\n",
"self.log_file_handles = {}\n",
"self.files_to_ignore = set()\n"
] | [
0,
0,
6,
6,
6,
6,
6
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_0():...\n",
"VAR_3 = psycopg2.connect(host=config['HOST'], port=config['PORT'], database\n =config['NAME'], user=config['USER'], password=config['PASSWORD'])\n",
"VAR_4 = VAR_3.cursor()\n",
"VAR_4.execute('select * from reply_map')\n",
"VAR_5 = {}\n",
"for VAR_6 in VAR_4:\n",
"VAR_5[VAR_6[0]] = VAR_6[1]\n",
"VAR_3.commit()\n",
"return VAR_5\n"
] | [
"def get_all():...\n",
"connection = psycopg2.connect(host=config['HOST'], port=config['PORT'],\n database=config['NAME'], user=config['USER'], password=config['PASSWORD'])\n",
"cur = connection.cursor()\n",
"cur.execute('select * from reply_map')\n",
"out = {}\n",
"for row in cur:\n",
"out[row[0]] = row[1]\n",
"connection.commit()\n",
"return out\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Return'"
] |
[
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = discord.Embed(VAR_2='Guide - ctrtransfer', VAR_3=discord.Color.orange()\n )\n",
"VAR_7.set_author(name='Plailect', url='https://3ds.guide/')\n",
"VAR_7.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n",
"VAR_7.url = 'https://3ds.guide/ctrtransfer'\n",
"VAR_7.description = 'How to do the 11.5.0-38 ctrtransfer'\n",
"await self.bot.say('', VAR_7=embed)\n"
] | [
"@commands.command()...\n",
"\"\"\"docstring\"\"\"\n",
"embed = discord.Embed(title='Guide - ctrtransfer', color=discord.Color.orange()\n )\n",
"embed.set_author(name='Plailect', url='https://3ds.guide/')\n",
"embed.set_thumbnail(url='https://3ds.guide/images/bio-photo.png')\n",
"embed.url = 'https://3ds.guide/ctrtransfer'\n",
"embed.description = 'How to do the 11.5.0-38 ctrtransfer'\n",
"await self.bot.say('', embed=embed)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(VAR_11, VAR_8, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_8 is None or VAR_12 is None or VAR_12(VAR_11, VAR_8):\n",
"return True\n",
"return False\n"
] | [
"def has_write_perm(user, group, is_member):...\n",
"\"\"\"docstring\"\"\"\n",
"if group is None or is_member is None or is_member(user, group):\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_27(self):...\n",
"VAR_13 = 1234\n",
"self.cursor.execute('create table t1(n int)')\n",
"self.cursor.execute('insert into t1 values (?)', VAR_13)\n",
"VAR_20 = self.cursor.execute('select n from t1').fetchone()[0]\n",
"self.assertEqual(VAR_20, VAR_13)\n"
] | [
"def test_int(self):...\n",
"value = 1234\n",
"self.cursor.execute('create table t1(n int)')\n",
"self.cursor.execute('insert into t1 values (?)', value)\n",
"result = self.cursor.execute('select n from t1').fetchone()[0]\n",
"self.assertEqual(result, value)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(VAR_5):...\n",
"if not VAR_6:\n",
"return\n",
"VAR_20 = UltiSnips_Manager._snips('', 1)\n",
"return\n",
"VAR_5['ultisnips_snippets'] = [{'trigger': x.trigger, 'description': x.\n description} for x in VAR_20]\n"
] | [
"def _AddUltiSnipsDataIfNeeded(extra_data):...\n",
"if not USE_ULTISNIPS_DATA:\n",
"return\n",
"rawsnips = UltiSnips_Manager._snips('', 1)\n",
"return\n",
"extra_data['ultisnips_snippets'] = [{'trigger': x.trigger, 'description': x\n .description} for x in rawsnips]\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Return'",
"Assign'"
] |
[
"import copy\n",
"import logging\n",
"import os\n",
"import sflock\n",
"from cuckoo.common.config import emit_options\n",
"from cuckoo.common.exceptions import CuckooOperationalError\n",
"from cuckoo.common.files import Folders, Files, Storage\n",
"from cuckoo.common.utils import validate_url, validate_hash\n",
"from cuckoo.common.virustotal import VirusTotalAPI\n",
"from cuckoo.core.database import Database\n",
"VAR_0 = logging.getLogger(__name__)\n",
"VAR_1 = Database()\n",
"def FUNC_0(self, VAR_2, VAR_3, VAR_4):...\n",
"if not VAR_4:\n",
"return\n",
"if validate_hash(VAR_4):\n",
"if validate_url(VAR_4):\n",
"VAR_21 = VirusTotalAPI().hash_fetch(VAR_4)\n",
"VAR_2['errors'].append('Error retrieving file hash: %s' % e)\n",
"VAR_18 = Files.create(VAR_3, VAR_4, VAR_21)\n",
"VAR_2['data'].append({'type': 'url', 'data': VAR_4})\n",
"VAR_2['errors'].append(\"'%s' was neither a valid hash or url\" % VAR_4)\n",
"return\n",
"VAR_2['data'].append({'type': 'file', 'data': VAR_18})\n",
"return\n",
"def FUNC_1(self, VAR_5, VAR_6):...\n",
"return\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_5 not in ('strings', 'files'):\n",
"VAR_0.error(\"Bad parameter '%s' for submit_type\", VAR_5)\n",
"VAR_13 = Folders.create_temp()\n",
"return False\n",
"VAR_14 = {'data': [], 'errors': []}\n",
"if VAR_5 == 'strings':\n",
"for VAR_4 in VAR_6:\n",
"if VAR_5 == 'files':\n",
"self._handle_string(VAR_14, VAR_13, VAR_4)\n",
"for entry in VAR_6:\n",
"return Database().add_submit(VAR_13, VAR_5, VAR_14)\n",
"VAR_22 = Storage.get_filename_from_path(entry['name'])\n",
"VAR_18 = Files.create(VAR_13, VAR_22, entry['data'])\n",
"VAR_14['data'].append({'type': 'file', 'data': VAR_18})\n"
] | [
"import copy\n",
"import logging\n",
"import os\n",
"import sflock\n",
"from cuckoo.common.config import emit_options\n",
"from cuckoo.common.exceptions import CuckooOperationalError\n",
"from cuckoo.common.files import Folders, Files, Storage\n",
"from cuckoo.common.utils import validate_url, validate_hash\n",
"from cuckoo.common.virustotal import VirusTotalAPI\n",
"from cuckoo.core.database import Database\n",
"log = logging.getLogger(__name__)\n",
"db = Database()\n",
"def _handle_string(self, submit, tmppath, line):...\n",
"if not line:\n",
"return\n",
"if validate_hash(line):\n",
"if validate_url(line):\n",
"filedata = VirusTotalAPI().hash_fetch(line)\n",
"submit['errors'].append('Error retrieving file hash: %s' % e)\n",
"filepath = Files.create(tmppath, line, filedata)\n",
"submit['data'].append({'type': 'url', 'data': line})\n",
"submit['errors'].append(\"'%s' was neither a valid hash or url\" % line)\n",
"return\n",
"submit['data'].append({'type': 'file', 'data': filepath})\n",
"return\n",
"def pre(self, submit_type, data):...\n",
"return\n",
"\"\"\"docstring\"\"\"\n",
"if submit_type not in ('strings', 'files'):\n",
"log.error(\"Bad parameter '%s' for submit_type\", submit_type)\n",
"path_tmp = Folders.create_temp()\n",
"return False\n",
"submit_data = {'data': [], 'errors': []}\n",
"if submit_type == 'strings':\n",
"for line in data:\n",
"if submit_type == 'files':\n",
"self._handle_string(submit_data, path_tmp, line)\n",
"for entry in data:\n",
"return Database().add_submit(path_tmp, submit_type, submit_data)\n",
"filename = Storage.get_filename_from_path(entry['name'])\n",
"filepath = Files.create(path_tmp, filename, entry['data'])\n",
"submit_data['data'].append({'type': 'file', 'data': filepath})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Return'",
"FunctionDef'",
"Return'",
"Docstring",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"For",
"Condition",
"Expr'",
"For",
"Return'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_1, VAR_5):...\n",
"if VAR_1 == '':\n",
"return self\n",
"if VAR_1 == 'login':\n",
"return self\n",
"if not self.is_logged_in(VAR_5):\n",
"return UnAuthorizedResource()\n",
"return NoResource()\n"
] | [
"def getChild(self, path, request):...\n",
"if path == '':\n",
"return self\n",
"if path == 'login':\n",
"return self\n",
"if not self.is_logged_in(request):\n",
"return UnAuthorizedResource()\n",
"return NoResource()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"def __init__(self, VAR_0, VAR_1):...\n",
"self.db = VAR_0\n",
"self.access_token = VAR_1\n"
] | [
"def __init__(self, db, access_token):...\n",
"self.db = db\n",
"self.access_token = access_token\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"def FUNC_18(self, VAR_14, VAR_28):...\n",
"VAR_12 = \"ssh %s 'hyperion --config %s/%s.yaml slave --kill'\" % (VAR_28,\n VAR_1, VAR_14)\n",
"self.logger.debug('Run cmd:\\n%s' % VAR_12)\n",
"FUNC_7(self.session, VAR_12)\n"
] | [
"def stop_remote_component(self, comp_name, host):...\n",
"cmd = \"ssh %s 'hyperion --config %s/%s.yaml slave --kill'\" % (host,\n TMP_SLAVE_DIR, comp_name)\n",
"self.logger.debug('Run cmd:\\n%s' % cmd)\n",
"send_main_session_command(self.session, cmd)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, *VAR_3, **VAR_4):...\n",
"super(CLASS_0, self).__init__(*VAR_3, **kwargs)\n",
"self.common = None\n",
"self.configuration.append_config_values(hpcommon.hp3par_opts)\n",
"self.configuration.append_config_values(san.san_opts)\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super(HP3PARISCSIDriver, self).__init__(*args, **kwargs)\n",
"self.common = None\n",
"self.configuration.append_config_values(hpcommon.hp3par_opts)\n",
"self.configuration.append_config_values(san.san_opts)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"@utils.synchronized('3par', external=True)...\n",
""
] | [
"@utils.synchronized('3par', external=True)...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"@command...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_3 = wrapper.todoist.get_projects()\n",
"cli.print_listing(VAR_3, 0)\n",
"return VAR_3\n"
] | [
"@command...\n",
"\"\"\"docstring\"\"\"\n",
"projects = wrapper.todoist.get_projects()\n",
"cli.print_listing(projects, 0)\n",
"return projects\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_28(self):...\n",
"VAR_4 = 'hubba-bubba'\n",
"VAR_3 = '/afterlogin/'\n",
"VAR_8 = self.add_outstanding_query(VAR_3)\n",
"VAR_10 = self.dump_session_cookie(VAR_8)\n",
"VAR_11 = auth_response(VAR_8, VAR_4)\n",
"VAR_31 = self.app.dispatch_request()\n",
"VAR_32.persist()\n",
"VAR_31 = self.app.dispatch_request()\n",
"self.assertEqual(VAR_31.status, '302 FOUND')\n",
"self.assertIn('testing-relay-state', VAR_31.location)\n"
] | [
"def test_logout_service_startingIDP_no_subject_id(self):...\n",
"eppn = 'hubba-bubba'\n",
"came_from = '/afterlogin/'\n",
"session_id = self.add_outstanding_query(came_from)\n",
"cookie = self.dump_session_cookie(session_id)\n",
"saml_response = auth_response(session_id, eppn)\n",
"response = self.app.dispatch_request()\n",
"session.persist()\n",
"response = self.app.dispatch_request()\n",
"self.assertEqual(response.status, '302 FOUND')\n",
"self.assertIn('testing-relay-state', response.location)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self, VAR_2, VAR_3):...\n",
"self.cursor.execute(\n 'SELECT count(id) FROM kickstarter.users where id = %s and money >= %s' %\n (VAR_2, VAR_3))\n",
"return self.cursor.fetchall()[0][0]\n"
] | [
"def can_user_pass_that_amount_of_money(self, user_id, money):...\n",
"self.cursor.execute(\n 'SELECT count(id) FROM kickstarter.users where id = %s and money >= %s' %\n (user_id, money))\n",
"return self.cursor.fetchall()[0][0]\n"
] | [
0,
4,
0
] | [
"FunctionDef'",
"Expr'",
"Return'"
] |
[
"@classmethod...\n",
"VAR_35 = len(VAR_22) - 1\n",
"if VAR_23 > 3:\n",
"warn('Nested lists to depth greater than 4')\n",
"for VAR_30, l in enumerate(VAR_22):\n",
"VAR_23 = 3\n",
"if isinstance(l, VAR_21):\n",
"if VAR_30 == 0 or (VAR_22[VAR_30 - 1].kind != l.kind if isinstance(VAR_22[\n",
"if isinstance(l, CLASS_0):\n",
"l.before = VAR_21.begins[l.kind] + l.before\n",
"if VAR_30 == VAR_35 or (VAR_22[VAR_30 + 1].kind != l.kind if isinstance(\n",
"VAR_21.resolve(l.children, VAR_23)\n",
"if l.kind == 1 and not l.resume:\n",
"l.after += VAR_21.ends[l.kind]\n",
"l.before %= VAR_21.enumCounterCmd % (VAR_21.enumCounters[VAR_23], VAR_21.\n counterValues[VAR_23]) if l.resume else ''\n",
"VAR_21.counterValues[VAR_23] = 0\n",
"if l.kind == 1:\n",
"VAR_21.counterValues[VAR_23] += 1\n",
"VAR_21.resolve(l.children, VAR_23 + 1)\n"
] | [
"@classmethod...\n",
"maxIndex = len(docList) - 1\n",
"if depth > 3:\n",
"warn('Nested lists to depth greater than 4')\n",
"for i, l in enumerate(docList):\n",
"depth = 3\n",
"if isinstance(l, cls):\n",
"if i == 0 or (docList[i - 1].kind != l.kind if isinstance(docList[i - 1],\n",
"if isinstance(l, Hierarchy):\n",
"l.before = cls.begins[l.kind] + l.before\n",
"if i == maxIndex or (docList[i + 1].kind != l.kind if isinstance(docList[i +\n",
"cls.resolve(l.children, depth)\n",
"if l.kind == 1 and not l.resume:\n",
"l.after += cls.ends[l.kind]\n",
"l.before %= cls.enumCounterCmd % (cls.enumCounters[depth], cls.\n counterValues[depth]) if l.resume else ''\n",
"cls.counterValues[depth] = 0\n",
"if l.kind == 1:\n",
"cls.counterValues[depth] += 1\n",
"cls.resolve(l.children, depth + 1)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Expr'",
"For",
"Assign'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Condition",
"AugAssign'",
"AugAssign'",
"Assign'",
"Condition",
"AugAssign'",
"Expr'"
] |
[
"def FUNC_15(self, VAR_15, VAR_8):...\n",
"VAR_27, VAR_28 = super().as_sql(VAR_15, VAR_8)\n",
"VAR_27 = '%s::%s' % (VAR_27, self.lhs.output_field.db_type(VAR_8))\n",
"return VAR_27, VAR_28\n"
] | [
"def as_sql(self, qn, connection):...\n",
"sql, params = super().as_sql(qn, connection)\n",
"sql = '%s::%s' % (sql, self.lhs.output_field.db_type(connection))\n",
"return sql, params\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_1(self, VAR_1, VAR_2):...\n",
"self.schema.add_filter(VAR_2, VAR_1)\n"
] | [
"def add_filter(self, op, value):...\n",
"self.schema.add_filter(value, op)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_20(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = VAR_0[:]\n",
"for user in self.company.employees:\n",
"VAR_11.append((Allow, user.login, ('view_project', 'edit_project',\n 'add_project', 'edit_phase', 'add_phase', 'add_estimation',\n 'add_invoice', 'list_estimations', 'list_invoices', 'view.file',\n 'add.file', 'edit.file')))\n",
"return VAR_11\n"
] | [
"def get_project_acl(self):...\n",
"\"\"\"docstring\"\"\"\n",
"acl = DEFAULT_PERM[:]\n",
"for user in self.company.employees:\n",
"acl.append((Allow, user.login, ('view_project', 'edit_project',\n 'add_project', 'edit_phase', 'add_phase', 'add_estimation',\n 'add_invoice', 'list_estimations', 'list_invoices', 'view.file',\n 'add.file', 'edit.file')))\n",
"return acl\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"@rule(TestResult, [PythonTestsAdaptor, PyTest, PythonSetup, SourceRootConfig])...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_7 = 'https://github.com/pantsbuild/pex/releases/download/v1.6.6/pex'\n",
"VAR_8 = Digest(\n '61bb79384db0da8c844678440bd368bcbfac17bbdb865721ad3f9cb0ab29b629', 1826945\n )\n",
"VAR_9 = yield Get(Snapshot, UrlToFetch(VAR_7, VAR_8))\n",
"VAR_10 = yield Get(TransitiveHydratedTargets, BuildFileAddresses((VAR_2.\n address,)))\n",
"VAR_11 = [t.adaptor for t in VAR_10.closure]\n",
"VAR_12 = []\n",
"for maybe_python_req_lib in VAR_11:\n",
"if hasattr(maybe_python_req_lib, 'requirement'):\n",
"VAR_13 = sorted(VAR_12 + list(VAR_3.get_requirement_strings()))\n",
"VAR_12.append(str(maybe_python_req_lib.requirement))\n",
"if hasattr(maybe_python_req_lib, 'requirements'):\n",
"VAR_14 = text_type(sys.executable)\n",
"for py_req in maybe_python_req_lib.requirements:\n",
"VAR_15 = FUNC_0(VAR_0, VAR_1=all_targets)\n",
"VAR_12.append(str(py_req.requirement))\n",
"VAR_16 = 'pytest-with-requirements.pex'\n",
"VAR_17 = [VAR_14, './{}'.format(VAR_9.files[0]), '-e', 'pytest:main', '-o',\n VAR_16] + VAR_15 + [text_type(req) for req in VAR_13]\n",
"VAR_18 = ExecuteProcessRequest(argv=tuple(requirements_pex_argv), env={\n 'PATH': text_type(os.pathsep.join(python_setup.interpreter_search_paths\n ))}, input_files=pex_snapshot.directory_digest, description=\n 'Resolve requirements: {}'.format(', '.join(all_requirements)),\n output_files=(output_pytest_requirements_pex_filename,))\n",
"VAR_19 = yield Get(ExecuteProcessResult, ExecuteProcessRequest, VAR_18)\n",
"VAR_20 = VAR_4.get_source_roots()\n",
"VAR_21 = []\n",
"for maybe_source_target in VAR_11:\n",
"if hasattr(maybe_source_target, 'sources'):\n",
"VAR_22 = yield [Get(Digest, DirectoryWithPrefixToStrip(directory_digest=\n snapshot.directory_digest, prefix=source_root.path)) for snapshot,\n source_root in VAR_21]\n",
"VAR_30 = maybe_source_target.sources.snapshot\n",
"VAR_23 = yield Get(Digest, DirectoriesToMerge(directories=tuple(\n all_sources_digests)))\n",
"VAR_31 = VAR_20.find_by_path(maybe_source_target.address.spec_path)\n",
"VAR_24 = yield Get(InjectedInitDigest, Digest, VAR_23)\n",
"VAR_21.append((VAR_30, VAR_31))\n",
"VAR_25 = [VAR_23, VAR_24.directory_digest, VAR_19.output_directory_digest]\n",
"VAR_26 = yield Get(Digest, DirectoriesToMerge, DirectoriesToMerge(\n directories=tuple(all_input_digests)))\n",
"VAR_27 = ExecuteProcessRequest(argv=(python_binary, './{}'.format(\n output_pytest_requirements_pex_filename)), env={'PATH': text_type(os.\n pathsep.join(python_setup.interpreter_search_paths))}, input_files=\n merged_input_files, description='Run pytest for {}'.format(test_target.\n address.reference()))\n",
"VAR_28 = yield Get(FallibleExecuteProcessResult, ExecuteProcessRequest, VAR_27)\n",
"VAR_29 = Status.SUCCESS if VAR_28.exit_code == 0 else Status.FAILURE\n",
"yield TestResult(VAR_29=status, stdout=result.stdout.decode('utf-8'),\n stderr=result.stderr.decode('utf-8'))\n"
] | [
"@rule(TestResult, [PythonTestsAdaptor, PyTest, PythonSetup, SourceRootConfig])...\n",
"\"\"\"docstring\"\"\"\n",
"url = 'https://github.com/pantsbuild/pex/releases/download/v1.6.6/pex'\n",
"digest = Digest(\n '61bb79384db0da8c844678440bd368bcbfac17bbdb865721ad3f9cb0ab29b629', 1826945\n )\n",
"pex_snapshot = yield Get(Snapshot, UrlToFetch(url, digest))\n",
"transitive_hydrated_targets = yield Get(TransitiveHydratedTargets,\n BuildFileAddresses((test_target.address,)))\n",
"all_targets = [t.adaptor for t in transitive_hydrated_targets.closure]\n",
"all_target_requirements = []\n",
"for maybe_python_req_lib in all_targets:\n",
"if hasattr(maybe_python_req_lib, 'requirement'):\n",
"all_requirements = sorted(all_target_requirements + list(pytest.\n get_requirement_strings()))\n",
"all_target_requirements.append(str(maybe_python_req_lib.requirement))\n",
"if hasattr(maybe_python_req_lib, 'requirements'):\n",
"python_binary = text_type(sys.executable)\n",
"for py_req in maybe_python_req_lib.requirements:\n",
"interpreter_constraint_args = parse_interpreter_constraints(python_setup,\n python_target_adaptors=all_targets)\n",
"all_target_requirements.append(str(py_req.requirement))\n",
"output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex'\n",
"requirements_pex_argv = [python_binary, './{}'.format(pex_snapshot.files[0]\n ), '-e', 'pytest:main', '-o', output_pytest_requirements_pex_filename\n ] + interpreter_constraint_args + [text_type(req) for req in\n all_requirements]\n",
"requirements_pex_request = ExecuteProcessRequest(argv=tuple(\n requirements_pex_argv), env={'PATH': text_type(os.pathsep.join(\n python_setup.interpreter_search_paths))}, input_files=pex_snapshot.\n directory_digest, description='Resolve requirements: {}'.format(', '.\n join(all_requirements)), output_files=(\n output_pytest_requirements_pex_filename,))\n",
"requirements_pex_response = yield Get(ExecuteProcessResult,\n ExecuteProcessRequest, requirements_pex_request)\n",
"source_roots = source_root_config.get_source_roots()\n",
"sources_snapshots_and_source_roots = []\n",
"for maybe_source_target in all_targets:\n",
"if hasattr(maybe_source_target, 'sources'):\n",
"all_sources_digests = yield [Get(Digest, DirectoryWithPrefixToStrip(\n directory_digest=snapshot.directory_digest, prefix=source_root.path)) for\n snapshot, source_root in sources_snapshots_and_source_roots]\n",
"tgt_snapshot = maybe_source_target.sources.snapshot\n",
"sources_digest = yield Get(Digest, DirectoriesToMerge(directories=tuple(\n all_sources_digests)))\n",
"tgt_source_root = source_roots.find_by_path(maybe_source_target.address.\n spec_path)\n",
"inits_digest = yield Get(InjectedInitDigest, Digest, sources_digest)\n",
"sources_snapshots_and_source_roots.append((tgt_snapshot, tgt_source_root))\n",
"all_input_digests = [sources_digest, inits_digest.directory_digest,\n requirements_pex_response.output_directory_digest]\n",
"merged_input_files = yield Get(Digest, DirectoriesToMerge,\n DirectoriesToMerge(directories=tuple(all_input_digests)))\n",
"request = ExecuteProcessRequest(argv=(python_binary, './{}'.format(\n output_pytest_requirements_pex_filename)), env={'PATH': text_type(os.\n pathsep.join(python_setup.interpreter_search_paths))}, input_files=\n merged_input_files, description='Run pytest for {}'.format(test_target.\n address.reference()))\n",
"result = yield Get(FallibleExecuteProcessResult, ExecuteProcessRequest, request\n )\n",
"status = Status.SUCCESS if result.exit_code == 0 else Status.FAILURE\n",
"yield TestResult(status=status, stdout=result.stdout.decode('utf-8'),\n stderr=result.stderr.decode('utf-8'))\n"
] | [
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
7,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"For",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_15(self, VAR_5, VAR_14):...\n",
"VAR_22 = IPixelatedSession(VAR_5.getSession())\n",
"VAR_22.user_uuid = VAR_14\n"
] | [
"def _init_http_session(self, request, user_id):...\n",
"session = IPixelatedSession(request.getSession())\n",
"session.user_uuid = user_id\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'"
] |
[
"@tornado.web.asynchronous...\n",
"VAR_6 = tornado.concurrent.Future()\n",
"def FUNC_4():...\n",
"VAR_22 = self.request.body\n",
"VAR_23 = json.loads(VAR_22.decode('utf-8', 'ignore'))\n",
"VAR_24 = VAR_23['action']\n",
"VAR_25 = VAR_23['source']\n",
"if type(VAR_25) == list:\n",
"for VAR_33 in range(0, len(VAR_25)):\n",
"VAR_25 = FUNC_1(VAR_25)\n",
"VAR_25[VAR_33] = FUNC_1(VAR_25[VAR_33])\n",
"if VAR_24 in ['copy', 'move']:\n",
"if VAR_24 in ['rename', 'new-folder']:\n",
"VAR_31 = FUNC_1(VAR_23['target'])\n",
"VAR_31 = '/'\n",
"if VAR_24 == 'copy':\n",
"VAR_31 = VAR_23['target']\n",
"VAR_31 = VAR_25\n",
"for source in VAR_25:\n",
"if VAR_24 == 'move':\n",
"os.system('cp \"D:%s\" \"D:%s\"' % (source, VAR_31))\n",
"VAR_6.set_result('')\n",
"for source in VAR_25:\n",
"if VAR_24 == 'delete':\n",
"tornado.ioloop.IOLoop.instance().add_callback(FUNC_4)\n",
"os.system('mv \"D:%s\" \"D:%s\"' % (source, VAR_31))\n",
"for source in VAR_25:\n",
"if VAR_24 == 'rename':\n",
"VAR_7 = yield VAR_6\n",
"os.system('rm \"D:%s\"' % source)\n",
"os.system('rename \"D:%s\" \"%s\"' % (VAR_25, VAR_31))\n",
"if VAR_24 == 'new-folder':\n",
"self.set_status(200, 'OK')\n",
"os.system('mkdir \"D:%s%s\"' % (VAR_25, VAR_31))\n",
"self.add_header('Cache-Control', 'max-age=0')\n",
"self.add_header('Connection', 'close')\n",
"self.add_header('Content-Type', 'text/html')\n",
"self.add_header('Content-Length', str(len(VAR_7)))\n",
"self.write(VAR_7)\n",
"self.flush()\n",
"self.finish()\n",
"return self\n"
] | [
"@tornado.web.asynchronous...\n",
"future = tornado.concurrent.Future()\n",
"def get_final_html_async():...\n",
"operation_content_raw = self.request.body\n",
"operation_content = json.loads(operation_content_raw.decode('utf-8', 'ignore'))\n",
"action = operation_content['action']\n",
"sources = operation_content['source']\n",
"if type(sources) == list:\n",
"for i in range(0, len(sources)):\n",
"sources = decode_hexed_b64_to_str(sources)\n",
"sources[i] = decode_hexed_b64_to_str(sources[i])\n",
"if action in ['copy', 'move']:\n",
"if action in ['rename', 'new-folder']:\n",
"target = decode_hexed_b64_to_str(operation_content['target'])\n",
"target = '/'\n",
"if action == 'copy':\n",
"target = operation_content['target']\n",
"target = sources\n",
"for source in sources:\n",
"if action == 'move':\n",
"os.system('cp \"D:%s\" \"D:%s\"' % (source, target))\n",
"future.set_result('')\n",
"for source in sources:\n",
"if action == 'delete':\n",
"tornado.ioloop.IOLoop.instance().add_callback(get_final_html_async)\n",
"os.system('mv \"D:%s\" \"D:%s\"' % (source, target))\n",
"for source in sources:\n",
"if action == 'rename':\n",
"file_temp = yield future\n",
"os.system('rm \"D:%s\"' % source)\n",
"os.system('rename \"D:%s\" \"%s\"' % (sources, target))\n",
"if action == 'new-folder':\n",
"self.set_status(200, 'OK')\n",
"os.system('mkdir \"D:%s%s\"' % (sources, target))\n",
"self.add_header('Cache-Control', 'max-age=0')\n",
"self.add_header('Connection', 'close')\n",
"self.add_header('Content-Type', 'text/html')\n",
"self.add_header('Content-Length', str(len(file_temp)))\n",
"self.write(file_temp)\n",
"self.flush()\n",
"self.finish()\n",
"return self\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
4,
0,
0,
0,
4,
4,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"For",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Condition",
"Expr'",
"Expr'",
"For",
"Condition",
"Expr'",
"Expr'",
"For",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self, VAR_11, VAR_12):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_16 = self.transformPoint(Point(VAR_11, VAR_12, 0), 'map', 'odom')\n",
"return Navigation.goToPosition(self, VAR_16.x, VAR_16.y)\n"
] | [
"def goToPosition(self, x, y):...\n",
"\"\"\"docstring\"\"\"\n",
"transformed_point = self.transformPoint(Point(x, y, 0), 'map', 'odom')\n",
"return Navigation.goToPosition(self, transformed_point.x, transformed_point.y)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_14(self):...\n",
"self.run_test_case(self.scenario.create_instances())\n"
] | [
"def test_b_create_instances(self):...\n",
"self.run_test_case(self.scenario.create_instances())\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_0(VAR_0):...\n",
"VAR_4 = datetime.fromtimestamp(VAR_0.created)\n",
"return VAR_4.strftime('%Y-%m-%d %H:%M:%S')\n"
] | [
"def getDate(submission):...\n",
"time = datetime.fromtimestamp(submission.created)\n",
"return time.strftime('%Y-%m-%d %H:%M:%S')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"@inlineCallbacks...\n",
""
] | [
"@inlineCallbacks...\n",
""
] | [
4,
0
] | [
"Condition",
"Condition"
] |
[
"def FUNC_35(self):...\n",
"\"\"\"docstring\"\"\"\n",
"tournament.reportMatch(p1=9, p2='')\n"
] | [
"def test_less_than_two_players(self):...\n",
"\"\"\"docstring\"\"\"\n",
"tournament.reportMatch(p1=9, p2='')\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'"
] |
[
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return os.path.join(self.env['GOPATH'], 'bin', 'gometalinter.v1')\n"
] | [
"@property...\n",
"\"\"\"docstring\"\"\"\n",
"return os.path.join(self.env['GOPATH'], 'bin', 'gometalinter.v1')\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_26(VAR_30, *VAR_12, **VAR_17):...\n",
"if not GLSetting.memory_copy.anomaly_checks:\n",
"return VAR_20(VAR_30, *VAR_12, **kw)\n",
"if GLSetting.anomalies_counter[VAR_1] > alarm_level[VAR_1]:\n",
"if VAR_1 == 'new_submission':\n",
"return VAR_20(VAR_30, *VAR_12, **kw)\n",
"log.debug('Blocked a New Submission (%d > %d)' % (GLSetting.\n anomalies_counter[VAR_1], alarm_level[VAR_1]))\n",
"if VAR_1 == 'finalized_submission':\n",
"log.debug('Blocked a Finalized Submission (%d > %d)' % (GLSetting.\n anomalies_counter[VAR_1], alarm_level[VAR_1]))\n",
"if VAR_1 == 'anon_requests':\n",
"log.debug('Blocked an Anon Request (%d > %d)' % (GLSetting.\n anomalies_counter[VAR_1], alarm_level[VAR_1]))\n",
"if VAR_1 == 'file_uploaded':\n",
"log.debug('Blocked a File upload (%d > %d)' % (GLSetting.anomalies_counter[\n VAR_1], alarm_level[VAR_1]))\n",
"log.debug('Blocked an Unknown event (=%s) !? [BUG!] (%d > %d)' % (VAR_1,\n GLSetting.anomalies_counter[VAR_1], alarm_level[VAR_1]))\n"
] | [
"def call_handler(cls, *args, **kw):...\n",
"if not GLSetting.memory_copy.anomaly_checks:\n",
"return method_handler(cls, *args, **kw)\n",
"if GLSetting.anomalies_counter[element] > alarm_level[element]:\n",
"if element == 'new_submission':\n",
"return method_handler(cls, *args, **kw)\n",
"log.debug('Blocked a New Submission (%d > %d)' % (GLSetting.\n anomalies_counter[element], alarm_level[element]))\n",
"if element == 'finalized_submission':\n",
"log.debug('Blocked a Finalized Submission (%d > %d)' % (GLSetting.\n anomalies_counter[element], alarm_level[element]))\n",
"if element == 'anon_requests':\n",
"log.debug('Blocked an Anon Request (%d > %d)' % (GLSetting.\n anomalies_counter[element], alarm_level[element]))\n",
"if element == 'file_uploaded':\n",
"log.debug('Blocked a File upload (%d > %d)' % (GLSetting.anomalies_counter[\n element], alarm_level[element]))\n",
"log.debug('Blocked an Unknown event (=%s) !? [BUG!] (%d > %d)' % (element,\n GLSetting.anomalies_counter[element], alarm_level[element]))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Condition",
"Condition",
"Return'",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
""
] | [
"def test_empty_environment(self):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_12(VAR_35, VAR_27):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_52 = ''\n",
"for result in VAR_35:\n",
"VAR_80 = VAR_35[result]\n",
"VAR_52 += '\\n--\\n{0}'.format(FUNC_21())\n",
"if VAR_80:\n",
"return VAR_52\n",
"VAR_89 = sorted(VAR_80, key=lambda x: list_result[x], reverse=True)\n",
"VAR_52 += '\\n\\n{0}:\\n'.format(result)\n",
"for element in VAR_89:\n",
"VAR_52 += '\\n{0} {1}'.format(VAR_80[element], element)\n"
] | [
"def _output_text(complete_output, categories):...\n",
"\"\"\"docstring\"\"\"\n",
"output = ''\n",
"for result in complete_output:\n",
"list_result = complete_output[result]\n",
"output += '\\n--\\n{0}'.format(_signature())\n",
"if list_result:\n",
"return output\n",
"list_result_sorted = sorted(list_result, key=lambda x: list_result[x],\n reverse=True)\n",
"output += '\\n\\n{0}:\\n'.format(result)\n",
"for element in list_result_sorted:\n",
"output += '\\n{0} {1}'.format(list_result[element], element)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Assign'",
"AugAssign'",
"Condition",
"Return'",
"Assign'",
"AugAssign'",
"For",
"AugAssign'"
] |
[
"def FUNC_3(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_9 = EndpointClient()\n",
"VAR_10 = True\n",
"while VAR_10:\n",
"if self._one_shot:\n",
"VAR_10 = False\n",
"VAR_18 = self.post('worker', data={'types': self._types})\n",
"self._logger.warning('Timed out contacting the WorkqueueService.')\n",
"VAR_13, VAR_0 = VAR_18\n",
"if err.code == 404:\n",
"VAR_14 = VAR_9.get_site(VAR_13['src_siteid'])\n",
"self._logger.debug('No work to pick up.')\n",
"self._logger.exception('Error trying to get job from WorkqueueService.')\n",
"VAR_15 = [urlsplit(site) for site in VAR_14['endpoints'].itervalues()]\n",
"time.sleep(self._interpoll_sleep_time)\n",
"VAR_16 = [urlunsplit(site._replace(path=job['src_filepath'])) for site in\n VAR_15 if site.scheme == PROTOCOLMAP[VAR_13['protocol']]]\n",
"if not VAR_16:\n",
"self._abort(VAR_13['id'], \n \"Protocol '%s' not supported at src site with id %d\" % (VAR_13[\n 'protocol'], VAR_13['src_siteid']))\n",
"VAR_17 = '%s %s' % (COMMANDMAP[VAR_13['type']][VAR_13['protocol']], random.\n choice(VAR_16))\n",
"if VAR_13['type'] == JobType.COPY:\n",
"if VAR_13['dst_siteid'] is None:\n",
"self._current_process = subprocess.Popen('(set -x && %s)' % VAR_17, shell=\n True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=dict(os.\n environ, PATH=self._script_path, X509_USER_PROXY=proxyfile.name))\n",
"self._abort(VAR_13['id'], 'No dst site id set for copy operation')\n",
"if VAR_13['dst_filepath'] is None:\n",
"VAR_22, VAR_5 = self._current_process.communicate()\n",
"self._abort(VAR_13['id'], 'No dst site filepath set for copy operation')\n",
"VAR_19 = VAR_9.get_site(VAR_13['dst_siteid'])\n",
"self.set_token(VAR_0)\n",
"VAR_20 = [urlsplit(site) for site in VAR_19['endpoints'].itervalues()]\n",
"self.put('worker/%s' % VAR_13['id'], data={'log': log, 'returncode': self.\n _current_process.returncode, 'host': socket.gethostbyaddr(socket.\n getfqdn())})\n",
"self._logger.exception('Error trying to PUT back output from subcommand.')\n",
"self.set_token(None)\n",
"VAR_21 = [urlunsplit(site._replace(path=job['dst_filepath'])) for site in\n VAR_20 if site.scheme == PROTOCOLMAP[VAR_13['protocol']]]\n",
"if not VAR_21:\n",
"self._abort(VAR_13['id'], \n \"Protocol '%s' not supported at dst site with id %d\" % (VAR_13[\n 'protocol'], VAR_13['dst_siteid']))\n",
"VAR_17 += ' %s' % random.choice(VAR_21)\n"
] | [
"def run(self):...\n",
"\"\"\"docstring\"\"\"\n",
"endpoint_client = EndpointClient()\n",
"run = True\n",
"while run:\n",
"if self._one_shot:\n",
"run = False\n",
"response = self.post('worker', data={'types': self._types})\n",
"self._logger.warning('Timed out contacting the WorkqueueService.')\n",
"job, token = response\n",
"if err.code == 404:\n",
"src_site = endpoint_client.get_site(job['src_siteid'])\n",
"self._logger.debug('No work to pick up.')\n",
"self._logger.exception('Error trying to get job from WorkqueueService.')\n",
"src_endpoints = [urlsplit(site) for site in src_site['endpoints'].itervalues()]\n",
"time.sleep(self._interpoll_sleep_time)\n",
"src = [urlunsplit(site._replace(path=job['src_filepath'])) for site in\n src_endpoints if site.scheme == PROTOCOLMAP[job['protocol']]]\n",
"if not src:\n",
"self._abort(job['id'], \"Protocol '%s' not supported at src site with id %d\" %\n (job['protocol'], job['src_siteid']))\n",
"command = '%s %s' % (COMMANDMAP[job['type']][job['protocol']], random.\n choice(src))\n",
"if job['type'] == JobType.COPY:\n",
"if job['dst_siteid'] is None:\n",
"self._current_process = subprocess.Popen('(set -x && %s)' % command, shell=\n True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=dict(os.\n environ, PATH=self._script_path, X509_USER_PROXY=proxyfile.name))\n",
"self._abort(job['id'], 'No dst site id set for copy operation')\n",
"if job['dst_filepath'] is None:\n",
"log, _ = self._current_process.communicate()\n",
"self._abort(job['id'], 'No dst site filepath set for copy operation')\n",
"dst_site = endpoint_client.get_site(job['dst_siteid'])\n",
"self.set_token(token)\n",
"dst_endpoints = [urlsplit(site) for site in dst_site['endpoints'].itervalues()]\n",
"self.put('worker/%s' % job['id'], data={'log': log, 'returncode': self.\n _current_process.returncode, 'host': socket.gethostbyaddr(socket.\n getfqdn())})\n",
"self._logger.exception('Error trying to PUT back output from subcommand.')\n",
"self.set_token(None)\n",
"dst = [urlunsplit(site._replace(path=job['dst_filepath'])) for site in\n dst_endpoints if site.scheme == PROTOCOLMAP[job['protocol']]]\n",
"if not dst:\n",
"self._abort(job['id'], \"Protocol '%s' not supported at dst site with id %d\" %\n (job['protocol'], job['dst_siteid']))\n",
"command += ' %s' % random.choice(dst)\n"
] | [
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Expr'",
"AugAssign'"
] |
[
"def FUNC_25(self, VAR_11, VAR_28):...\n",
"VAR_12 = \"ssh -t %s 'tmux kill-session -t %s'\" % (VAR_28, VAR_11)\n",
"FUNC_7(self.session, VAR_12)\n"
] | [
"def kill_remote_session_by_name(self, name, host):...\n",
"cmd = \"ssh -t %s 'tmux kill-session -t %s'\" % (host, name)\n",
"send_main_session_command(self.session, cmd)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"@VAR_7.user_loader...\n",
"return FUNC_4(VAR_5)\n"
] | [
"@login_manager.user_loader...\n",
"return auth_get_user_by_id(user_id)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(VAR_1)\n"
] | [
"@classmethod...\n",
"\"\"\"docstring\"\"\"\n",
"return gate.new_agent(bindings)\n"
] | [
0,
0,
0
] | [
"Condition",
"Docstring",
"Return'"
] |
[
"def FUNC_21(self, VAR_20):...\n",
"\"\"\"docstring\"\"\"\n",
"for VAR_67 in VAR_20:\n",
"VAR_55 = VAR_20[VAR_67].errno\n",
"return None\n",
"if VAR_55 == 103:\n",
"return 'Key exchange failed'\n",
"if VAR_55 == 108:\n",
"return 'SSH version is unsupported'\n",
"if VAR_55 == 111:\n",
"return 'Could not open SSH session on port %s' % self.config['ssh_port']\n",
"if VAR_55 == 115:\n",
"return \"No valid SSH user '%s'\" % self.config['ssh_user']\n"
] | [
"def _determine_ssh_error(self, errors):...\n",
"\"\"\"docstring\"\"\"\n",
"for err in errors:\n",
"errno = errors[err].errno\n",
"return None\n",
"if errno == 103:\n",
"return 'Key exchange failed'\n",
"if errno == 108:\n",
"return 'SSH version is unsupported'\n",
"if errno == 111:\n",
"return 'Could not open SSH session on port %s' % self.config['ssh_port']\n",
"if errno == 115:\n",
"return \"No valid SSH user '%s'\" % self.config['ssh_user']\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Assign'",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"def FUNC_3(VAR_2):...\n",
"if not VAR_2:\n",
"return []\n",
"VAR_9 = []\n",
"VAR_10 = frappe.db.get_value('Department', {'name': VAR_2}, ['lft', 'rgt'],\n as_dict=True)\n",
"VAR_11 = frappe.db.sql(\n \"\"\"select name from `tabDepartment`\n\t\twhere lft >= %s and rgt <= %s order by lft desc\n\t\t\"\"\"\n , (VAR_10.lft, VAR_10.rgt), as_list=True)\n",
"for d in VAR_11:\n",
"VAR_9.extend([l.leave_approver for l in frappe.db.sql('string', d, as_dict=\n True)])\n",
"return VAR_9\n"
] | [
"def get_approvers(department):...\n",
"if not department:\n",
"return []\n",
"approvers = []\n",
"department_details = frappe.db.get_value('Department', {'name': department},\n ['lft', 'rgt'], as_dict=True)\n",
"department_list = frappe.db.sql(\n \"\"\"select name from `tabDepartment`\n\t\twhere lft >= %s and rgt <= %s order by lft desc\n\t\t\"\"\"\n , (department_details.lft, department_details.rgt), as_list=True)\n",
"for d in department_list:\n",
"approvers.extend([l.leave_approver for l in frappe.db.sql(\n \"select approver from `tabDepartment Approver` \\t\\t\\twhere parent = %s and parentfield = 'leave_approvers'\"\n , d, as_dict=True)])\n",
"return approvers\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Expr'",
"Return'"
] |
[
"def FUNC_20(self, VAR_10=None, VAR_11=None, VAR_1=None):...\n",
"VAR_10 = self.add_rule(VAR_10, VAR_11, VAR_1)\n",
"VAR_13 = self.get_rule(VAR_10)\n",
"def FUNC_39(VAR_101):...\n",
"if VAR_101.input:\n",
"VAR_13.set_input(*VAR_101.input[0], **ruleinfo.input[1])\n",
"if VAR_101.output:\n",
"VAR_13.set_output(*VAR_101.output[0], **ruleinfo.output[1])\n",
"if VAR_101.params:\n",
"VAR_13.set_params(*VAR_101.params[0], **ruleinfo.params[1])\n",
"if VAR_101.threads:\n",
"if not isinstance(VAR_101.threads, int):\n",
"if VAR_101.resources:\n",
"VAR_13.resources['_cores'] = VAR_101.threads\n",
"VAR_76, VAR_52 = VAR_101.resources\n",
"if VAR_101.priority:\n",
"if VAR_76:\n",
"if not isinstance(VAR_101.priority, int) and not isinstance(VAR_101.\n",
"if VAR_101.version:\n",
"if not all(map(lambda r: isinstance(r, int), VAR_52.values())):\n",
"VAR_13.priority = VAR_101.priority\n",
"VAR_13.version = VAR_101.version\n",
"if VAR_101.log:\n",
"VAR_13.resources.update(VAR_52)\n",
"VAR_13.set_log(*VAR_101.log[0], **ruleinfo.log[1])\n",
"if VAR_101.message:\n",
"VAR_13.message = VAR_101.message\n",
"if VAR_101.benchmark:\n",
"VAR_13.benchmark = VAR_101.benchmark\n",
"VAR_13.norun = VAR_101.norun\n",
"VAR_13.docstring = VAR_101.docstring\n",
"VAR_13.run_func = VAR_101.func\n",
"VAR_13.shellcmd = VAR_101.shellcmd\n",
"VAR_101.func.__name__ = '__{}'.format(VAR_10)\n",
"self.globals[VAR_101.func.__name__] = VAR_101.func\n",
"setattr(VAR_86, VAR_10, VAR_13)\n",
"return VAR_101.func\n"
] | [
"def rule(self, name=None, lineno=None, snakefile=None):...\n",
"name = self.add_rule(name, lineno, snakefile)\n",
"rule = self.get_rule(name)\n",
"def decorate(ruleinfo):...\n",
"if ruleinfo.input:\n",
"rule.set_input(*ruleinfo.input[0], **ruleinfo.input[1])\n",
"if ruleinfo.output:\n",
"rule.set_output(*ruleinfo.output[0], **ruleinfo.output[1])\n",
"if ruleinfo.params:\n",
"rule.set_params(*ruleinfo.params[0], **ruleinfo.params[1])\n",
"if ruleinfo.threads:\n",
"if not isinstance(ruleinfo.threads, int):\n",
"if ruleinfo.resources:\n",
"rule.resources['_cores'] = ruleinfo.threads\n",
"args, resources = ruleinfo.resources\n",
"if ruleinfo.priority:\n",
"if args:\n",
"if not isinstance(ruleinfo.priority, int) and not isinstance(ruleinfo.\n",
"if ruleinfo.version:\n",
"if not all(map(lambda r: isinstance(r, int), resources.values())):\n",
"rule.priority = ruleinfo.priority\n",
"rule.version = ruleinfo.version\n",
"if ruleinfo.log:\n",
"rule.resources.update(resources)\n",
"rule.set_log(*ruleinfo.log[0], **ruleinfo.log[1])\n",
"if ruleinfo.message:\n",
"rule.message = ruleinfo.message\n",
"if ruleinfo.benchmark:\n",
"rule.benchmark = ruleinfo.benchmark\n",
"rule.norun = ruleinfo.norun\n",
"rule.docstring = ruleinfo.docstring\n",
"rule.run_func = ruleinfo.func\n",
"rule.shellcmd = ruleinfo.shellcmd\n",
"ruleinfo.func.__name__ = '__{}'.format(name)\n",
"self.globals[ruleinfo.func.__name__] = ruleinfo.func\n",
"setattr(rules, name, rule)\n",
"return ruleinfo.func\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"FunctionDef'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Expr'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Condition",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_13(self):...\n",
"VAR_11 = {}\n",
"for VAR_12 in self.parent_groups:\n",
"VAR_11[VAR_12.name] = VAR_12\n",
"return VAR_11\n",
"VAR_11.update(VAR_12._get_ancestors())\n"
] | [
"def _get_ancestors(self):...\n",
"results = {}\n",
"for g in self.parent_groups:\n",
"results[g.name] = g\n",
"return results\n",
"results.update(g._get_ancestors())\n"
] | [
0,
1,
0,
1,
1,
1
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_3(VAR_2, VAR_3):...\n",
"VAR_11 = crypt.crypt(VAR_3, '22')\n",
"os.system('useradd -G docker,wheel -p ' + VAR_11 + ' ' + VAR_2)\n"
] | [
"def add_user(username, password):...\n",
"encPass = crypt.crypt(password, '22')\n",
"os.system('useradd -G docker,wheel -p ' + encPass + ' ' + username)\n"
] | [
0,
0,
7
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_34(self, VAR_64):...\n",
"return CLASS_1(VAR_64)\n"
] | [
"def run(self, func):...\n",
"return RuleInfo(func)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_6():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_44 = 'Select the leagues parameter from the following leagues:\\n\\n'\n",
"for league_id, league_name in VAR_1.items():\n",
"VAR_44 += '{} ({})\\n'.format(league_id, league_name)\n",
"VAR_45 = ArgumentParser(VAR_44=description, formatter_class=\n RawDescriptionHelpFormatter)\n",
"VAR_45.add_argument('leagues', nargs='*', default=['all'], help=\n 'One of all or any league ids from above.')\n",
"VAR_46 = VAR_45.parse_args()\n",
"VAR_8 = VAR_46.leagues\n",
"if len(VAR_8) == 1 and VAR_8[0] == 'all':\n",
"VAR_8 = VAR_8[0]\n",
"for VAR_49, func in enumerate([FUNC_2, FUNC_3, FUNC_4, FUNC_5]):\n",
"func(VAR_8) if VAR_49 in (0, 1) else func()\n"
] | [
"def download():...\n",
"\"\"\"docstring\"\"\"\n",
"description = 'Select the leagues parameter from the following leagues:\\n\\n'\n",
"for league_id, league_name in LEAGUES_MAPPING.items():\n",
"description += '{} ({})\\n'.format(league_id, league_name)\n",
"parser = ArgumentParser(description=description, formatter_class=\n RawDescriptionHelpFormatter)\n",
"parser.add_argument('leagues', nargs='*', default=['all'], help=\n 'One of all or any league ids from above.')\n",
"args = parser.parse_args()\n",
"leagues = args.leagues\n",
"if len(leagues) == 1 and leagues[0] == 'all':\n",
"leagues = leagues[0]\n",
"for ind, func in enumerate([create_spi_tables, create_fd_tables,\n",
"func(leagues) if ind in (0, 1) else func()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"AugAssign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Expr'"
] |
[
"def FUNC_16(VAR_5, VAR_6):...\n",
"VAR_38 = 5\n",
"VAR_39 = VAR_14 + timedelta(minutes=cache_time) < datetime.now(\n ) if VAR_14 else True\n",
"if not VAR_14 or VAR_39 or VAR_5 not in VAR_15:\n",
"VAR_14 = datetime.now()\n",
"log.info('Returning cached result of %s', VAR_3.__name__)\n",
"VAR_50 = VAR_3(VAR_5, VAR_6)\n",
"VAR_51 = VAR_14 + timedelta(minutes=cache_time) - datetime.now()\n",
"VAR_15[VAR_5] = VAR_50\n",
"log.debug('Time to to reevaluate result of %s is %s', VAR_3.__name__, str(\n VAR_51)[:-7])\n",
"return VAR_50\n",
"return VAR_15[VAR_5]\n"
] | [
"def func_launcher(feature, feature_type):...\n",
"cache_time = 5\n",
"high_time = when_was_called + timedelta(minutes=cache_time) < datetime.now(\n ) if when_was_called else True\n",
"if not when_was_called or high_time or feature not in result:\n",
"when_was_called = datetime.now()\n",
"log.info('Returning cached result of %s', func.__name__)\n",
"num_of_users = func(feature, feature_type)\n",
"time_left = when_was_called + timedelta(minutes=cache_time) - datetime.now()\n",
"result[feature] = num_of_users\n",
"log.debug('Time to to reevaluate result of %s is %s', func.__name__, str(\n time_left)[:-7])\n",
"return num_of_users\n",
"return result[feature]\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_37(self):...\n",
"self.cursor.execute('create table t1(i int)')\n",
"self.cursor.execute('insert into t1 values (1)')\n",
"VAR_4 = self.cursor.execute('select * from t1')\n",
"self.assertEqual(VAR_4, self.cursor)\n"
] | [
"def test_retcursor_select(self):...\n",
"self.cursor.execute('create table t1(i int)')\n",
"self.cursor.execute('insert into t1 values (1)')\n",
"v = self.cursor.execute('select * from t1')\n",
"self.assertEqual(v, self.cursor)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_0(self):...\n",
"VAR_1 = 'sh'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', VAR_1),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/usr/a-dir/', VAR_1),\n '/home/usr/a-dir/')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n VAR_1), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n VAR_1), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n",
"self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', VAR_1),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n"
] | [
"def test_escape_path_argument_sh(self):...\n",
"_type = 'sh'\n",
"self.assertEqual(escape_path_argument('/home/usr/a-file', _type),\n '/home/usr/a-file')\n",
"self.assertEqual(escape_path_argument('/home/usr/a-dir/', _type),\n '/home/usr/a-dir/')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-file with spaces.bla',\n _type), '/home/us\\\\ r/a-file\\\\ with\\\\ spaces.bla')\n",
"self.assertEqual(escape_path_argument('/home/us r/a-dir with spaces/x/',\n _type), '/home/us\\\\ r/a-dir\\\\ with\\\\ spaces/x/')\n",
"self.assertEqual(escape_path_argument(\n 'relative something/with cherries and/pickles.delicious', _type),\n 'relative\\\\ something/with\\\\ cherries\\\\ and/pickles.delicious')\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"\"\"\"\nCreated on 1 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\"\"\"\n",
"from ply import lex\n",
"from beamr.lexers.generic import t_error\n",
"import beamr.interpreters\n",
"import beamr.debug as dbg\n",
"VAR_0 = 'COMMENT', 'HEADING', 'SLIDE', 'SCISSOR', 'YAML', 'TEXT'\n",
"def FUNC_0(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1.value = beamr.interpreters.Comment(VAR_1.value)\n",
"return VAR_1\n"
] | [
"\"\"\"\nCreated on 1 Feb 2018\n\n@author: Teodor Gherasim Nistor\n\"\"\"\n",
"from ply import lex\n",
"from beamr.lexers.generic import t_error\n",
"import beamr.interpreters\n",
"import beamr.debug as dbg\n",
"tokens = 'COMMENT', 'HEADING', 'SLIDE', 'SCISSOR', 'YAML', 'TEXT'\n",
"def t_COMMENT(t):...\n",
"\"\"\"docstring\"\"\"\n",
"t.value = beamr.interpreters.Comment(t.value)\n",
"return t\n"
] | [
0,
0,
0,
0,
0,
2,
0,
0,
0,
0
] | [
"Expr'",
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"Assign'",
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"def FUNC_25(VAR_16, VAR_22, VAR_17):...\n",
"VAR_53 = (\n \"select date from ranks where scene='{}' and player='{}' order by date limit 1;\"\n .format(VAR_22, VAR_17))\n",
"VAR_54 = VAR_16.exec(VAR_53)\n",
"VAR_23 = VAR_54[0][0]\n",
"return VAR_23\n"
] | [
"def get_first_ranked_month(db, scene, player):...\n",
"sql = (\n \"select date from ranks where scene='{}' and player='{}' order by date limit 1;\"\n .format(scene, player))\n",
"res = db.exec(sql)\n",
"date = res[0][0]\n",
"return date\n"
] | [
0,
4,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_4(self, VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_6 = VAR_1.session.get('oidc_nonce')\n",
"if VAR_6:\n",
"if 'code' in VAR_1.GET and 'state' in VAR_1.GET:\n",
"VAR_3 = {'request': VAR_1, 'nonce': VAR_6}\n",
"return self.login_failure()\n",
"if 'oidc_state' not in VAR_1.session:\n",
"return self.login_failure()\n",
"if VAR_1.GET['state'] != VAR_1.session['oidc_state']:\n",
"VAR_14 = 'Session `oidc_state` does not match the OIDC callback state'\n",
"self.user = auth.authenticate(**kwargs)\n",
"if self.user and self.user.is_active:\n",
"return self.login_success()\n"
] | [
"def get(self, request):...\n",
"\"\"\"docstring\"\"\"\n",
"nonce = request.session.get('oidc_nonce')\n",
"if nonce:\n",
"if 'code' in request.GET and 'state' in request.GET:\n",
"kwargs = {'request': request, 'nonce': nonce}\n",
"return self.login_failure()\n",
"if 'oidc_state' not in request.session:\n",
"return self.login_failure()\n",
"if request.GET['state'] != request.session['oidc_state']:\n",
"msg = 'Session `oidc_state` does not match the OIDC callback state'\n",
"self.user = auth.authenticate(**kwargs)\n",
"if self.user and self.user.is_active:\n",
"return self.login_success()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"Assign'",
"Return'",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Return'"
] |
[
"def FUNC_4(VAR_7):...\n",
"return hashlib.sha1(VAR_7).hexdigest()\n"
] | [
"def generate_hash(input_str):...\n",
"return hashlib.sha1(input_str).hexdigest()\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"@login_required...\n",
""
] | [
"@login_required...\n",
""
] | [
0,
0
] | [
"Condition",
"Condition"
] |
[
"@VAR_0.event...\n",
"VAR_3 = VAR_0.get_server(constants.Settings.mainServerID).get_channel(constants\n .Settings.mainChannelId)\n",
"VAR_4 = VAR_0.get_server(constants.Settings.mainServerID).get_channel(constants\n .Settings.logsChannelId)\n",
"print('Logged in !')\n",
"await asyncio.sleep(0.1)\n",
"VAR_32 = False\n",
"if datetime.now().strftime('%H') == '00' or set(sys.argv) & set(['refresh']):\n",
"VAR_14 = await VAR_0.send_message(VAR_3,\n '<:empty:317951266355544065> Updating stats ...')\n",
"print('Ready !')\n",
"print('Refreshing users stats ...')\n",
"await VAR_0.edit_message(VAR_14,\n '<:xmark:317951256889131008> Updating stats ... Fail !')\n",
"if not set(sys.argv) & set(['dev']):\n",
"if set(sys.argv) & set(['online']) and VAR_32 == False:\n",
"FUNC_1()\n",
"await VAR_0.send_message(VAR_3,\n '<:online:317951041838514179> Uso!<:Bot:317951180737347587> is now online !'\n )\n",
"await VAR_0.send_message(VAR_3,\n '<:online:317951041838514179> Uso!<:Bot:317951180737347587> is now online !'\n )\n",
"if set(sys.argv) & set(['dev']):\n",
"print(' - Done')\n",
"await VAR_0.change_presence(status=discord.Status('online'), game=discord.\n Game(name='Osu !'))\n",
"await VAR_0.change_presence(status=discord.Status('online'), game=discord.\n Game(name='Osu !'))\n",
"await VAR_0.change_presence(status=discord.Status('idle'), game=discord.\n Game(name='Dev mode'))\n",
"await VAR_0.edit_message(VAR_14,\n '<:check:317951246084341761> Updating stats ... Done !')\n",
"VAR_32 = True\n"
] | [
"@client.event...\n",
"mainChannel = client.get_server(constants.Settings.mainServerID).get_channel(\n constants.Settings.mainChannelId)\n",
"logsChannel = client.get_server(constants.Settings.mainServerID).get_channel(\n constants.Settings.logsChannelId)\n",
"print('Logged in !')\n",
"await asyncio.sleep(0.1)\n",
"hello = False\n",
"if datetime.now().strftime('%H') == '00' or set(sys.argv) & set(['refresh']):\n",
"message = await client.send_message(mainChannel,\n '<:empty:317951266355544065> Updating stats ...')\n",
"print('Ready !')\n",
"print('Refreshing users stats ...')\n",
"await client.edit_message(message,\n '<:xmark:317951256889131008> Updating stats ... Fail !')\n",
"if not set(sys.argv) & set(['dev']):\n",
"if set(sys.argv) & set(['online']) and hello == False:\n",
"refresh_all_pp_stats()\n",
"await client.send_message(mainChannel,\n '<:online:317951041838514179> Uso!<:Bot:317951180737347587> is now online !'\n )\n",
"await client.send_message(mainChannel,\n '<:online:317951041838514179> Uso!<:Bot:317951180737347587> is now online !'\n )\n",
"if set(sys.argv) & set(['dev']):\n",
"print(' - Done')\n",
"await client.change_presence(status=discord.Status('online'), game=discord.\n Game(name='Osu !'))\n",
"await client.change_presence(status=discord.Status('online'), game=discord.\n Game(name='Osu !'))\n",
"await client.change_presence(status=discord.Status('idle'), game=discord.\n Game(name='Dev mode'))\n",
"await client.edit_message(message,\n '<:check:317951246084341761> Updating stats ... Done !')\n",
"hello = True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Assign'"
] |
[
"def __init__(self, VAR_7=None):...\n",
"super(CLASS_0, self).__init__()\n",
"if VAR_7 is not None:\n",
"self.parse(VAR_7)\n"
] | [
"def __init__(self, path=None):...\n",
"super(Pyjo_Path, self).__init__()\n",
"if path is not None:\n",
"self.parse(path)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_10(VAR_2):...\n",
"VAR_24 = []\n",
"for sub in FUNC_8(VAR_2):\n",
"VAR_40 = -1\n",
"return VAR_24\n",
"VAR_41 = False\n",
"for index, cursub in enumerate(VAR_24):\n",
"if sub['student_id'] == cursub['student_id'] and sub['sheet_id'] == cursub[\n",
"if not VAR_41:\n",
"VAR_41 = True\n",
"VAR_24.append(sub)\n",
"if VAR_40 > -1:\n",
"if sub['time'] > cursub['time']:\n",
"VAR_24[VAR_40] = sub\n",
"VAR_40 = index\n"
] | [
"def get_current_full(db):...\n",
"current_submission = []\n",
"for sub in get_all_full(db):\n",
"found_older_submission_index = -1\n",
"return current_submission\n",
"found_correct_student_and_sheet = False\n",
"for index, cursub in enumerate(current_submission):\n",
"if sub['student_id'] == cursub['student_id'] and sub['sheet_id'] == cursub[\n",
"if not found_correct_student_and_sheet:\n",
"found_correct_student_and_sheet = True\n",
"current_submission.append(sub)\n",
"if found_older_submission_index > -1:\n",
"if sub['time'] > cursub['time']:\n",
"current_submission[found_older_submission_index] = sub\n",
"found_older_submission_index = index\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"For",
"Assign'",
"Return'",
"Assign'",
"For",
"Condition",
"Condition",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_9(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_0.debug(_('Updating volume stats'))\n",
"VAR_34 = {}\n",
"VAR_35 = 'eqlx'\n",
"if self.configuration:\n",
"VAR_35 = self.configuration.safe_get('volume_backend_name')\n",
"VAR_34['volume_backend_name'] = VAR_35 or 'eqlx'\n",
"VAR_34['vendor_name'] = 'Dell'\n",
"VAR_34['driver_version'] = self.VERSION\n",
"VAR_34['storage_protocol'] = 'iSCSI'\n",
"VAR_34['reserved_percentage'] = 0\n",
"VAR_34['QoS_support'] = False\n",
"VAR_34['total_capacity_gb'] = 'infinite'\n",
"VAR_34['free_capacity_gb'] = 'infinite'\n",
"for line in self._eql_execute('pool', 'select', self.configuration.\n",
"if line.startswith('TotalCapacity:'):\n",
"self._stats = VAR_34\n",
"VAR_50 = line.rstrip().partition(' ')\n",
"if line.startswith('FreeSpace:'):\n",
"VAR_34['total_capacity_gb'] = self._get_space_in_gb(VAR_50[-1])\n",
"VAR_50 = line.rstrip().partition(' ')\n",
"VAR_34['free_capacity_gb'] = self._get_space_in_gb(VAR_50[-1])\n"
] | [
"def _update_volume_stats(self):...\n",
"\"\"\"docstring\"\"\"\n",
"LOG.debug(_('Updating volume stats'))\n",
"data = {}\n",
"backend_name = 'eqlx'\n",
"if self.configuration:\n",
"backend_name = self.configuration.safe_get('volume_backend_name')\n",
"data['volume_backend_name'] = backend_name or 'eqlx'\n",
"data['vendor_name'] = 'Dell'\n",
"data['driver_version'] = self.VERSION\n",
"data['storage_protocol'] = 'iSCSI'\n",
"data['reserved_percentage'] = 0\n",
"data['QoS_support'] = False\n",
"data['total_capacity_gb'] = 'infinite'\n",
"data['free_capacity_gb'] = 'infinite'\n",
"for line in self._eql_execute('pool', 'select', self.configuration.\n",
"if line.startswith('TotalCapacity:'):\n",
"self._stats = data\n",
"out_tup = line.rstrip().partition(' ')\n",
"if line.startswith('FreeSpace:'):\n",
"data['total_capacity_gb'] = self._get_space_in_gb(out_tup[-1])\n",
"out_tup = line.rstrip().partition(' ')\n",
"data['free_capacity_gb'] = self._get_space_in_gb(out_tup[-1])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_15(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_11 = VAR_1[:]\n",
"VAR_11.extend(FUNC_7(self))\n",
"VAR_13 = ()\n",
"if not self.exported:\n",
"VAR_13 += 'set_treasury.expensesheet',\n",
"if self.status == 'valid' and self.paid_status != 'resulted':\n",
"VAR_13 += 'add_payment.expensesheet',\n",
"if VAR_13:\n",
"VAR_11.append((Allow, 'group:admin', VAR_13))\n",
"VAR_11.extend(FUNC_6(self))\n",
"VAR_11.append((Allow, 'group:manager', VAR_13))\n",
"return VAR_11\n"
] | [
"def get_expense_sheet_default_acl(self):...\n",
"\"\"\"docstring\"\"\"\n",
"acl = DEFAULT_PERM_NEW[:]\n",
"acl.extend(_get_admin_status_acl(self))\n",
"admin_perms = ()\n",
"if not self.exported:\n",
"admin_perms += 'set_treasury.expensesheet',\n",
"if self.status == 'valid' and self.paid_status != 'resulted':\n",
"admin_perms += 'add_payment.expensesheet',\n",
"if admin_perms:\n",
"acl.append((Allow, 'group:admin', admin_perms))\n",
"acl.extend(_get_user_status_acl(self))\n",
"acl.append((Allow, 'group:manager', admin_perms))\n",
"return acl\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Condition",
"AugAssign'",
"Condition",
"AugAssign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self, VAR_12, VAR_13):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_25 = self.file_db\n",
"if path.splitext(VAR_12)[1].replace('.', '').lower() != 'txt':\n",
"if not VAR_25.fileExists(VAR_12):\n",
"if path.basename(VAR_12) == METADATA_FILENAME:\n",
"VAR_25.addFile(VAR_12, VAR_13=mod_time)\n",
"if VAR_13 > VAR_25.getModTime(VAR_12):\n",
"def FUNC_12():...\n",
"VAR_25.invalidateCached(VAR_12)\n",
"VAR_39 = ''\n",
"VAR_25.updateModTime(VAR_12, VAR_13)\n",
"if not VAR_3:\n",
"logging.error('Could not read metafile!', full_traceback())\n",
"return VAR_39\n",
"VAR_39 = f.read()\n",
"VAR_39 = self.getDropboxFile(VAR_12).decode()\n"
] | [
"def fileToDB(self, filepath, mod_time):...\n",
"\"\"\"docstring\"\"\"\n",
"file_db = self.file_db\n",
"if path.splitext(filepath)[1].replace('.', '').lower() != 'txt':\n",
"if not file_db.fileExists(filepath):\n",
"if path.basename(filepath) == METADATA_FILENAME:\n",
"file_db.addFile(filepath, mod_time=mod_time)\n",
"if mod_time > file_db.getModTime(filepath):\n",
"def getMetadata():...\n",
"file_db.invalidateCached(filepath)\n",
"metadata = ''\n",
"file_db.updateModTime(filepath, mod_time)\n",
"if not FROM_DROPBOX:\n",
"logging.error('Could not read metafile!', full_traceback())\n",
"return metadata\n",
"metadata = f.read()\n",
"metadata = self.getDropboxFile(filepath).decode()\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(VAR_0):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_0.culprit is None:\n",
"return {}\n",
"return {'commit_position': VAR_0.culprit.commit_position, 'git_hash': VAR_0\n .culprit.revision, 'url': VAR_0.culprit.url, 'confidence': VAR_0.\n culprit.confidence}\n"
] | [
"def _GetCulpritInfo(analysis):...\n",
"\"\"\"docstring\"\"\"\n",
"if analysis.culprit is None:\n",
"return {}\n",
"return {'commit_position': analysis.culprit.commit_position, 'git_hash':\n analysis.culprit.revision, 'url': analysis.culprit.url, 'confidence':\n analysis.culprit.confidence}\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_13(self, VAR_20, VAR_21=True):...\n",
"def FUNC_39(VAR_62):...\n",
"return json.loads(VAR_62) if VAR_21 else VAR_62\n"
] | [
"def _render(self, request, as_json=True):...\n",
"def get_str(_str):...\n",
"return json.loads(_str) if as_json else _str\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"FunctionDef'",
"Return'"
] |
[
"def FUNC_3(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_12 = dbconn2.read_cnf('/students/' + VAR_0 + '/.my.cnf')\n",
"VAR_12['db'] = VAR_1\n",
"VAR_6 = dbconn2.connect(VAR_12)\n",
"return VAR_6\n"
] | [
"def dbConnect(database):...\n",
"\"\"\"docstring\"\"\"\n",
"dsn = dbconn2.read_cnf('/students/' + USER + '/.my.cnf')\n",
"dsn['db'] = database\n",
"conn = dbconn2.connect(dsn)\n",
"return conn\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.