lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "def FUNC_6(self, VAR_11, VAR_52=None):...\n", "if VAR_52 is None and not isinstance(VAR_101.site, FakeSubreddit):\n", "VAR_52 = VAR_101.site\n", "if VAR_52:\n", "if not VAR_11:\n", "VAR_52 = None\n", "VAR_52 = Subreddit._by_name(VAR_52)\n", "VAR_101.errors.add(errors.SUBREDDIT_NOEXIST)\n", "return self.error(errors.NO_URL)\n", "VAR_11 = utils.sanitize_url(VAR_11)\n", "VAR_52 = None\n", "if VAR_11 == 'self':\n", "return VAR_11\n", "if VAR_11:\n", "VAR_108 = Link._by_url(VAR_11, VAR_52)\n", "return VAR_11\n", "return self.error(errors.BAD_URL)\n", "self.error(errors.ALREADY_SUB)\n", "return utils.tup(VAR_108)\n" ]
[ "def run(self, url, sr=None):...\n", "if sr is None and not isinstance(c.site, FakeSubreddit):\n", "sr = c.site\n", "if sr:\n", "if not url:\n", "sr = None\n", "sr = Subreddit._by_name(sr)\n", "c.errors.add(errors.SUBREDDIT_NOEXIST)\n", "return self.error(errors.NO_URL)\n", "url = utils.sanitize_url(url)\n", "sr = None\n", "if url == 'self':\n", "return url\n", "if url:\n", "l = Link._by_url(url, sr)\n", "return url\n", "return self.error(errors.BAD_URL)\n", "self.error(errors.ALREADY_SUB)\n", "return utils.tup(l)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Assign'", "Return'", "Return'", "Expr'", "Return'" ]
[ "@staticmethod...\n", "if callable(VAR_8):\n", "VAR_39 = CLASS_1.validate_python_type(VAR_6, VAR_8)\n", "if isinstance(VAR_8, collections.Mapping):\n", "if not VAR_39:\n", "VAR_39 = CLASS_1.validate_jmessage(VAR_6, VAR_8)\n", "if isinstance(VAR_8, str):\n", "log.err('-- Invalid python_type, in [%s] expected %s' % (VAR_6, VAR_8))\n", "return VAR_39\n", "if not VAR_39:\n", "VAR_39 = CLASS_1.validate_GLtype(VAR_6, VAR_8)\n", "if isinstance(VAR_8, collections.Iterable):\n", "log.err('-- Invalid JSON/dict [%s] expected %s' % (VAR_6, VAR_8))\n", "return VAR_39\n", "if not VAR_39:\n", "if len(VAR_6) == 0:\n", "log.err('-- Failed Match in regexp [%s] against %s' % (VAR_6, VAR_8))\n", "return VAR_39\n", "return True\n", "VAR_39 = all(CLASS_1.validate_type(x, VAR_8[0]) for x in VAR_6)\n", "if not VAR_39:\n", "log.err('-- List validation failed [%s] of %s' % (VAR_6, VAR_8))\n", "return VAR_39\n" ]
[ "@staticmethod...\n", "if callable(gl_type):\n", "retval = BaseHandler.validate_python_type(value, gl_type)\n", "if isinstance(gl_type, collections.Mapping):\n", "if not retval:\n", "retval = BaseHandler.validate_jmessage(value, gl_type)\n", "if isinstance(gl_type, str):\n", "log.err('-- Invalid python_type, in [%s] expected %s' % (value, gl_type))\n", "return retval\n", "if not retval:\n", "retval = BaseHandler.validate_GLtype(value, gl_type)\n", "if isinstance(gl_type, collections.Iterable):\n", "log.err('-- Invalid JSON/dict [%s] expected %s' % (value, gl_type))\n", "return retval\n", "if not retval:\n", "if len(value) == 0:\n", "log.err('-- Failed Match in regexp [%s] against %s' % (value, gl_type))\n", "return retval\n", "return True\n", "retval = all(BaseHandler.validate_type(x, gl_type[0]) for x in value)\n", "if not retval:\n", "log.err('-- List validation failed [%s] of %s' % (value, gl_type))\n", "return retval\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Expr'", "Return'", "Condition", "Assign'", "Condition", "Expr'", "Return'", "Condition", "Condition", "Expr'", "Return'", "Return'", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def __exit__(self, VAR_7, VAR_8, VAR_9):...\n", "" ]
[ "def __exit__(self, type, value, tb):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_6(self, VAR_11='', VAR_6=None):...\n", "VAR_5 = self.session.query(self.obj)\n", "VAR_5 = self._get_base_query(VAR_5=query, VAR_6=filters)\n", "VAR_25 = VAR_5.all()\n", "VAR_27 = GroupByDateYear(VAR_11, 'Group by Year')\n", "return VAR_27.apply(VAR_25)\n" ]
[ "def query_year_group(self, group_by='', filters=None):...\n", "query = self.session.query(self.obj)\n", "query = self._get_base_query(query=query, filters=filters)\n", "query_result = query.all()\n", "group_year = GroupByDateYear(group_by, 'Group by Year')\n", "return group_year.apply(query_result)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import base64\n", "import json\n", "import logging\n", "import optparse\n", "import os\n", "import signal\n", "import sys\n", "import time\n", "import xsrf_client\n", "from utils import net\n", "from utils import on_error\n", "from utils import subprocess42\n", "from utils import zip_package\n", "VAR_0 = os.path.abspath(zip_package.get_main_script_path())\n", "VAR_1 = 102400\n", "VAR_2 = 30\n", "VAR_3 = 10\n", "VAR_4 = 3\n", "VAR_5 = signal.SIGBREAK if sys.platform == 'win32' else signal.SIGTERM\n", "VAR_6 = 0\n", "def FUNC_0():...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = time.time()\n", "if VAR_20 > VAR_6:\n", "VAR_6 = VAR_20\n", "return VAR_6\n" ]
[ "\"\"\"Runs a Swarming task.\n\nDownloads all the necessary files to run the task, executes the command and\nstreams results back to the Swarming server.\n\nThe process exit code is 0 when the task was executed, even if the task itself\nfailed. If there's any failure in the setup or teardown, like invalid packet\nresponse, failure to contact the server, etc, a non zero exit code is used. It's\nup to the calling process (bot_main.py) to signal that there was an internal\nfailure and to cancel this task run and ask the server to retry it.\n\"\"\"\n", "import base64\n", "import json\n", "import logging\n", "import optparse\n", "import os\n", "import signal\n", "import sys\n", "import time\n", "import xsrf_client\n", "from utils import net\n", "from utils import on_error\n", "from utils import subprocess42\n", "from utils import zip_package\n", "THIS_FILE = os.path.abspath(zip_package.get_main_script_path())\n", "MAX_CHUNK_SIZE = 102400\n", "MAX_PACKET_INTERVAL = 30\n", "MIN_PACKET_INTERNAL = 10\n", "OUT_VERSION = 3\n", "SIG_BREAK_OR_TERM = (signal.SIGBREAK if sys.platform == 'win32' else signal\n .SIGTERM)\n", "_last_now = 0\n", "def monotonic_time():...\n", "\"\"\"docstring\"\"\"\n", "now = time.time()\n", "if now > _last_now:\n", "_last_now = now\n", "return _last_now\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_0):...\n", "VAR_6 = VAR_0.split('/')\n", "VAR_7 = collections.deque()\n", "for entry in VAR_6:\n", "if not entry:\n", "if VAR_6[0] == '':\n", "if entry == '.':\n", "VAR_7.appendleft('')\n", "if VAR_6[-1] == '':\n", "if entry == '..':\n", "VAR_7.append('')\n", "return '/'.join(VAR_7)\n", "if len(VAR_7) > 0:\n", "VAR_7.append(entry)\n", "VAR_7.pop()\n" ]
[ "def normpath(path):...\n", "old_path = path.split('/')\n", "new_path = collections.deque()\n", "for entry in old_path:\n", "if not entry:\n", "if old_path[0] == '':\n", "if entry == '.':\n", "new_path.appendleft('')\n", "if old_path[-1] == '':\n", "if entry == '..':\n", "new_path.append('')\n", "return '/'.join(new_path)\n", "if len(new_path) > 0:\n", "new_path.append(entry)\n", "new_path.pop()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "Condition", "Condition", "Expr'", "Condition", "Condition", "Expr'", "Return'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_4(self):...\n", "\"\"\"docstring\"\"\"\n", "if self.conn:\n", "self.conn.close()\n", "if self.tunnel:\n", "log.info('Connection to the database has been closed.')\n", "self.tunnel.stop()\n", "self.tunnel_opened = False\n", "log.info('SSH tunnel has been closed.')\n", "return True\n" ]
[ "def disconnect(self):...\n", "\"\"\"docstring\"\"\"\n", "if self.conn:\n", "self.conn.close()\n", "if self.tunnel:\n", "log.info('Connection to the database has been closed.')\n", "self.tunnel.stop()\n", "self.tunnel_opened = False\n", "log.info('SSH tunnel has been closed.')\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_0, **VAR_1):...\n", "self._configure(VAR_1)\n", "self.conn = sqlite3.connect(VAR_0.name)\n", "self.conn.execute('string')\n", "self.conn.commit()\n", "self.conn.text_factory = str\n" ]
[ "def __init__(self, file, **kwargs):...\n", "self._configure(kwargs)\n", "self.conn = sqlite3.connect(file.name)\n", "self.conn.execute(\n \"\"\"CREATE TABLE IF NOT EXISTS `webpages`(\n `id` INTEGER PRIMARY KEY,\n `title` VARCHAR DEFAULT NULL,\n `content` VARCHAR DEFAULT NULL,\n `url` VARCHAR DEFAULT NULL UNIQUE \n );\n \"\"\"\n )\n", "self.conn.commit()\n", "self.conn.text_factory = str\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_27(self):...\n", "VAR_16 = vimsupport.CurrentFiletypes()\n", "VAR_17 = self._user_options['filetype_specific_completion_to_disable']\n", "return not all([(x in VAR_17) for x in VAR_16])\n" ]
[ "def CurrentFiletypeCompletionEnabled(self):...\n", "filetypes = vimsupport.CurrentFiletypes()\n", "filetype_to_disable = self._user_options[\n 'filetype_specific_completion_to_disable']\n", "return not all([(x in filetype_to_disable) for x in filetypes])\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_13(self):...\n", "self.run_test_case(self.scenario.upsert_load_balancer())\n" ]
[ "def test_a_upsert_load_balancer(self):...\n", "self.run_test_case(self.scenario.upsert_load_balancer())\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __init__(self, VAR_6, VAR_7=None):...\n", "if VAR_7 is not None:\n", "self._r_root_item_ = VAR_7._r_root_item_\n", "self._r_root_item_ = VAR_6\n", "self._r_steps_ = list(VAR_7._r_steps_)\n", "self._r_steps_ = []\n" ]
[ "def __init__(self, initial_item, path_to_clone=None):...\n", "if path_to_clone is not None:\n", "self._r_root_item_ = path_to_clone._r_root_item_\n", "self._r_root_item_ = initial_item\n", "self._r_steps_ = list(path_to_clone._r_steps_)\n", "self._r_steps_ = []\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_1(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = psycopg2.connect(database=DBNAME)\n", "VAR_3 = VAR_2.cursor()\n", "VAR_3.execute(\"insert into posts values('%s')\" % VAR_1)\n", "VAR_2.commit()\n", "VAR_2.close()\n" ]
[ "def add_post(content):...\n", "\"\"\"docstring\"\"\"\n", "db = psycopg2.connect(database=DBNAME)\n", "c = db.cursor()\n", "c.execute(\"insert into posts values('%s')\" % content)\n", "db.commit()\n", "db.close()\n" ]
[ 0, 0, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_21(VAR_27, VAR_28, VAR_29, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_49 = len(VAR_27)\n", "if VAR_49 <= ray_constants.PICKLE_OBJECT_WARNING_SIZE:\n", "return\n", "VAR_50 = 'string'.format(VAR_29, VAR_28, VAR_49)\n", "FUNC_2(VAR_2, ray_constants.PICKLING_LARGE_OBJECT_PUSH_ERROR, VAR_50, VAR_5\n =worker.task_driver_id)\n" ]
[ "def check_oversized_pickle(pickled, name, obj_type, worker):...\n", "\"\"\"docstring\"\"\"\n", "length = len(pickled)\n", "if length <= ray_constants.PICKLE_OBJECT_WARNING_SIZE:\n", "return\n", "warning_message = (\n 'Warning: The {} {} has size {} when pickled. It will be stored in Redis, which could cause memory issues. This may mean that its definition uses a large array or other object.'\n .format(obj_type, name, length))\n", "push_error_to_driver(worker, ray_constants.PICKLING_LARGE_OBJECT_PUSH_ERROR,\n warning_message, driver_id=worker.task_driver_id)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Expr'" ]
[ "def FUNC_26():...\n", "return self.closed\n" ]
[ "def stop():...\n", "return self.closed\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_23(self, VAR_34):...\n", "\"\"\"docstring\"\"\"\n", "return {VAR_52[0]: {self.FIELD_NAME: VAR_52[1], self.TABLE_NAME: VAR_52[2],\n self.DATA_TYPE: VAR_52[3]} for VAR_52 in VAR_34}\n" ]
[ "def _parse_field_mapping(self, field_mapping):...\n", "\"\"\"docstring\"\"\"\n", "return {field[0]: {self.FIELD_NAME: field[1], self.TABLE_NAME: field[2],\n self.DATA_TYPE: field[3]} for field in field_mapping}\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@handled_slot(bool)...\n", "print(f\"{'Locking' if VAR_4 else 'Unlocking'} slow piezo.\")\n", "self.lock_slow_piezo_action.setChecked(not VAR_4)\n", "self.matisse.set_slow_piezo_lock(VAR_4)\n", "self.lock_slow_piezo_action.setChecked(VAR_4)\n" ]
[ "@handled_slot(bool)...\n", "print(f\"{'Locking' if checked else 'Unlocking'} slow piezo.\")\n", "self.lock_slow_piezo_action.setChecked(not checked)\n", "self.matisse.set_slow_piezo_lock(checked)\n", "self.lock_slow_piezo_action.setChecked(checked)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_18, *VAR_15, **VAR_16):...\n", "CLASS_3.__init__(self, VAR_18, errors.NO_USER, *VAR_15, **kw)\n" ]
[ "def __init__(self, item, *a, **kw):...\n", "VRequired.__init__(self, item, errors.NO_USER, *a, **kw)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_20(self):...\n", "def FUNC_29(VAR_34):...\n", "self.assertEqual(logging.WARNING, VAR_34)\n", "self.mock(logging_utils, 'set_console_level', FUNC_29)\n", "def FUNC_30(VAR_35):...\n", "self.assertEqual(None, VAR_35)\n", "return 0\n" ]
[ "def test_main(self):...\n", "def check(x):...\n", "self.assertEqual(logging.WARNING, x)\n", "self.mock(logging_utils, 'set_console_level', check)\n", "def run_bot(error):...\n", "self.assertEqual(None, error)\n", "return 0\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_0():...\n", "VAR_0 = input().split(' ')\n", "VAR_1, VAR_2 = int(VAR_0[0]), int(VAR_0[1])\n", "VAR_3 = {i: set() for i in range(VAR_1)}\n", "VAR_4 = {}\n", "VAR_5 = {}\n", "for i in range(VAR_2):\n", "VAR_10 = input().split(' ')\n", "return VAR_1, VAR_3, VAR_4, VAR_5\n", "VAR_11, VAR_12, VAR_13 = int(VAR_10[0]), int(VAR_10[1]), int(VAR_10[2])\n", "if VAR_11 != VAR_12:\n", "VAR_3[VAR_11].add(VAR_12)\n", "VAR_3[VAR_12].add(VAR_11)\n", "VAR_14 = frozenset((VAR_11, VAR_12))\n", "if VAR_14 in VAR_4 and VAR_13 >= VAR_4[VAR_14]:\n", "VAR_5[VAR_14] += 1 if VAR_13 == VAR_4[VAR_14] else 0\n", "VAR_4[VAR_14] = VAR_13\n", "VAR_5[VAR_14] = 1\n" ]
[ "def inp():...\n", "first_line = input().split(' ')\n", "num_points, num_trails = int(first_line[0]), int(first_line[1])\n", "adj_lst = {i: set() for i in range(num_points)}\n", "trail_len = {}\n", "trail_len_duplicate_count = {}\n", "for i in range(num_trails):\n", "trail = input().split(' ')\n", "return num_points, adj_lst, trail_len, trail_len_duplicate_count\n", "node1, node2, length = int(trail[0]), int(trail[1]), int(trail[2])\n", "if node1 != node2:\n", "adj_lst[node1].add(node2)\n", "adj_lst[node2].add(node1)\n", "key = frozenset((node1, node2))\n", "if key in trail_len and length >= trail_len[key]:\n", "trail_len_duplicate_count[key] += 1 if length == trail_len[key] else 0\n", "trail_len[key] = length\n", "trail_len_duplicate_count[key] = 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Condition", "AugAssign'", "Assign'", "Assign'" ]
[ "def FUNC_11(self):...\n", "if self.submission_count > 0:\n", "VAR_10 = self.submissions[0]\n", "return None\n", "if VAR_10.submitters.count() > 0:\n", "return StudentGroup.get_exact(self.exercise.course_instance, VAR_10.\n submitters.all())\n" ]
[ "def get_group(self):...\n", "if self.submission_count > 0:\n", "s = self.submissions[0]\n", "return None\n", "if s.submitters.count() > 0:\n", "return StudentGroup.get_exact(self.exercise.course_instance, s.submitters.all()\n )\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "Condition", "Return'" ]
[ "def FUNC_2(VAR_4):...\n", "VAR_5 = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "VAR_6 = re.findall(VAR_5, VAR_2.written[0])\n", "self.assertEquals(len(VAR_6), 1)\n" ]
[ "def assert_response(_):...\n", "expected = '<title>{0}</title>'.format(self.MAIL_ADDRESS)\n", "matches = re.findall(expected, request.written[0])\n", "self.assertEquals(len(matches), 1)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_7(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = self.run_command('hostname')\n", "self.hostname = VAR_32['stdout'].strip()\n", "self.log_debug('Hostname set to %s' % self.hostname)\n" ]
[ "def get_hostname(self):...\n", "\"\"\"docstring\"\"\"\n", "sout = self.run_command('hostname')\n", "self.hostname = sout['stdout'].strip()\n", "self.log_debug('Hostname set to %s' % self.hostname)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_23(self):...\n", "VAR_74 = FUNC_1(self.request.GET, VAR_3=('info_type', 'field', 'format'))\n", "return User.objects.filter(**query)\n" ]
[ "def users(self):...\n", "query = strip_parameters(self.request.GET, skip_parameters=('info_type',\n 'field', 'format'))\n", "return User.objects.filter(**query)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_0(self, VAR_1, VAR_6, VAR_7, VAR_3):...\n", "return '<textarea name=\"%s\" rows=5 cols=40>%s</textarea>' % (html_escape(\n VAR_6), html_escape(VAR_7 or ''))\n" ]
[ "def make_input(self, version, name, value, attribute):...\n", "return '<textarea name=\"%s\" rows=5 cols=40>%s</textarea>' % (html_escape(\n name), html_escape(value or ''))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(self, VAR_14, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "VAR_28 = self._get_groupdict(VAR_14)\n", "for VAR_33 in ('line', 'column', 'end_line', 'end_column'):\n", "if VAR_33 in VAR_28 and VAR_28[VAR_33]:\n", "if 'origin' in VAR_28:\n", "VAR_28[VAR_33] = int(VAR_28[VAR_33])\n", "VAR_28['origin'] = '{} ({})'.format(str(self.__class__.__name__), str(\n VAR_28['origin']))\n", "return Result.from_values(origin=groups.get('origin', self), message=groups\n .get('message', ''), VAR_12=filename, severity=int(groups.get(\n 'severity', RESULT_SEVERITY.NORMAL)), line=groups.get('line', None),\n column=groups.get('column', None), end_line=groups.get('end_line', None\n ), end_column=groups.get('end_column', None))\n" ]
[ "def match_to_result(self, match, filename):...\n", "\"\"\"docstring\"\"\"\n", "groups = self._get_groupdict(match)\n", "for variable in ('line', 'column', 'end_line', 'end_column'):\n", "if variable in groups and groups[variable]:\n", "if 'origin' in groups:\n", "groups[variable] = int(groups[variable])\n", "groups['origin'] = '{} ({})'.format(str(self.__class__.__name__), str(\n groups['origin']))\n", "return Result.from_values(origin=groups.get('origin', self), message=groups\n .get('message', ''), file=filename, severity=int(groups.get('severity',\n RESULT_SEVERITY.NORMAL)), line=groups.get('line', None), column=groups.\n get('column', None), end_line=groups.get('end_line', None), end_column=\n groups.get('end_column', None))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Condition", "Assign'", "Assign'", "Return'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.format_wildcards(self.rule.message) if self.rule.message else None\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.format_wildcards(self.rule.message) if self.rule.message else None\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@functools.wraps(VAR_3)...\n", "VAR_23 = VAR_5.pop('timeout', None)\n", "VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n", "if VAR_23 is None:\n", "return VAR_24.wait()\n", "VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n", "VAR_48 = VAR_24.wait()\n", "VAR_36.cancel()\n", "return VAR_48\n" ]
[ "@functools.wraps(f)...\n", "timeout = kwargs.pop('timeout', None)\n", "gt = eventlet.spawn(f, self, *args, **kwargs)\n", "if timeout is None:\n", "return gt.wait()\n", "kill_thread = eventlet.spawn_after(timeout, gt.kill)\n", "res = gt.wait()\n", "kill_thread.cancel()\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_13(self, VAR_14):...\n", "" ]
[ "def is_float(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_1(VAR_0, VAR_1, VAR_2={}, VAR_3='bqsql', VAR_4='bqcache', VAR_5=False...\n", "VAR_8 = FUNC_0(VAR_0, VAR_1, **query_params)\n", "VAR_9 = os.path.join(VAR_4, '{}.pkl'.format(VAR_8))\n", "if not VAR_5 and os.path.exists(VAR_9):\n", "VAR_10 = pd.read_pickle(VAR_9)\n", "VAR_11 = os.path.join(VAR_3, '{}.sql'.format(VAR_1))\n", "return VAR_10\n", "VAR_13 = query_f.read()\n", "VAR_12 = VAR_13.format(**query_params)\n", "VAR_10 = pd.io.gbq.read_gbq(VAR_12, VAR_0=project_id, dialect='standard')\n", "os.makedirs(VAR_4, exist_ok=True)\n", "VAR_10.to_pickle(VAR_9)\n" ]
[ "def get_result(project_id, query_name, query_params={}, query_dir='bqsql',...\n", "qhash = query_hash(project_id, query_name, **query_params)\n", "cache_file_name = os.path.join(cache_dir, '{}.pkl'.format(qhash))\n", "if not reload and os.path.exists(cache_file_name):\n", "res = pd.read_pickle(cache_file_name)\n", "query_fn = os.path.join(query_dir, '{}.sql'.format(query_name))\n", "return res\n", "query_templ = query_f.read()\n", "query_str = query_templ.format(**query_params)\n", "res = pd.io.gbq.read_gbq(query_str, project_id=project_id, dialect='standard')\n", "os.makedirs(cache_dir, exist_ok=True)\n", "res.to_pickle(cache_file_name)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_31(self, VAR_17, VAR_15):...\n", "VAR_28 = FileManager()\n", "VAR_29 = ImageManager()\n", "for file_col in VAR_17.files:\n", "if self.is_file(file_col):\n", "for file_col in VAR_17.files:\n", "VAR_28.save_file(VAR_17.files[file_col], getattr(VAR_15, file_col))\n", "if self.is_image(file_col):\n", "VAR_29.save_file(VAR_17.files[file_col], getattr(VAR_15, file_col))\n" ]
[ "def _add_files(self, this_request, item):...\n", "fm = FileManager()\n", "im = ImageManager()\n", "for file_col in this_request.files:\n", "if self.is_file(file_col):\n", "for file_col in this_request.files:\n", "fm.save_file(this_request.files[file_col], getattr(item, file_col))\n", "if self.is_image(file_col):\n", "im.save_file(this_request.files[file_col], getattr(item, file_col))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "For", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_15(VAR_20, VAR_22, VAR_23, VAR_24, VAR_25):...\n", "VAR_17 = [('input', VAR_23), ('output', VAR_24)]\n", "VAR_16 = {}\n", "VAR_16['codename'] = '%03d' % VAR_22\n", "if VAR_25:\n", "VAR_16['public'] = '1'\n", "VAR_47 = FUNC_14(VAR_20)\n", "FUNC_8('dataset/%d/testcases/add' % VAR_47, VAR_17=files, VAR_16=args)\n" ]
[ "def add_testcase(task_id, num, input_file, output_file, public):...\n", "files = [('input', input_file), ('output', output_file)]\n", "args = {}\n", "args['codename'] = '%03d' % num\n", "if public:\n", "args['public'] = '1'\n", "dataset_id = get_task_active_dataset_id(task_id)\n", "admin_req('dataset/%d/testcases/add' % dataset_id, files=files, args=args)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_7(self):...\n", "VAR_16 = request.headers.get('X-XSRF-TOKEN', '')\n", "if not VAR_16:\n", "return False\n", "return safe_str_cmp(VAR_16, VAR_15.get('XSRF-TOKEN', ''))\n" ]
[ "def check_csrf_token(self):...\n", "token = request.headers.get('X-XSRF-TOKEN', '')\n", "if not token:\n", "return False\n", "return safe_str_cmp(token, session.get('XSRF-TOKEN', ''))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_39():...\n", "from lib.evaluators.PyQt4Evaluator import Evaluator\n", "return Evaluator()\n" ]
[ "def ev_init():...\n", "from lib.evaluators.PyQt4Evaluator import Evaluator\n", "return Evaluator()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Return'" ]
[ "def FUNC_0(self):...\n", "super(CLASS_0, self).setUp()\n", "VAR_3 = os.path.join(os.getcwd(), 'bandit', 'plugins')\n", "VAR_4 = b_config.BanditConfig()\n", "self.b_mgr = b_manager.BanditManager(VAR_4, 'file')\n", "self.b_mgr.b_conf._settings['plugins_dir'] = VAR_3\n", "self.b_mgr.b_ts = b_test_set.BanditTestSet(config=b_conf)\n" ]
[ "def setUp(self):...\n", "super(FunctionalTests, self).setUp()\n", "path = os.path.join(os.getcwd(), 'bandit', 'plugins')\n", "b_conf = b_config.BanditConfig()\n", "self.b_mgr = b_manager.BanditManager(b_conf, 'file')\n", "self.b_mgr.b_conf._settings['plugins_dir'] = path\n", "self.b_mgr.b_ts = b_test_set.BanditTestSet(config=b_conf)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_0(self):...\n", "return hpcommon.HP3PARCommon(self.configuration)\n" ]
[ "def _init_common(self):...\n", "return hpcommon.HP3PARCommon(self.configuration)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_29(self):...\n", "\"\"\"docstring\"\"\"\n", "tournament.editPlayer(new_name='Joan Jett')\n" ]
[ "def test_edit_missing_new_info(self):...\n", "\"\"\"docstring\"\"\"\n", "tournament.editPlayer(new_name='Joan Jett')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Expr'" ]
[ "from argparse import ArgumentParser\n", "from ply import lex, yacc\n", "import locale\n", "import traceback\n", "import sys\n", "import git\n", "import re\n", "import os\n", "def __init__(self, VAR_3, VAR_4):...\n", "self.tok = VAR_3\n", "self.txt = VAR_4\n", "def __init__(self, VAR_5, VAR_4):...\n", "self.el = VAR_5\n", "self.txt = VAR_4\n", "def __init__(self):...\n", "self.license_files = 0\n", "self.exception_files = 0\n", "self.licenses = []\n", "self.exceptions = {}\n", "def FUNC_0(VAR_0):...\n", "VAR_6 = ['preferred', 'other', 'exceptions']\n", "VAR_7 = VAR_0.head.commit.tree['LICENSES']\n", "VAR_8 = CLASS_2()\n", "for d in VAR_6:\n", "for VAR_5 in VAR_7[d].traverse():\n", "return VAR_8\n", "if not os.path.isfile(VAR_5.path):\n", "VAR_23 = None\n", "for l in open(VAR_5.path).readlines():\n", "if l.startswith('Valid-License-Identifier:'):\n", "VAR_26 = l.split(':')[1].strip().upper()\n", "if l.startswith('SPDX-Exception-Identifier:'):\n", "if VAR_26 in VAR_8.licenses:\n", "VAR_23 = l.split(':')[1].strip().upper()\n", "if l.startswith('SPDX-Licenses:'):\n", "VAR_8.licenses.append(VAR_26)\n", "VAR_8.exceptions[VAR_23] = []\n", "for lic in l.split(':')[1].upper().strip().replace(' ', '').replace('\\t', ''\n", "if l.startswith('License-Text:'):\n", "if not lic in VAR_8.licenses:\n", "if VAR_23:\n", "VAR_8.exceptions[VAR_23].append(lic)\n", "if not len(VAR_8.exceptions[VAR_23]):\n", "VAR_8.license_files += 1\n", "VAR_8.exception_files += 1\n" ]
[ "from argparse import ArgumentParser\n", "from ply import lex, yacc\n", "import locale\n", "import traceback\n", "import sys\n", "import git\n", "import re\n", "import os\n", "def __init__(self, tok, txt):...\n", "self.tok = tok\n", "self.txt = txt\n", "def __init__(self, el, txt):...\n", "self.el = el\n", "self.txt = txt\n", "def __init__(self):...\n", "self.license_files = 0\n", "self.exception_files = 0\n", "self.licenses = []\n", "self.exceptions = {}\n", "def read_spdxdata(repo):...\n", "license_dirs = ['preferred', 'other', 'exceptions']\n", "lictree = repo.head.commit.tree['LICENSES']\n", "spdx = SPDXdata()\n", "for d in license_dirs:\n", "for el in lictree[d].traverse():\n", "return spdx\n", "if not os.path.isfile(el.path):\n", "exception = None\n", "for l in open(el.path).readlines():\n", "if l.startswith('Valid-License-Identifier:'):\n", "lid = l.split(':')[1].strip().upper()\n", "if l.startswith('SPDX-Exception-Identifier:'):\n", "if lid in spdx.licenses:\n", "exception = l.split(':')[1].strip().upper()\n", "if l.startswith('SPDX-Licenses:'):\n", "spdx.licenses.append(lid)\n", "spdx.exceptions[exception] = []\n", "for lic in l.split(':')[1].upper().strip().replace(' ', '').replace('\\t', ''\n", "if l.startswith('License-Text:'):\n", "if not lic in spdx.licenses:\n", "if exception:\n", "spdx.exceptions[exception].append(lic)\n", "if not len(spdx.exceptions[exception]):\n", "spdx.license_files += 1\n", "spdx.exception_files += 1\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Import'", "Import'", "Import'", "Import'", "Import'", "Import'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "For", "For", "Return'", "Condition", "Assign'", "For", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "For", "Condition", "Condition", "Condition", "Expr'", "Condition", "AugAssign'", "AugAssign'" ]
[ "def FUNC_0(VAR_0, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = Admin(VAR_0, name='Test')\n", "assert 'model' in record_adminview\n", "assert 'modelview' in record_adminview\n", "VAR_3 = record_adminview.pop('model')\n", "VAR_4 = record_adminview.pop('modelview')\n", "VAR_2.add_view(VAR_4(VAR_3, VAR_1.session, **record_adminview))\n", "VAR_5 = {str(item.name): item for item in VAR_2.menu()}\n", "assert 'Records' in VAR_5\n", "assert VAR_5['Records'].is_category()\n", "VAR_6 = {str(item.name): item for item in VAR_5['Records'].get_children()}\n", "assert 'Record Metadata' in VAR_6\n", "assert isinstance(VAR_6['Record Metadata'], menu.MenuView)\n", "VAR_7 = str(uuid.uuid4())\n", "Record.create({'title': 'test'}, id_=rec_uuid)\n", "VAR_1.session.commit()\n", "VAR_8 = url_for('recordmetadata.index_view')\n", "VAR_9 = url_for('recordmetadata.delete_view')\n", "VAR_10 = url_for('recordmetadata.details_view', id=rec_uuid)\n", "VAR_11 = client.get(VAR_8)\n", "assert VAR_11.status_code == 200\n", "db_mock.side_effect = SQLAlchemyError()\n", "VAR_11 = client.post(VAR_9, data={'id': rec_uuid}, follow_redirects=True)\n", "assert VAR_11.status_code == 200\n", "VAR_11 = client.post(VAR_9, data={'id': rec_uuid}, follow_redirects=True)\n", "assert VAR_11.status_code == 200\n", "VAR_11 = client.get(VAR_10)\n", "assert VAR_11.status_code == 200\n", "assert '<pre>null</pre>' in VAR_11.get_data(as_text=True)\n", "VAR_11 = client.post(VAR_9, data={'id': rec_uuid}, follow_redirects=True)\n", "assert VAR_11.status_code == 200\n" ]
[ "def test_admin(app, db):...\n", "\"\"\"docstring\"\"\"\n", "admin = Admin(app, name='Test')\n", "assert 'model' in record_adminview\n", "assert 'modelview' in record_adminview\n", "model = record_adminview.pop('model')\n", "view = record_adminview.pop('modelview')\n", "admin.add_view(view(model, db.session, **record_adminview))\n", "menu_items = {str(item.name): item for item in admin.menu()}\n", "assert 'Records' in menu_items\n", "assert menu_items['Records'].is_category()\n", "submenu_items = {str(item.name): item for item in menu_items['Records'].\n get_children()}\n", "assert 'Record Metadata' in submenu_items\n", "assert isinstance(submenu_items['Record Metadata'], menu.MenuView)\n", "rec_uuid = str(uuid.uuid4())\n", "Record.create({'title': 'test'}, id_=rec_uuid)\n", "db.session.commit()\n", "index_view_url = url_for('recordmetadata.index_view')\n", "delete_view_url = url_for('recordmetadata.delete_view')\n", "detail_view_url = url_for('recordmetadata.details_view', id=rec_uuid)\n", "res = client.get(index_view_url)\n", "assert res.status_code == 200\n", "db_mock.side_effect = SQLAlchemyError()\n", "res = client.post(delete_view_url, data={'id': rec_uuid}, follow_redirects=True\n )\n", "assert res.status_code == 200\n", "res = client.post(delete_view_url, data={'id': rec_uuid}, follow_redirects=True\n )\n", "assert res.status_code == 200\n", "res = client.get(detail_view_url)\n", "assert res.status_code == 200\n", "assert '<pre>null</pre>' in res.get_data(as_text=True)\n", "res = client.post(delete_view_url, data={'id': rec_uuid}, follow_redirects=True\n )\n", "assert res.status_code == 200\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assert'", "Assert'", "Assign'", "Assign'", "Expr'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'" ]
[ "def FUNC_12(VAR_6, VAR_11, VAR_10):...\n", "VAR_13 = CLASS_0.create_invoice_products_values_query(VAR_10, VAR_11)\n", "print('Result:')\n", "print(VAR_13)\n", "VAR_12 = f\"\"\"\n INSERT INTO {VAR_4} (invoice_id, product_id, quantity)\n VALUES {VAR_13}\n \"\"\"\n", "VAR_14 = create_connection()\n", "VAR_14.close()\n", "VAR_15 = VAR_14.cursor()\n", "VAR_15.execute(VAR_12)\n", "VAR_14.commit()\n", "return 'Ok'\n" ]
[ "def create_invoice_products(userId, products, invoiceId):...\n", "invoice_products_values = UsersRepository.create_invoice_products_values_query(\n invoiceId, products)\n", "print('Result:')\n", "print(invoice_products_values)\n", "sql_query = f\"\"\"\n INSERT INTO {INVOICE_PRODUCTS_TABLE} (invoice_id, product_id, quantity)\n VALUES {invoice_products_values}\n \"\"\"\n", "connection = create_connection()\n", "connection.close()\n", "cursor = connection.cursor()\n", "cursor.execute(sql_query)\n", "connection.commit()\n", "return 'Ok'\n" ]
[ 0, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_1(VAR_1=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = 'df -i'\n", "if VAR_1 is not None:\n", "VAR_4 = VAR_4 + ' -' + VAR_1\n", "VAR_2 = {}\n", "VAR_3 = __salt__['cmd.run'](VAR_4).splitlines()\n", "for line in VAR_3:\n", "if line.startswith('Filesystem'):\n", "return VAR_2\n", "VAR_5 = line.split()\n", "if not VAR_5:\n", "if __grains__['kernel'] == 'OpenBSD':\n", "VAR_0.warn('Problem parsing inode usage information')\n", "VAR_2[VAR_5[8]] = {'inodes': int(VAR_5[5]) + int(VAR_5[6]), 'used': VAR_5[5\n ], 'free': VAR_5[6], 'use': VAR_5[7], 'filesystem': VAR_5[0]}\n", "VAR_2[VAR_5[5]] = {'inodes': VAR_5[1], 'used': VAR_5[2], 'free': VAR_5[3],\n 'use': VAR_5[4], 'filesystem': VAR_5[0]}\n", "VAR_2 = {}\n" ]
[ "def inodeusage(args=None):...\n", "\"\"\"docstring\"\"\"\n", "cmd = 'df -i'\n", "if args is not None:\n", "cmd = cmd + ' -' + args\n", "ret = {}\n", "out = __salt__['cmd.run'](cmd).splitlines()\n", "for line in out:\n", "if line.startswith('Filesystem'):\n", "return ret\n", "comps = line.split()\n", "if not comps:\n", "if __grains__['kernel'] == 'OpenBSD':\n", "log.warn('Problem parsing inode usage information')\n", "ret[comps[8]] = {'inodes': int(comps[5]) + int(comps[6]), 'used': comps[5],\n 'free': comps[6], 'use': comps[7], 'filesystem': comps[0]}\n", "ret[comps[5]] = {'inodes': comps[1], 'used': comps[2], 'free': comps[3],\n 'use': comps[4], 'filesystem': comps[0]}\n", "ret = {}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "For", "Condition", "Return'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "@transaction.atomic...\n", "VAR_6 = CreateCourseForm(VAR_0.POST)\n", "if not VAR_6.is_valid():\n", "return FUNC_0(VAR_0, VAR_3=form)\n", "VAR_8 = Course(course_number=request.POST['course_number'], course_name=\n request.POST['course_name'], instructor=request.POST['instructor'])\n", "VAR_8.save()\n", "return FUNC_0(VAR_0, VAR_1=['Added %s' % new_course])\n" ]
[ "@transaction.atomic...\n", "form = CreateCourseForm(request.POST)\n", "if not form.is_valid():\n", "return make_view(request, create_course_form=form)\n", "new_course = Course(course_number=request.POST['course_number'],\n course_name=request.POST['course_name'], instructor=request.POST[\n 'instructor'])\n", "new_course.save()\n", "return make_view(request, messages=['Added %s' % new_course])\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_26(VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "VAR_68 = len(VAR_17)\n", "VAR_69 = lambda x: int(float(x) / 100 * VAR_68)\n", "VAR_70 = [VAR_17[VAR_69(start):VAR_69(end)] for start, end in bconfig.\n CFG_BIBCLASSIFY_PARTIAL_TEXT]\n", "return '\\n'.join(VAR_70)\n" ]
[ "def _get_partial_text(fulltext):...\n", "\"\"\"docstring\"\"\"\n", "length = len(fulltext)\n", "get_index = lambda x: int(float(x) / 100 * length)\n", "partial_text = [fulltext[get_index(start):get_index(end)] for start, end in\n bconfig.CFG_BIBCLASSIFY_PARTIAL_TEXT]\n", "return '\\n'.join(partial_text)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(self, VAR_1):...\n", "VAR_2 = self.connect()\n", "VAR_3 = \"INSERT INTO crimes (description) VALUES ('{}');\".format(VAR_1)\n", "VAR_2.close()\n", "cursor.execute(VAR_3)\n", "VAR_2.commit()\n" ]
[ "def add_input(self, data):...\n", "connection = self.connect()\n", "query = \"INSERT INTO crimes (description) VALUES ('{}');\".format(data)\n", "connection.close()\n", "cursor.execute(query)\n", "connection.commit()\n" ]
[ 0, 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_2(VAR_0, VAR_1):...\n", "VAR_5 = frappe.session.user\n", "VAR_6 = get_leave_allocation_records(VAR_0.to_date)\n", "VAR_7 = get_leave_allocation_records(VAR_0.from_date)\n", "VAR_8 = frappe.get_all('Employee', VAR_0={'status': 'Active', 'company':\n filters.company}, fields=['name', 'employee_name', 'department', 'user_id']\n )\n", "VAR_4 = []\n", "for employee in VAR_8:\n", "VAR_12 = FUNC_3(employee.department)\n", "return VAR_4\n", "if len(VAR_12) and VAR_5 in VAR_12 or VAR_5 in ['Administrator', employee.\n", "VAR_13 = [employee.name, employee.employee_name, employee.department]\n", "for leave_type in VAR_1:\n", "VAR_14 = get_approved_leaves_for_period(employee.name, leave_type, VAR_0.\n from_date, VAR_0.to_date)\n", "VAR_4.append(VAR_13)\n", "VAR_15 = get_leave_balance_on(employee.name, leave_type, VAR_0.from_date,\n VAR_7.get(employee.name, frappe._dict()))\n", "VAR_16 = get_leave_balance_on(employee.name, leave_type, VAR_0.to_date,\n VAR_6.get(employee.name, frappe._dict()))\n", "VAR_13 += [VAR_15, VAR_14, VAR_16]\n" ]
[ "def get_data(filters, leave_types):...\n", "user = frappe.session.user\n", "allocation_records_based_on_to_date = get_leave_allocation_records(filters.\n to_date)\n", "allocation_records_based_on_from_date = get_leave_allocation_records(filters\n .from_date)\n", "active_employees = frappe.get_all('Employee', filters={'status': 'Active',\n 'company': filters.company}, fields=['name', 'employee_name',\n 'department', 'user_id'])\n", "data = []\n", "for employee in active_employees:\n", "leave_approvers = get_approvers(employee.department)\n", "return data\n", "if len(leave_approvers) and user in leave_approvers or user in ['Administrator'\n", "row = [employee.name, employee.employee_name, employee.department]\n", "for leave_type in leave_types:\n", "leaves_taken = get_approved_leaves_for_period(employee.name, leave_type,\n filters.from_date, filters.to_date)\n", "data.append(row)\n", "opening = get_leave_balance_on(employee.name, leave_type, filters.from_date,\n allocation_records_based_on_from_date.get(employee.name, frappe._dict()))\n", "closing = get_leave_balance_on(employee.name, leave_type, filters.to_date,\n allocation_records_based_on_to_date.get(employee.name, frappe._dict()))\n", "row += [opening, leaves_taken, closing]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'", "For", "Assign'", "Expr'", "Assign'", "Assign'", "AugAssign'" ]
[ "def FUNC_4(VAR_2):...\n", "VAR_7 = files.get_static_data('./static/files.html')\n", "VAR_2 = FUNC_1(VAR_2)\n", "VAR_2 = '/'\n", "if not VAR_2:\n", "VAR_2 = '/'\n", "VAR_15 = VAR_2.split('/')\n", "VAR_16 = list()\n", "while '' in VAR_15:\n", "VAR_15.remove('')\n", "VAR_15 = [''] + VAR_15\n", "VAR_17 = ''\n", "for VAR_31 in range(0, len(VAR_15)):\n", "VAR_15[VAR_31] += '/'\n", "VAR_18 = list()\n", "VAR_17 += VAR_15[VAR_31]\n", "for f_handle in db.Filesystem.listdir(VAR_2):\n", "VAR_16.append(dict(folder_name=files_hierarchy[i], href_path=\n '/files/list/%s' % encode_str_to_hexed_b64(files_hierarchy_cwd),\n disabled=i == len(files_hierarchy) - 1))\n", "VAR_5 = f_handle['file-name']\n", "VAR_19 = FUNC_0(VAR_17)\n", "VAR_27 = VAR_2 + VAR_5\n", "VAR_12 = users.get_user_by_cookie(self.get_cookie('user_active_login',\n default=''))\n", "VAR_28 = dict()\n", "VAR_7 = preproc.preprocess_webpage(VAR_7, VAR_12, VAR_18=files_attrib_list,\n VAR_16=files_hierarchy_list, VAR_19=cwd_uuid)\n", "VAR_28['file-name'] = VAR_5\n", "VAR_6.set_result(VAR_7)\n", "VAR_28['file-name-url'] = urllib.parse.quote(VAR_5)\n", "VAR_28['file-name-escaped'] = cgi.escape(VAR_5)\n", "VAR_28['size'] = f_handle['file-size']\n", "VAR_28['size-str'] = files.format_file_size(VAR_28['size'])\n", "VAR_28['owner'] = f_handle['owner']\n", "VAR_28['date-uploaded'] = time.strftime(const.get_const('time-format'),\n time.localtime(f_handle['upload-time']))\n", "if f_handle['is-dir']:\n", "VAR_28['mime-type'] = 'directory/folder'\n", "VAR_28['mime-type'] = files.guess_mime_type(VAR_5)\n", "if VAR_28['mime-type'] == 'directory/folder':\n", "VAR_28['target-link'] = '/files/list/%s' % FUNC_0(VAR_27 + '/')\n", "VAR_28['target-link'] = '/files/download/%s/%s' % (FUNC_0(VAR_27), VAR_28[\n 'file-name-url'])\n", "VAR_28['uuid'] = FUNC_0(VAR_27)\n", "VAR_18.append(VAR_28)\n" ]
[ "def get_final_html_async(target_path):...\n", "file_temp = files.get_static_data('./static/files.html')\n", "target_path = decode_hexed_b64_to_str(target_path)\n", "target_path = '/'\n", "if not target_path:\n", "target_path = '/'\n", "files_hierarchy = target_path.split('/')\n", "files_hierarchy_list = list()\n", "while '' in files_hierarchy:\n", "files_hierarchy.remove('')\n", "files_hierarchy = [''] + files_hierarchy\n", "files_hierarchy_cwd = ''\n", "for i in range(0, len(files_hierarchy)):\n", "files_hierarchy[i] += '/'\n", "files_attrib_list = list()\n", "files_hierarchy_cwd += files_hierarchy[i]\n", "for f_handle in db.Filesystem.listdir(target_path):\n", "files_hierarchy_list.append(dict(folder_name=files_hierarchy[i], href_path=\n '/files/list/%s' % encode_str_to_hexed_b64(files_hierarchy_cwd),\n disabled=i == len(files_hierarchy) - 1))\n", "file_name = f_handle['file-name']\n", "cwd_uuid = encode_str_to_hexed_b64(files_hierarchy_cwd)\n", "actual_path = target_path + file_name\n", "working_user = users.get_user_by_cookie(self.get_cookie('user_active_login',\n default=''))\n", "attrib = dict()\n", "file_temp = preproc.preprocess_webpage(file_temp, working_user,\n files_attrib_list=files_attrib_list, files_hierarchy_list=\n files_hierarchy_list, cwd_uuid=cwd_uuid)\n", "attrib['file-name'] = file_name\n", "future.set_result(file_temp)\n", "attrib['file-name-url'] = urllib.parse.quote(file_name)\n", "attrib['file-name-escaped'] = cgi.escape(file_name)\n", "attrib['size'] = f_handle['file-size']\n", "attrib['size-str'] = files.format_file_size(attrib['size'])\n", "attrib['owner'] = f_handle['owner']\n", "attrib['date-uploaded'] = time.strftime(const.get_const('time-format'),\n time.localtime(f_handle['upload-time']))\n", "if f_handle['is-dir']:\n", "attrib['mime-type'] = 'directory/folder'\n", "attrib['mime-type'] = files.guess_mime_type(file_name)\n", "if attrib['mime-type'] == 'directory/folder':\n", "attrib['target-link'] = '/files/list/%s' % encode_str_to_hexed_b64(\n actual_path + '/')\n", "attrib['target-link'] = '/files/download/%s/%s' % (encode_str_to_hexed_b64(\n actual_path), attrib['file-name-url'])\n", "attrib['uuid'] = encode_str_to_hexed_b64(actual_path)\n", "files_attrib_list.append(attrib)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "For", "AugAssign'", "Assign'", "AugAssign'", "For", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_3(self):...\n", "VAR_4 = {'username': 'first', 'password': 'password', 'email':\n '[email protected]'}\n", "VAR_5 = '/api/auth/register'\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "self.assertTrue(VAR_6.data['is_superuser'])\n", "VAR_4 = {'username': 'second', 'password': 'password', 'email':\n '[email protected]'}\n", "VAR_5 = '/api/auth/register'\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "self.assertFalse(VAR_6.data['is_superuser'])\n", "self.assertTrue(self.client.login(VAR_1='first', VAR_2='password'))\n", "VAR_5 = '/api/admin/perms'\n", "VAR_7 = {'username': 'second'}\n", "VAR_6 = self.client.post(VAR_5, json.dumps(VAR_7), content_type=\n 'application/json')\n", "self.assertEqual(VAR_6.status_code, 201)\n", "VAR_6 = self.client.delete(VAR_5 + '/second')\n", "self.assertEqual(VAR_6.status_code, 204)\n", "VAR_6 = self.client.get(VAR_5)\n", "self.assertEqual(VAR_6.status_code, 200)\n", "self.assertEqual(len(VAR_6.data['results']), 1)\n", "self.assertNotIn('two', str(VAR_6.data['results']))\n" ]
[ "def test_delete(self):...\n", "submit = {'username': 'first', 'password': 'password', 'email':\n '[email protected]'}\n", "url = '/api/auth/register'\n", "response = self.client.post(url, json.dumps(submit), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertTrue(response.data['is_superuser'])\n", "submit = {'username': 'second', 'password': 'password', 'email':\n '[email protected]'}\n", "url = '/api/auth/register'\n", "response = self.client.post(url, json.dumps(submit), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "self.assertFalse(response.data['is_superuser'])\n", "self.assertTrue(self.client.login(username='first', password='password'))\n", "url = '/api/admin/perms'\n", "body = {'username': 'second'}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "response = self.client.delete(url + '/second')\n", "self.assertEqual(response.status_code, 204)\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "self.assertNotIn('two', str(response.data['results']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_8(self, VAR_7, VAR_9, VAR_10=False):...\n", "if isinstance(VAR_9, list) and not VAR_10:\n", "self.__dict__[VAR_7] = []\n", "self.__dict__[VAR_7] = VAR_9\n", "self.extend(VAR_7, VAR_9)\n" ]
[ "def set(self, key, value, as_value=False):...\n", "if isinstance(value, list) and not as_value:\n", "self.__dict__[key] = []\n", "self.__dict__[key] = value\n", "self.extend(key, value)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_18(self):...\n", "return Category.objects.filter(product__id=self.product_id)\n" ]
[ "def categories(self):...\n", "return Category.objects.filter(product__id=self.product_id)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_5(self, VAR_8):...\n", "VAR_13 = ['\\n']\n", "for row in VAR_8:\n", "VAR_13.append('When: {}'.format(prettify_date(row[0])))\n", "return '\\n'.join(VAR_13)\n", "VAR_13.extend(row[1:])\n", "VAR_13.append('')\n" ]
[ "def prettify_rows(self, rows):...\n", "str_builder = ['\\n']\n", "for row in rows:\n", "str_builder.append('When: {}'.format(prettify_date(row[0])))\n", "return '\\n'.join(str_builder)\n", "str_builder.extend(row[1:])\n", "str_builder.append('')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Expr'", "Return'", "Expr'", "Expr'" ]
[ "def FUNC_4(self, VAR_5='test', VAR_6=None):...\n", "\"\"\"docstring\"\"\"\n", "from invenio.modules.oauthclient.client import oauth\n", "oauth.remote_apps[VAR_5].handle_oauth2_response = MagicMock(return_value=\n data or {'access_token': 'test_access_token', 'scope': '', 'token_type':\n 'bearer'})\n" ]
[ "def mock_response(self, app='test', data=None):...\n", "\"\"\"docstring\"\"\"\n", "from invenio.modules.oauthclient.client import oauth\n", "oauth.remote_apps[app].handle_oauth2_response = MagicMock(return_value=data or\n {'access_token': 'test_access_token', 'scope': '', 'token_type': 'bearer'})\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Assign'" ]
[ "def FUNC_2(self, VAR_1, VAR_7, VAR_8, VAR_9, VAR_4=None):...\n", "if VAR_4 is None:\n", "VAR_4 = {}\n", "VAR_29 = super(CLASS_0, self).view_header_get(VAR_1, VAR_7, VAR_8, VAR_9, VAR_4\n )\n", "if VAR_29:\n", "return VAR_29\n", "if VAR_4.get('active_id', False) and VAR_4.get('active_model'\n", "return _('Products: ') + self.pool.get('stock.location').browse(VAR_1,\n VAR_7, VAR_4['active_id'], VAR_4).name\n", "return VAR_29\n" ]
[ "def view_header_get(self, cr, user, view_id, view_type, context=None):...\n", "if context is None:\n", "context = {}\n", "res = super(product_product, self).view_header_get(cr, user, view_id,\n view_type, context)\n", "if res:\n", "return res\n", "if context.get('active_id', False) and context.get('active_model'\n", "return _('Products: ') + self.pool.get('stock.location').browse(cr, user,\n context['active_id'], context).name\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "def FUNC_14(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "return partial(FUNC_5, VAR_1)\n" ]
[ "def make_token_getter(remote):...\n", "\"\"\"docstring\"\"\"\n", "return partial(token_getter, remote)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return kato.new_agent(VAR_2)\n" ]
[ "@classmethod...\n", "\"\"\"docstring\"\"\"\n", "return kato.new_agent(bindings)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@mock.patch('requests.post', FUNC_0)...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = self.test_release()\n", "VAR_15 = Release.objects.get(uuid=release3['uuid'])\n", "self.assertEqual(str(VAR_15), '{}-v3'.format(VAR_11['app']))\n" ]
[ "@mock.patch('requests.post', mock_import_repository_task)...\n", "\"\"\"docstring\"\"\"\n", "release3 = self.test_release()\n", "release = Release.objects.get(uuid=release3['uuid'])\n", "self.assertEqual(str(release), '{}-v3'.format(release3['app']))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_2(VAR_4):...\n", "VAR_10 = sqlite3.connect(VAR_1)\n", "VAR_11 = VAR_10.cursor()\n", "VAR_11.execute('UPDATE UserData SET session_id = NULL WHERE session_id = ?',\n [VAR_4])\n", "VAR_10.commit()\n", "VAR_10.close()\n" ]
[ "def logout(session_id):...\n", "connection = sqlite3.connect(DATABASE_PATH)\n", "cursor = connection.cursor()\n", "cursor.execute('UPDATE UserData SET session_id = NULL WHERE session_id = ?',\n [session_id])\n", "connection.commit()\n", "connection.close()\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_3(VAR_1):...\n", "return frappe.db.get_value('Blog Category', {'name': VAR_1}, 'title') or VAR_1\n" ]
[ "def get_blog_category(route):...\n", "return frappe.db.get_value('Blog Category', {'name': route}, 'title') or route\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_23(self, *VAR_69, **VAR_70):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.output = VAR_69, VAR_70\n", "return VAR_101\n" ]
[ "def output(self, *paths, **kwpaths):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.output = paths, kwpaths\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_4, VAR_5, VAR_6):...\n", "VAR_4.parent = VAR_5\n", "VAR_4.put()\n" ]
[ "def save_message(message, thread, user):...\n", "message.parent = thread\n", "message.put()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_8(VAR_15, VAR_16, VAR_17):...\n", "qm.RX(VAR_15, [0])\n", "qm.CNOT([0, 1])\n", "qm.RY(-1.6, [0])\n", "qm.RY(VAR_16, [1])\n", "qm.CNOT([1, 0])\n", "qm.RX(VAR_17, [0])\n", "qm.CNOT([0, 1])\n", "qm.expectation.Hermitian(np.array([[0, 1], [1, 0]]), 0)\n" ]
[ "def node(x, y, z):...\n", "qm.RX(x, [0])\n", "qm.CNOT([0, 1])\n", "qm.RY(-1.6, [0])\n", "qm.RY(y, [1])\n", "qm.CNOT([1, 0])\n", "qm.RX(z, [0])\n", "qm.CNOT([0, 1])\n", "qm.expectation.Hermitian(np.array([[0, 1], [1, 0]]), 0)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_10(self, VAR_7, VAR_9=None):...\n", "if VAR_9 == None:\n", "VAR_9 = {}\n", "if isinstance(VAR_9, (dict, CLASS_0)):\n", "if not self.__dict__.get(VAR_7):\n", "if getattr(self, '_metaclass', None) or self.__class__.__name__ in ('Meta',\n", "self.__dict__[VAR_7] = []\n", "VAR_9 = self._init_child(VAR_9, VAR_7)\n", "return VAR_9\n", "self.__dict__[VAR_7].append(VAR_9)\n", "VAR_9.parent_doc = self\n", "return VAR_9\n" ]
[ "def append(self, key, value=None):...\n", "if value == None:\n", "value = {}\n", "if isinstance(value, (dict, BaseDocument)):\n", "if not self.__dict__.get(key):\n", "if getattr(self, '_metaclass', None) or self.__class__.__name__ in ('Meta',\n", "self.__dict__[key] = []\n", "value = self._init_child(value, key)\n", "return value\n", "self.__dict__[key].append(value)\n", "value.parent_doc = self\n", "return value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Return'" ]
[ "def FUNC_12(self, VAR_14):...\n", "" ]
[ "def is_numeric(self, col_name):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@VAR_0.route('/statistics')...\n", "VAR_28 = VAR_1.connection.cursor()\n", "VAR_28.execute(\n 'SELECT cid FROM Crawls WHERE crawl_date = (SELECT max(crawl_date) FROM Crawls)'\n )\n", "VAR_29 = VAR_28.fetchone()\n", "VAR_28.close()\n", "if VAR_29:\n", "VAR_49 = VAR_29['cid']\n", "flash(\n 'There are no statistics to display, please start a new query and wait for it to complete.'\n , 'danger')\n", "return redirect(url_for('cid_statistics', VAR_7=cid_last_crawl))\n", "return redirect(url_for('index'))\n" ]
[ "@app.route('/statistics')...\n", "cur = mysql.connection.cursor()\n", "cur.execute(\n 'SELECT cid FROM Crawls WHERE crawl_date = (SELECT max(crawl_date) FROM Crawls)'\n )\n", "result = cur.fetchone()\n", "cur.close()\n", "if result:\n", "cid_last_crawl = result['cid']\n", "flash(\n 'There are no statistics to display, please start a new query and wait for it to complete.'\n , 'danger')\n", "return redirect(url_for('cid_statistics', cid=cid_last_crawl))\n", "return redirect(url_for('index'))\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_7(self, VAR_10=None):...\n", "VAR_25 = VAR_20.path.join(self.parlai_home, 'downloads')\n", "VAR_26 = self.add_argument_group('Main ParlAI Arguments')\n", "VAR_26.add_argument('-t', '--task', help=\n 'ParlAI task(s), e.g. \"babi:Task1\" or \"babi,cbt\"')\n", "VAR_26.add_argument('--download-path', default=default_downloads_path, help\n =\n 'path for non-data dependencies to store any needed files.defaults to {parlai_dir}/downloads'\n )\n", "VAR_26.add_argument('-dt', '--datatype', default='train', choices=['train',\n 'train:stream', 'train:ordered', 'train:ordered:stream',\n 'train:stream:ordered', 'valid', 'valid:stream', 'test', 'test:stream'],\n help=\n 'choose from: train, train:ordered, valid, test. to stream data add \":stream\" to any option (e.g., train:stream). by default: train is random with replacement, valid is ordered, test is ordered.'\n )\n", "VAR_26.add_argument('-im', '--image-mode', default='raw', type=str, help=\n 'image preprocessor to use. default is \"raw\". set to \"none\" to skip image loading.'\n )\n", "VAR_26.add_argument('-nt', '--numthreads', default=1, type=int, help=\n 'number of threads. If batchsize set to 1, used for hogwild; otherwise, used for number of threads in threadpool loading, e.g. in vqa'\n )\n", "VAR_26.add_argument('--hide-labels', default=False, type='bool', help=\n 'default (False) moves labels in valid and test sets to the eval_labels field. If True, they are hidden completely.'\n )\n", "VAR_27 = self.add_argument_group('Batching Arguments')\n", "VAR_27.add_argument('-bs', '--batchsize', default=1, type=int, help=\n 'batch size for minibatch training schemes')\n", "VAR_27.add_argument('-bsrt', '--batch-sort', default=True, type='bool',\n help=\n 'If enabled (default True), create batches by flattening all episodes to have exactly one utterance exchange and then sorting all the examples according to their length. This dramatically reduces the amount of padding present after examples have been parsed, speeding up training.'\n )\n", "VAR_27.add_argument('-clen', '--context-length', default=-1, type=int, help\n =\n 'Number of past utterances to remember when building flattened batches of data in multi-example episodes.'\n )\n", "VAR_27.add_argument('-incl', '--include-labels', default=True, type='bool',\n help=\n 'Specifies whether or not to include labels as past utterances when building flattened batches of data in multi-example episodes.'\n )\n", "self.add_parlai_data_path(VAR_26)\n" ]
[ "def add_parlai_args(self, args=None):...\n", "default_downloads_path = os.path.join(self.parlai_home, 'downloads')\n", "parlai = self.add_argument_group('Main ParlAI Arguments')\n", "parlai.add_argument('-t', '--task', help=\n 'ParlAI task(s), e.g. \"babi:Task1\" or \"babi,cbt\"')\n", "parlai.add_argument('--download-path', default=default_downloads_path, help\n =\n 'path for non-data dependencies to store any needed files.defaults to {parlai_dir}/downloads'\n )\n", "parlai.add_argument('-dt', '--datatype', default='train', choices=['train',\n 'train:stream', 'train:ordered', 'train:ordered:stream',\n 'train:stream:ordered', 'valid', 'valid:stream', 'test', 'test:stream'],\n help=\n 'choose from: train, train:ordered, valid, test. to stream data add \":stream\" to any option (e.g., train:stream). by default: train is random with replacement, valid is ordered, test is ordered.'\n )\n", "parlai.add_argument('-im', '--image-mode', default='raw', type=str, help=\n 'image preprocessor to use. default is \"raw\". set to \"none\" to skip image loading.'\n )\n", "parlai.add_argument('-nt', '--numthreads', default=1, type=int, help=\n 'number of threads. If batchsize set to 1, used for hogwild; otherwise, used for number of threads in threadpool loading, e.g. in vqa'\n )\n", "parlai.add_argument('--hide-labels', default=False, type='bool', help=\n 'default (False) moves labels in valid and test sets to the eval_labels field. If True, they are hidden completely.'\n )\n", "batch = self.add_argument_group('Batching Arguments')\n", "batch.add_argument('-bs', '--batchsize', default=1, type=int, help=\n 'batch size for minibatch training schemes')\n", "batch.add_argument('-bsrt', '--batch-sort', default=True, type='bool', help\n =\n 'If enabled (default True), create batches by flattening all episodes to have exactly one utterance exchange and then sorting all the examples according to their length. This dramatically reduces the amount of padding present after examples have been parsed, speeding up training.'\n )\n", "batch.add_argument('-clen', '--context-length', default=-1, type=int, help=\n 'Number of past utterances to remember when building flattened batches of data in multi-example episodes.'\n )\n", "batch.add_argument('-incl', '--include-labels', default=True, type='bool',\n help=\n 'Specifies whether or not to include labels as past utterances when building flattened batches of data in multi-example episodes.'\n )\n", "self.add_parlai_data_path(parlai)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_0(self, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_2 is None:\n", "VAR_2 = self.sensors\n", "for i, _ in enumerate(VAR_2):\n", "if random() < 0.01:\n", "yield 'NULL'\n", "VAR_3 = gauss(293 + 0.5 * i, 0.1)\n", "yield f'{VAR_3:.4f}'\n" ]
[ "def read_data(self, sensors=None):...\n", "\"\"\"docstring\"\"\"\n", "if sensors is None:\n", "sensors = self.sensors\n", "for i, _ in enumerate(sensors):\n", "if random() < 0.01:\n", "yield 'NULL'\n", "value = gauss(293 + 0.5 * i, 0.1)\n", "yield f'{value:.4f}'\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "For", "Condition", "Expr'", "Assign'", "Expr'" ]
[ "from app import database as db\n", "import pandas as pd\n", "import re\n", "import numpy as np\n", "def FUNC_0(VAR_0, VAR_1, VAR_2):...\n", "db.engine.execute('ALTER TABLE {0} RENAME COLUMN \"{1}\" TO \"{2}\"'.format(\n VAR_0, VAR_1, VAR_2))\n", "print('RENAMING FAILED: ' + str(e))\n", "def FUNC_1(VAR_0, VAR_1):...\n", "db.engine.execute('ALTER TABLE {0} DROP COLUMN \"{1}\"'.format(VAR_0, VAR_1))\n", "print('DELETING FAILED')\n", "def FUNC_2(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "VAR_18 = 'og' + VAR_0[2:]\n", "print('FAILED TO RESTORE ORIGINAL')\n", "def FUNC_3(VAR_0, VAR_3, VAR_4):...\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(VAR_0))\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('CREATE TABLE \"{0}\" AS SELECT * FROM \"{1}\"'.format(VAR_0,\n VAR_18))\n", "VAR_17 = db.engine.execute(\n \"SELECT data_type from information_schema.columns where table_name = '{0}' and column_name = '{1}';\"\n .format(VAR_0, VAR_3)).fetchall()[0][0]\n", "if VAR_4 == 'INTEGER':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE BIGINT USING \"{1}\"::bigint'.\n format(VAR_0, VAR_3))\n", "if VAR_4 == 'DOUBLE':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE DOUBLE PRECISION USING \"{1}\"::double precision'\n .format(VAR_0, VAR_3))\n", "if VAR_4 == 'TEXT':\n", "if VAR_17 == 'date':\n", "if VAR_4 == 'DATE':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TEXT USING to_char(\"{1}\", \\'DD/MM/YYYY\\')'\n .format(VAR_0, VAR_3))\n", "if VAR_17 == 'timestamp with time zone':\n", "if VAR_17 == 'timestamp with time zone':\n", "if VAR_4 == 'TIMESTAMP':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TEXT USING to_char(\"{1}\", \\'DD/MM/YYYY HH24:MI:SS\\')'\n .format(VAR_0, VAR_3))\n", "db.engine.execute('ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TEXT'.format(\n VAR_0, VAR_3))\n", "db.engine.execute('ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE DATE'.format(\n VAR_0, VAR_3))\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE DATE USING to_date(\"{1}\", \\'DD/MM/YYYY\\')'\n .format(VAR_0, VAR_3))\n", "if VAR_17 == 'date':\n", "def FUNC_4(VAR_0, VAR_5):...\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TIMESTAMP WITH TIME ZONE'.\n format(VAR_0, VAR_3))\n", "db.engine.execute('string'.format(VAR_0, VAR_3))\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('ALTER TABLE \"{0}\" DROP COLUMN IF EXISTS \"{1}\"'.format(\n VAR_0, VAR_5))\n", "print('FAILED TO DROP ATTRIBUTE {0} FROM {1}'.format(VAR_5, VAR_0))\n", "def FUNC_5(VAR_0, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_15 = pd.read_sql_table(VAR_0, db.engine)\n", "print('ONE-HOT ENCODING FAILED')\n", "def FUNC_6(VAR_0, VAR_5, VAR_6, VAR_7):...\n", "VAR_19 = pd.get_dummies(VAR_15[VAR_5])\n", "\"\"\"docstring\"\"\"\n", "print('OH', VAR_19)\n", "if VAR_7:\n", "print('FILL NULL FAILED WITH FOLLOWING MESSAGE:\\n' + str(e))\n", "def FUNC_7(VAR_0, VAR_5):...\n", "VAR_15 = VAR_15.join(VAR_19)\n", "db.engine.execute(\n 'UPDATE \"{0}\" SET \"{1}\" = \\'{2}\\' WHERE (\"{1}\" = \\'\\') IS NOT FALSE'.\n format(VAR_0, VAR_5, VAR_6))\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (VAR_0, VAR_5, VAR_6))\n", "\"\"\"docstring\"\"\"\n", "print('DF', VAR_15)\n", "VAR_15 = pd.read_sql_table(VAR_0, db.engine, columns=[attr])\n", "print('FILL AVERAGE FAILED')\n", "def FUNC_8(VAR_0, VAR_5):...\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(VAR_0))\n", "VAR_20 = VAR_15[VAR_5].mean()\n", "\"\"\"docstring\"\"\"\n", "VAR_15.to_sql(VAR_16=table_name, con=db.engine, if_exists='fail', index=False)\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (VAR_0, VAR_5, VAR_20))\n", "VAR_15 = pd.read_sql_table(VAR_0, db.engine, columns=[attr])\n", "print('FILL MEAN FAILED')\n", "def FUNC_9(VAR_0, VAR_5, VAR_8, VAR_9):...\n", "VAR_21 = VAR_15[VAR_5].median()\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = \\'{2}\\' WHERE \"{1}\" = \\'{3}\\' '\n .format(VAR_0, VAR_5, VAR_9, VAR_8))\n", "print('FIND-REPLACE FAILED')\n", "def FUNC_10(VAR_0, VAR_5, VAR_8, VAR_9, VAR_10=False):...\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (VAR_0, VAR_5, VAR_21))\n", "if VAR_10:\n", "print('FIND-REPLACE FAILED\\n' + str(e))\n", "def FUNC_11(VAR_0, VAR_5, VAR_11, VAR_9):...\n", "db.engine.execute(\n 'UPDATE \"{0}\" SET \"{1}\" = \\'{2}\\' WHERE \"{1}\" LIKE \\'%%{3}%%\\' '.format\n (VAR_0, VAR_5, VAR_9, VAR_8))\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = REPLACE(\"{1}\", \\'{2}\\', \\'{3}\\')'\n .format(VAR_0, VAR_5, VAR_8, VAR_9))\n", "VAR_22 = True\n", "print('REGEX FIND-REPLACE FAILED:\\n' + str(e))\n", "def FUNC_12(VAR_0, VAR_5):...\n", "re.compile(VAR_11)\n", "VAR_22 = False\n", "if VAR_22:\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute(\n 'UPDATE \"{0}\" SET \"{1}\" = REGEXP_REPLACE(\"{1}\", \\'{2}\\', \\'{3}\\')'.\n format(VAR_0, VAR_5, VAR_11, VAR_9))\n", "VAR_23 = pd.read_sql_table(VAR_0, db.engine)\n", "print('NORMALIZATION FAILED')\n", "def FUNC_13(VAR_0, VAR_5, VAR_6, VAR_12=False):...\n", "VAR_23[VAR_5] = (VAR_23[VAR_5] - VAR_23[VAR_5].mean()) / VAR_23[VAR_5].std(ddof\n =0)\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(VAR_0))\n", "if VAR_12:\n", "print('REMOVE OUTLIERS FAILED')\n", "def FUNC_14(VAR_0, VAR_13):...\n", "VAR_23.to_sql(VAR_16=table_name, con=db.engine, if_exists='fail', index=False)\n", "db.engine.execute('DELETE FROM \"{0}\" WHERE \"{1}\" < {2}'.format(VAR_0, VAR_5,\n VAR_6))\n", "db.engine.execute('DELETE FROM \"{0}\" WHERE \"{1}\" > {2}'.format(VAR_0, VAR_5,\n VAR_6))\n", "db.engine.execute('DELETE FROM \"{0}\" WHERE {1}'.format(VAR_0, VAR_13))\n", "def FUNC_15(VAR_0, VAR_5, VAR_14, VAR_15=None, VAR_16=None):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_15 is not None:\n", "print('WIDTH DISCRETIZATION FAILED:\\n' + str(e))\n", "def FUNC_16(VAR_0, VAR_5, VAR_14):...\n", "VAR_23 = VAR_15\n", "VAR_23 = pd.read_sql_table(VAR_0, db.engine)\n", "\"\"\"docstring\"\"\"\n", "if VAR_16 is not None:\n", "VAR_23 = pd.read_sql_table(VAR_0, db.engine)\n", "print('EQUAL FREQUENCY DISCRETIZATION FAILED:\\n' + str(e))\n", "VAR_24 = VAR_16\n", "if isinstance(VAR_14, list):\n", "VAR_25 = len(VAR_23[VAR_5])\n", "VAR_23[VAR_24] = pd.cut(VAR_23[VAR_5], VAR_14, precision=9).apply(str)\n", "VAR_24 = VAR_5 + '_custom_intervals'\n", "VAR_24 = VAR_5 + '_' + str(VAR_14) + '_eq_intervals'\n", "VAR_26 = VAR_25 // VAR_14\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(VAR_0))\n", "VAR_27 = list(VAR_23[VAR_5].sort_values())\n", "VAR_23.to_sql(VAR_16=table_name, con=db.engine, if_exists='fail', index=False)\n", "VAR_28 = 0\n", "VAR_29 = []\n", "while VAR_28 < VAR_25:\n", "if VAR_29[-1] != VAR_27[-1] and len(VAR_29) == VAR_14 + 1:\n", "VAR_29.append(VAR_27[VAR_28])\n", "VAR_29[-1] = VAR_27[-1]\n", "if VAR_29[-1] != VAR_27[-1] and len(VAR_29) != VAR_14 + 1:\n", "VAR_28 += VAR_26\n", "VAR_29[0] = VAR_29[0] - VAR_29[0] * 0.001\n", "VAR_29.append(VAR_27[-1])\n", "VAR_29[-1] = VAR_29[-1] + VAR_29[-1] * 0.001\n", "VAR_24 = VAR_5 + '_' + str(VAR_14) + '_eq_freq_intervals'\n", "FUNC_15(VAR_0, VAR_5, VAR_29, VAR_23, VAR_24)\n" ]
[ "from app import database as db\n", "import pandas as pd\n", "import re\n", "import numpy as np\n", "def rename_attribute(table_name, column, new_name):...\n", "db.engine.execute('ALTER TABLE {0} RENAME COLUMN \"{1}\" TO \"{2}\"'.format(\n table_name, column, new_name))\n", "print('RENAMING FAILED: ' + str(e))\n", "def delete_attribute(table_name, column):...\n", "db.engine.execute('ALTER TABLE {0} DROP COLUMN \"{1}\"'.format(table_name,\n column))\n", "print('DELETING FAILED')\n", "def restore_original(table_name):...\n", "\"\"\"docstring\"\"\"\n", "original = 'og' + table_name[2:]\n", "print('FAILED TO RESTORE ORIGINAL')\n", "def change_attribute_type(table_name, table_col, new_type):...\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(table_name))\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('CREATE TABLE \"{0}\" AS SELECT * FROM \"{1}\"'.format(\n table_name, original))\n", "current_type = db.engine.execute(\n \"SELECT data_type from information_schema.columns where table_name = '{0}' and column_name = '{1}';\"\n .format(table_name, table_col)).fetchall()[0][0]\n", "if new_type == 'INTEGER':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE BIGINT USING \"{1}\"::bigint'.\n format(table_name, table_col))\n", "if new_type == 'DOUBLE':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE DOUBLE PRECISION USING \"{1}\"::double precision'\n .format(table_name, table_col))\n", "if new_type == 'TEXT':\n", "if current_type == 'date':\n", "if new_type == 'DATE':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TEXT USING to_char(\"{1}\", \\'DD/MM/YYYY\\')'\n .format(table_name, table_col))\n", "if current_type == 'timestamp with time zone':\n", "if current_type == 'timestamp with time zone':\n", "if new_type == 'TIMESTAMP':\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TEXT USING to_char(\"{1}\", \\'DD/MM/YYYY HH24:MI:SS\\')'\n .format(table_name, table_col))\n", "db.engine.execute('ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TEXT'.format(\n table_name, table_col))\n", "db.engine.execute('ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE DATE'.format(\n table_name, table_col))\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE DATE USING to_date(\"{1}\", \\'DD/MM/YYYY\\')'\n .format(table_name, table_col))\n", "if current_type == 'date':\n", "def drop_attribute(table_name, attr):...\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TIMESTAMP WITH TIME ZONE'.\n format(table_name, table_col))\n", "db.engine.execute(\n 'ALTER TABLE {0} ALTER COLUMN \"{1}\" TYPE TIMESTAMP WITH TIME ZONE USING to_timestamp(\"{1}\", \\'DD/MM/YYYY HH24:MI:SS\\')'\n .format(table_name, table_col))\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('ALTER TABLE \"{0}\" DROP COLUMN IF EXISTS \"{1}\"'.format(\n table_name, attr))\n", "print('FAILED TO DROP ATTRIBUTE {0} FROM {1}'.format(attr, table_name))\n", "def one_hot_encode(table_name, attr):...\n", "\"\"\"docstring\"\"\"\n", "dataframe = pd.read_sql_table(table_name, db.engine)\n", "print('ONE-HOT ENCODING FAILED')\n", "def fill_null_with(table_name, attr, value, text_type):...\n", "one_hot = pd.get_dummies(dataframe[attr])\n", "\"\"\"docstring\"\"\"\n", "print('OH', one_hot)\n", "if text_type:\n", "print('FILL NULL FAILED WITH FOLLOWING MESSAGE:\\n' + str(e))\n", "def fill_null_with_average(table_name, attr):...\n", "dataframe = dataframe.join(one_hot)\n", "db.engine.execute(\n 'UPDATE \"{0}\" SET \"{1}\" = \\'{2}\\' WHERE (\"{1}\" = \\'\\') IS NOT FALSE'.\n format(table_name, attr, value))\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (table_name, attr, value))\n", "\"\"\"docstring\"\"\"\n", "print('DF', dataframe)\n", "dataframe = pd.read_sql_table(table_name, db.engine, columns=[attr])\n", "print('FILL AVERAGE FAILED')\n", "def fill_null_with_median(table_name, attr):...\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(table_name))\n", "average = dataframe[attr].mean()\n", "\"\"\"docstring\"\"\"\n", "dataframe.to_sql(name=table_name, con=db.engine, if_exists='fail', index=False)\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (table_name, attr, average))\n", "dataframe = pd.read_sql_table(table_name, db.engine, columns=[attr])\n", "print('FILL MEAN FAILED')\n", "def find_replace(table_name, attr, find, replace):...\n", "median = dataframe[attr].median()\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = \\'{2}\\' WHERE \"{1}\" = \\'{3}\\' '\n .format(table_name, attr, replace, find))\n", "print('FIND-REPLACE FAILED')\n", "def substring_find_replace(table_name, attr, find, replace, full=False):...\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = {2} WHERE \"{1}\" IS NULL'.format\n (table_name, attr, median))\n", "if full:\n", "print('FIND-REPLACE FAILED\\n' + str(e))\n", "def regex_find_replace(table_name, attr, regex, replace):...\n", "db.engine.execute(\n 'UPDATE \"{0}\" SET \"{1}\" = \\'{2}\\' WHERE \"{1}\" LIKE \\'%%{3}%%\\' '.format\n (table_name, attr, replace, find))\n", "db.engine.execute('UPDATE \"{0}\" SET \"{1}\" = REPLACE(\"{1}\", \\'{2}\\', \\'{3}\\')'\n .format(table_name, attr, find, replace))\n", "is_valid = True\n", "print('REGEX FIND-REPLACE FAILED:\\n' + str(e))\n", "def normalize_attribute(table_name, attr):...\n", "re.compile(regex)\n", "is_valid = False\n", "if is_valid:\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute(\n 'UPDATE \"{0}\" SET \"{1}\" = REGEXP_REPLACE(\"{1}\", \\'{2}\\', \\'{3}\\')'.\n format(table_name, attr, regex, replace))\n", "df = pd.read_sql_table(table_name, db.engine)\n", "print('NORMALIZATION FAILED')\n", "def remove_outliers(table_name, attr, value, smaller_than=False):...\n", "df[attr] = (df[attr] - df[attr].mean()) / df[attr].std(ddof=0)\n", "\"\"\"docstring\"\"\"\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(table_name))\n", "if smaller_than:\n", "print('REMOVE OUTLIERS FAILED')\n", "def delete_rows(table_name, condition):...\n", "df.to_sql(name=table_name, con=db.engine, if_exists='fail', index=False)\n", "db.engine.execute('DELETE FROM \"{0}\" WHERE \"{1}\" < {2}'.format(table_name,\n attr, value))\n", "db.engine.execute('DELETE FROM \"{0}\" WHERE \"{1}\" > {2}'.format(table_name,\n attr, value))\n", "db.engine.execute('DELETE FROM \"{0}\" WHERE {1}'.format(table_name, condition))\n", "def discretize_width(table_name, attr, intervals, dataframe=None, name=None):...\n", "\"\"\"docstring\"\"\"\n", "if dataframe is not None:\n", "print('WIDTH DISCRETIZATION FAILED:\\n' + str(e))\n", "def discretize_eq_freq(table_name, attr, intervals):...\n", "df = dataframe\n", "df = pd.read_sql_table(table_name, db.engine)\n", "\"\"\"docstring\"\"\"\n", "if name is not None:\n", "df = pd.read_sql_table(table_name, db.engine)\n", "print('EQUAL FREQUENCY DISCRETIZATION FAILED:\\n' + str(e))\n", "column_name = name\n", "if isinstance(intervals, list):\n", "attr_length = len(df[attr])\n", "df[column_name] = pd.cut(df[attr], intervals, precision=9).apply(str)\n", "column_name = attr + '_custom_intervals'\n", "column_name = attr + '_' + str(intervals) + '_eq_intervals'\n", "elements_per_interval = attr_length // intervals\n", "db.engine.execute('DROP TABLE \"{0}\"'.format(table_name))\n", "sorted_data = list(df[attr].sort_values())\n", "df.to_sql(name=table_name, con=db.engine, if_exists='fail', index=False)\n", "selector = 0\n", "edge_list = []\n", "while selector < attr_length:\n", "if edge_list[-1] != sorted_data[-1] and len(edge_list) == intervals + 1:\n", "edge_list.append(sorted_data[selector])\n", "edge_list[-1] = sorted_data[-1]\n", "if edge_list[-1] != sorted_data[-1] and len(edge_list) != intervals + 1:\n", "selector += elements_per_interval\n", "edge_list[0] = edge_list[0] - edge_list[0] * 0.001\n", "edge_list.append(sorted_data[-1])\n", "edge_list[-1] = edge_list[-1] + edge_list[-1] * 0.001\n", "column_name = attr + '_' + str(intervals) + '_eq_freq_intervals'\n", "discretize_width(table_name, attr, edge_list, df, column_name)\n" ]
[ 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Import'", "Import'", "FunctionDef'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Docstring", "Expr'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "FunctionDef'", "Expr'", "Expr'", "Docstring", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Docstring", "Expr'", "Condition", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "Docstring", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Docstring", "Expr'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Expr'", "Condition", "Expr'", "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Condition", "Docstring", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Docstring", "Expr'", "Condition", "Expr'", "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "Docstring", "Condition", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Docstring", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Condition", "AugAssign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_2(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "VAR_1 = {'name': self.volume_name, 'size': 1}\n", "self.driver._eql_execute('volume', 'create', VAR_1['name'], '%sG' % VAR_1[\n 'size'], 'pool', self.configuration.eqlx_pool, 'thin-provision').AndReturn(\n ['iSCSI target name is %s.' % self.fake_iqn])\n", "self.mox.ReplayAll()\n", "VAR_2 = self.driver.create_volume(VAR_1)\n", "self.assertEqual(VAR_2, self._model_update)\n" ]
[ "def test_create_volume(self):...\n", "self.driver._eql_execute = self.mox.CreateMock(self.driver._eql_execute)\n", "volume = {'name': self.volume_name, 'size': 1}\n", "self.driver._eql_execute('volume', 'create', volume['name'], '%sG' % volume\n ['size'], 'pool', self.configuration.eqlx_pool, 'thin-provision'\n ).AndReturn(['iSCSI target name is %s.' % self.fake_iqn])\n", "self.mox.ReplayAll()\n", "model_update = self.driver.create_volume(volume)\n", "self.assertEqual(model_update, self._model_update)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_14(VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "return FUNC_13(VAR_16)\n" ]
[ "def temporary(value):...\n", "\"\"\"docstring\"\"\"\n", "return temp(value)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_5(self):...\n", "VAR_1 = '/api/apps'\n", "VAR_2 = self.client.post(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_3 = VAR_2.data['id']\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_4 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 201)\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 2)\n", "VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 2)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 1)\n", "VAR_1 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_4 = {'web': 20}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 204)\n", "chaos.CREATE_ERROR_RATE = 0.5\n", "chaos.START_ERROR_RATE = 0.5\n", "VAR_1 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_4 = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_2 = self.client.post(VAR_1, json.dumps(VAR_4), content_type=\n 'application/json')\n", "self.assertEqual(VAR_2.status_code, 503)\n", "VAR_1 = '/api/apps/{app_id}/releases'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 2)\n", "VAR_1 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_2 = self.client.get(VAR_1)\n", "self.assertEqual(VAR_2.status_code, 200)\n", "self.assertEqual(len(VAR_2.data['results']), 20)\n", "VAR_5 = set([c['state'] for c in VAR_2.data['results']])\n", "self.assertEqual(VAR_5, set(['up']))\n" ]
[ "def test_build_chaos(self):...\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{app_id}/releases'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 1)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 20}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "chaos.CREATE_ERROR_RATE = 0.5\n", "chaos.START_ERROR_RATE = 0.5\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'b' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 503)\n", "url = '/api/apps/{app_id}/releases'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 2)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 20)\n", "states = set([c['state'] for c in response.data['results']])\n", "self.assertEqual(states, set(['up']))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_25(self):...\n", "return False\n" ]
[ "def is_anonymous(self):...\n", "return False\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(VAR_5, VAR_6, VAR_3='NGTREE', VAR_7=True):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = 'CSV', 'TREE', 'JSON', 'YAML', 'NGTREE'\n", "if VAR_3 in VAR_11:\n", "VAR_0.info('Query: Finding Switched Paths (%s --> %s) for %s', VAR_5, VAR_6,\n nglib.user)\n", "return\n", "VAR_23 = []\n", "VAR_19 = nglib.ngtree.get_ngtree('Switched Paths', tree_type='L2-PATH')\n", "VAR_19['Name'] = VAR_5 + ' -> ' + VAR_6\n", "VAR_24 = dict()\n", "VAR_25 = nglib.py2neo_ses.cypher.execute('MATCH (ss:Switch), (ds:Switch), ' +\n 'sp = allShortestPaths((ss)-[:NEI*0..9]-(ds)) ' +\n 'WHERE ss.name =~ {switch1} AND ds.name =~ {switch2}' +\n 'UNWIND nodes(sp) as s1 UNWIND nodes(sp) as s2 ' +\n 'MATCH (s1)<-[nei:NEI]-(s2), plen = shortestPath((ss)-[:NEI*0..9]-(s1)) ' +\n 'RETURN DISTINCT s1.name AS csw, s2.name AS psw, ' +\n 'nei.pPort AS pport, nei.cPort as cport, nei.native AS native, ' +\n 'nei.cPc as cPc, nei.pPc AS pPc, nei.vlans AS vlans, nei.rvlans as rvlans, '\n + 'nei._rvlans AS p_rvlans, ' +\n 'LENGTH(plen) as distance ORDER BY distance, s1.name, s2.name', {\n 'switch1': VAR_5, 'switch2': VAR_6})\n", "VAR_26 = 0\n", "for rec in VAR_25:\n", "VAR_39 = nglib.ngtree.get_ngtree('Link', tree_type='L2-HOP')\n", "if VAR_23:\n", "if rec.distance == 0:\n", "VAR_19['Links'] = len(VAR_23)\n", "if VAR_7:\n", "VAR_39['distance'] = rec.distance + 1\n", "if VAR_26:\n", "VAR_19['Distance'] = max([s['distance'] for s in VAR_23])\n", "print('No results found for path between {:} and {:}'.format(VAR_5, VAR_6))\n", "VAR_26 = 1\n", "if rec.distance == VAR_26:\n", "VAR_39['distance'] = rec.distance\n", "if VAR_3 == 'CSV':\n", "VAR_39['Name'] = ('#' + str(VAR_39['distance']) + ' ' + rec.psw + '(' + rec\n .pport + ') <-> ' + rec.csw + '(' + rec.cport + ')')\n", "VAR_26 += 1\n", "if rec.distance == VAR_26 - 1:\n", "nglib.query.print_dict_csv(VAR_23)\n", "VAR_19 = nglib.query.exp_ngtree(VAR_19, VAR_3)\n", "nglib.ngtree.add_child_ngtree(VAR_19, VAR_39)\n", "VAR_39['distance'] = rec.distance + 1\n", "VAR_39['distance'] = rec.distance + 1\n", "VAR_39['distance'] = rec.distance\n", "return VAR_19\n", "VAR_39['Child Switch'] = rec.csw\n", "VAR_26 = 0\n", "VAR_39['Child Port'] = rec.cport\n", "VAR_39['Parent Switch'] = rec.psw\n", "VAR_39['Parent Port'] = rec.pport\n", "if rec.cPc:\n", "VAR_39['Child Channel'] = rec.cPc\n", "if rec.rvlans:\n", "VAR_39['Parent Channel'] = rec.pPc\n", "VAR_39['Link VLANs'] = rec.vlans\n", "VAR_23.append(VAR_39)\n", "VAR_39['Link rVLANs'] = rec.rvlans\n", "VAR_39['_rvlans'] = rec.p_rvlans\n", "VAR_39['Native VLAN'] = rec.native\n" ]
[ "def get_switched_path(switch1, switch2, rtype='NGTREE', verbose=True):...\n", "\"\"\"docstring\"\"\"\n", "rtypes = 'CSV', 'TREE', 'JSON', 'YAML', 'NGTREE'\n", "if rtype in rtypes:\n", "logger.info('Query: Finding Switched Paths (%s --> %s) for %s', switch1,\n switch2, nglib.user)\n", "return\n", "pathList = []\n", "ngtree = nglib.ngtree.get_ngtree('Switched Paths', tree_type='L2-PATH')\n", "ngtree['Name'] = switch1 + ' -> ' + switch2\n", "dist = dict()\n", "swp = nglib.py2neo_ses.cypher.execute('MATCH (ss:Switch), (ds:Switch), ' +\n 'sp = allShortestPaths((ss)-[:NEI*0..9]-(ds)) ' +\n 'WHERE ss.name =~ {switch1} AND ds.name =~ {switch2}' +\n 'UNWIND nodes(sp) as s1 UNWIND nodes(sp) as s2 ' +\n 'MATCH (s1)<-[nei:NEI]-(s2), plen = shortestPath((ss)-[:NEI*0..9]-(s1)) ' +\n 'RETURN DISTINCT s1.name AS csw, s2.name AS psw, ' +\n 'nei.pPort AS pport, nei.cPort as cport, nei.native AS native, ' +\n 'nei.cPc as cPc, nei.pPc AS pPc, nei.vlans AS vlans, nei.rvlans as rvlans, '\n + 'nei._rvlans AS p_rvlans, ' +\n 'LENGTH(plen) as distance ORDER BY distance, s1.name, s2.name', {\n 'switch1': switch1, 'switch2': switch2})\n", "last = 0\n", "for rec in swp:\n", "swptree = nglib.ngtree.get_ngtree('Link', tree_type='L2-HOP')\n", "if pathList:\n", "if rec.distance == 0:\n", "ngtree['Links'] = len(pathList)\n", "if verbose:\n", "swptree['distance'] = rec.distance + 1\n", "if last:\n", "ngtree['Distance'] = max([s['distance'] for s in pathList])\n", "print('No results found for path between {:} and {:}'.format(switch1, switch2))\n", "last = 1\n", "if rec.distance == last:\n", "swptree['distance'] = rec.distance\n", "if rtype == 'CSV':\n", "swptree['Name'] = ('#' + str(swptree['distance']) + ' ' + rec.psw + '(' +\n rec.pport + ') <-> ' + rec.csw + '(' + rec.cport + ')')\n", "last += 1\n", "if rec.distance == last - 1:\n", "nglib.query.print_dict_csv(pathList)\n", "ngtree = nglib.query.exp_ngtree(ngtree, rtype)\n", "nglib.ngtree.add_child_ngtree(ngtree, swptree)\n", "swptree['distance'] = rec.distance + 1\n", "swptree['distance'] = rec.distance + 1\n", "swptree['distance'] = rec.distance\n", "return ngtree\n", "swptree['Child Switch'] = rec.csw\n", "last = 0\n", "swptree['Child Port'] = rec.cport\n", "swptree['Parent Switch'] = rec.psw\n", "swptree['Parent Port'] = rec.pport\n", "if rec.cPc:\n", "swptree['Child Channel'] = rec.cPc\n", "if rec.rvlans:\n", "swptree['Parent Channel'] = rec.pPc\n", "swptree['Link VLANs'] = rec.vlans\n", "pathList.append(swptree)\n", "swptree['Link rVLANs'] = rec.rvlans\n", "swptree['_rvlans'] = rec.p_rvlans\n", "swptree['Native VLAN'] = rec.native\n" ]
[ 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "AugAssign'", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "@VAR_1.route('/login', methods=['POST'])...\n", "VAR_12 = request.form['email']\n", "VAR_14 = request.form['password']\n", "VAR_13 = query_user(VAR_12)\n", "if VAR_13 != None:\n", "if check_password_hash(VAR_13.password, VAR_14):\n", "flash('Incorrect Email/Password')\n", "VAR_18['email'] = VAR_12\n", "return redirect('/login')\n", "VAR_20 = VAR_18.pop('return_url', None)\n", "if VAR_20:\n", "return redirect(VAR_20)\n", "return redirect('/')\n" ]
[ "@app.route('/login', methods=['POST'])...\n", "email = request.form['email']\n", "password = request.form['password']\n", "user = query_user(email)\n", "if user != None:\n", "if check_password_hash(user.password, password):\n", "flash('Incorrect Email/Password')\n", "session['email'] = email\n", "return redirect('/login')\n", "returnUrl = session.pop('return_url', None)\n", "if returnUrl:\n", "return redirect(returnUrl)\n", "return redirect('/')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Expr'", "Assign'", "Return'", "Assign'", "Condition", "Return'", "Return'" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = '[email protected]'\n", "__license__ = 'MIT'\n", "import os\n", "import re\n", "import sys\n", "import inspect\n", "import sre_constants\n", "from collections import defaultdict\n", "from snakemake.io import IOFile, _IOFile, protected, temp, dynamic, Namedlist\n", "from snakemake.io import expand, InputFiles, OutputFiles, Wildcards, Params, Log\n", "from snakemake.io import apply_wildcards, is_flagged, not_iterable\n", "from snakemake.exceptions import RuleException, IOFileException, WildcardError, InputFunctionException\n", "def __init__(self, *VAR_0, VAR_1=None, VAR_2=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(VAR_0) == 2:\n", "VAR_10, VAR_41 = VAR_0\n", "if len(VAR_0) == 1:\n", "self.name = VAR_10\n", "VAR_17 = VAR_0[0]\n", "def FUNC_0(self, VAR_3, VAR_4=True):...\n", "self.workflow = VAR_41\n", "self.name = VAR_17.name\n", "def FUNC_21(VAR_16):...\n", "self.docstring = None\n", "self.workflow = VAR_17.workflow\n", "return (VAR_16.input, VAR_16.dynamic_input) if VAR_4 else (VAR_16.output,\n VAR_16.dynamic_output)\n", "self.message = None\n", "self.docstring = VAR_17.docstring\n", "self._input = InputFiles()\n", "self.message = VAR_17.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(VAR_17._input)\n", "self._params = Params()\n", "self._output = OutputFiles(VAR_17._output)\n", "self.dependencies = dict()\n", "self._params = Params(VAR_17._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(VAR_17.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(VAR_17.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(VAR_17.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(VAR_17.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(VAR_17.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(VAR_17.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(VAR_17.subworkflow_input)\n", "self.priority = 0\n", "self.resources = VAR_17.resources\n", "self.version = None\n", "self.priority = VAR_17.priority\n", "self._log = Log()\n", "self.version = VAR_17.version\n", "self._benchmark = None\n", "self._log = VAR_17._log\n", "self.wildcard_names = set()\n", "self._benchmark = VAR_17._benchmark\n", "self.lineno = VAR_1\n", "self.wildcard_names = set(VAR_17.wildcard_names)\n", "self.snakefile = VAR_2\n", "self.lineno = VAR_17.lineno\n", "self.run_func = None\n", "self.snakefile = VAR_17.snakefile\n", "self.shellcmd = None\n", "self.run_func = VAR_17.run_func\n", "self.norun = False\n", "self.shellcmd = VAR_17.shellcmd\n", "self.norun = VAR_17.norun\n" ]
[ "__author__ = 'Johannes Köster'\n", "__copyright__ = 'Copyright 2015, Johannes Köster'\n", "__email__ = '[email protected]'\n", "__license__ = 'MIT'\n", "import os\n", "import re\n", "import sys\n", "import inspect\n", "import sre_constants\n", "from collections import defaultdict\n", "from snakemake.io import IOFile, _IOFile, protected, temp, dynamic, Namedlist\n", "from snakemake.io import expand, InputFiles, OutputFiles, Wildcards, Params, Log\n", "from snakemake.io import apply_wildcards, is_flagged, not_iterable\n", "from snakemake.exceptions import RuleException, IOFileException, WildcardError, InputFunctionException\n", "def __init__(self, *args, lineno=None, snakefile=None):...\n", "\"\"\"docstring\"\"\"\n", "if len(args) == 2:\n", "name, workflow = args\n", "if len(args) == 1:\n", "self.name = name\n", "other = args[0]\n", "def dynamic_branch(self, wildcards, input=True):...\n", "self.workflow = workflow\n", "self.name = other.name\n", "def get_io(rule):...\n", "self.docstring = None\n", "self.workflow = other.workflow\n", "return (rule.input, rule.dynamic_input) if input else (rule.output, rule.\n dynamic_output)\n", "self.message = None\n", "self.docstring = other.docstring\n", "self._input = InputFiles()\n", "self.message = other.message\n", "self._output = OutputFiles()\n", "self._input = InputFiles(other._input)\n", "self._params = Params()\n", "self._output = OutputFiles(other._output)\n", "self.dependencies = dict()\n", "self._params = Params(other._params)\n", "self.dynamic_output = set()\n", "self.dependencies = dict(other.dependencies)\n", "self.dynamic_input = set()\n", "self.dynamic_output = set(other.dynamic_output)\n", "self.temp_output = set()\n", "self.dynamic_input = set(other.dynamic_input)\n", "self.protected_output = set()\n", "self.temp_output = set(other.temp_output)\n", "self.touch_output = set()\n", "self.protected_output = set(other.protected_output)\n", "self.subworkflow_input = dict()\n", "self.touch_output = set(other.touch_output)\n", "self.resources = dict(_cores=1, _nodes=1)\n", "self.subworkflow_input = dict(other.subworkflow_input)\n", "self.priority = 0\n", "self.resources = other.resources\n", "self.version = None\n", "self.priority = other.priority\n", "self._log = Log()\n", "self.version = other.version\n", "self._benchmark = None\n", "self._log = other._log\n", "self.wildcard_names = set()\n", "self._benchmark = other._benchmark\n", "self.lineno = lineno\n", "self.wildcard_names = set(other.wildcard_names)\n", "self.snakefile = snakefile\n", "self.lineno = other.lineno\n", "self.run_func = None\n", "self.snakefile = other.snakefile\n", "self.shellcmd = None\n", "self.run_func = other.run_func\n", "self.norun = False\n", "self.shellcmd = other.shellcmd\n", "self.norun = other.norun\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Assign'", "Assign'", "Assign'", "Assign'", "Import'", "Import'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Return'", "Assign'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_24(self, VAR_7, VAR_17):...\n", "self.common.extend_volume(VAR_7, VAR_17)\n" ]
[ "def extend_volume(self, volume, new_size):...\n", "self.common.extend_volume(volume, new_size)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __init__(self):...\n", "self.updated_input = set()\n", "self.updated_input_run = set()\n", "self.missing_output = set()\n", "self.incomplete_output = set()\n", "self.forced = False\n", "self.noio = False\n", "self.nooutput = False\n", "self.derived = True\n" ]
[ "def __init__(self):...\n", "self.updated_input = set()\n", "self.updated_input_run = set()\n", "self.missing_output = set()\n", "self.incomplete_output = set()\n", "self.forced = False\n", "self.noio = False\n", "self.nooutput = False\n", "self.derived = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_7(VAR_3):...\n", "if VAR_3 and VAR_3.strip():\n", "return False\n", "return True\n" ]
[ "def is_blank(string):...\n", "if string and string.strip():\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_5(self):...\n", "if self.redirected_to is None:\n", "return None\n", "VAR_14 = self.redirected_to.split('&')\n", "if len(VAR_14) != 2:\n", "VAR_0.warning(\"Redirected to an unexpected page: `%s'\", self.redirected_to)\n", "VAR_21 = decrypt_number(VAR_14[-1])\n", "VAR_0.warning(\"Unable to decrypt user test id from page: `%s'\", self.\n redirected_to)\n", "return VAR_21\n", "return None\n", "return None\n" ]
[ "def get_user_test_id(self):...\n", "if self.redirected_to is None:\n", "return None\n", "p = self.redirected_to.split('&')\n", "if len(p) != 2:\n", "logger.warning(\"Redirected to an unexpected page: `%s'\", self.redirected_to)\n", "user_test_id = decrypt_number(p[-1])\n", "logger.warning(\"Unable to decrypt user test id from page: `%s'\", self.\n redirected_to)\n", "return user_test_id\n", "return None\n", "return None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Expr'", "Assign'", "Expr'", "Return'", "Return'", "Return'" ]
[ "def FUNC_1(self, VAR_2, *VAR_5, **VAR_6):...\n", "" ]
[ "def get(self, dataset, *args, **kwargs):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "@app.route('/api/images/<type>', methods=['GET'])...\n", "return FUNC_34(VAR_11).to_map()\n" ]
[ "@app.route('/api/images/<type>', methods=['GET'])...\n", "return get_image_database(type).to_map()\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_7(self):...\n", "return {}\n" ]
[ "def Response(self):...\n", "return {}\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_1(VAR_2=0, VAR_3=VAR_0, VAR_4=None, VAR_5={}, VAR_6='INBOX', VAR_7...\n", "VAR_13 = CLASS_0({'flags': VAR_3, 'mbox_uuid': VAR_6, 'type': 'flags',\n 'uid': VAR_2, 'chash': VAR_8})\n", "if VAR_4 is None:\n", "VAR_4 = {}\n", "if not (VAR_4.get('received') or VAR_4.get('date')):\n", "VAR_4.update(VAR_1)\n", "VAR_4['headers'] = VAR_5\n", "VAR_14 = CLASS_0(VAR_4)\n", "VAR_15 = CLASS_0({'raw': VAR_7, 'type': 'cnt'})\n", "return VAR_13, VAR_14, VAR_15\n" ]
[ "def leap_mail(uid=0, flags=LEAP_FLAGS, headers=None, extra_headers={},...\n", "fdoc = TestDoc({'flags': flags, 'mbox_uuid': mbox_uuid, 'type': 'flags',\n 'uid': uid, 'chash': chash})\n", "if headers is None:\n", "headers = {}\n", "if not (headers.get('received') or headers.get('date')):\n", "headers.update(DEFAULT_HEADERS)\n", "headers['headers'] = extra_headers\n", "hdoc = TestDoc(headers)\n", "bdoc = TestDoc({'raw': body, 'type': 'cnt'})\n", "return fdoc, hdoc, bdoc\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "from odoo import fields, models, _, api, exceptions\n", "from odoo.addons.base_crapo_workflow.mixins import crapo_automata_mixins\n", "\"\"\"\n A state used in the context of an automaton\n \"\"\"\n", "VAR_0 = 'crapo.state'\n", "VAR_1 = u'State in a workflow, specific to a given model'\n", "VAR_2 = 'sequence, name'\n", "VAR_3 = fields.Char(help=\"State's name\", required=True, translate=True, size=32\n )\n", "VAR_4 = fields.Char(required=False, translate=True, size=256)\n", "VAR_5 = fields.Integer(default=1, help=\n 'Sequence gives the order in which states are displayed')\n", "VAR_6 = fields.Boolean(string='Folded in kanban', help=\n 'This stage is folded in the kanban view when there are no records in that stage to display.'\n , default=False)\n", "@api.multi...\n", "\"\"\"docstring\"\"\"\n", "if 'default_state' in VAR_7:\n", "if VAR_7['default_state']:\n", "return super(CLASS_0, self).write(VAR_7)\n", "if len(self) > 1:\n", "VAR_8 = self.search([('default_state', '=', True), ('automaton', '=', self.\n automaton.id), ('id', '!=', self.id)])\n", "for s in VAR_8:\n", "s.write({'default_state': False})\n" ]
[ "from odoo import fields, models, _, api, exceptions\n", "from odoo.addons.base_crapo_workflow.mixins import crapo_automata_mixins\n", "\"\"\"\n A state used in the context of an automaton\n \"\"\"\n", "_name = 'crapo.state'\n", "_description = u'State in a workflow, specific to a given model'\n", "_order = 'sequence, name'\n", "name = fields.Char(help=\"State's name\", required=True, translate=True, size=32)\n", "description = fields.Char(required=False, translate=True, size=256)\n", "sequence = fields.Integer(default=1, help=\n 'Sequence gives the order in which states are displayed')\n", "fold = fields.Boolean(string='Folded in kanban', help=\n 'This stage is folded in the kanban view when there are no records in that stage to display.'\n , default=False)\n", "@api.multi...\n", "\"\"\"docstring\"\"\"\n", "if 'default_state' in values:\n", "if values['default_state']:\n", "return super(State, self).write(values)\n", "if len(self) > 1:\n", "found = self.search([('default_state', '=', True), ('automaton', '=', self.\n automaton.id), ('id', '!=', self.id)])\n", "for s in found:\n", "s.write({'default_state': False})\n" ]
[ 0, 4, 0, 0, 0, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Docstring", "Condition", "Condition", "Return'", "Condition", "Assign'", "For", "Expr'" ]
[ "def FUNC_14(self, VAR_11, VAR_12=None):...\n", "self.backward_tree = Node(VAR_11)\n", "self.add_backward_children(self.backward_tree)\n" ]
[ "def build_backward_node_tree(self, oldest_node, parent=None):...\n", "self.backward_tree = Node(oldest_node)\n", "self.add_backward_children(self.backward_tree)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_7(self, VAR_11):...\n", "if VAR_0(VAR_11):\n", "return 'jsonb'\n", "return super().db_type(VAR_11)\n" ]
[ "def db_type(self, connection):...\n", "if is_postgresql(connection):\n", "return 'jsonb'\n", "return super().db_type(connection)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "@VAR_2.route('/wins')...\n", "if VAR_0 == None:\n", "FUNC_16()\n", "VAR_26 = request.args.get('tag', default='christmasmike')\n", "VAR_8 = \"SELECT * FROM matches WHERE winner = '\" + str(VAR_26\n ) + \"' ORDER BY date DESC;\"\n", "VAR_27 = VAR_0.exec(VAR_8)\n", "VAR_27 = [str(x) for x in VAR_27]\n", "VAR_27 = '\\n'.join(VAR_27)\n", "return json.dumps(VAR_27)\n" ]
[ "@endpoints.route('/wins')...\n", "if db == None:\n", "init()\n", "player = request.args.get('tag', default='christmasmike')\n", "sql = \"SELECT * FROM matches WHERE winner = '\" + str(player\n ) + \"' ORDER BY date DESC;\"\n", "result = db.exec(sql)\n", "result = [str(x) for x in result]\n", "result = '\\n'.join(result)\n", "return json.dumps(result)\n" ]
[ 0, 0, 0, 0, 4, 4, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@validate(VAR_0=VMeetup('id'), VAR_15=VMenu('controller', CommentSortMenu),...\n", "VAR_18 = Link._byID(VAR_0.assoc_link)\n", "VAR_20 = c.user.pref_num_comments or g.num_comments\n", "VAR_21 = g.max_comments if VAR_16 == 'true' else VAR_20\n", "VAR_22 = CommentBuilder(VAR_18, CommentSortMenu.operator(VAR_15), None, None)\n", "VAR_23 = NestedListing(VAR_22, VAR_21=num, parent_name=article._fullname)\n", "VAR_24 = PaneStack()\n", "if c.user_is_loggedin:\n", "VAR_24.append(CommentReplyBox())\n", "VAR_24.append(VAR_23.listing())\n", "VAR_24.append(CommentReplyBox(link_name=article._fullname))\n", "VAR_25 = CommentSortMenu(default=sort, type='dropdown2')\n", "VAR_26 = [VAR_25, NumCommentsMenu(VAR_18.num_comments, default=num_comments)]\n", "VAR_27 = CommentListing(VAR_27=displayPane, VAR_16=article.num_comments,\n VAR_26=nav_menus)\n", "VAR_28 = None\n", "if c.user_is_loggedin:\n", "VAR_29 = VAR_18._getLastClickTime(c.user)\n", "VAR_2 = ShowMeetup(VAR_0=meetup, VAR_27=content, fullname=article._fullname,\n VAR_28=lastViewed)\n", "VAR_28 = VAR_29._date if VAR_29 else None\n", "return BoringPage(pagename=meetup.title, VAR_27=res, body_class='meetup'\n ).render()\n", "VAR_18._click(c.user)\n" ]
[ "@validate(meetup=VMeetup('id'), sort=VMenu('controller', CommentSortMenu),...\n", "article = Link._byID(meetup.assoc_link)\n", "user_num = c.user.pref_num_comments or g.num_comments\n", "num = g.max_comments if num_comments == 'true' else user_num\n", "builder = CommentBuilder(article, CommentSortMenu.operator(sort), None, None)\n", "listing = NestedListing(builder, num=num, parent_name=article._fullname)\n", "displayPane = PaneStack()\n", "if c.user_is_loggedin:\n", "displayPane.append(CommentReplyBox())\n", "displayPane.append(listing.listing())\n", "displayPane.append(CommentReplyBox(link_name=article._fullname))\n", "sort_menu = CommentSortMenu(default=sort, type='dropdown2')\n", "nav_menus = [sort_menu, NumCommentsMenu(article.num_comments, default=\n num_comments)]\n", "content = CommentListing(content=displayPane, num_comments=article.\n num_comments, nav_menus=nav_menus)\n", "lastViewed = None\n", "if c.user_is_loggedin:\n", "clicked = article._getLastClickTime(c.user)\n", "res = ShowMeetup(meetup=meetup, content=content, fullname=article._fullname,\n lastViewed=lastViewed)\n", "lastViewed = clicked._date if clicked else None\n", "return BoringPage(pagename=meetup.title, content=res, body_class='meetup'\n ).render()\n", "article._click(c.user)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_5(VAR_9=False, VAR_10=False, VAR_11=True):...\n", "VAR_15 = FUNC_4()\n", "if len(VAR_15) == 0:\n", "if VAR_9:\n", "VAR_16 = []\n", "sys.stderr.write('No missing dependencies\\n')\n", "return 0\n", "for e in VAR_15:\n", "VAR_16 += e.deps\n", "VAR_17 = ' '.join(sorted(VAR_16))\n", "if VAR_10:\n", "sys.stderr.write('Missing dependencies: %s\\n' % VAR_17)\n", "if os.geteuid() != 0:\n", "return 0\n", "sys.stderr.write('Missing dependencies: %s\\n' % VAR_17)\n", "if VAR_9:\n", "sys.stderr.write('Package installation is not possible as non-root.\\n')\n", "sys.stderr.write('Installing %s\\n' % VAR_17)\n", "VAR_18 = 0\n", "return 2\n", "install_packages(VAR_16, VAR_11=allow_daemons, aptopts=[\n '--no-install-recommends'])\n", "sys.stderr.write('%s\\n' % e)\n", "return VAR_18\n", "VAR_18 = e.exit_code\n" ]
[ "def install_deps(verbosity=False, dry_run=False, allow_daemons=True):...\n", "errors = find_missing_deps()\n", "if len(errors) == 0:\n", "if verbosity:\n", "missing_pkgs = []\n", "sys.stderr.write('No missing dependencies\\n')\n", "return 0\n", "for e in errors:\n", "missing_pkgs += e.deps\n", "deps_string = ' '.join(sorted(missing_pkgs))\n", "if dry_run:\n", "sys.stderr.write('Missing dependencies: %s\\n' % deps_string)\n", "if os.geteuid() != 0:\n", "return 0\n", "sys.stderr.write('Missing dependencies: %s\\n' % deps_string)\n", "if verbosity:\n", "sys.stderr.write('Package installation is not possible as non-root.\\n')\n", "sys.stderr.write('Installing %s\\n' % deps_string)\n", "ret = 0\n", "return 2\n", "install_packages(missing_pkgs, allow_daemons=allow_daemons, aptopts=[\n '--no-install-recommends'])\n", "sys.stderr.write('%s\\n' % e)\n", "return ret\n", "ret = e.exit_code\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Return'", "For", "AugAssign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Expr'", "Condition", "Expr'", "Expr'", "Assign'", "Return'", "Expr'", "Expr'", "Return'", "Assign'" ]
[ "@detail_route(methods=['post'])...\n", "return self.set_pending_action(pending_actions.CANCEL, *VAR_13, **kwargs)\n" ]
[ "@detail_route(methods=['post'])...\n", "return self.set_pending_action(pending_actions.CANCEL, *args, **kwargs)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def __init__(self):...\n", "self.license_files = 0\n", "self.exception_files = 0\n", "self.licenses = []\n", "self.exceptions = {}\n" ]
[ "def __init__(self):...\n", "self.license_files = 0\n", "self.exception_files = 0\n", "self.licenses = []\n", "self.exceptions = {}\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "import networkx as nx\n", "import random\n", "def __init__(self, VAR_0, VAR_1, VAR_2, VAR_3, VAR_4):...\n", "self.G = nx.Graph()\n", "self.size = VAR_0\n", "self.width = VAR_1\n", "self.keyPoolSize = VAR_2\n", "self.keysPerNode = VAR_3\n", "self.commRange = VAR_4\n", "self.genNodes()\n", "self.addEdges()\n", "self.calcAllLKVM()\n", "def FUNC_0(self):...\n", "for VAR_7 in self.G.edges():\n", "self.calcLKVM(VAR_7)\n", "def FUNC_1(self, VAR_5):...\n", "for VAR_7 in self.G.edges():\n", "self.calcWLPVM(VAR_7, VAR_5)\n", "def FUNC_2(self, VAR_6):...\n", "for VAR_7 in self.G.edges():\n", "self.calcTPVM(VAR_7, VAR_6)\n", "def FUNC_3(self, VAR_7):...\n", "VAR_11 = VAR_7[0]\n", "VAR_12 = VAR_7[1]\n", "VAR_13 = self.G.nodes(1)[VAR_11][1]['keys']\n", "VAR_14 = self.G.nodes(1)[VAR_12][1]['keys']\n", "VAR_15 = VAR_13.intersection(VAR_14)\n", "VAR_16 = set()\n", "VAR_17 = 0\n", "while not VAR_15.issubset(VAR_16):\n", "VAR_25 = random.randint(0, self.size)\n", "self.G[VAR_11][VAR_12]['lkvm'] = VAR_17\n", "VAR_16.union(self.G.nodes(1)[VAR_25][1]['keys'])\n", "def FUNC_4(self):...\n", "VAR_17 = VAR_17 + 1\n", "for VAR_11 in range(self.size):\n", "self.addNewNode(VAR_11)\n", "def FUNC_5(self, VAR_8):...\n", "VAR_18 = random.randint(0, self.width)\n", "VAR_19 = random.randint(0, self.width)\n", "VAR_20 = set()\n", "while len(VAR_20) < self.keysPerNode:\n", "VAR_20.add(random.randint(0, self.keyPoolSize))\n", "self.G.add_node(VAR_8, VAR_18=x, VAR_19=y, VAR_20=keys)\n", "def FUNC_6(self):...\n", "for node in self.G.nodes(1):\n", "for otherNode in self.G.nodes():\n", "def FUNC_7(self, VAR_9, VAR_10):...\n", "if not node == otherNode and not self.G.has_edge(node[0], otherNode[0]\n", "VAR_21 = VAR_9['x'] - VAR_10['x']\n", "self.G.add_edge(node[0], otherNode[0])\n", "VAR_22 = VAR_9['y'] - VAR_10['x']\n", "VAR_23 = math.sqrt(VAR_21 * VAR_21 + VAR_22 * VAR_22)\n", "return VAR_23 <= self.commRange\n" ]
[ "import networkx as nx\n", "import random\n", "def __init__(self, size, width, keyPoolSize, keysPerNode, commRange):...\n", "self.G = nx.Graph()\n", "self.size = size\n", "self.width = width\n", "self.keyPoolSize = keyPoolSize\n", "self.keysPerNode = keysPerNode\n", "self.commRange = commRange\n", "self.genNodes()\n", "self.addEdges()\n", "self.calcAllLKVM()\n", "def calcAllLKVM(self):...\n", "for edge in self.G.edges():\n", "self.calcLKVM(edge)\n", "def calcAllWLPVM(self, l):...\n", "for edge in self.G.edges():\n", "self.calcWLPVM(edge, l)\n", "def calcAllTPVM(self, gamma):...\n", "for edge in self.G.edges():\n", "self.calcTPVM(edge, gamma)\n", "def calcLKVM(self, edge):...\n", "i = edge[0]\n", "j = edge[1]\n", "iKeys = self.G.nodes(1)[i][1]['keys']\n", "jKeys = self.G.nodes(1)[j][1]['keys']\n", "sharedKeys = iKeys.intersection(jKeys)\n", "c = set()\n", "lkvm = 0\n", "while not sharedKeys.issubset(c):\n", "randNodeIndex = random.randint(0, self.size)\n", "self.G[i][j]['lkvm'] = lkvm\n", "c.union(self.G.nodes(1)[randNodeIndex][1]['keys'])\n", "def genNodes(self):...\n", "lkvm = lkvm + 1\n", "for i in range(self.size):\n", "self.addNewNode(i)\n", "def addNewNode(self, index):...\n", "x = random.randint(0, self.width)\n", "y = random.randint(0, self.width)\n", "keys = set()\n", "while len(keys) < self.keysPerNode:\n", "keys.add(random.randint(0, self.keyPoolSize))\n", "self.G.add_node(index, x=x, y=y, keys=keys)\n", "def addEdges(self):...\n", "for node in self.G.nodes(1):\n", "for otherNode in self.G.nodes():\n", "def inRange(self, node1, node2):...\n", "if not node == otherNode and not self.G.has_edge(node[0], otherNode[0]\n", "xDistance = node1['x'] - node2['x']\n", "self.G.add_edge(node[0], otherNode[0])\n", "yDistance = node1['y'] - node2['x']\n", "distance = math.sqrt(xDistance * xDistance + yDistance * yDistance)\n", "return distance <= self.commRange\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "FunctionDef'", "For", "Expr'", "FunctionDef'", "For", "Expr'", "FunctionDef'", "For", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "For", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Expr'", "FunctionDef'", "For", "For", "FunctionDef'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_6, VAR_12, VAR_13):...\n", "VAR_14 = []\n", "VAR_14.append('string'.format(VAR_6))\n", "VAR_14.append('string'.format(VAR_6, VAR_12, VAR_13))\n", "for query in VAR_14:\n", "self.cur.execute(query)\n", "self.conn.commit()\n" ]
[ "def update_attendance(self, username, event_id, attending):...\n", "sql = []\n", "sql.append(\n \"\"\"INSERT INTO users (username)\n VALUES ('{0}')\n ON DUPLICATE KEY UPDATE username = '{0}';\n \"\"\"\n .format(username))\n", "sql.append(\n \"\"\"INSERT INTO user_event (username, event_id, attending)\n VALUES ('{0}', '{1}', '{2}')\n ON DUPLICATE KEY UPDATE attending = '{2}';\n \"\"\"\n .format(username, event_id, attending))\n", "for query in sql:\n", "self.cur.execute(query)\n", "self.conn.commit()\n" ]
[ 0, 0, 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "For", "Expr'", "Expr'" ]
[ "def FUNC_1(self):...\n", "self.objects = load_model_objects()\n", "if settings.TEST_BROWSER == 'firefox':\n", "self.browser = webdriver.Firefox()\n", "self.browser = webdriver.Chrome()\n", "FUNC_0(self)\n" ]
[ "def setUp(self):...\n", "self.objects = load_model_objects()\n", "if settings.TEST_BROWSER == 'firefox':\n", "self.browser = webdriver.Firefox()\n", "self.browser = webdriver.Chrome()\n", "log_karyn_in(self)\n" ]
[ 0, 0, 6, 6, 6, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_36(VAR_11):...\n", "for VAR_51, VAR_58 in VAR_11.items():\n", "if isinstance(VAR_58, str) or not isinstance(VAR_58, Iterable):\n", "VAR_58 = [VAR_58]\n", "yield [(VAR_51, VAR_16) for VAR_16 in VAR_58]\n" ]
[ "def flatten(wildcards):...\n", "for wildcard, values in wildcards.items():\n", "if isinstance(values, str) or not isinstance(values, Iterable):\n", "values = [values]\n", "yield [(wildcard, value) for value in values]\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Assign'", "Expr'" ]
[ "def FUNC_5(VAR_8):...\n", "return VAR_14.headers[VAR_8] if VAR_8 in VAR_14.headers else None\n" ]
[ "def get_from_header(field):...\n", "return response.headers[field] if field in response.headers else None\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_8(self):...\n", "return datetime.now(tz=pytz.utc).astimezone().tzinfo\n" ]
[ "def get_local_timezone(self):...\n", "return datetime.now(tz=pytz.utc).astimezone().tzinfo\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_11(self, VAR_11, VAR_12=None):...\n", "self.tree = Node(VAR_11)\n", "self.add_children(self.tree)\n" ]
[ "def build_node_tree(self, oldest_node, parent=None):...\n", "self.tree = Node(oldest_node)\n", "self.add_children(self.tree)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_10(self, VAR_16):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = ['volume', 'select', VAR_16['name'], 'show']\n", "self._eql_execute(*VAR_11)\n", "if err.stdout.find('does not exist.\\n') > -1:\n", "VAR_0.debug(_('Volume %s does not exist, it may have already been deleted'),\n VAR_16['name'])\n" ]
[ "def _check_volume(self, volume):...\n", "\"\"\"docstring\"\"\"\n", "command = ['volume', 'select', volume['name'], 'show']\n", "self._eql_execute(*command)\n", "if err.stdout.find('does not exist.\\n') > -1:\n", "LOG.debug(_('Volume %s does not exist, it may have already been deleted'),\n volume['name'])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_0(self):...\n", "self.user = self.make_user()\n", "self.factory = RequestFactory()\n" ]
[ "def setUp(self):...\n", "self.user = self.make_user()\n", "self.factory = RequestFactory()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_9(self, VAR_10):...\n", "self.errors = []\n", "VAR_21 = self.find_oldest_node(self.view.file_name())\n", "self.tree = Node(VAR_21)\n", "self.build_node_tree('ROOT -> ' + VAR_21)\n", "VAR_22 = ''\n", "for pre, fill, node in RenderTree(self.tree):\n", "VAR_22 += '%s %s' % (pre, node.name) + '\\n'\n", "VAR_23 = self.view.window()\n", "VAR_23.focus_group(0)\n", "VAR_24 = self.view.window().new_file()\n", "VAR_24.run_command('insert_snippet', {'contents': VAR_22})\n", "VAR_24.run_command('insert_snippet', {'contents': '\\n'.join(self.errors)})\n" ]
[ "def run(self, edit):...\n", "self.errors = []\n", "oldest_known_filename = self.find_oldest_node(self.view.file_name())\n", "self.tree = Node(oldest_known_filename)\n", "self.build_node_tree('ROOT -> ' + oldest_known_filename)\n", "render = ''\n", "for pre, fill, node in RenderTree(self.tree):\n", "render += '%s %s' % (pre, node.name) + '\\n'\n", "window = self.view.window()\n", "window.focus_group(0)\n", "new_view = self.view.window().new_file()\n", "new_view.run_command('insert_snippet', {'contents': render})\n", "new_view.run_command('insert_snippet', {'contents': '\\n'.join(self.errors)})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "For", "AugAssign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_1=None):...\n", "\"\"\"docstring\"\"\"\n", "if __grains__['kernel'] == 'Linux':\n", "VAR_4 = 'df -P'\n", "if __grains__['kernel'] == 'OpenBSD':\n", "if VAR_1:\n", "VAR_4 = 'df -kP'\n", "VAR_4 = 'df'\n", "VAR_4 = VAR_4 + ' -' + VAR_1\n", "VAR_2 = {}\n", "VAR_3 = __salt__['cmd.run'](VAR_4).splitlines()\n", "for line in VAR_3:\n", "if not line:\n", "return VAR_2\n", "if line.startswith('Filesystem'):\n", "VAR_5 = line.split()\n", "while not VAR_5[1].isdigit():\n", "VAR_5[0] = '{0} {1}'.format(VAR_5[0], VAR_5[1])\n", "if __grains__['kernel'] == 'Darwin':\n", "VAR_0.warn('Problem parsing disk usage information')\n", "VAR_5.pop(1)\n", "VAR_2[VAR_5[8]] = {'filesystem': VAR_5[0], '512-blocks': VAR_5[1], 'used':\n VAR_5[2], 'available': VAR_5[3], 'capacity': VAR_5[4], 'iused': VAR_5[5\n ], 'ifree': VAR_5[6], '%iused': VAR_5[7]}\n", "VAR_2[VAR_5[5]] = {'filesystem': VAR_5[0], '1K-blocks': VAR_5[1], 'used':\n VAR_5[2], 'available': VAR_5[3], 'capacity': VAR_5[4]}\n", "VAR_2 = {}\n" ]
[ "def usage(args=None):...\n", "\"\"\"docstring\"\"\"\n", "if __grains__['kernel'] == 'Linux':\n", "cmd = 'df -P'\n", "if __grains__['kernel'] == 'OpenBSD':\n", "if args:\n", "cmd = 'df -kP'\n", "cmd = 'df'\n", "cmd = cmd + ' -' + args\n", "ret = {}\n", "out = __salt__['cmd.run'](cmd).splitlines()\n", "for line in out:\n", "if not line:\n", "return ret\n", "if line.startswith('Filesystem'):\n", "comps = line.split()\n", "while not comps[1].isdigit():\n", "comps[0] = '{0} {1}'.format(comps[0], comps[1])\n", "if __grains__['kernel'] == 'Darwin':\n", "log.warn('Problem parsing disk usage information')\n", "comps.pop(1)\n", "ret[comps[8]] = {'filesystem': comps[0], '512-blocks': comps[1], 'used':\n comps[2], 'available': comps[3], 'capacity': comps[4], 'iused': comps[5\n ], 'ifree': comps[6], '%iused': comps[7]}\n", "ret[comps[5]] = {'filesystem': comps[0], '1K-blocks': comps[1], 'used':\n comps[2], 'available': comps[3], 'capacity': comps[4]}\n", "ret = {}\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Return'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "def __init__(self, VAR_11, VAR_23, VAR_13):...\n", "self.cname, self.pname, self.ename = VAR_11, VAR_23, VAR_13\n" ]
[ "def __init__(self, cname, pname, ename):...\n", "self.cname, self.pname, self.ename = cname, pname, ename\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]