lines
sequencelengths
1
444
raw_lines
sequencelengths
1
444
label
sequencelengths
1
444
type
sequencelengths
1
444
[ "@VAR_0.route('/reset', methods=['GET'])...\n", "VAR_1 = get_dataset_with_id(request.args.get('dataset_id'))\n", "restore_original(VAR_1.working_copy)\n", "create_action('restored dataset to original state', VAR_1.id, current_user.id)\n", "return redirect(request.referrer)\n" ]
[ "@_transform.route('/reset', methods=['GET'])...\n", "dataset = get_dataset_with_id(request.args.get('dataset_id'))\n", "restore_original(dataset.working_copy)\n", "create_action('restored dataset to original state', dataset.id, current_user.id\n )\n", "return redirect(request.referrer)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@require_http_methods(['GET'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_7 = get_next_url_for_login_page(VAR_3)\n", "if VAR_3.user.is_authenticated():\n", "return redirect(VAR_7)\n", "VAR_11 = FUNC_6(VAR_3)\n", "VAR_12 = None\n", "if '?' in VAR_7:\n", "if is_request_in_themed_site() and not configuration_helpers.get_value(\n", "VAR_38 = urlparse.parse_qs(urlparse.urlparse(VAR_7).query)\n", "if VAR_4 == 'login':\n", "VAR_13 = FUNC_7(VAR_3, VAR_4)\n", "VAR_39 = VAR_38['tpa_hint'][0]\n", "return old_login_view(VAR_3)\n", "if VAR_4 == 'register':\n", "if VAR_13 is not None:\n", "VAR_40 = third_party_auth.provider.Registry.get(VAR_39=provider_id)\n", "return old_register_view(VAR_3)\n", "return VAR_13\n", "VAR_14 = [{'message': message.message, 'tags': message.tags} for message in\n messages.get_messages(VAR_3) if 'account-activation' in message.tags]\n", "if VAR_40:\n", "VAR_5 = {'data': {'login_redirect_url': VAR_7, 'initial_mode': VAR_4,\n 'third_party_auth': FUNC_5(VAR_3, VAR_7, VAR_12),\n 'third_party_auth_hint': VAR_12 or '', 'platform_name':\n configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME\n ), 'support_link': configuration_helpers.get_value('SUPPORT_SITE_LINK',\n settings.SUPPORT_SITE_LINK), 'password_reset_support_link': \n configuration_helpers.get_value('PASSWORD_RESET_SUPPORT_LINK', settings\n .PASSWORD_RESET_SUPPORT_LINK) or settings.SUPPORT_SITE_LINK,\n 'account_activation_messages': VAR_14, 'login_form_desc': json.loads(\n VAR_11['login']), 'registration_form_desc': json.loads(VAR_11[\n 'registration']), 'password_reset_form_desc': json.loads(VAR_11[\n 'password_reset']), 'account_creation_allowed': configuration_helpers.\n get_value('ALLOW_PUBLIC_ACCOUNT_CREATION', settings.FEATURES.get(\n 'ALLOW_PUBLIC_ACCOUNT_CREATION', True))}, 'login_redirect_url': VAR_7,\n 'responsive': True, 'allow_iframing': True, 'disable_courseware_js': \n True, 'combined_login_and_register': True, 'disable_footer': not\n configuration_helpers.get_value(\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER', settings.FEATURES[\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER'])}\n", "if VAR_40.skip_hinted_login_dialog:\n", "VAR_5 = FUNC_2(VAR_3, VAR_5)\n", "return redirect(pipeline.get_login_url(VAR_39, pipeline.AUTH_ENTRY_LOGIN,\n redirect_url=redirect_to))\n", "VAR_12 = VAR_39\n", "VAR_15 = render_to_response('student_account/login_and_register.html', VAR_5)\n", "VAR_4 = 'hinted_login'\n", "VAR_15.delete_cookie(configuration_helpers.get_value(\n 'ENTERPRISE_CUSTOMER_COOKIE_NAME', settings.\n ENTERPRISE_CUSTOMER_COOKIE_NAME), domain=configuration_helpers.\n get_value('BASE_COOKIE_DOMAIN', settings.BASE_COOKIE_DOMAIN))\n", "return VAR_15\n" ]
[ "@require_http_methods(['GET'])...\n", "\"\"\"docstring\"\"\"\n", "redirect_to = get_next_url_for_login_page(request)\n", "if request.user.is_authenticated():\n", "return redirect(redirect_to)\n", "form_descriptions = _get_form_descriptions(request)\n", "third_party_auth_hint = None\n", "if '?' in redirect_to:\n", "if is_request_in_themed_site() and not configuration_helpers.get_value(\n", "next_args = urlparse.parse_qs(urlparse.urlparse(redirect_to).query)\n", "if initial_mode == 'login':\n", "ext_auth_response = _external_auth_intercept(request, initial_mode)\n", "provider_id = next_args['tpa_hint'][0]\n", "return old_login_view(request)\n", "if initial_mode == 'register':\n", "if ext_auth_response is not None:\n", "tpa_hint_provider = third_party_auth.provider.Registry.get(provider_id=\n provider_id)\n", "return old_register_view(request)\n", "return ext_auth_response\n", "account_activation_messages = [{'message': message.message, 'tags': message\n .tags} for message in messages.get_messages(request) if \n 'account-activation' in message.tags]\n", "if tpa_hint_provider:\n", "context = {'data': {'login_redirect_url': redirect_to, 'initial_mode':\n initial_mode, 'third_party_auth': _third_party_auth_context(request,\n redirect_to, third_party_auth_hint), 'third_party_auth_hint': \n third_party_auth_hint or '', 'platform_name': configuration_helpers.\n get_value('PLATFORM_NAME', settings.PLATFORM_NAME), 'support_link':\n configuration_helpers.get_value('SUPPORT_SITE_LINK', settings.\n SUPPORT_SITE_LINK), 'password_reset_support_link': \n configuration_helpers.get_value('PASSWORD_RESET_SUPPORT_LINK', settings\n .PASSWORD_RESET_SUPPORT_LINK) or settings.SUPPORT_SITE_LINK,\n 'account_activation_messages': account_activation_messages,\n 'login_form_desc': json.loads(form_descriptions['login']),\n 'registration_form_desc': json.loads(form_descriptions['registration']),\n 'password_reset_form_desc': json.loads(form_descriptions[\n 'password_reset']), 'account_creation_allowed': configuration_helpers.\n get_value('ALLOW_PUBLIC_ACCOUNT_CREATION', settings.FEATURES.get(\n 'ALLOW_PUBLIC_ACCOUNT_CREATION', True))}, 'login_redirect_url':\n redirect_to, 'responsive': True, 'allow_iframing': True,\n 'disable_courseware_js': True, 'combined_login_and_register': True,\n 'disable_footer': not configuration_helpers.get_value(\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER', settings.FEATURES[\n 'ENABLE_COMBINED_LOGIN_REGISTRATION_FOOTER'])}\n", "if tpa_hint_provider.skip_hinted_login_dialog:\n", "context = update_context_for_enterprise(request, context)\n", "return redirect(pipeline.get_login_url(provider_id, pipeline.\n AUTH_ENTRY_LOGIN, redirect_url=redirect_to))\n", "third_party_auth_hint = provider_id\n", "response = render_to_response('student_account/login_and_register.html',\n context)\n", "initial_mode = 'hinted_login'\n", "response.delete_cookie(configuration_helpers.get_value(\n 'ENTERPRISE_CUSTOMER_COOKIE_NAME', settings.\n ENTERPRISE_CUSTOMER_COOKIE_NAME), domain=configuration_helpers.\n get_value('BASE_COOKIE_DOMAIN', settings.BASE_COOKIE_DOMAIN))\n", "return response\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Return'", "Condition", "Condition", "Assign'", "Return'", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6():...\n", "VAR_13.throw(_('Invalid Search Field'), VAR_13.DataError)\n" ]
[ "def _raise_exception():...\n", "frappe.throw(_('Invalid Search Field'), frappe.DataError)\n" ]
[ 0, 4 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_13(self, VAR_64):...\n", "self._onsuccess = VAR_64\n" ]
[ "def onsuccess(self, func):...\n", "self._onsuccess = func\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_44(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ "def test_latest_match_not_found(self):...\n", "\"\"\"docstring\"\"\"\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Docstring" ]
[ "def FUNC_13(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_14, VAR_15 = new_raylet_monitor_log_file(self._ray_params.redirect_output)\n", "VAR_16 = ray.services.start_raylet_monitor(self._redis_address, VAR_14=\n stdout_file, VAR_15=stderr_file, redis_password=self._ray_params.\n redis_password, config=self._config)\n", "assert VAR_13.PROCESS_TYPE_RAYLET_MONITOR not in self.all_processes\n", "self.all_processes[VAR_13.PROCESS_TYPE_RAYLET_MONITOR] = [VAR_16]\n" ]
[ "def start_raylet_monitor(self):...\n", "\"\"\"docstring\"\"\"\n", "stdout_file, stderr_file = new_raylet_monitor_log_file(self._ray_params.\n redirect_output)\n", "process_info = ray.services.start_raylet_monitor(self._redis_address,\n stdout_file=stdout_file, stderr_file=stderr_file, redis_password=self.\n _ray_params.redis_password, config=self._config)\n", "assert ray_constants.PROCESS_TYPE_RAYLET_MONITOR not in self.all_processes\n", "self.all_processes[ray_constants.PROCESS_TYPE_RAYLET_MONITOR] = [process_info]\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assert'", "Assign'" ]
[ "def FUNC_1(VAR_0, VAR_1, VAR_2, VAR_3):...\n", "VAR_7 = psycopg2.connect(dbname=pg_connection['database'], user=\n pg_connection['user'], password=pg_connection['password'], host=\n pg_connection['host'])\n", "VAR_8 = VAR_7.cursor()\n", "VAR_8.execute('string', (VAR_1, VAR_2, VAR_3))\n", "VAR_7.commit()\n", "VAR_8.close()\n", "VAR_7.close()\n" ]
[ "def import_quest_data(pg_connection, quest_tier, quest_desc, creator):...\n", "conn = psycopg2.connect(dbname=pg_connection['database'], user=\n pg_connection['user'], password=pg_connection['password'], host=\n pg_connection['host'])\n", "cur = conn.cursor()\n", "cur.execute(\n \"\"\"\n INSERT INTO quests (tier, description, creator, completed)\n VALUES (%s, %s, %s, False);\n \"\"\"\n , (quest_tier, quest_desc, creator))\n", "conn.commit()\n", "cur.close()\n", "conn.close()\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.getboolean(self.section, 'insecure_cookies')\n" ]
[ "@property...\n", "\"\"\"docstring\"\"\"\n", "return self.config.getboolean(self.section, 'insecure_cookies')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_11(self, VAR_7):...\n", "VAR_26 = super().get_transform(VAR_7)\n", "if VAR_26:\n", "return VAR_26\n", "if '_' not in VAR_7:\n", "VAR_18 = int(VAR_7)\n", "VAR_18 += 1\n", "VAR_20, VAR_21 = VAR_7.split('_')\n", "return CLASS_10(VAR_20, VAR_21)\n", "return CLASS_8(VAR_18, self.base_field)\n", "VAR_20 = int(VAR_20) + 1\n", "VAR_21 = int(VAR_21)\n" ]
[ "def get_transform(self, name):...\n", "transform = super().get_transform(name)\n", "if transform:\n", "return transform\n", "if '_' not in name:\n", "index = int(name)\n", "index += 1\n", "start, end = name.split('_')\n", "return SliceTransformFactory(start, end)\n", "return IndexTransformFactory(index, self.base_field)\n", "start = int(start) + 1\n", "end = int(end)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Condition", "Assign'", "AugAssign'", "Assign'", "Return'", "Return'", "Assign'", "Assign'" ]
[ "def FUNC_9(self, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "return '%s' % FUNC_12(VAR_10)\n" ]
[ "def sqlForNonNone(self, value):...\n", "\"\"\"docstring\"\"\"\n", "return '%s' % QuotedString(value)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_4(self):...\n", "if self._user_notified_about_crash or self._IsServerAlive():\n", "return\n", "self._user_notified_about_crash = True\n", "if self._server_stderr:\n", "VAR_27 = ''.join(server_stderr_file.readlines()[:-VAR_1])\n", "vimsupport.PostVimMessage(VAR_3)\n", "vimsupport.PostMultiLineNotice(VAR_2 + VAR_27)\n" ]
[ "def _NotifyUserIfServerCrashed(self):...\n", "if self._user_notified_about_crash or self._IsServerAlive():\n", "return\n", "self._user_notified_about_crash = True\n", "if self._server_stderr:\n", "error_output = ''.join(server_stderr_file.readlines()[:-\n NUM_YCMD_STDERR_LINES_ON_CRASH])\n", "vimsupport.PostVimMessage(SERVER_CRASH_MESSAGE_SAME_STDERR)\n", "vimsupport.PostMultiLineNotice(SERVER_CRASH_MESSAGE_STDERR_FILE + error_output)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_22(VAR_18, VAR_31, VAR_30=60):...\n", "VAR_50 = re.compile('Compiling\\\\.\\\\.\\\\.|Evaluating\\\\.\\\\.\\\\.')\n", "VAR_51 = re.compile('Compilation failed|Evaluated')\n", "VAR_34 = FUNC_0()\n", "VAR_52 = 0.1\n", "while VAR_30 > 0:\n", "VAR_30 -= VAR_52\n", "VAR_49 = AWSUserTestViewRequest(VAR_34, VAR_31, base_url=AWS_BASE_URL)\n", "VAR_49.execute()\n", "VAR_63 = VAR_49.get_user_test_info()\n", "VAR_64 = VAR_63['status']\n", "if VAR_51.search(VAR_64):\n", "return VAR_63\n", "if VAR_50.search(VAR_64):\n", "time.sleep(VAR_52)\n" ]
[ "def get_user_test_result(contest_id, user_test_id, timeout=60):...\n", "WAITING_STATUSES = re.compile('Compiling\\\\.\\\\.\\\\.|Evaluating\\\\.\\\\.\\\\.')\n", "COMPLETED_STATUS = re.compile('Compilation failed|Evaluated')\n", "browser = get_aws_browser()\n", "sleep_interval = 0.1\n", "while timeout > 0:\n", "timeout -= sleep_interval\n", "sr = AWSUserTestViewRequest(browser, user_test_id, base_url=AWS_BASE_URL)\n", "sr.execute()\n", "result = sr.get_user_test_info()\n", "status = result['status']\n", "if COMPLETED_STATUS.search(status):\n", "return result\n", "if WAITING_STATUSES.search(status):\n", "time.sleep(sleep_interval)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "AugAssign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Return'", "Condition", "Expr'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_47 = re.compile('^.*[,();].*')\n", "VAR_48 = ['select', 'create', 'insert', 'delete', 'drop', 'update', 'case']\n", "VAR_49 = ['concat', 'concat_ws', 'if', 'ifnull', 'nullif', 'coalesce',\n 'connection_id', 'current_user', 'database', 'last_insert_id',\n 'session_user', 'system_user', 'user', 'version']\n", "def FUNC_26():...\n", "frappe.throw(_('Cannot use sub-query or function in fields'), frappe.DataError)\n", "for field in self.fields:\n", "if VAR_47.match(field):\n", "if any(keyword in field.lower() for keyword in VAR_48):\n", "FUNC_26()\n", "if any('{0}('.format(keyword) in field.lower() for keyword in VAR_49):\n", "FUNC_26()\n" ]
[ "def sanitize_fields(self):...\n", "\"\"\"docstring\"\"\"\n", "regex = re.compile('^.*[,();].*')\n", "blacklisted_keywords = ['select', 'create', 'insert', 'delete', 'drop',\n 'update', 'case']\n", "blacklisted_functions = ['concat', 'concat_ws', 'if', 'ifnull', 'nullif',\n 'coalesce', 'connection_id', 'current_user', 'database',\n 'last_insert_id', 'session_user', 'system_user', 'user', 'version']\n", "def _raise_exception():...\n", "frappe.throw(_('Cannot use sub-query or function in fields'), frappe.DataError)\n", "for field in self.fields:\n", "if regex.match(field):\n", "if any(keyword in field.lower() for keyword in blacklisted_keywords):\n", "_raise_exception()\n", "if any('{0}('.format(keyword) in field.lower() for keyword in\n", "_raise_exception()\n" ]
[ 0, 0, 4, 0, 0, 0, 0, 0, 4, 4, 0, 4, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Expr'", "For", "Condition", "For", "Expr'", "For", "Expr'" ]
[ "@VAR_0.route('/level-1/index')...\n", "VAR_3.emit('level-progress-update', {'level_progress': 'test'})\n", "return render_template('level-1/index.html')\n" ]
[ "@app.route('/level-1/index')...\n", "socketIO.emit('level-progress-update', {'level_progress': 'test'})\n", "return render_template('level-1/index.html')\n" ]
[ 0, 0, 0 ]
[ "Condition", "Expr'", "Return'" ]
[ "def FUNC_6(self):...\n", "print('tag: %s' % self.tag_name)\n", "print('value: %s' % self.value)\n", "print('datetimestamp: %s' % self.dtstamp)\n" ]
[ "def log(self):...\n", "print('tag: %s' % self.tag_name)\n", "print('value: %s' % self.value)\n", "print('datetimestamp: %s' % self.dtstamp)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'" ]
[ "@staticmethod...\n", "VAR_22 = User().get_by_id(VAR_3)\n", "if VAR_22 is None:\n", "return VAR_22\n" ]
[ "@staticmethod...\n", "user = User().get_by_id(user_id)\n", "if user is None:\n", "return user\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'" ]
[ "def FUNC_26(VAR_5, VAR_8):...\n", "api.reset_local_state()\n", "VAR_11[0] = VAR_5\n", "VAR_15 = VAR_7.get('/request', extra_environ={'REMOTE_ADDR': ip},\n expect_errors=True)\n", "return VAR_15.status_int\n" ]
[ "def call(ident, ip):...\n", "api.reset_local_state()\n", "mocked_ident[0] = ident\n", "response = app.get('/request', extra_environ={'REMOTE_ADDR': ip},\n expect_errors=True)\n", "return response.status_int\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(self, VAR_8):...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = self.get_all_ads()\n", "[self.delete_ad(i) for t, i in VAR_19 if t.strip() == VAR_8.strip()]\n" ]
[ "def delete_ad_using_title(self, title):...\n", "\"\"\"docstring\"\"\"\n", "allAds = self.get_all_ads()\n", "[self.delete_ad(i) for t, i in allAds if t.strip() == title.strip()]\n" ]
[ 0, 0, 5, 5 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'" ]
[ "def FUNC_5(VAR_14, VAR_15, VAR_18=True):...\n", "VAR_26 = 0\n", "VAR_27 = len(VAR_15)\n", "for line in VAR_15:\n", "if len(line) > VAR_26:\n", "VAR_14 = (VAR_14[0][0] / VAR_26, VAR_14[0][1]) if VAR_14[0] else None, (\n VAR_14[1][0] / VAR_27, VAR_14[1][1]) if VAR_14[1] else None\n", "VAR_26 = len(line)\n", "if not (VAR_14[0] or VAR_14[1]):\n", "if VAR_18:\n", "VAR_28 = ''\n", "VAR_14 = (1.0 / VAR_26, '\\\\textwidth'), None\n", "VAR_14 = None, (1.0 / VAR_27, '\\\\textheight')\n", "for line in VAR_15:\n", "for VAR_16 in line:\n", "return VAR_28\n", "VAR_28 += FUNC_2(VAR_14, VAR_16=file)\n", "VAR_28 += '\\\\\\\\'\n" ]
[ "def grid(dims, files, implicitFillWidth=True):...\n", "x = 0\n", "y = len(files)\n", "for line in files:\n", "if len(line) > x:\n", "dims = (dims[0][0] / x, dims[0][1]) if dims[0] else None, (dims[1][0] / y,\n dims[1][1]) if dims[1] else None\n", "x = len(line)\n", "if not (dims[0] or dims[1]):\n", "if implicitFillWidth:\n", "s = ''\n", "dims = (1.0 / x, '\\\\textwidth'), None\n", "dims = None, (1.0 / y, '\\\\textheight')\n", "for line in files:\n", "for file in line:\n", "return s\n", "s += singleImage(dims, file=file)\n", "s += '\\\\\\\\'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "For", "For", "Return'", "AugAssign'", "AugAssign'" ]
[ "def FUNC_2():...\n", "VAR_30 = VAR_10.find(' ')\n", "VAR_24 = VAR_10[:VAR_30]\n", "VAR_42 = ''\n", "VAR_12 = VAR_10[VAR_30 + 1:]\n", "self.emph = 1 if VAR_24.find('*') > -1 else 0\n", "self.uncover = 2 if VAR_24.find('+') > -1 else 0\n", "self.kind = 0\n", "self.resume = False\n", "if VAR_24.find('.') > -1:\n", "self.kind = 1\n", "if VAR_24.find(',') > -1:\n", "super(CLASS_3, self).__init__(slideParser.parse(VAR_12, slideLexer), '%s' +\n self.markers[self.kind] % (self.specs[self.emph + self.uncover], VAR_42\n ), '\\n')\n", "self.kind = 1\n", "if VAR_24.find('=') > -1:\n", "self.resume = True\n", "self.kind = 2\n", "VAR_43 = VAR_12.find('=')\n", "if VAR_43 == -1:\n", "VAR_43 = VAR_12.find(' ')\n", "if VAR_43 == -1:\n", "VAR_42 = VAR_12\n", "VAR_42 = VAR_12[:VAR_43]\n", "VAR_12 = ' '\n", "VAR_12 = VAR_12[VAR_43 + 1:]\n" ]
[ "def innerFunc():...\n", "i = txt.find(' ')\n", "marker = txt[:i]\n", "describee = ''\n", "content = txt[i + 1:]\n", "self.emph = 1 if marker.find('*') > -1 else 0\n", "self.uncover = 2 if marker.find('+') > -1 else 0\n", "self.kind = 0\n", "self.resume = False\n", "if marker.find('.') > -1:\n", "self.kind = 1\n", "if marker.find(',') > -1:\n", "super(ListItem, self).__init__(slideParser.parse(content, slideLexer), '%s' +\n self.markers[self.kind] % (self.specs[self.emph + self.uncover],\n describee), '\\n')\n", "self.kind = 1\n", "if marker.find('=') > -1:\n", "self.resume = True\n", "self.kind = 2\n", "j = content.find('=')\n", "if j == -1:\n", "j = content.find(' ')\n", "if j == -1:\n", "describee = content\n", "describee = content[:j]\n", "content = ' '\n", "content = content[j + 1:]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "@property...\n", "VAR_11 = await self.content\n", "if not VAR_11:\n", "return ''\n", "return VAR_0(VAR_11)\n" ]
[ "@property...\n", "content = await self.content\n", "if not content:\n", "return ''\n", "return markdown(content)\n" ]
[ 0, 3, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(VAR_27):...\n", "VAR_38 = str(VAR_27.author)\n", "VAR_36 = datetime.utcfromtimestamp(VAR_27.created_utc)\n", "FUNC_2(VAR_14=str(message.author), VAR_18='message', VAR_25=message_time.\n strftime('%Y-%m-%d %H:%M:%S'), VAR_15='private_key', VAR_26=str(message\n .body)[:255])\n", "VAR_13.execute(\"SELECT address, private_key FROM accounts WHERE name='%s'\" %\n VAR_38)\n", "VAR_34 = VAR_13.fetchall()\n", "if len(VAR_34) > 0:\n", "VAR_39 = \"\"\"Your account: %s\n\nYour private key: %s\"\"\" % (VAR_34[0][0],\n VAR_34[0][1])\n", "VAR_37 = VAR_2.redditor(VAR_14).message('No account found.', 'string')\n", "VAR_37 = VAR_2.redditor(VAR_14).message('New Private Key', VAR_39)\n", "return None\n", "return None\n" ]
[ "def handle_private_key(message):...\n", "author = str(message.author)\n", "message_time = datetime.utcfromtimestamp(message.created_utc)\n", "add_history_record(username=str(message.author), comment_or_message=\n 'message', reddit_time=message_time.strftime('%Y-%m-%d %H:%M:%S'),\n action='private_key', comment_text=str(message.body)[:255])\n", "mycursor.execute(\n \"SELECT address, private_key FROM accounts WHERE name='%s'\" % author)\n", "result = mycursor.fetchall()\n", "if len(result) > 0:\n", "response = \"\"\"Your account: %s\n\nYour private key: %s\"\"\" % (result[0][0],\n result[0][1])\n", "x = reddit.redditor(username).message('No account found.',\n \"You do not currently have an account open.To create one, respond with the text 'create' in the message body.\"\n )\n", "x = reddit.redditor(username).message('New Private Key', response)\n", "return None\n", "return None\n" ]
[ 0, 4, 0, 4, 4, 0, 0, 4, 4, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Return'", "Return'" ]
[ "def __init__(self, VAR_0, VAR_3, VAR_2=None):...\n", "GenericRequest.__init__(self, VAR_0, VAR_2)\n", "self.user_test_id = VAR_3\n", "self.url = '%suser_test/%s' % (self.base_url, VAR_3)\n" ]
[ "def __init__(self, browser, user_test_id, base_url=None):...\n", "GenericRequest.__init__(self, browser, base_url)\n", "self.user_test_id = user_test_id\n", "self.url = '%suser_test/%s' % (self.base_url, user_test_id)\n" ]
[ 0, 0, 0, 5 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_3(self, VAR_0, VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "return webtest.TestApp(webapp2.WSGIApplication([(VAR_0, VAR_1)], debug=True\n ), extra_environ={'REMOTE_ADDR': '127.0.0.1'})\n" ]
[ "def make_test_app(self, path, request_handler):...\n", "\"\"\"docstring\"\"\"\n", "return webtest.TestApp(webapp2.WSGIApplication([(path, request_handler)],\n debug=True), extra_environ={'REMOTE_ADDR': '127.0.0.1'})\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_8(self, VAR_10, VAR_11):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_10:\n", "for rec in self:\n", "VAR_36 = safe_eval(VAR_10, {'object': rec, 'env': self.env})\n", "logging.error('CRAPO: Failed to validate transition %sconditions: %s',\n VAR_11, str(err))\n", "if not VAR_36:\n", "VAR_36 = False\n" ]
[ "def exec_conditions(self, conditions, prefix):...\n", "\"\"\"docstring\"\"\"\n", "if conditions:\n", "for rec in self:\n", "is_valid = safe_eval(conditions, {'object': rec, 'env': self.env})\n", "logging.error('CRAPO: Failed to validate transition %sconditions: %s',\n prefix, str(err))\n", "if not is_valid:\n", "is_valid = False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "For", "Assign'", "Expr'", "Condition", "Assign'" ]
[ "def FUNC_1(self, VAR_2, *VAR_5, **VAR_6):...\n", "VAR_2 = db.get_dataset(VAR_2)\n", "VAR_27 = db.User.select()\n", "VAR_28 = db.DatasetAccessPending.select().where(db.DatasetAccessPending.\n dataset == VAR_2)\n", "VAR_10 = peewee.prefetch(VAR_27, VAR_28)\n", "self.finish({'data': self._build_json_response(VAR_10, lambda u: u.\n access_pending_prefetch)})\n" ]
[ "def get(self, dataset, *args, **kwargs):...\n", "dataset = db.get_dataset(dataset)\n", "users = db.User.select()\n", "access = db.DatasetAccessPending.select().where(db.DatasetAccessPending.\n dataset == dataset)\n", "query = peewee.prefetch(users, access)\n", "self.finish({'data': self._build_json_response(query, lambda u: u.\n access_pending_prefetch)})\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_4(VAR_2):...\n", "VAR_7 = files.get_static_data('./static/files.html')\n", "VAR_2 = FUNC_1(VAR_2)\n", "VAR_2 = '/'\n", "if not VAR_2:\n", "VAR_2 = '/'\n", "VAR_14 = VAR_2.split('/')\n", "VAR_15 = list()\n", "while '' in VAR_14:\n", "VAR_14.remove('')\n", "VAR_14 = [''] + VAR_14\n", "VAR_16 = ''\n", "for VAR_33 in range(0, len(VAR_14)):\n", "VAR_14[VAR_33] += '/'\n", "VAR_17 = list()\n", "VAR_16 += VAR_14[VAR_33]\n", "for VAR_5 in os.listdir(VAR_2):\n", "VAR_15.append(dict(folder_name=files_hierarchy[i], href_path=\n '/files/list/%s' % encode_str_to_hexed_b64(files_hierarchy_cwd),\n disabled=i == len(files_hierarchy) - 1))\n", "VAR_18 = FUNC_0(VAR_16)\n", "VAR_29 = VAR_2 + VAR_5\n", "VAR_19 = users.get_user_by_cookie(self.get_cookie('user_active_login',\n default=''))\n", "VAR_30 = dict()\n", "VAR_7 = preproc.preprocess_webpage(VAR_7, VAR_19, VAR_17=files_attrib_list,\n VAR_15=files_hierarchy_list, VAR_18=cwd_uuid)\n", "VAR_30['file-name'] = VAR_5\n", "VAR_6.set_result(VAR_7)\n", "VAR_30['allow-edit'] = True\n", "VAR_30['file-size'] = files.format_file_size(os.path.getsize(VAR_29))\n", "VAR_30['owner'] = 'root'\n", "VAR_30['date-uploaded'] = time.ctime(os.path.getctime(VAR_29))\n", "if os.path.isdir(VAR_29):\n", "VAR_30['mime-type'] = 'directory/folder'\n", "VAR_30['mime-type'] = files.guess_mime_type(VAR_5)\n", "if VAR_30['mime-type'] == 'directory/folder':\n", "VAR_30['target-link'] = '/files/list/%s' % FUNC_0(VAR_29 + '/')\n", "VAR_30['target-link'] = '/files/download/%s/%s' % (FUNC_0(VAR_29), VAR_5)\n", "VAR_30['uuid'] = FUNC_0(VAR_29)\n", "VAR_17.append(VAR_30)\n" ]
[ "def get_final_html_async(target_path):...\n", "file_temp = files.get_static_data('./static/files.html')\n", "target_path = decode_hexed_b64_to_str(target_path)\n", "target_path = '/'\n", "if not target_path:\n", "target_path = '/'\n", "files_hierarchy = target_path.split('/')\n", "files_hierarchy_list = list()\n", "while '' in files_hierarchy:\n", "files_hierarchy.remove('')\n", "files_hierarchy = [''] + files_hierarchy\n", "files_hierarchy_cwd = ''\n", "for i in range(0, len(files_hierarchy)):\n", "files_hierarchy[i] += '/'\n", "files_attrib_list = list()\n", "files_hierarchy_cwd += files_hierarchy[i]\n", "for file_name in os.listdir(target_path):\n", "files_hierarchy_list.append(dict(folder_name=files_hierarchy[i], href_path=\n '/files/list/%s' % encode_str_to_hexed_b64(files_hierarchy_cwd),\n disabled=i == len(files_hierarchy) - 1))\n", "cwd_uuid = encode_str_to_hexed_b64(files_hierarchy_cwd)\n", "actual_path = target_path + file_name\n", "working_user = users.get_user_by_cookie(self.get_cookie('user_active_login',\n default=''))\n", "attrib = dict()\n", "file_temp = preproc.preprocess_webpage(file_temp, working_user,\n files_attrib_list=files_attrib_list, files_hierarchy_list=\n files_hierarchy_list, cwd_uuid=cwd_uuid)\n", "attrib['file-name'] = file_name\n", "future.set_result(file_temp)\n", "attrib['allow-edit'] = True\n", "attrib['file-size'] = files.format_file_size(os.path.getsize(actual_path))\n", "attrib['owner'] = 'root'\n", "attrib['date-uploaded'] = time.ctime(os.path.getctime(actual_path))\n", "if os.path.isdir(actual_path):\n", "attrib['mime-type'] = 'directory/folder'\n", "attrib['mime-type'] = files.guess_mime_type(file_name)\n", "if attrib['mime-type'] == 'directory/folder':\n", "attrib['target-link'] = '/files/list/%s' % encode_str_to_hexed_b64(\n actual_path + '/')\n", "attrib['target-link'] = '/files/download/%s/%s' % (encode_str_to_hexed_b64(\n actual_path), file_name)\n", "attrib['uuid'] = encode_str_to_hexed_b64(actual_path)\n", "files_attrib_list.append(attrib)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "For", "AugAssign'", "Assign'", "AugAssign'", "For", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def __getstate__(self):...\n", "return self.serialize()\n" ]
[ "def __getstate__(self):...\n", "return self.serialize()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self, VAR_8) ->str:...\n", "return '<div class=\"nameparts-form-group\">%s</div>' % ''.join(VAR_8)\n" ]
[ "def format_output(self, rendered_widgets) ->str:...\n", "return '<div class=\"nameparts-form-group\">%s</div>' % ''.join(rendered_widgets)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(VAR_13):...\n", "VAR_26 = requests.get(VAR_13)\n", "VAR_27 = VAR_26.content.decode('utf-8')\n", "logging.info('GET %s responded with %s', VAR_13, VAR_27)\n", "return json.loads(VAR_27)\n" ]
[ "def get_json_from_url(url):...\n", "response = requests.get(url)\n", "decoded_content = response.content.decode('utf-8')\n", "logging.info('GET %s responded with %s', url, decoded_content)\n", "return json.loads(decoded_content)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_16(self):...\n", "if self.doctype not in VAR_54.local.valid_columns:\n", "if self.doctype in ('DocField', 'DocPerm') and self.parent in ('DocType',\n", "return VAR_54.local.valid_columns[self.doctype]\n", "from frappe.model.meta import get_table_columns\n", "VAR_65 = self.meta.get_valid_columns()\n", "VAR_65 = get_table_columns(self.doctype)\n", "VAR_54.local.valid_columns[self.doctype] = VAR_65\n" ]
[ "def get_valid_columns(self):...\n", "if self.doctype not in frappe.local.valid_columns:\n", "if self.doctype in ('DocField', 'DocPerm') and self.parent in ('DocType',\n", "return frappe.local.valid_columns[self.doctype]\n", "from frappe.model.meta import get_table_columns\n", "valid = self.meta.get_valid_columns()\n", "valid = get_table_columns(self.doctype)\n", "frappe.local.valid_columns[self.doctype] = valid\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Return'", "ImportFrom'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_21():...\n", "\"\"\"docstring\"\"\"\n", "return 'bibclassify v%s' % (bconfig.VERSION,)\n" ]
[ "def _signature():...\n", "\"\"\"docstring\"\"\"\n", "return 'bibclassify v%s' % (bconfig.VERSION,)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "from grokcore.component import Adapter, implements, baseclass\n", "from grokcore.security import require\n", "from zope.interface import Interface\n", "def FUNC_0(VAR_0):...\n", "def FUNC_1(VAR_0, VAR_1):...\n", "def FUNC_2(VAR_0):...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_3(VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "implements(CLASS_0)\n", "baseclass()\n", "require('rest')\n", "__builtin_attributes__ = ['id', 'children']\n", "def FUNC_4(self, VAR_0, VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = VAR_0.args.get('attrs', [''])[0]\n", "if VAR_4:\n", "VAR_7 = {}\n", "return VAR_3\n", "for VAR_8 in (VAR_4.decode('utf-8').split(',') + self.__builtin_attributes__):\n", "if VAR_8 in VAR_3:\n", "return VAR_7\n", "VAR_7[VAR_8] = VAR_3[VAR_8]\n" ]
[ "from grokcore.component import Adapter, implements, baseclass\n", "from grokcore.security import require\n", "from zope.interface import Interface\n", "def render(request):...\n", "def render_recursive(request, depth):...\n", "def rw_transaction(request):...\n", "\"\"\"docstring\"\"\"\n", "def resolve(path):...\n", "\"\"\"docstring\"\"\"\n", "implements(IHttpRestView)\n", "baseclass()\n", "require('rest')\n", "__builtin_attributes__ = ['id', 'children']\n", "def filter_attributes(self, request, data):...\n", "\"\"\"docstring\"\"\"\n", "attrs = request.args.get('attrs', [''])[0]\n", "if attrs:\n", "filtered_data = {}\n", "return data\n", "for a in (attrs.decode('utf-8').split(',') + self.__builtin_attributes__):\n", "if a in data:\n", "return filtered_data\n", "filtered_data[a] = data[a]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "FunctionDef'", "FunctionDef'", "Docstring", "FunctionDef'", "Docstring", "Expr'", "Expr'", "Expr'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Return'", "For", "Condition", "Return'", "Assign'" ]
[ "@eqlx.with_timeout...\n", "return 'no timeout'\n" ]
[ "@eqlx.with_timeout...\n", "return 'no timeout'\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_32(self):...\n", "VAR_33 = VAR_5.search(self.file)\n", "if VAR_33:\n", "return self.file[:VAR_33.start()]\n", "return self.file\n" ]
[ "def constant_prefix(self):...\n", "first_wildcard = _wildcard_regex.search(self.file)\n", "if first_wildcard:\n", "return self.file[:first_wildcard.start()]\n", "return self.file\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "def FUNC_0():...\n", "print('\\nQuery options:')\n", "print('1: List ETCs in the country __ having more than __ open beds.')\n", "print(\n '2: List average age & education of respondents whose sex=__ and live in __.'\n )\n", "print(\n '3: Count the respondents whose sex=__ and who have at least an education of __.'\n )\n", "print(\n '4: Display partner organizations with their longitude/latitude coordinates.'\n )\n", "print(\n '5: Display a chosen organization __ & codes of the ETCs it is working with.'\n )\n", "print('6: Display all distinct organization types.')\n", "print(\n \"7: Display every respondent's (whose sex=__) info & their country's info.\"\n )\n", "print(\n \"8: Display every ETC that isn't closed & its info/partner organization.\")\n", "print('9: List countries in ascending order by GDP.')\n", "print('string')\n", "print(\n '11: Show gender, age, education, and country of survey respondents ordered by age.'\n )\n", "print('12: Show ETC names and Partner Orgs ordered by ETC names.')\n", "print(\n '13: Show ETC name, Selected Partner Org, and Country GDP ordered by Country.'\n )\n", "print('14: Show average age of selected gender of survey respondents.')\n", "print(\n '15: Show average educaiton level of selected gender of survey respondents.\\n'\n )\n" ]
[ "def print_menu():...\n", "print('\\nQuery options:')\n", "print('1: List ETCs in the country __ having more than __ open beds.')\n", "print(\n '2: List average age & education of respondents whose sex=__ and live in __.'\n )\n", "print(\n '3: Count the respondents whose sex=__ and who have at least an education of __.'\n )\n", "print(\n '4: Display partner organizations with their longitude/latitude coordinates.'\n )\n", "print(\n '5: Display a chosen organization __ & codes of the ETCs it is working with.'\n )\n", "print('6: Display all distinct organization types.')\n", "print(\n \"7: Display every respondent's (whose sex=__) info & their country's info.\"\n )\n", "print(\n \"8: Display every ETC that isn't closed & its info/partner organization.\")\n", "print('9: List countries in ascending order by GDP.')\n", "print(\n '10: Count the respondents (with sex=__, education>=__, and country=__) who think their community was well organized.'\n )\n", "print(\n '11: Show gender, age, education, and country of survey respondents ordered by age.'\n )\n", "print('12: Show ETC names and Partner Orgs ordered by ETC names.')\n", "print(\n '13: Show ETC name, Selected Partner Org, and Country GDP ordered by Country.'\n )\n", "print('14: Show average age of selected gender of survey respondents.')\n", "print(\n '15: Show average educaiton level of selected gender of survey respondents.\\n'\n )\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_5(VAR_13):...\n", "\"\"\"docstring\"\"\"\n", "VAR_56 = in_f.readlines()\n", "VAR_33 = set(VAR_13.keys())\n", "for VAR_69, line in enumerate(VAR_56):\n", "VAR_46 = re.match('^(\\\\s*)\"([^\"]+)\":', line)\n", "for l in VAR_56:\n", "if VAR_46:\n", "out_f.write(l)\n", "if VAR_33:\n", "VAR_65, VAR_66 = VAR_46.groups()\n", "print('These configuration items were not set:')\n", "FUNC_2()\n", "if VAR_66 in VAR_33:\n", "print(' ' + ', '.join(sorted(list(VAR_33))))\n", "VAR_56[VAR_69] = '%s\"%s\": %s,\\n' % (VAR_65, VAR_66, VAR_13[VAR_66])\n", "VAR_33.remove(VAR_66)\n" ]
[ "def configure_cms(options):...\n", "\"\"\"docstring\"\"\"\n", "lines = in_f.readlines()\n", "unset = set(options.keys())\n", "for i, line in enumerate(lines):\n", "g = re.match('^(\\\\s*)\"([^\"]+)\":', line)\n", "for l in lines:\n", "if g:\n", "out_f.write(l)\n", "if unset:\n", "whitespace, key = g.groups()\n", "print('These configuration items were not set:')\n", "read_cms_config()\n", "if key in unset:\n", "print(' ' + ', '.join(sorted(list(unset))))\n", "lines[i] = '%s\"%s\": %s,\\n' % (whitespace, key, options[key])\n", "unset.remove(key)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "For", "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Condition", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_27(self):...\n", "self.compilation_ko(\n \"\"\"\nif header :is \"Sender\" \"Toto\" & header :contains \"Cc\" \"Tata\" {\n \n}\n\"\"\"\n )\n" ]
[ "def test_unknown_token(self):...\n", "self.compilation_ko(\n \"\"\"\nif header :is \"Sender\" \"Toto\" & header :contains \"Cc\" \"Tata\" {\n \n}\n\"\"\"\n )\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_12(self, VAR_2, VAR_3, VAR_4, VAR_7={}):...\n", "VAR_20 = {'parent_id': VAR_7.get('parent_id', False)}\n", "for VAR_19 in VAR_4:\n", "self.copy(VAR_2, VAR_3, VAR_19, VAR_20=default)\n", "VAR_2.commit()\n" ]
[ "def duplicate_template(self, cr, uid, ids, context={}):...\n", "default = {'parent_id': context.get('parent_id', False)}\n", "for id in ids:\n", "self.copy(cr, uid, id, default=default)\n", "cr.commit()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Expr'", "Expr'" ]
[ "def FUNC_1(self, *VAR_0, **VAR_1):...\n", "VAR_3 = MetadataStore(os.path.join(self.temporary_directory(), '%d.db' %\n self.count), self.temporary_directory(), default_eccrypto.generate_key(\n u'curve25519'))\n", "VAR_1['metadata_store'] = VAR_3\n", "VAR_4 = super(CLASS_0, self).create_node(*VAR_0, **kwargs)\n", "self.count += 1\n", "return VAR_4\n" ]
[ "def create_node(self, *args, **kwargs):...\n", "metadata_store = MetadataStore(os.path.join(self.temporary_directory(), \n '%d.db' % self.count), self.temporary_directory(), default_eccrypto.\n generate_key(u'curve25519'))\n", "kwargs['metadata_store'] = metadata_store\n", "node = super(TestGigaChannelUnits, self).create_node(*args, **kwargs)\n", "self.count += 1\n", "return node\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "AugAssign'", "Return'" ]
[ "def __init__(self):...\n", "VAR_12 = {}\n", "self.session = requests.Session()\n" ]
[ "def __init__(self):...\n", "config = {}\n", "self.session = requests.Session()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "@property...\n", "return self._attributes['state']\n" ]
[ "@property...\n", "return self._attributes['state']\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "VAR_4 = VAR_5 = None\n", "website_send_message(VAR_0, VAR_1, VAR_2)\n", "VAR_5 = frappe.db.sql('string'.format(email_id=sender))\n", "if not VAR_5:\n", "VAR_4 = frappe.db.get_value('Lead', dict(email_id=sender))\n", "VAR_6 = frappe.get_doc(dict(doctype='Opportunity', enquiry_from='Customer' if\n customer else 'Lead', VAR_3='Open', title=subject, contact_email=sender,\n to_discuss=message))\n", "if not VAR_4:\n", "if VAR_5:\n", "VAR_8 = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "VAR_6.customer = VAR_5[0][0]\n", "if VAR_4:\n", "VAR_6.insert(ignore_permissions=True)\n", "VAR_6.lead = VAR_4\n", "VAR_6.lead = VAR_8.name\n", "VAR_7 = frappe.get_doc({'doctype': 'Communication', 'subject': VAR_0,\n 'content': VAR_1, 'sender': VAR_2, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': VAR_6.name})\n", "VAR_7.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ "from __future__ import unicode_literals\n", "import frappe, json\n", "from frappe import _\n", "from frappe.utils import cint, formatdate\n", "@frappe.whitelist(allow_guest=True)...\n", "from frappe.www.contact import send_message as website_send_message\n", "lead = customer = None\n", "website_send_message(subject, message, sender)\n", "customer = frappe.db.sql(\n \"\"\"select distinct dl.link_name from `tabDynamic Link` dl\n\t\tleft join `tabContact` c on dl.parent=c.name where dl.link_doctype='Customer'\n\t\tand c.email_id='{email_id}'\"\"\"\n .format(email_id=sender))\n", "if not customer:\n", "lead = frappe.db.get_value('Lead', dict(email_id=sender))\n", "opportunity = frappe.get_doc(dict(doctype='Opportunity', enquiry_from=\n 'Customer' if customer else 'Lead', status='Open', title=subject,\n contact_email=sender, to_discuss=message))\n", "if not lead:\n", "if customer:\n", "new_lead = frappe.get_doc(dict(doctype='Lead', email_id=sender, lead_name=\n sender.split('@')[0].title())).insert(ignore_permissions=True)\n", "opportunity.customer = customer[0][0]\n", "if lead:\n", "opportunity.insert(ignore_permissions=True)\n", "opportunity.lead = lead\n", "opportunity.lead = new_lead.name\n", "comm = frappe.get_doc({'doctype': 'Communication', 'subject': subject,\n 'content': message, 'sender': sender, 'sent_or_received': 'Received',\n 'reference_doctype': 'Opportunity', 'reference_name': opportunity.name})\n", "comm.insert(ignore_permissions=True)\n", "return 'okay'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "Condition", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6(self, VAR_16):...\n", "if 'avatar_uploaded' in VAR_16[0] and VAR_16[0]['avatar_uploaded'] is True:\n", "return\n", "VAR_32 = []\n", "for sd in os.walk(VAR_9.av_dir):\n", "VAR_32.extend(sd[2])\n", "VAR_33 = os.path.join(sd[0], random.choice(VAR_32))\n", "self.log.info('Uploading %s as new avatar', VAR_33)\n", "self.site.uploadavatar('0', VAR_33)\n", "VAR_16[0]['avatar'] = VAR_33\n", "VAR_16[0]['avatar_uploaded'] = True\n" ]
[ "def upload_avatar(self, ud):...\n", "if 'avatar_uploaded' in ud[0] and ud[0]['avatar_uploaded'] is True:\n", "return\n", "files = []\n", "for sd in os.walk(c.av_dir):\n", "files.extend(sd[2])\n", "av = os.path.join(sd[0], random.choice(files))\n", "self.log.info('Uploading %s as new avatar', av)\n", "self.site.uploadavatar('0', av)\n", "ud[0]['avatar'] = av\n", "ud[0]['avatar_uploaded'] = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "For", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'" ]
[ "from django.test import Client\n", "from dashboard.tests.loader import *\n", "from django.test import TestCase, override_settings, RequestFactory\n", "from dashboard.models import DataDocument, Script, ExtractedText, ExtractedChemical, QAGroup\n", "from django.db.models import Count\n", "VAR_0 = fixtures_standard\n", "def FUNC_0(self):...\n", "self.factory = RequestFactory()\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "self.assertFalse(Script.objects.get(VAR_2=5).qa_begun,\n 'The Script should have qa_begun of False at the beginning')\n", "VAR_1 = self.client.get('/qa/extractionscript/5/')\n", "self.assertTrue(Script.objects.get(VAR_2=5).qa_begun,\n 'qa_begun should now be true')\n", "def FUNC_2(self):...\n", "VAR_1 = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), VAR_1.content)\n", "VAR_2 = 15\n", "VAR_1 = self.client.get(f'/qa/extractionscript/{VAR_2}/')\n", "VAR_3 = ExtractedText.objects.filter(extraction_script=pk).first()\n", "self.assertIn(f'/qa/extractedtext/{VAR_3.pk}/'.encode(), VAR_1.content)\n", "VAR_4 = QAGroup.objects.filter(extraction_script_id=pk).count()\n", "self.assertTrue(VAR_4 == 1)\n", "self.assertTrue(Script.objects.get(VAR_2=15).qa_begun)\n", "VAR_5 = QAGroup.objects.get(extraction_script_id=pk).pk\n", "VAR_3 = ExtractedText.objects.filter(extraction_script=pk).first()\n", "self.assertTrue(VAR_3.qa_group_id == VAR_5)\n", "VAR_1 = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"'/qa/extractionscript/15/'> Continue QA\".encode(), VAR_1.\n content)\n", "def FUNC_3(self):...\n", "VAR_1 = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), VAR_1.content)\n", "VAR_2 = 9\n", "VAR_1 = self.client.get(f'/qa/extractionscript/{VAR_2}/', follow=True)\n", "self.assertEqual(VAR_1.status_code, 200)\n", "def FUNC_4(self):...\n", "VAR_6 = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__lt=100).filter(script_type='EX').first()\n", "VAR_2 = ExtractedText.objects.filter(qa_group=None).filter(extraction_script\n =scr).filter(data_document__data_group__group_type__code='CO').first().pk\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_2}/')\n", "VAR_6 = ExtractedText.objects.get(VAR_2=pk).extraction_script\n", "VAR_4 = QAGroup.objects.filter(extraction_script=scr).count()\n", "self.assertTrue(VAR_4 == 1)\n", "self.assertTrue(VAR_6.qa_begun)\n", "VAR_7 = QAGroup.objects.get(extraction_script=scr)\n", "VAR_3 = ExtractedText.objects.get(VAR_2=pk)\n", "self.assertTrue(VAR_3.qa_group == VAR_7)\n", "VAR_1 = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"'/qa/extractionscript/{VAR_6.pk}/'> Continue QA\".encode(),\n VAR_1.content)\n", "VAR_6 = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__gt=100).first()\n", "VAR_2 = ExtractedText.objects.filter(extraction_script=scr).first().pk\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_2}/')\n", "VAR_6 = ExtractedText.objects.get(VAR_2=pk).extraction_script\n", "VAR_7 = QAGroup.objects.get(extraction_script=scr)\n", "VAR_8 = ExtractedText.objects.filter(qa_group=new_group).count()\n", "self.assertTrue(VAR_8 > 100)\n", "VAR_2 = ExtractedText.objects.filter(extraction_script_id=scr.id).filter(\n qa_group=None).first().pk\n", "VAR_1 = self.client.get(f'/qa/extractedtext/{VAR_2}/')\n", "self.assertGreater(ExtractedText.objects.filter(qa_group=new_group).count(),\n VAR_8)\n", "def FUNC_5(self):...\n", "VAR_1 = self.client.get(f'/habitsandpractices/54/')\n", "self.assertContains(VAR_1, '<b>Add New Habit and Practice</b>')\n", "def FUNC_6(self):...\n", "VAR_1 = self.client.get('/qa/extractedtext/5', follow=True)\n", "self.assertIn(b'/datadocument/5', VAR_1.content)\n", "def FUNC_7(self):...\n", "VAR_1 = self.client.get('/qa/extractionscript/5', follow=True)\n", "VAR_1 = self.client.get('/qa/extractedtext/7', follow=True)\n", "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.client.get('/qa/extractionscript/15/', follow=True)\n", "VAR_1 = self.client.get('/qa/extractedtext/5/', follow=True)\n", "self.assertIn(b'<input type=\"text\" name=\"rawchem-1-raw_cas\"', VAR_1.content)\n", "self.assertNotIn(b'<input type=\"text\" name=\"rawchem-1-unit_type\"', VAR_1.\n content)\n", "self.assertIn(b'Functional Use Chem1', VAR_1.content)\n", "VAR_1 = self.client.get('/qa/extractionscript/5', follow=True)\n", "VAR_1 = self.client.get('/qa/extractedtext/7/', follow=True)\n", "self.assertIn(b'rawchem-1-unit_type', VAR_1.content)\n", "def FUNC_9(self):...\n", "VAR_1 = self.client.get(f'/qa/chemicalpresence/')\n", "self.assertIn(f\"/qa/chemicalpresencegroup/49/'> View Chemical Presence Lists\"\n .encode(), VAR_1.content)\n", "VAR_1 = self.client.get(f'/qa/chemicalpresencegroup/49', follow=True)\n", "self.assertIn(f'/qa/extractedtext/254781/\"> Begin QA'.encode(), VAR_1.content)\n", "VAR_9 = ExtractedListPresence.objects.filter(extracted_text__data_document_id\n =254781)\n", "self.assertEqual(VAR_9.filter(qa_flag=True).count(), 0)\n", "VAR_1 = self.client.get(f'/qa/extractedtext/254781/', follow=True)\n", "VAR_9 = ExtractedListPresence.objects.filter(extracted_text__data_document_id\n =254781)\n", "self.assertEqual(VAR_9.filter(qa_flag=True).count(), 30)\n", "VAR_10 = VAR_9.filter(qa_flag=True).first()\n", "self.assertIn(VAR_10.raw_cas.encode(), VAR_1.content)\n", "VAR_11 = VAR_9.filter(qa_flag=False).first()\n", "self.assertNotIn(VAR_11.raw_cas.encode(), VAR_1.content)\n", "def FUNC_10(self):...\n", "for VAR_3 in ExtractedText.objects.all():\n", "VAR_1 = self.client.get(f'/qa/extractedtext/%s' % VAR_3.data_document_id,\n follow=True)\n", "if VAR_1.status_code != 200:\n", "print(VAR_3.data_document_id)\n", "self.assertEqual(VAR_1.status_code, 200)\n" ]
[ "from django.test import Client\n", "from dashboard.tests.loader import *\n", "from django.test import TestCase, override_settings, RequestFactory\n", "from dashboard.models import DataDocument, Script, ExtractedText, ExtractedChemical, QAGroup\n", "from django.db.models import Count\n", "fixtures = fixtures_standard\n", "def setUp(self):...\n", "self.factory = RequestFactory()\n", "self.client.login(username='Karyn', password='specialP@55word')\n", "def test_qa_begin(self):...\n", "\"\"\"docstring\"\"\"\n", "self.assertFalse(Script.objects.get(pk=5).qa_begun,\n 'The Script should have qa_begun of False at the beginning')\n", "response = self.client.get('/qa/extractionscript/5/')\n", "self.assertTrue(Script.objects.get(pk=5).qa_begun,\n 'qa_begun should now be true')\n", "def test_new_qa_group_urls(self):...\n", "response = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), response.content\n )\n", "pk = 15\n", "response = self.client.get(f'/qa/extractionscript/{pk}/')\n", "et = ExtractedText.objects.filter(extraction_script=pk).first()\n", "self.assertIn(f'/qa/extractedtext/{et.pk}/'.encode(), response.content)\n", "group_count = QAGroup.objects.filter(extraction_script_id=pk).count()\n", "self.assertTrue(group_count == 1)\n", "self.assertTrue(Script.objects.get(pk=15).qa_begun)\n", "group_pk = QAGroup.objects.get(extraction_script_id=pk).pk\n", "et = ExtractedText.objects.filter(extraction_script=pk).first()\n", "self.assertTrue(et.qa_group_id == group_pk)\n", "response = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"'/qa/extractionscript/15/'> Continue QA\".encode(), response\n .content)\n", "def test_qa_script_without_ext_text(self):...\n", "response = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"/qa/extractionscript/15/'> Begin QA\".encode(), response.content\n )\n", "pk = 9\n", "response = self.client.get(f'/qa/extractionscript/{pk}/', follow=True)\n", "self.assertEqual(response.status_code, 200)\n", "def test_data_document_qa(self):...\n", "scr = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__lt=100).filter(script_type='EX').first()\n", "pk = ExtractedText.objects.filter(qa_group=None).filter(extraction_script=scr\n ).filter(data_document__data_group__group_type__code='CO').first().pk\n", "response = self.client.get(f'/qa/extractedtext/{pk}/')\n", "scr = ExtractedText.objects.get(pk=pk).extraction_script\n", "group_count = QAGroup.objects.filter(extraction_script=scr).count()\n", "self.assertTrue(group_count == 1)\n", "self.assertTrue(scr.qa_begun)\n", "new_group = QAGroup.objects.get(extraction_script=scr)\n", "et = ExtractedText.objects.get(pk=pk)\n", "self.assertTrue(et.qa_group == new_group)\n", "response = self.client.get(f'/qa/extractionscript/')\n", "self.assertIn(f\"'/qa/extractionscript/{scr.pk}/'> Continue QA\".encode(),\n response.content)\n", "scr = Script.objects.annotate(num_ets=Count('extractedtext')).filter(\n num_ets__gt=100).first()\n", "pk = ExtractedText.objects.filter(extraction_script=scr).first().pk\n", "response = self.client.get(f'/qa/extractedtext/{pk}/')\n", "scr = ExtractedText.objects.get(pk=pk).extraction_script\n", "new_group = QAGroup.objects.get(extraction_script=scr)\n", "initial_qa_count = ExtractedText.objects.filter(qa_group=new_group).count()\n", "self.assertTrue(initial_qa_count > 100)\n", "pk = ExtractedText.objects.filter(extraction_script_id=scr.id).filter(qa_group\n =None).first().pk\n", "response = self.client.get(f'/qa/extractedtext/{pk}/')\n", "self.assertGreater(ExtractedText.objects.filter(qa_group=new_group).count(),\n initial_qa_count)\n", "def test_habitsandpractices(self):...\n", "response = self.client.get(f'/habitsandpractices/54/')\n", "self.assertContains(response, '<b>Add New Habit and Practice</b>')\n", "def test_dd_link(self):...\n", "response = self.client.get('/qa/extractedtext/5', follow=True)\n", "self.assertIn(b'/datadocument/5', response.content)\n", "def test_approval(self):...\n", "response = self.client.get('/qa/extractionscript/5', follow=True)\n", "response = self.client.get('/qa/extractedtext/7', follow=True)\n", "def test_hidden_fields(self):...\n", "\"\"\"docstring\"\"\"\n", "response = self.client.get('/qa/extractionscript/15/', follow=True)\n", "response = self.client.get('/qa/extractedtext/5/', follow=True)\n", "self.assertIn(b'<input type=\"text\" name=\"rawchem-1-raw_cas\"', response.content)\n", "self.assertNotIn(b'<input type=\"text\" name=\"rawchem-1-unit_type\"', response\n .content)\n", "self.assertIn(b'Functional Use Chem1', response.content)\n", "response = self.client.get('/qa/extractionscript/5', follow=True)\n", "response = self.client.get('/qa/extractedtext/7/', follow=True)\n", "self.assertIn(b'rawchem-1-unit_type', response.content)\n", "def test_cpcat_qa(self):...\n", "response = self.client.get(f'/qa/chemicalpresence/')\n", "self.assertIn(f\"/qa/chemicalpresencegroup/49/'> View Chemical Presence Lists\"\n .encode(), response.content)\n", "response = self.client.get(f'/qa/chemicalpresencegroup/49', follow=True)\n", "self.assertIn(f'/qa/extractedtext/254781/\"> Begin QA'.encode(), response.\n content)\n", "elps = ExtractedListPresence.objects.filter(extracted_text__data_document_id\n =254781)\n", "self.assertEqual(elps.filter(qa_flag=True).count(), 0)\n", "response = self.client.get(f'/qa/extractedtext/254781/', follow=True)\n", "elps = ExtractedListPresence.objects.filter(extracted_text__data_document_id\n =254781)\n", "self.assertEqual(elps.filter(qa_flag=True).count(), 30)\n", "elp_flagged = elps.filter(qa_flag=True).first()\n", "self.assertIn(elp_flagged.raw_cas.encode(), response.content)\n", "elp_not_flagged = elps.filter(qa_flag=False).first()\n", "self.assertNotIn(elp_not_flagged.raw_cas.encode(), response.content)\n", "def test_every_extractedtext_qa(self):...\n", "for et in ExtractedText.objects.all():\n", "response = self.client.get(f'/qa/extractedtext/%s' % et.data_document_id,\n follow=True)\n", "if response.status_code != 200:\n", "print(et.data_document_id)\n", "self.assertEqual(response.status_code, 200)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Docstring", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "FunctionDef'", "For", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "@handled_slot(bool)...\n", "VAR_19, VAR_18 = QInputDialog.getInt(self.window, title=\n 'Set BiFi Motor Position', label='Absolute Position:', value=self.\n matisse.query('MOTBI:POS?', numeric_result=True))\n", "if VAR_18:\n", "print(f'Setting BiFi motor position to {VAR_19}.')\n", "self.matisse.set_bifi_motor_pos(VAR_19)\n" ]
[ "@handled_slot(bool)...\n", "target_pos, success = QInputDialog.getInt(self.window, title=\n 'Set BiFi Motor Position', label='Absolute Position:', value=self.\n matisse.query('MOTBI:POS?', numeric_result=True))\n", "if success:\n", "print(f'Setting BiFi motor position to {target_pos}.')\n", "self.matisse.set_bifi_motor_pos(target_pos)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_1(self, VAR_2, VAR_1):...\n", "VAR_5 = 'string' % (VAR_1, VAR_2)\n", "VAR_6 = sql.queryDB(self.conn, VAR_5)\n", "return VAR_6\n" ]
[ "def getCommentsByPostid(self, postid, userid):...\n", "sqlText = (\n 'select (select Count(*) from comment_like where comments.commentid = comment_like.commentid) as like,(select Count(*) from comment_like where comments.commentid = comment_like.commentid and comment_like.userid=%d) as flag,commentid,name,comment from users,comments where users.userid=comments.userid and postid=%d order by date desc;'\n % (userid, postid))\n", "result = sql.queryDB(self.conn, sqlText)\n", "return result\n" ]
[ 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_32(self, VAR_25):...\n", "\"\"\"docstring\"\"\"\n", "VAR_44 = self.host.sos_path_strip\n", "if VAR_44:\n", "VAR_25 = VAR_25.replace(VAR_44, '')\n", "VAR_25 = VAR_25.split()[0]\n", "self.log_debug('Final sos path: %s' % VAR_25)\n", "self.sos_path = VAR_25\n", "self.archive = VAR_25.split('/')[-1]\n" ]
[ "def finalize_sos_path(self, path):...\n", "\"\"\"docstring\"\"\"\n", "pstrip = self.host.sos_path_strip\n", "if pstrip:\n", "path = path.replace(pstrip, '')\n", "path = path.split()[0]\n", "self.log_debug('Final sos path: %s' % path)\n", "self.sos_path = path\n", "self.archive = path.split('/')[-1]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_7(VAR_24):...\n", "for VAR_27 in VAR_24:\n", "if VAR_27['submittable']:\n", "VAR_27.update({'submission_count': 0, 'submissions': [], 'best_submission':\n None, 'points': 0, 'passed': VAR_27['points_to_pass'] == 0, 'graded': \n False, 'unofficial': False})\n", "FUNC_7(VAR_27.get('children'))\n" ]
[ "def r_augment(children):...\n", "for entry in children:\n", "if entry['submittable']:\n", "entry.update({'submission_count': 0, 'submissions': [], 'best_submission':\n None, 'points': 0, 'passed': entry['points_to_pass'] == 0, 'graded': \n False, 'unofficial': False})\n", "r_augment(entry.get('children'))\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_26(self, VAR_74):...\n", "def FUNC_39(VAR_101):...\n", "VAR_101.benchmark = VAR_74\n", "return VAR_101\n" ]
[ "def benchmark(self, benchmark):...\n", "def decorate(ruleinfo):...\n", "ruleinfo.benchmark = benchmark\n", "return ruleinfo\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_15(self, VAR_14):...\n", "self.sleep_ticker.tick()\n", "self.poll(VAR_14 * 1000)\n", "while self.sleep_ticker.elapsed(False) < VAR_14:\n", "self.poll(VAR_14 * 1000)\n", "return\n" ]
[ "def inter_sleep(self, timeout):...\n", "self.sleep_ticker.tick()\n", "self.poll(timeout * 1000)\n", "while self.sleep_ticker.elapsed(False) < timeout:\n", "self.poll(timeout * 1000)\n", "return\n" ]
[ 0, 0, 7, 7, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Condition", "Expr'", "Return'" ]
[ "def __init__(self, VAR_2=VAR_0, *VAR_3, **VAR_4):...\n", "super(CLASS_0, self).__init__(*VAR_3, **kwargs)\n", "self.max_length = VAR_2\n" ]
[ "def __init__(self, max_length=MAX_LENGTH, *args, **kwargs):...\n", "super(CharField, self).__init__(*args, **kwargs)\n", "self.max_length = max_length\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_4 = '/api/apps'\n", "VAR_5 = self.client.post(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_6 = VAR_5.data['id']\n", "VAR_4 = '/api/apps/{app_id}/builds'.format(**locals())\n", "VAR_8 = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 201)\n", "VAR_4 = '/api/apps/{app_id}/scale'.format(**locals())\n", "VAR_8 = {'web': 4, 'worker': 2}\n", "VAR_5 = self.client.post(VAR_4, json.dumps(VAR_8), content_type=\n 'application/json')\n", "self.assertEqual(VAR_5.status_code, 204)\n", "VAR_4 = '/api/apps/{app_id}/containers'.format(**locals())\n", "VAR_5 = self.client.get(VAR_4)\n", "self.assertEqual(VAR_5.status_code, 200)\n", "self.assertEqual(len(VAR_5.data['results']), 6)\n", "VAR_10 = VAR_5.data['results'][0]['uuid']\n", "VAR_11 = Container.objects.get(VAR_10=uuid)\n", "self.assertEqual(VAR_11.short_name(), '{}.{}.{}'.format(VAR_11.app, VAR_11.\n type, VAR_11.num))\n", "self.assertEqual(str(VAR_11), '{}.{}.{}'.format(VAR_11.app, VAR_11.type,\n VAR_11.num))\n" ]
[ "def test_container_str(self):...\n", "\"\"\"docstring\"\"\"\n", "url = '/api/apps'\n", "response = self.client.post(url)\n", "self.assertEqual(response.status_code, 201)\n", "app_id = response.data['id']\n", "url = '/api/apps/{app_id}/builds'.format(**locals())\n", "body = {'image': 'autotest/example', 'sha': 'a' * 40, 'procfile': json.\n dumps({'web': 'node server.js', 'worker': 'node worker.js'})}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 201)\n", "url = '/api/apps/{app_id}/scale'.format(**locals())\n", "body = {'web': 4, 'worker': 2}\n", "response = self.client.post(url, json.dumps(body), content_type=\n 'application/json')\n", "self.assertEqual(response.status_code, 204)\n", "url = '/api/apps/{app_id}/containers'.format(**locals())\n", "response = self.client.get(url)\n", "self.assertEqual(response.status_code, 200)\n", "self.assertEqual(len(response.data['results']), 6)\n", "uuid = response.data['results'][0]['uuid']\n", "container = Container.objects.get(uuid=uuid)\n", "self.assertEqual(container.short_name(), '{}.{}.{}'.format(container.app,\n container.type, container.num))\n", "self.assertEqual(str(container), '{}.{}.{}'.format(container.app, container\n .type, container.num))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_38(self, VAR_84):...\n", "return [VAR_82 for job in VAR_84.jobs for VAR_82 in job.subworkflow_input if\n job.subworkflow_input[VAR_82] is self]\n" ]
[ "def targets(self, dag):...\n", "return [f for job in dag.jobs for f in job.subworkflow_input if job.\n subworkflow_input[f] is self]\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@property...\n", "return self._params\n" ]
[ "@property...\n", "return self._params\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_6(self):...\n", "VAR_5 = Mock()\n", "VAR_5.sendmail.side_effect = lambda VAR_68: succeed(VAR_68)\n", "return VAR_5\n" ]
[ "def _create_mail_sender(self):...\n", "mail_sender = Mock()\n", "mail_sender.sendmail.side_effect = lambda mail: succeed(mail)\n", "return mail_sender\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Return'" ]
[ "\"\"\"Volume driver for Dell EqualLogic Storage.\"\"\"\n", "import functools\n", "import random\n", "import eventlet\n", "from eventlet import greenthread\n", "import greenlet\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder.openstack.common import processutils\n", "from cinder import utils\n", "from cinder.volume.drivers.san import SanISCSIDriver\n", "VAR_0 = logging.getLogger(__name__)\n", "VAR_1 = [cfg.StrOpt('eqlx_group_name', default='group-0', help=\n 'Group name to use for creating volumes'), cfg.IntOpt(\n 'eqlx_cli_timeout', default=30, help=\n 'Timeout for the Group Manager cli command execution'), cfg.IntOpt(\n 'eqlx_cli_max_retries', default=5, help=\n 'Maximum retry count for reconnection'), cfg.BoolOpt('eqlx_use_chap',\n default=False, help='Use CHAP authentication for targets?'), cfg.StrOpt\n ('eqlx_chap_login', default='admin', help='Existing CHAP account name'),\n cfg.StrOpt('eqlx_chap_password', default='password', help=\n 'Password for specified CHAP account name', secret=True), cfg.StrOpt(\n 'eqlx_pool', default='default', help=\n 'Pool in which volumes will be created')]\n", "VAR_2 = cfg.CONF\n", "VAR_2.register_opts(VAR_1)\n", "def FUNC_0(VAR_3):...\n", "@functools.wraps(VAR_3)...\n", "VAR_23 = VAR_5.pop('timeout', None)\n", "VAR_24 = eventlet.spawn(VAR_3, self, *VAR_4, **kwargs)\n", "if VAR_23 is None:\n", "return VAR_24.wait()\n", "VAR_36 = eventlet.spawn_after(VAR_23, VAR_24.kill)\n", "VAR_48 = VAR_24.wait()\n", "VAR_36.cancel()\n", "return FUNC_1\n", "return VAR_48\n" ]
[ "\"\"\"Volume driver for Dell EqualLogic Storage.\"\"\"\n", "import functools\n", "import random\n", "import eventlet\n", "from eventlet import greenthread\n", "import greenlet\n", "from oslo.config import cfg\n", "from cinder import exception\n", "from cinder.openstack.common import excutils\n", "from cinder.openstack.common import log as logging\n", "from cinder.openstack.common import processutils\n", "from cinder import utils\n", "from cinder.volume.drivers.san import SanISCSIDriver\n", "LOG = logging.getLogger(__name__)\n", "eqlx_opts = [cfg.StrOpt('eqlx_group_name', default='group-0', help=\n 'Group name to use for creating volumes'), cfg.IntOpt(\n 'eqlx_cli_timeout', default=30, help=\n 'Timeout for the Group Manager cli command execution'), cfg.IntOpt(\n 'eqlx_cli_max_retries', default=5, help=\n 'Maximum retry count for reconnection'), cfg.BoolOpt('eqlx_use_chap',\n default=False, help='Use CHAP authentication for targets?'), cfg.StrOpt\n ('eqlx_chap_login', default='admin', help='Existing CHAP account name'),\n cfg.StrOpt('eqlx_chap_password', default='password', help=\n 'Password for specified CHAP account name', secret=True), cfg.StrOpt(\n 'eqlx_pool', default='default', help=\n 'Pool in which volumes will be created')]\n", "CONF = cfg.CONF\n", "CONF.register_opts(eqlx_opts)\n", "def with_timeout(f):...\n", "@functools.wraps(f)...\n", "timeout = kwargs.pop('timeout', None)\n", "gt = eventlet.spawn(f, self, *args, **kwargs)\n", "if timeout is None:\n", "return gt.wait()\n", "kill_thread = eventlet.spawn_after(timeout, gt.kill)\n", "res = gt.wait()\n", "kill_thread.cancel()\n", "return __inner\n", "return res\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Expr'", "Import'", "Import'", "Import'", "ImportFrom'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Return'", "Return'" ]
[ "def __init__(self, VAR_18, VAR_19):...\n", "\"\"\"docstring\"\"\"\n", "self.auth = VAR_18\n", "self.data = None\n", "self.station_data = None\n", "self.station = VAR_19\n" ]
[ "def __init__(self, auth, station):...\n", "\"\"\"docstring\"\"\"\n", "self.auth = auth\n", "self.data = None\n", "self.station_data = None\n", "self.station = station\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_2(self, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = []\n", "for error in VAR_5:\n", "if self.filepath not in error.get('path', ''):\n", "return VAR_11\n", "VAR_12 = error.get('severity', 'X').capitalize()[0]\n", "if VAR_12 == 'X':\n", "if VAR_12 not in ['E', 'W']:\n", "VAR_12 = 'V'\n", "VAR_11.append({'underline_range': True, 'lineno': error.get('line', 0),\n 'offset': error.get('col', 0), 'raw_message': error.get('message', ''),\n 'code': 0, 'level': VAR_12, 'message': '[{0}] {1} ({2}): {3}'.format(\n VAR_12, error.get('linter', 'none'), error.get('severity', 'none'),\n error.get('message'))})\n" ]
[ "def _normalize(self, metaerrors):...\n", "\"\"\"docstring\"\"\"\n", "errors = []\n", "for error in metaerrors:\n", "if self.filepath not in error.get('path', ''):\n", "return errors\n", "error_type = error.get('severity', 'X').capitalize()[0]\n", "if error_type == 'X':\n", "if error_type not in ['E', 'W']:\n", "error_type = 'V'\n", "errors.append({'underline_range': True, 'lineno': error.get('line', 0),\n 'offset': error.get('col', 0), 'raw_message': error.get('message', ''),\n 'code': 0, 'level': error_type, 'message': '[{0}] {1} ({2}): {3}'.\n format(error_type, error.get('linter', 'none'), error.get('severity',\n 'none'), error.get('message'))})\n" ]
[ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Condition", "Return'", "Assign'", "Condition", "Condition", "Assign'", "Expr'" ]
[ "from __future__ import absolute_import, division, print_function, unicode_literals\n", "from collections import namedtuple\n", "from pants.backend.jvm.subsystems.jvm_tool_mixin import JvmToolMixin\n", "from pants.backend.jvm.subsystems.zinc_language_mixin import ZincLanguageMixin\n", "from pants.backend.jvm.targets.jar_library import JarLibrary\n", "from pants.build_graph.address import Address\n", "from pants.build_graph.injectables_mixin import InjectablesMixin\n", "from pants.java.jar.jar_dependency import JarDependency\n", "from pants.subsystem.subsystem import Subsystem\n", "VAR_0 = namedtuple('major_version_info', ['full_version'])\n", "VAR_1 = {'2.10': VAR_0(full_version='2.10.6'), '2.11': VAR_0(full_version=\n '2.11.12'), '2.12': VAR_0(full_version='2.12.4')}\n", "VAR_2 = JarDependency('org.scalastyle', 'scalastyle_2.11', '0.8.0')\n", "\"\"\"A scala platform.\n\n :API: public\n \"\"\"\n", "VAR_3 = 'scala'\n", "@classmethod...\n", "return JarDependency(org='org.scala-lang', VAR_5=name, rev=scala_build_info\n [version].full_version)\n" ]
[ "from __future__ import absolute_import, division, print_function, unicode_literals\n", "from collections import namedtuple\n", "from pants.backend.jvm.subsystems.jvm_tool_mixin import JvmToolMixin\n", "from pants.backend.jvm.subsystems.zinc_language_mixin import ZincLanguageMixin\n", "from pants.backend.jvm.targets.jar_library import JarLibrary\n", "from pants.build_graph.address import Address\n", "from pants.build_graph.injectables_mixin import InjectablesMixin\n", "from pants.java.jar.jar_dependency import JarDependency\n", "from pants.subsystem.subsystem import Subsystem\n", "major_version_info = namedtuple('major_version_info', ['full_version'])\n", "scala_build_info = {'2.10': major_version_info(full_version='2.10.6'),\n '2.11': major_version_info(full_version='2.11.12'), '2.12':\n major_version_info(full_version='2.12.4')}\n", "scala_style_jar = JarDependency('org.scalastyle', 'scalastyle_2.11', '0.8.0')\n", "\"\"\"A scala platform.\n\n :API: public\n \"\"\"\n", "options_scope = 'scala'\n", "@classmethod...\n", "return JarDependency(org='org.scala-lang', name=name, rev=scala_build_info[\n version].full_version)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_4(self, **VAR_6):...\n", "VAR_14 = super().get_context_data(**kwargs)\n", "return VAR_14\n" ]
[ "def get_context_data(self, **kwargs):...\n", "context = super().get_context_data(**kwargs)\n", "return context\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_5(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_11 = VAR_0[:]\n", "if self.enabled():\n", "VAR_11.append((Allow, self.login, ('view_user', 'edit_user',\n 'list_holidays', 'add_holiday', 'edit_holiday', 'list_competences')))\n", "return VAR_11\n", "VAR_11.append((Allow, Authenticated, 'visit'))\n" ]
[ "def get_user_acl(self):...\n", "\"\"\"docstring\"\"\"\n", "acl = DEFAULT_PERM[:]\n", "if self.enabled():\n", "acl.append((Allow, self.login, ('view_user', 'edit_user', 'list_holidays',\n 'add_holiday', 'edit_holiday', 'list_competences')))\n", "return acl\n", "acl.append((Allow, Authenticated, 'visit'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Return'", "Expr'" ]
[ "def FUNC_4(VAR_13, VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "VAR_25 = 'string'\n", "VAR_14.execute(VAR_25)\n", "VAR_26 = []\n", "for record in VAR_14:\n", "VAR_35 = record[0]\n", "return VAR_26\n", "VAR_26.append(VAR_35)\n" ]
[ "def get_all_old_sources(conn, sqlite):...\n", "\"\"\"docstring\"\"\"\n", "query = \"\"\"\n SELECT\n source\n FROM\n source\n GROUP BY\n source\n \"\"\"\n", "sqlite.execute(query)\n", "old_sources = []\n", "for record in sqlite:\n", "old_source = record[0]\n", "return old_sources\n", "old_sources.append(old_source)\n" ]
[ 0, 0, 0, 4, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Return'", "Expr'" ]
[ "def FUNC_6(self, VAR_33):...\n", "if not VAR_33:\n", "VAR_101.errors.add(self.error)\n", "VAR_33 = float(VAR_33)\n", "VAR_101.errors.add(self.error)\n", "return\n", "if self.min is not None and VAR_33 < self.min:\n", "VAR_33 = self.min\n", "if self.max is not None and VAR_33 > self.max:\n", "return VAR_33\n", "VAR_33 = self.max\n" ]
[ "def run(self, val):...\n", "if not val:\n", "c.errors.add(self.error)\n", "val = float(val)\n", "c.errors.add(self.error)\n", "return\n", "if self.min is not None and val < self.min:\n", "val = self.min\n", "if self.max is not None and val > self.max:\n", "return val\n", "val = self.max\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Expr'", "Assign'", "Expr'", "Return'", "Condition", "Assign'", "Condition", "Return'", "Assign'" ]
[ "def FUNC_2():...\n", "\"\"\"docstring\"\"\"\n", "VAR_6 = {}\n", "for arg in request.args:\n", "VAR_8 = re.findall('psize_(.*)', arg)\n", "return VAR_6\n", "if VAR_8:\n", "VAR_6[VAR_8[0]] = int(request.args.get(arg))\n" ]
[ "def get_page_size_args():...\n", "\"\"\"docstring\"\"\"\n", "page_sizes = {}\n", "for arg in request.args:\n", "re_match = re.findall('psize_(.*)', arg)\n", "return page_sizes\n", "if re_match:\n", "page_sizes[re_match[0]] = int(request.args.get(arg))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "For", "Assign'", "Return'", "Condition", "Assign'" ]
[ "def __init__(self, VAR_21=0):...\n", "self.maxdepth = VAR_21\n" ]
[ "def __init__(self, maxdepth=0):...\n", "self.maxdepth = maxdepth\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def FUNC_39(VAR_101):...\n", "VAR_101.message = VAR_73\n", "return VAR_101\n" ]
[ "def decorate(ruleinfo):...\n", "ruleinfo.message = message\n", "return ruleinfo\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def __repr__(self):...\n", "return u'Nullable({})'.format(repr(self.inner_type))\n" ]
[ "def __repr__(self):...\n", "return u'Nullable({})'.format(repr(self.inner_type))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(*VAR_0, **VAR_1):...\n", "VAR_2 = requests.Response()\n", "VAR_2.status_code = 200\n", "VAR_2._content_consumed = True\n", "return VAR_2\n" ]
[ "def mock_import_repository_task(*args, **kwargs):...\n", "resp = requests.Response()\n", "resp.status_code = 200\n", "resp._content_consumed = True\n", "return resp\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@patch('invenio.ext.session.interface.SessionInterface.save_session')...\n", "from invenio.modules.oauthclient.models import RemoteToken\n", "from invenio.modules.oauthclient.handlers import token_getter\n", "from invenio.modules.oauthclient.client import oauth\n", "VAR_9 = MagicMock()\n", "VAR_9.get_id = MagicMock(return_value=1)\n", "VAR_9.is_authenticated = MagicMock(return_value=True)\n", "VAR_10 = c.get(url_for('oauthclient.login', remote_app='full'))\n", "assert VAR_10.status_code == 302\n", "assert VAR_10.location.startswith(oauth.remote_apps['full'].authorize_url)\n", "self.mock_response(VAR_5='full')\n", "c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n", "assert session['oauth_token_full'] == ('test_access_token', '')\n", "VAR_11 = RemoteToken.get(1, 'fullid')\n", "assert VAR_11.remote_account.client_id == 'fullid'\n", "assert VAR_11.access_token == 'test_access_token'\n", "assert RemoteToken.query.count() == 1\n", "self.mock_response(VAR_5='full', VAR_6={'access_token': 'new_access_token',\n 'scope': '', 'token_type': 'bearer'})\n", "c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n", "VAR_11 = RemoteToken.get(1, 'fullid')\n", "assert VAR_11.access_token == 'new_access_token'\n", "assert RemoteToken.query.count() == 1\n", "VAR_12 = token_getter(oauth.remote_apps['full'])\n", "assert VAR_12 == ('new_access_token', '')\n", "VAR_10 = c.get(url_for('oauthclient.disconnect', remote_app='full'))\n", "assert VAR_10.status_code == 302\n", "assert VAR_10.location.endswith(url_for('oauthclient_settings.index'))\n", "VAR_11 = RemoteToken.get(1, 'fullid')\n", "assert VAR_11 is None\n" ]
[ "@patch('invenio.ext.session.interface.SessionInterface.save_session')...\n", "from invenio.modules.oauthclient.models import RemoteToken\n", "from invenio.modules.oauthclient.handlers import token_getter\n", "from invenio.modules.oauthclient.client import oauth\n", "user = MagicMock()\n", "user.get_id = MagicMock(return_value=1)\n", "user.is_authenticated = MagicMock(return_value=True)\n", "res = c.get(url_for('oauthclient.login', remote_app='full'))\n", "assert res.status_code == 302\n", "assert res.location.startswith(oauth.remote_apps['full'].authorize_url)\n", "self.mock_response(app='full')\n", "c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n", "assert session['oauth_token_full'] == ('test_access_token', '')\n", "t = RemoteToken.get(1, 'fullid')\n", "assert t.remote_account.client_id == 'fullid'\n", "assert t.access_token == 'test_access_token'\n", "assert RemoteToken.query.count() == 1\n", "self.mock_response(app='full', data={'access_token': 'new_access_token',\n 'scope': '', 'token_type': 'bearer'})\n", "c.get(url_for('oauthclient.authorized', remote_app='full', code='test'))\n", "t = RemoteToken.get(1, 'fullid')\n", "assert t.access_token == 'new_access_token'\n", "assert RemoteToken.query.count() == 1\n", "val = token_getter(oauth.remote_apps['full'])\n", "assert val == ('new_access_token', '')\n", "res = c.get(url_for('oauthclient.disconnect', remote_app='full'))\n", "assert res.status_code == 302\n", "assert res.location.endswith(url_for('oauthclient_settings.index'))\n", "t = RemoteToken.get(1, 'fullid')\n", "assert t is None\n" ]
[ 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assert'", "Expr'", "Expr'", "Assert'", "Assign'", "Assert'", "Assert'", "Assert'", "Expr'", "Expr'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'" ]
[ "def FUNC_1(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n" ]
[ "def setUp(self):...\n", "self.assertTrue(self.client.login(username='autotest', password='password'))\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_1(self):...\n", "return 'check submission %s' % self.submission_id\n" ]
[ "def describe(self):...\n", "return 'check submission %s' % self.submission_id\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(self, VAR_5, **VAR_6):...\n", "self._cur = self._db.cursor()\n", "self._cur.execute(VAR_5, **args)\n", "VAR_58 = None\n", "self._db.commit()\n", "VAR_58 = self._cur.fetchall()\n", "self._cur.close()\n", "return VAR_58\n" ]
[ "def execute(self, command, **args):...\n", "self._cur = self._db.cursor()\n", "self._cur.execute(command, **args)\n", "final_arr = None\n", "self._db.commit()\n", "final_arr = self._cur.fetchall()\n", "self._cur.close()\n", "return final_arr\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_65, VAR_7, *VAR_15, **VAR_16):...\n", "self.cache_prefix = VAR_65\n", "CLASS_0.__init__(self, VAR_7, *VAR_15, **kw)\n" ]
[ "def __init__(self, cache_prefix, param, *a, **kw):...\n", "self.cache_prefix = cache_prefix\n", "Validator.__init__(self, param, *a, **kw)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "@classmethod...\n", "" ]
[ "@classmethod...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_56(self):...\n", "self._test_strtype('blob', VAR_13, VAR_23(VAR_13))\n" ]
[ "def t(self):...\n", "self._test_strtype('blob', value, len(value))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "@app.route('/metric/api/v1.0/metric/current')...\n", "VAR_0 = getMetric(request.args.get('fromtime', None), request.args.get(\n 'totime', None), request.args.get('origin', None), request.args.get(\n 'key', None), 1, ('Time', True))\n", "return jsonify({'results': VAR_0, 'resultcount': len(VAR_0)})\n" ]
[ "@app.route('/metric/api/v1.0/metric/current')...\n", "res = getMetric(request.args.get('fromtime', None), request.args.get(\n 'totime', None), request.args.get('origin', None), request.args.get(\n 'key', None), 1, ('Time', True))\n", "return jsonify({'results': res, 'resultcount': len(res)})\n" ]
[ 0, 4, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_5(VAR_18, VAR_20, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_36 = VAR_3 if VAR_18 else VAR_2\n", "return len(VAR_18) >= VAR_1 or VAR_20 - VAR_21 > VAR_36\n" ]
[ "def should_post_update(stdout, now, last_packet):...\n", "\"\"\"docstring\"\"\"\n", "packet_interval = MIN_PACKET_INTERNAL if stdout else MAX_PACKET_INTERVAL\n", "return len(stdout) >= MAX_CHUNK_SIZE or now - last_packet > packet_interval\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_4(self, VAR_7, VAR_12):...\n", "\"\"\"docstring\"\"\"\n", "VAR_17 = []\n", "VAR_2 = VAR_1.view_submit(VAR_7)\n", "for entry in VAR_12['file_selection']:\n", "VAR_10 = copy.deepcopy(VAR_12['global'])\n", "return VAR_17\n", "VAR_10.update(entry)\n", "VAR_11 = copy.deepcopy(VAR_12['global']['options'])\n", "VAR_11.update(entry.get('per_file_options', {}))\n", "VAR_19 = {'package': VAR_10.get('package'), 'timeout': VAR_10.get('timeout',\n 120), 'priority': VAR_10.get('priority'), 'custom': VAR_10.get('custom'\n ), 'owner': VAR_10.get('owner'), 'tags': VAR_10.get('tags'), 'memory':\n VAR_10.get('memory'), 'enforce_timeout': VAR_11.get('enforce-timeout'),\n 'machine': VAR_10.get('machine'), 'platform': VAR_10.get('platform'),\n 'options': self.translate_options(VAR_10, VAR_11), 'submit_id': VAR_7}\n", "if entry['type'] == 'url':\n", "VAR_17.append(VAR_1.add_url(url=info['filename'], **kw))\n", "VAR_20 = Folders.create_temp()\n", "if not VAR_10['extrpath']:\n", "VAR_24 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['filename']))\n", "if len(VAR_10['extrpath']) == 1:\n", "VAR_18 = Files.copy(VAR_24, VAR_20=path_dest)\n", "VAR_25 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['arcname']))\n", "VAR_25 = os.path.join(VAR_2.tmp_path, os.path.basename(VAR_10['arcname']))\n", "VAR_17.append(VAR_1.add_path(file_path=filepath, **kw))\n", "if not os.path.exists(VAR_25):\n", "if not os.path.exists(VAR_25):\n", "VAR_2.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(VAR_10['arcname']))\n", "VAR_26 = sflock.zipify(sflock.unpack(VAR_10['arcname'], contents=open(\n arcpath, 'rb').read()))\n", "VAR_2.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(VAR_10['arcname']))\n", "VAR_27 = sflock.unpack(VAR_25).read(VAR_10['extrpath'][:-1])\n", "VAR_25 = Files.temp_named_put(VAR_26, os.path.basename(VAR_10['arcname']))\n", "VAR_28 = sflock.unpack(VAR_10['extrpath'][-2], contents=content)\n", "VAR_17.append(VAR_1.add_archive(file_path=arcpath, VAR_22=info['filename'],\n **kw))\n", "VAR_25 = Files.temp_named_put(sflock.zipify(VAR_28), os.path.basename(\n VAR_10['extrpath'][-2]))\n", "VAR_17.append(VAR_1.add_archive(file_path=arcpath, VAR_22=info['filename'],\n **kw))\n" ]
[ "def submit(self, submit_id, config):...\n", "\"\"\"docstring\"\"\"\n", "ret = []\n", "submit = db.view_submit(submit_id)\n", "for entry in config['file_selection']:\n", "info = copy.deepcopy(config['global'])\n", "return ret\n", "info.update(entry)\n", "options = copy.deepcopy(config['global']['options'])\n", "options.update(entry.get('per_file_options', {}))\n", "kw = {'package': info.get('package'), 'timeout': info.get('timeout', 120),\n 'priority': info.get('priority'), 'custom': info.get('custom'), 'owner':\n info.get('owner'), 'tags': info.get('tags'), 'memory': info.get(\n 'memory'), 'enforce_timeout': options.get('enforce-timeout'), 'machine':\n info.get('machine'), 'platform': info.get('platform'), 'options': self.\n translate_options(info, options), 'submit_id': submit_id}\n", "if entry['type'] == 'url':\n", "ret.append(db.add_url(url=info['filename'], **kw))\n", "path_dest = Folders.create_temp()\n", "if not info['extrpath']:\n", "path = os.path.join(submit.tmp_path, os.path.basename(info['filename']))\n", "if len(info['extrpath']) == 1:\n", "filepath = Files.copy(path, path_dest=path_dest)\n", "arcpath = os.path.join(submit.tmp_path, os.path.basename(info['arcname']))\n", "arcpath = os.path.join(submit.tmp_path, os.path.basename(info['arcname']))\n", "ret.append(db.add_path(file_path=filepath, **kw))\n", "if not os.path.exists(arcpath):\n", "if not os.path.exists(arcpath):\n", "submit.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(info['arcname']))\n", "arc = sflock.zipify(sflock.unpack(info['arcname'], contents=open(arcpath,\n 'rb').read()))\n", "submit.data['errors'].append('Unable to find parent archive file: %s' % os.\n path.basename(info['arcname']))\n", "content = sflock.unpack(arcpath).read(info['extrpath'][:-1])\n", "arcpath = Files.temp_named_put(arc, os.path.basename(info['arcname']))\n", "subarc = sflock.unpack(info['extrpath'][-2], contents=content)\n", "ret.append(db.add_archive(file_path=arcpath, filename=info['filename'], **kw))\n", "arcpath = Files.temp_named_put(sflock.zipify(subarc), os.path.basename(info\n ['extrpath'][-2]))\n", "ret.append(db.add_archive(file_path=arcpath, filename=info['filename'], **kw))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Condition", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "from re import compile, UNICODE\n", "from Acquisition import aq_base\n", "from unidecode import unidecode\n", "from collective.solr.interfaces import ISolrSchema\n", "from zope.component import getUtility\n", "from plone.registry.interfaces import IRegistry\n", "import six\n", "from six.moves import range\n", "if hasattr(str, 'maketrans'):\n", "VAR_14 = str.maketrans\n", "from string import maketrans\n", "def FUNC_0():...\n", "VAR_15 = getUtility(IRegistry)\n", "return VAR_15.forInterface(ISolrSchema, prefix='collective.solr')\n" ]
[ "from re import compile, UNICODE\n", "from Acquisition import aq_base\n", "from unidecode import unidecode\n", "from collective.solr.interfaces import ISolrSchema\n", "from zope.component import getUtility\n", "from plone.registry.interfaces import IRegistry\n", "import six\n", "from six.moves import range\n", "if hasattr(str, 'maketrans'):\n", "maketrans = str.maketrans\n", "from string import maketrans\n", "def getConfig():...\n", "registry = getUtility(IRegistry)\n", "return registry.forInterface(ISolrSchema, prefix='collective.solr')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Import'", "ImportFrom'", "Condition", "Assign'", "ImportFrom'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_2(self):...\n", "return self._doc['collections'].keys()\n" ]
[ "def get_collection_names(self):...\n", "return self._doc['collections'].keys()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@property...\n", "" ]
[ "@property...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_1(VAR_8, VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_8 = os.path.abspath(VAR_8)\n", "VAR_12 = FUNC_0(VAR_2, VAR_7='GET')\n", "if VAR_12 is None:\n", "return False\n", "f.write(VAR_12)\n", "logging.error(\"\"\"Failed to write to %s\n%s\"\"\", VAR_8, e)\n", "return True\n", "return False\n" ]
[ "def DownloadFile(local_file, url):...\n", "\"\"\"docstring\"\"\"\n", "local_file = os.path.abspath(local_file)\n", "url_data = UrlOpen(url, method='GET')\n", "if url_data is None:\n", "return False\n", "f.write(url_data)\n", "logging.error(\"\"\"Failed to write to %s\n%s\"\"\", local_file, e)\n", "return True\n", "return False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Return'", "Expr'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_4(VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "for directory in os.getenv('PATH').split(':'):\n", "if os.path.exists(os.path.join(directory, VAR_7)):\n", "return False\n", "return True\n" ]
[ "def executable_exists(executable):...\n", "\"\"\"docstring\"\"\"\n", "for directory in os.getenv('PATH').split(':'):\n", "if os.path.exists(os.path.join(directory, executable)):\n", "return False\n", "return True\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Condition", "Return'", "Return'" ]
[ "def FUNC_34(self):...\n", "return self.replace(self.dynamic_fill, '{*}')\n" ]
[ "def format_dynamic(self):...\n", "return self.replace(self.dynamic_fill, '{*}')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_5(VAR_17):...\n", "\"\"\"docstring\"\"\"\n", "VAR_23 = {}\n", "VAR_46 = reader.KeywordToken\n", "for VAR_76, v in acronymer.get_acronyms(VAR_17).items():\n", "VAR_23[VAR_46(VAR_76, type='acronym')] = v\n", "return VAR_23\n" ]
[ "def extract_abbreviations(fulltext):...\n", "\"\"\"docstring\"\"\"\n", "acronyms = {}\n", "K = reader.KeywordToken\n", "for k, v in acronymer.get_acronyms(fulltext).items():\n", "acronyms[K(k, type='acronym')] = v\n", "return acronyms\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "Assign'", "Return'" ]
[ "def FUNC_12(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_7, VAR_12 = self.make_xsrf_handling_app()\n", "VAR_17 = VAR_7.get('/request').body\n", "VAR_7.post('/request?xsrf_token=%s' % VAR_17)\n", "self.assertEqual([('POST', True)], VAR_12)\n" ]
[ "def test_xsrf_token_get_param(self):...\n", "\"\"\"docstring\"\"\"\n", "app, calls = self.make_xsrf_handling_app()\n", "token = app.get('/request').body\n", "app.post('/request?xsrf_token=%s' % token)\n", "self.assertEqual([('POST', True)], calls)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_0(VAR_1):...\n", "@functools.wraps(VAR_1)...\n", "if g.user is None:\n", "return redirect(url_for('auth.login'))\n", "if g.user['admin'] != 1:\n", "return redirect(url_for('blog.feedpage', VAR_3=0))\n", "return VAR_1(**kwargs)\n" ]
[ "def admin_required(view):...\n", "@functools.wraps(view)...\n", "if g.user is None:\n", "return redirect(url_for('auth.login'))\n", "if g.user['admin'] != 1:\n", "return redirect(url_for('blog.feedpage', page=0))\n", "return view(**kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Return'", "Condition", "Return'", "Return'" ]
[ "from saker.fuzzers.fuzzer import Fuzzer\n", "\"\"\"CmdInjection\"\"\"\n", "def __init__(self):...\n", "super(CLASS_0, self).__init__()\n", "@staticmethod...\n", "return ['|id', \"=cmd|'cmd'!''\", ';id', '\\n\\rid', '`id`', '${id}', '\\x00`id`']\n" ]
[ "from saker.fuzzers.fuzzer import Fuzzer\n", "\"\"\"CmdInjection\"\"\"\n", "def __init__(self):...\n", "super(CmdInjection, self).__init__()\n", "@staticmethod...\n", "return ['|id', \"=cmd|'cmd'!''\", ';id', '\\n\\rid', '`id`', '${id}', '\\x00`id`']\n" ]
[ 0, 0, 0, 0, 2, 2 ]
[ "ImportFrom'", "Expr'", "FunctionDef'", "Expr'", "Condition", "Return'" ]
[ "def __init__(self, VAR_5, VAR_6):...\n", "tk.Frame.__init__(self, VAR_5)\n", "VAR_12 = tk.Label(self, text='Login Page', font=controller.titleFont)\n", "VAR_12.pack(pady=10, padx=10)\n", "VAR_13 = tk.Button(self, text='Start Using Raspi-Telxon!', font=controller.\n itemFont, command=lambda : controller.show_frame(SearchPage))\n", "VAR_13.pack(pady=5)\n", "VAR_14 = tk.Button(self, text='Quit', font=controller.itemFont, command=lambda\n : sys.exit(0))\n", "VAR_14.pack(pady=5)\n" ]
[ "def __init__(self, parent, controller):...\n", "tk.Frame.__init__(self, parent)\n", "label = tk.Label(self, text='Login Page', font=controller.titleFont)\n", "label.pack(pady=10, padx=10)\n", "enterAppButton = tk.Button(self, text='Start Using Raspi-Telxon!', font=\n controller.itemFont, command=lambda : controller.show_frame(SearchPage))\n", "enterAppButton.pack(pady=5)\n", "exitAppButton = tk.Button(self, text='Quit', font=controller.itemFont,\n command=lambda : sys.exit(0))\n", "exitAppButton.pack(pady=5)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_24(self, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_57 = self.getfile(VAR_21, VAR_23=False)\n", "if VAR_57 == False:\n", "self.get_path(os.path.dirname(VAR_21)).remove(VAR_57)\n", "return\n" ]
[ "def remove(self, path):...\n", "\"\"\"docstring\"\"\"\n", "p = self.getfile(path, follow_symlinks=False)\n", "if p == False:\n", "self.get_path(os.path.dirname(path)).remove(p)\n", "return\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Expr'", "Return'" ]
[ "def FUNC_18(VAR_32, VAR_33, VAR_5=False):...\n", "\"\"\"docstring\"\"\"\n", "VAR_60 = {}\n", "VAR_52 = {}\n", "for VAR_93, _ in VAR_32:\n", "for VAR_83 in VAR_93.fieldcodes:\n", "for VAR_94, _ in VAR_33:\n", "VAR_60.setdefault(VAR_83, set()).add(VAR_93.output(VAR_5))\n", "if len(VAR_94.fieldcodes):\n", "for VAR_83, VAR_36 in VAR_60.items():\n", "for VAR_83 in VAR_94.fieldcodes:\n", "for VAR_62 in VAR_94.getComponents():\n", "VAR_52[VAR_83] = ', '.join(VAR_36)\n", "return VAR_52\n", "VAR_60.setdefault(VAR_83, set()).add(VAR_94.output(VAR_5))\n", "for VAR_83 in VAR_62.fieldcodes:\n", "VAR_60.setdefault(VAR_83, set()).add('%s*' % VAR_94.output(VAR_5))\n", "VAR_60.setdefault('*', set()).add(VAR_62.output(VAR_5))\n" ]
[ "def _get_fieldcodes(skw_matches, ckw_matches, spires=False):...\n", "\"\"\"docstring\"\"\"\n", "fieldcodes = {}\n", "output = {}\n", "for skw, _ in skw_matches:\n", "for fieldcode in skw.fieldcodes:\n", "for ckw, _ in ckw_matches:\n", "fieldcodes.setdefault(fieldcode, set()).add(skw.output(spires))\n", "if len(ckw.fieldcodes):\n", "for fieldcode, keywords in fieldcodes.items():\n", "for fieldcode in ckw.fieldcodes:\n", "for kw in ckw.getComponents():\n", "output[fieldcode] = ', '.join(keywords)\n", "return output\n", "fieldcodes.setdefault(fieldcode, set()).add(ckw.output(spires))\n", "for fieldcode in kw.fieldcodes:\n", "fieldcodes.setdefault(fieldcode, set()).add('%s*' % ckw.output(spires))\n", "fieldcodes.setdefault('*', set()).add(kw.output(spires))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "For", "For", "For", "Expr'", "Condition", "For", "For", "For", "Assign'", "Return'", "Expr'", "For", "Expr'", "Expr'" ]
[ "def FUNC_18(self, VAR_3):...\n", "print('ERROR: database connection is down (error: {0})'.format(VAR_3.value))\n", "return DeadConnectionDetector.startReconnecting(self, VAR_3)\n" ]
[ "def startReconnecting(self, f):...\n", "print('ERROR: database connection is down (error: {0})'.format(f.value))\n", "return DeadConnectionDetector.startReconnecting(self, f)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_20(self, VAR_25, VAR_26, *VAR_1, **VAR_2):...\n", "self.ctx = VAR_25\n", "self.sig_addr = VAR_26\n", "threading.Thread.start(self, *VAR_1, **kvargs)\n" ]
[ "def start(self, ctx, sig_addr, *args, **kvargs):...\n", "self.ctx = ctx\n", "self.sig_addr = sig_addr\n", "threading.Thread.start(self, *args, **kvargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def __init__(self, *VAR_11, **VAR_12):...\n", "super(CLASS_0, self).__init__(*VAR_11, **kwargs)\n", "self.set_distribution(jdk=True)\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "super(JavacCompile, self).__init__(*args, **kwargs)\n", "self.set_distribution(jdk=True)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'" ]
[ "def FUNC_18(VAR_5, VAR_6):...\n", "VAR_8 = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n", "VAR_9 = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n", "VAR_11 = requests.get(VAR_5['diff_url'], VAR_8=headers, VAR_9=auth)\n", "VAR_20 = unidiff.PatchSet(VAR_11.content.splitlines(), encoding=r.encoding)\n", "VAR_21 = {}\n", "for patchset in VAR_20:\n", "if patchset.target_file[-3:] == '.py':\n", "VAR_32 = ','.join(VAR_6['pycodestyle']['ignore'])\n", "VAR_60 = patchset.target_file[1:]\n", "VAR_33 = ''\n", "VAR_21[VAR_60] = []\n", "if len(VAR_32) > 0:\n", "for hunk in patchset:\n", "VAR_33 = '--ignore ' + VAR_32\n", "for VAR_47 in VAR_21:\n", "for VAR_66 in hunk.target_lines():\n", "VAR_48 = VAR_47[1:]\n", "if VAR_66.is_added:\n", "VAR_10 = 'https://raw.githubusercontent.com/{}/{}/{}'\n", "VAR_21[VAR_60].append(VAR_66.target_line_no)\n", "VAR_10 = VAR_10.format(VAR_5['repository'], VAR_5['sha'], VAR_47)\n", "VAR_11 = requests.get(VAR_10, VAR_8=headers, VAR_9=auth)\n", "file_to_fix.write(VAR_11.text)\n", "VAR_49 = 'autopep8 file_to_fix.py {arg_to_ignore}'.format(VAR_33=arg_to_ignore)\n", "VAR_50 = subprocess.Popen(VAR_49, shell=True, VAR_51=subprocess.PIPE)\n", "VAR_51, VAR_52 = VAR_50.communicate()\n", "VAR_5['results'][VAR_48] = VAR_51.decode(VAR_11.encoding)\n", "os.remove('file_to_fix.py')\n" ]
[ "def autopep8ify(data, config):...\n", "headers = {'Authorization': 'token ' + os.environ['GITHUB_TOKEN']}\n", "auth = os.environ['BOT_USERNAME'], os.environ['BOT_PASSWORD']\n", "r = requests.get(data['diff_url'], headers=headers, auth=auth)\n", "patch = unidiff.PatchSet(r.content.splitlines(), encoding=r.encoding)\n", "py_files = {}\n", "for patchset in patch:\n", "if patchset.target_file[-3:] == '.py':\n", "to_ignore = ','.join(config['pycodestyle']['ignore'])\n", "py_file = patchset.target_file[1:]\n", "arg_to_ignore = ''\n", "py_files[py_file] = []\n", "if len(to_ignore) > 0:\n", "for hunk in patchset:\n", "arg_to_ignore = '--ignore ' + to_ignore\n", "for file in py_files:\n", "for line in hunk.target_lines():\n", "filename = file[1:]\n", "if line.is_added:\n", "url = 'https://raw.githubusercontent.com/{}/{}/{}'\n", "py_files[py_file].append(line.target_line_no)\n", "url = url.format(data['repository'], data['sha'], file)\n", "r = requests.get(url, headers=headers, auth=auth)\n", "file_to_fix.write(r.text)\n", "cmd = 'autopep8 file_to_fix.py {arg_to_ignore}'.format(arg_to_ignore=\n arg_to_ignore)\n", "proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n", "stdout, _ = proc.communicate()\n", "data['results'][filename] = stdout.decode(r.encoding)\n", "os.remove('file_to_fix.py')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "For", "Assign'", "For", "For", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_7(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.bindings\n", "VAR_7 = VAR_1['TEST_APP_COMPONENT_NAME']\n", "VAR_15 = '{app}-{stack}-v000'.format(app=self.TEST_APP, stack=bindings[\n 'TEST_STACK'])\n", "VAR_8 = VAR_1['TEST_AWS_REGION']\n", "VAR_9 = [VAR_8 + 'a', VAR_8 + 'b']\n", "VAR_12 = self.agent.make_json_payload_from_kwargs(job=[{'type':\n 'createServerGroup', 'cloudProvider': 'aws', 'application': self.\n TEST_APP, 'credentials': bindings['AWS_CREDENTIALS'], 'strategy': '',\n 'capacity': {'min': 2, 'max': 2, 'desired': 2},\n 'targetHealthyDeployPercentage': 100, 'loadBalancers': [\n load_balancer_name], 'cooldown': 8, 'healthCheckType': 'EC2',\n 'healthCheckGracePeriod': 40, 'instanceMonitoring': False,\n 'ebsOptimized': False, 'iamRole': bindings['AWS_IAM_ROLE'],\n 'terminationPolicies': ['Default'], 'availabilityZones': {region:\n avail_zones}, 'keyPair': bindings['AWS_CREDENTIALS'] + '-keypair',\n 'suspendedProcesses': [], 'subnetType': 'internal (defaultvpc)',\n 'securityGroups': [bindings['TEST_AWS_SECURITY_GROUP_ID']],\n 'virtualizationType': 'paravirtual', 'stack': bindings['TEST_STACK'],\n 'freeFormDetails': '', 'amiName': bindings['TEST_AWS_AMI'],\n 'instanceType': 'm1.small', 'useSourceCapacity': False, 'account':\n bindings['AWS_CREDENTIALS'], 'user': '[anonymous]'}], description=\n 'Create Server Group in ' + group_name, application=self.TEST_APP)\n", "VAR_13 = aws.AwsContractBuilder(self.aws_observer)\n", "VAR_13.new_clause_builder('Auto Server Group Added', retryable_for_secs=30\n ).collect_resources('autoscaling', 'describe-auto-scaling-groups', args\n =['--auto-scaling-group-names', group_name]).contains_path_value(\n 'AutoScalingGroups', {'MaxSize': 2})\n", "return st.OperationContract(self.new_post_operation(title=\n 'create_server_group', data=payload, path='tasks'), VAR_6=builder.build())\n" ]
[ "def create_server_group(self):...\n", "\"\"\"docstring\"\"\"\n", "bindings = self.bindings\n", "load_balancer_name = bindings['TEST_APP_COMPONENT_NAME']\n", "group_name = '{app}-{stack}-v000'.format(app=self.TEST_APP, stack=bindings[\n 'TEST_STACK'])\n", "region = bindings['TEST_AWS_REGION']\n", "avail_zones = [region + 'a', region + 'b']\n", "payload = self.agent.make_json_payload_from_kwargs(job=[{'type':\n 'createServerGroup', 'cloudProvider': 'aws', 'application': self.\n TEST_APP, 'credentials': bindings['AWS_CREDENTIALS'], 'strategy': '',\n 'capacity': {'min': 2, 'max': 2, 'desired': 2},\n 'targetHealthyDeployPercentage': 100, 'loadBalancers': [\n load_balancer_name], 'cooldown': 8, 'healthCheckType': 'EC2',\n 'healthCheckGracePeriod': 40, 'instanceMonitoring': False,\n 'ebsOptimized': False, 'iamRole': bindings['AWS_IAM_ROLE'],\n 'terminationPolicies': ['Default'], 'availabilityZones': {region:\n avail_zones}, 'keyPair': bindings['AWS_CREDENTIALS'] + '-keypair',\n 'suspendedProcesses': [], 'subnetType': 'internal (defaultvpc)',\n 'securityGroups': [bindings['TEST_AWS_SECURITY_GROUP_ID']],\n 'virtualizationType': 'paravirtual', 'stack': bindings['TEST_STACK'],\n 'freeFormDetails': '', 'amiName': bindings['TEST_AWS_AMI'],\n 'instanceType': 'm1.small', 'useSourceCapacity': False, 'account':\n bindings['AWS_CREDENTIALS'], 'user': '[anonymous]'}], description=\n 'Create Server Group in ' + group_name, application=self.TEST_APP)\n", "builder = aws.AwsContractBuilder(self.aws_observer)\n", "builder.new_clause_builder('Auto Server Group Added', retryable_for_secs=30\n ).collect_resources('autoscaling', 'describe-auto-scaling-groups', args\n =['--auto-scaling-group-names', group_name]).contains_path_value(\n 'AutoScalingGroups', {'MaxSize': 2})\n", "return st.OperationContract(self.new_post_operation(title=\n 'create_server_group', data=payload, path='tasks'), contract=builder.\n build())\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_6(self, VAR_8):...\n", "VAR_0.info('Handling monthly repeating event')\n", "VAR_16 = datetime.strptime(VAR_8.date_end, '%Y-%m-%d %H:%M:00')\n", "VAR_17 = datetime.now()\n", "if VAR_17 >= VAR_16:\n", "VAR_10 = datetime.strptime(VAR_8.date_begin, '%Y-%m-%d %H:%M:00'\n ) + relativedelta(months=+1)\n", "VAR_9 = VAR_16 + relativedelta(months=+1)\n", "if self._event_does_not_exist(VAR_8, VAR_9):\n", "self._create_new_event(VAR_8, VAR_10, VAR_9)\n" ]
[ "def _handle_monthly_event_repetition(self, old_repeating_event):...\n", "_logger.info('Handling monthly repeating event')\n", "end_date = datetime.strptime(old_repeating_event.date_end, '%Y-%m-%d %H:%M:00')\n", "present = datetime.now()\n", "if present >= end_date:\n", "new_start_date = datetime.strptime(old_repeating_event.date_begin,\n '%Y-%m-%d %H:%M:00') + relativedelta(months=+1)\n", "new_end_date = end_date + relativedelta(months=+1)\n", "if self._event_does_not_exist(old_repeating_event, new_end_date):\n", "self._create_new_event(old_repeating_event, new_start_date, new_end_date)\n" ]
[ 0, 0, 0, 0, 0, 6, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'" ]
[ "def FUNC_6(self, VAR_64):...\n", "if not VAR_64:\n", "return None\n", "VAR_88 = [int(i, 36) for i in VAR_64.split(',')]\n", "VAR_89 = Comment._byID(VAR_88, data=True, return_dict=False)\n", "return VAR_89\n" ]
[ "def run(self, id_str):...\n", "if not id_str:\n", "return None\n", "cids = [int(i, 36) for i in id_str.split(',')]\n", "comments = Comment._byID(cids, data=True, return_dict=False)\n", "return comments\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Assign'", "Assign'", "Return'" ]
[ "@VAR_2.route('/tournament_losses')...\n", "if VAR_0 == None:\n", "FUNC_16()\n", "VAR_5 = request.args.get('tag', default=None)\n", "VAR_19 = request.args.get('date', default=None)\n", "if VAR_5 and VAR_19:\n", "VAR_8 = 'string'.format(VAR_5, VAR_5, VAR_19)\n", "return ''\n", "VAR_7 = VAR_0.exec(VAR_8)\n", "VAR_8 = 'string'.format(VAR_5, VAR_5, VAR_19)\n", "VAR_7 = VAR_7 + VAR_0.exec(VAR_8)\n", "VAR_7 = [r for r in VAR_7]\n", "VAR_7.sort(key=lambda x: int(x[1]))\n", "def FUNC_18(VAR_37):...\n", "VAR_37 = VAR_37.replace('[', '')\n", "VAR_37 = VAR_37.replace(']', '')\n", "VAR_45, VAR_46 = VAR_37.split(',')\n", "VAR_37 = '{} - {}'.format(VAR_45, VAR_46)\n", "return VAR_37\n" ]
[ "@endpoints.route('/tournament_losses')...\n", "if db == None:\n", "init()\n", "tag = request.args.get('tag', default=None)\n", "date = request.args.get('date', default=None)\n", "if tag and date:\n", "sql = (\n \"select player1, place, date, score from matches join placings on matches.url=placings.url and matches.player1=placings.player where winner!='{}' and player2='{}' and date='{}';\"\n .format(tag, tag, date))\n", "return ''\n", "data = db.exec(sql)\n", "sql = (\n \"select player2, place, date, score from matches join placings on matches.url=placings.url and matches.player2=placings.player where winner!='{}' and player1='{}' and date='{}';\"\n .format(tag, tag, date))\n", "data = data + db.exec(sql)\n", "data = [r for r in data]\n", "data.sort(key=lambda x: int(x[1]))\n", "def reformat(score):...\n", "score = score.replace('[', '')\n", "score = score.replace(']', '')\n", "win, loss = score.split(',')\n", "score = '{} - {}'.format(win, loss)\n", "return score\n" ]
[ 0, 0, 0, 0, 0, 0, 4, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]