response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Get the result.
def get_result(): """Get the result.""" params = dict(request.iterparams()) page = helpers.cast(request.get('page') or 1, int, "'page' is not an int.") query = datastore_query.Query(data_types.TestcaseUploadMetadata) query.order('timestamp', is_desc=True) if not access.has_access(need_privileged_access=True): query.filter('uploader_email', helpers.get_user_email()) params['permission'] = {'uploaderEmail': helpers.get_user_email()} entities, total_pages, total_items, has_more = query.fetch_page( page=page, page_size=PAGE_SIZE, projection=None, more_limit=MORE_LIMIT) items = [] for entity in entities: items.append({ 'timestamp': utils.utc_datetime_to_timestamp(entity.timestamp), 'testcaseId': entity.testcase_id, 'uploaderEmail': entity.uploader_email, 'filename': entity.filename, 'bundled': entity.bundled, 'pathInArchive': entity.path_in_archive, 'status': entity.status }) attach_testcases(items) result = { 'hasMore': has_more, 'items': items, 'page': page, 'pageSize': PAGE_SIZE, 'totalItems': total_items, 'totalPages': total_pages, } return result, params
Return a bytesio representing a GCS object.
def _read_to_bytesio(gcs_path): """Return a bytesio representing a GCS object.""" data = storage.read_data(gcs_path) if not data: raise helpers.EarlyExitError('Failed to read uploaded archive.', 500) return io.BytesIO(data)
Guess the main test case file from an archive.
def guess_input_file(uploaded_file, filename): """Guess the main test case file from an archive.""" for file_pattern in RUN_FILE_PATTERNS: blob_reader = _read_to_bytesio(uploaded_file.gcs_path) with archive.open(filename, blob_reader) as reader: file_path_input = reader.get_first_file_matching(file_pattern) if file_path_input: return file_path_input return None
Start a query for an associated testcase.
def query_testcase(testcase_id): """Start a query for an associated testcase.""" if not testcase_id: return [] return data_types.Testcase.query(data_types.Testcase.key == ndb.Key( data_types.Testcase, testcase_id)).iter( limit=1, projection=[ 'crash_type', 'crash_state', 'security_flag', 'bug_information', 'job_type', 'fuzzer_name', 'overridden_fuzzer_name', 'project_name', ])
Filter target names for a fuzzer and remove parent fuzzer prefixes.
def filter_target_names(targets, engine): """Filter target names for a fuzzer and remove parent fuzzer prefixes.""" prefix = engine + '_' return [t[len(prefix):] for t in targets if t.startswith(prefix)]
Filter out fuzzers such that only blackbox fuzzers are included.
def filter_blackbox_fuzzers(fuzzers): """Filter out fuzzers such that only blackbox fuzzers are included.""" def is_engine_fuzzer(name): return any(name.startswith(engine) for engine in fuzzing.ENGINES) return [f for f in fuzzers if not is_engine_fuzzer(f)]
Return fuzz target values given the engine, target name (which may or may not be prefixed with project), and job.
def find_fuzz_target(engine, target_name, job_name): """Return fuzz target values given the engine, target name (which may or may not be prefixed with project), and job.""" project_name = data_handler.get_project_name(job_name) candidate_name = data_types.fuzz_target_fully_qualified_name( engine, project_name, target_name) target = data_handler.get_fuzz_target(candidate_name) if not target: raise helpers.EarlyExitError('Fuzz target does not exist.', 400) return target.fully_qualified_name(), target.binary
Returns whether or not the provided testcase metadata can be set by an unprivileged user.
def _allow_unprivileged_metadata(testcase_metadata): """Returns whether or not the provided testcase metadata can be set by an unprivileged user.""" if utils.is_oss_fuzz(): # Labels in OSS-Fuzz are privileged and control things like disclosure # deadlines. Do not let these be editable. return False # Allow *only* issue labels to be set. return len(testcase_metadata) == 1 and 'issue_labels' in testcase_metadata
Get slots for crash stats.
def get_result(testcase, end, block, days, group_by): """Get slots for crash stats.""" query = crash_stats.Query() query.group_by = group_by query.sort_by = 'total_count' query.set_time_params(end, days, block) query.filter('crash_type', testcase.crash_type) query.filter('crash_state', testcase.crash_state) query.filter('security_flag', testcase.security_flag) _, rows = crash_stats.get(query, crash_stats.Query(), 0, 1) if not rows: return {'end': end, 'days': days, 'block': block, 'groups': []} return rows[0]
Get testcase file in the binary form.
def get_testcase_blob_info(testcase): """Get testcase file in the binary form.""" blob_key = testcase.minimized_keys using_minimized_keys = True if not blob_key or blob_key == 'NA': blob_key = testcase.fuzzed_keys using_minimized_keys = False if not blob_key: raise helpers.EarlyExitError( "The testcase (%d) doesn't have fuzzed keys." % testcase.key.id(), 400) blob_key = str(urllib.parse.unquote(blob_key)) blob_info = blobs.get_blob_info(blob_key) return blob_info, using_minimized_keys
Get testcase file and write it to the handler.
def get(self): """Get testcase file and write it to the handler.""" testcase_id = request.get('id') testcase = access.check_access_and_get_testcase(testcase_id) blob_info, _ = get_testcase_blob_info(testcase) save_as_filename = 'testcase-%s-%s' % ( testcase.key.id(), blob_info.filename[-PREVIEW_BLOB_FILENAME_LENTGH:]) content_disposition = str('attachment; filename=%s' % save_as_filename) return self.serve_gcs_object(blob_info.bucket, blob_info.object_path, content_disposition)
Mark the testcase as fixed.
def mark(testcase): """Mark the testcase as fixed.""" testcase.fixed = 'Yes' testcase.open = False testcase.put() helpers.log('Marked testcase %s as fixed' % testcase.key.id(), helpers.MODIFY_OPERATION) return testcase
Mark the testcase as security-related.
def mark(testcase, security, severity): """Mark the testcase as security-related.""" testcase.security_flag = security if security: if not severity: severity = severity_analyzer.get_security_severity( testcase.crash_type, testcase.crash_stacktrace, testcase.job_type, bool(testcase.gestures)) testcase.security_severity = severity bisection.request_bisection(testcase) else: # The bisection infrastructure only cares about security bugs. If this was # marked as non-security, mark it as invalid. bisection.notify_bisection_invalid(testcase) testcase.put() helpers.log( f'Set security flags on testcase {testcase.key.id()} to {security}.', helpers.MODIFY_OPERATION)
Mark the testcase as unconfirmed.
def mark(testcase): """Mark the testcase as unconfirmed.""" testcase.one_time_crasher_flag = True if not testcase.fixed: testcase.fixed = 'NA' if not testcase.regression: testcase.regression = 'NA' if not testcase.minimized_keys: testcase.minimized_keys = 'NA' testcase.put() helpers.log('Marked testcase %s as unconfirmed' % testcase.key.id(), helpers.MODIFY_OPERATION)
Remove duplicate status from a test case.
def remove(testcase): """Remove duplicate status from a test case.""" testcase.status = 'Processed' testcase.duplicate_of = None testcase.put() helpers.log('Removed duplicate status for testcase %s' % testcase.key.id(), helpers.MODIFY_OPERATION)
Remove the testcase from a group.
def remove_group(testcase_id): """Remove the testcase from a group.""" testcase = helpers.get_testcase(testcase_id) group_id = testcase.group_id data_handler.remove_testcase_from_group(testcase) helpers.log( 'Removed the testcase %s from the group %s' % (testcase.key.id(), group_id), helpers.MODIFY_OPERATION) return testcase
Truncate stacktrace if necessary.
def _truncate_stacktrace(stacktrace): """Truncate stacktrace if necessary.""" if len(stacktrace) > STACKTRACE_MAX_LENGTH: # Read first and last |STACKTRACE_MAX_LENGTH/2| bytes. truncated = len(stacktrace) - STACKTRACE_MAX_LENGTH return (stacktrace[:STACKTRACE_MAX_LENGTH // 2] + f'...truncated {truncated} bytes...' + stacktrace[-STACKTRACE_MAX_LENGTH // 2:]) return stacktrace
Parse raw suspected_cls into dict.
def _parse_suspected_cls(predator_result): """Parse raw suspected_cls into dict.""" if not predator_result: return None # The raw result contains some additional information that we don't need here. # Everything we're concerned with is a part of the "result" object included # with the response. predator_result = predator_result['result'] return { 'found': predator_result.get('found'), 'suspected_project': predator_result.get('suspected_project'), 'suspected_components': predator_result.get('suspected_components'), 'changelists': predator_result.get('suspected_cls'), 'feedback_url': predator_result.get('feedback_url'), 'error_message': predator_result.get('error_message'), }
Highlights common stack frames between first two stacks.
def highlight_common_stack_frames(crash_stacktrace): """Highlights common stack frames between first two stacks.""" crash_stacks = [[]] highlighted_crash_stacktrace_lines = [] old_frame_no = 0 stack_index = 0 stack_trace_line_format = '^ *#([0-9]+) *0x[0-9a-f]+ (.*)' for line in crash_stacktrace.splitlines(): # Stacktrace separator prefix. if stack_index and line.startswith('+-'): break match = re.match(stack_trace_line_format, line) if match: frame_no = int(match.group(1)) # This means we encountered another stack like free or alloc stack. if old_frame_no > frame_no: stack_index += 1 crash_stacks.append([]) crash_stacks[stack_index].append(match.group(2)) old_frame_no = frame_no # If we have just one crash stack and no other stack, # then nothing to highlight. if stack_index == 0: return crash_stacktrace # Compare stack frames between first two stacks. match_index = -1 start_index_crash_stack_1 = len(crash_stacks[0]) - 1 start_index_crash_stack_2 = len(crash_stacks[1]) - 1 while True: if (crash_stacks[0][start_index_crash_stack_1] != crash_stacks[1][start_index_crash_stack_2]): break match_index = [start_index_crash_stack_1, start_index_crash_stack_2] if not start_index_crash_stack_1: break if not start_index_crash_stack_2: break start_index_crash_stack_1 -= 1 start_index_crash_stack_2 -= 1 # No match found, nothing to highlight. if match_index == -1: return crash_stacktrace old_frame_no = 0 stack_index = 0 frame_index = -1 for line in crash_stacktrace.splitlines(): match = re.match(stack_trace_line_format, line) if match: frame_no = int(match.group(1)) # This means we encountered another stack like free or alloc stack. if old_frame_no > frame_no: stack_index += 1 frame_index = -1 frame_index += 1 old_frame_no = frame_no # We only care about highlighting the first two stacks. if stack_index <= 1 and frame_index >= match_index[stack_index]: line = '<b>%s</b>' % line highlighted_crash_stacktrace_lines.append(line) return '\n'.join(highlighted_crash_stacktrace_lines)
Linkify links to android kernel source.
def _linkify_android_kernel_stack_frame_if_needed(line): """Linkify links to android kernel source.""" match = KERNEL_LINK_REGEX.match(line) if match: return KERNEL_LINK_FORMAT % (match.group(1), match.group(2), match.group(3), match.group(4)) return line
Clean up and format a stack trace for display.
def filter_stacktrace(crash_stacktrace, crash_type, revisions_dict, platform, job_type): """Clean up and format a stack trace for display.""" if not crash_stacktrace: return '' # Truncate stacktrace if it's too big. crash_stacktrace = _truncate_stacktrace(crash_stacktrace) filtered_crash_lines = [] linkifier = source_mapper.StackFrameLinkifier(revisions_dict) for line in crash_stacktrace.splitlines(): # Html escape line content to prevent XSS. line = html.escape(line, quote=True) line = linkifier.linkify_stack_frame(line) is_android = platform is not None and 'android' in platform if is_android or environment.is_lkl_job(job_type): line = _linkify_android_kernel_stack_frame_if_needed(line) filtered_crash_lines.append(line) filtered_crash_stacktrace = '\n'.join(filtered_crash_lines) if crash_type == leak_blacklist.DIRECT_LEAK_LABEL: return leak_blacklist.highlight_first_direct_leak(filtered_crash_stacktrace) return highlight_common_stack_frames(filtered_crash_stacktrace)
Check if the line contains a frame; it means the line is important.
def _is_line_important(line_content, frames): """Check if the line contains a frame; it means the line is important.""" for frame in frames: if frame in line_content: return True return False
Get the stack frames from the crash state. Sometimes the crash state contains a type of crash, e.g. 'Bad-cast to content::RenderWidget from content::RenderWidgetHostViewAura'. The stack frame is 'content::RenderWidget'.
def get_stack_frames(crash_state_lines): """Get the stack frames from the crash state. Sometimes the crash state contains a type of crash, e.g. 'Bad-cast to content::RenderWidget from content::RenderWidgetHostViewAura'. The stack frame is 'content::RenderWidget'.""" frames = [] for line in crash_state_lines: added = False for regex in COMPILED_CRASH_STATE_REGEXES: matches = re.match(regex, line) if matches: frames.append(matches.group(1)) added = True break if not added: frames.append(line) return frames
Convert an array of string to an array of Line.
def convert_to_lines(raw_stacktrace, crash_state_lines, crash_type): """Convert an array of string to an array of Line.""" if not raw_stacktrace or not raw_stacktrace.strip(): return [] raw_lines = raw_stacktrace.splitlines() frames = get_stack_frames(crash_state_lines) escaped_frames = [jinja2.escape(f) for f in frames] combined_frames = frames + escaped_frames # Certain crash types have their own customized frames that are not related to # the stacktrace. Therefore, we make our best effort to preview stacktrace # in a reasonable way; we preview around the the top of the stacktrace. for unique_type in data_types.CRASH_TYPES_WITH_UNIQUE_STATE: if crash_type.startswith(unique_type): combined_frames = ['ERROR'] break lines = [] for index, content in enumerate(raw_lines): important = _is_line_important(content, combined_frames) lines.append(Line(index + 1, content, important)) return lines
Get testcase detail for rendering the testcase detail page.
def get_testcase_detail_by_id(testcase_id): """Get testcase detail for rendering the testcase detail page.""" testcase = access.check_access_and_get_testcase(testcase_id) return get_testcase_detail(testcase)
Return revision range html for a revision range and job type given a range string.
def _get_revision_range_html_from_string(job_type, platform_id, revision_range): """Return revision range html for a revision range and job type given a range string.""" try: start_revision, end_revision = revision_range.split(':') except: return 'Bad revision range.' return _get_revision_range_html(job_type, platform_id, start_revision, end_revision)
Return revision range html for a revision range and job type.
def _get_revision_range_html(job_type, platform_id, start_revision, end_revision=None): """Return revision range html for a revision range and job type.""" if end_revision is None: end_revision = start_revision component_rev_list = revisions.get_component_range_list( start_revision, end_revision, job_type, platform_id=platform_id) if not component_rev_list: return ('%s:%s (No component revisions found!)' % (start_revision, end_revision)) return revisions.format_revision_list(component_rev_list)
Return blob size string.
def _get_blob_size_string(blob_key): """Return blob size string.""" blob_size = blobs.get_blob_size(blob_key) if blob_size is None: return None return utils.get_size_string(blob_size)
Format a reproduction help string as HTML (linkified with break tags).
def _format_reproduction_help(reproduction_help): """Format a reproduction help string as HTML (linkified with break tags).""" if not reproduction_help: return '' return jinja2.utils.urlize(reproduction_help).replace('\n', '<br>')
Get testcase detail for rendering the testcase detail page.
def get_testcase_detail(testcase): """Get testcase detail for rendering the testcase detail page.""" config = db_config.get() crash_address = testcase.crash_address crash_state = testcase.crash_state crash_state_lines = crash_state.strip().splitlines() crash_type = data_handler.get_crash_type_string(testcase) external_user = not access.has_access(job_type=testcase.job_type) issue_url = issue_tracker_utils.get_issue_url(testcase) metadata = testcase.get_metadata() original_testcase_size = _get_blob_size_string(testcase.fuzzed_keys) minimized_testcase_size = _get_blob_size_string(testcase.minimized_keys) has_issue_tracker = bool(data_handler.get_issue_tracker_name()) fuzzer_display = data_handler.get_fuzzer_display(testcase) formatted_reproduction_help = _format_reproduction_help( data_handler.get_formatted_reproduction_help(testcase)) # When we have a HELP_TEMPLATE, ignore any default values set for HELP_URL. if not formatted_reproduction_help: reproduction_help_url = data_handler.get_reproduction_help_url( testcase, config) else: reproduction_help_url = None if not testcase.regression: regression = 'Pending' elif testcase.regression == 'NA': regression = 'NA' else: regression = _get_revision_range_html_from_string( testcase.job_type, testcase.platform_id, testcase.regression) fixed_full = None if 'progression_pending' in metadata: fixed = 'Pending' elif not testcase.fixed: fixed = 'NO' elif testcase.fixed == 'NA': fixed = 'NA' elif testcase.fixed == 'Yes': fixed = 'YES' else: fixed = 'YES' fixed_full = _get_revision_range_html_from_string( testcase.job_type, testcase.platform_id, testcase.fixed) last_tested = None last_tested_revision = ( metadata.get('last_tested_revision') or testcase.crash_revision) if last_tested_revision: last_tested = _get_revision_range_html( testcase.job_type, testcase.platform_id, last_tested_revision) crash_revision = testcase.crash_revision crash_revisions_dict = revisions.get_component_revisions_dict( crash_revision, testcase.job_type, platform_id=testcase.platform_id) crash_stacktrace = data_handler.get_stacktrace(testcase) crash_stacktrace = filter_stacktrace(crash_stacktrace, testcase.crash_type, crash_revisions_dict, testcase.platform, testcase.job_type) crash_stacktrace = convert_to_lines(crash_stacktrace, crash_state_lines, crash_type) last_tested_crash_revision = metadata.get('last_tested_crash_revision') last_tested_crash_revisions_dict = revisions.get_component_revisions_dict( last_tested_crash_revision, testcase.job_type, platform_id=testcase.platform_id) last_tested_crash_stacktrace = data_handler.get_stacktrace( testcase, stack_attribute='last_tested_crash_stacktrace') last_tested_crash_stacktrace = filter_stacktrace( last_tested_crash_stacktrace, testcase.crash_type, last_tested_crash_revisions_dict, testcase.platform, testcase.job_type) last_tested_crash_stacktrace = convert_to_lines(last_tested_crash_stacktrace, crash_state_lines, crash_type) privileged_user = access.has_access(need_privileged_access=True) # Fix build url link. |storage.cloud.google.com| takes care of using the # right set of authentication credentials needed to access the link. if 'build_url' in metadata: metadata['build_url'] = metadata['build_url'].replace( 'gs://', 'https://storage.cloud.google.com/') pending_blame_task = ( testcase.has_blame() and 'blame_pending' in metadata and metadata['blame_pending']) pending_impact_task = ( testcase.has_impacts() and not testcase.is_impact_set_flag) pending_minimize_task = not testcase.minimized_keys pending_progression_task = ('progression_pending' in metadata and metadata['progression_pending']) pending_regression_task = not testcase.regression pending_stack_task = testcase.last_tested_crash_stacktrace == 'Pending' needs_refresh = ( testcase.status == 'Pending' or (testcase.status in ('Processed', 'Duplicate') and (pending_blame_task or pending_impact_task or pending_minimize_task or pending_progression_task or pending_regression_task or pending_stack_task))) if data_types.SecuritySeverity.is_valid(testcase.security_severity): security_severity = severity_analyzer.severity_to_string( testcase.security_severity) else: security_severity = None auto_delete_timestamp = None auto_close_timestamp = None if testcase.one_time_crasher_flag: last_crash_time = ( crash_stats.get_last_crash_time(testcase) or testcase.timestamp) # Set auto-delete timestamp for unreproducible testcases with # no associated bug. if not testcase.bug_information: auto_delete_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_NO_BUG_DEADLINE)) # Set auto-close timestamp for unreproducible testcases with # an associated bug. if testcase.open and testcase.bug_information: auto_close_timestamp = utils.utc_datetime_to_timestamp( last_crash_time + datetime.timedelta( days=data_types.UNREPRODUCIBLE_TESTCASE_WITH_BUG_DEADLINE)) memory_tool_display_string = environment.get_memory_tool_display_string( testcase.job_type) memory_tool_display_label = memory_tool_display_string.split(':')[0] memory_tool_display_value = memory_tool_display_string.split(':')[1].strip() helpers.log('Testcase %s' % testcase.key.id(), helpers.VIEW_OPERATION) return { 'id': testcase.key.id(), 'crash_type': crash_type, 'crash_address': crash_address, 'crash_state': crash_state, 'crash_state_lines': crash_state_lines, 'crash_revision': testcase.crash_revision, 'csrf_token': form.generate_csrf_token(), 'external_user': external_user, 'footer': testcase.comments, 'formatted_reproduction_help': formatted_reproduction_help, 'fixed': fixed, 'fixed_full': fixed_full, 'issue_url': issue_url, 'is_admin': auth.is_current_user_admin(), 'metadata': metadata, 'minimized_testcase_size': minimized_testcase_size, 'needs_refresh': needs_refresh, 'original_testcase_size': original_testcase_size, 'privileged_user': privileged_user, 'regression': regression, 'crash_stacktrace': { 'lines': crash_stacktrace, 'revision': revisions.get_real_revision( crash_revision, testcase.job_type, display=True, platform_id=testcase.platform_id) }, 'last_tested_crash_stacktrace': { 'lines': last_tested_crash_stacktrace, 'revision': revisions.get_real_revision( last_tested_crash_revision, testcase.job_type, display=True, platform_id=testcase.platform_id) }, 'security_severity': security_severity, 'security_severities': data_types.SecuritySeverity.list(), 'stats': { 'min_hour': crash_stats.get_min_hour(), 'max_hour': crash_stats.get_max_hour(), }, 'suspected_cls': _parse_suspected_cls(metadata.get('predator_result')), 'testcase': testcase, 'timestamp': utils.utc_datetime_to_timestamp(testcase.timestamp), 'show_blame': testcase.has_blame(), 'show_impact': testcase.has_impacts(), 'impacts_production': testcase.impacts_production(), 'find_similar_issues_options': FIND_SIMILAR_ISSUES_OPTIONS, 'auto_delete_timestamp': auto_delete_timestamp, 'auto_close_timestamp': auto_close_timestamp, 'memory_tool_display_label': memory_tool_display_label, 'memory_tool_display_value': memory_tool_display_value, 'last_tested': last_tested, 'is_admin_or_not_oss_fuzz': is_admin_or_not_oss_fuzz(), 'has_issue_tracker': has_issue_tracker, 'reproduction_help_url': reproduction_help_url, 'is_local_development': environment.is_running_on_app_engine_development(), 'fuzzer_display': fuzzer_display._asdict(), }
Return True if the current user is an admin or if this is not OSS-Fuzz.
def is_admin_or_not_oss_fuzz(): """Return True if the current user is an admin or if this is not OSS-Fuzz.""" return not utils.is_oss_fuzz() or auth.is_current_user_admin()
Update from trunk.
def update(testcase): """Update from trunk.""" testcase.last_tested_crash_stacktrace = 'Pending' testcase.put() tasks.add_task( 'variant', testcase.key.id(), testcase.job_type, queue=tasks.queue_for_testcase(testcase)) helpers.log( 'Marked testcase %s for last tested stacktrace update' % testcase.key.id(), helpers.MODIFY_OPERATION)
Check if an email is in the privileged users list.
def _is_privileged_user(email): """Check if an email is in the privileged users list.""" if local_config.AuthConfig().get('all_users_privileged'): return True privileged_user_emails = (db_config.get_value('privileged_users') or '').splitlines() return any( utils.emails_equal(email, privileged_user_email) for privileged_user_email in privileged_user_emails)
Check if an email is in the privileged users list.
def _is_blacklisted_user(email): """Check if an email is in the privileged users list.""" blacklisted_user_emails = (db_config.get_value('blacklisted_users') or '').splitlines() return any( utils.emails_equal(email, blacklisted_user_email) for blacklisted_user_email in blacklisted_user_emails)
Return the job_type that is assigned to the current user. None means one can access any job type. You might want to invoke get_access(..) with the job type afterward.
def get_user_job_type(): """Return the job_type that is assigned to the current user. None means one can access any job type. You might want to invoke get_access(..) with the job type afterward.""" email = helpers.get_user_email() privileged_user_emails = (db_config.get_value('privileged_users') or '').splitlines() for privileged_user_email in privileged_user_emails: if ';' in privileged_user_email: tokens = privileged_user_email.split(';') privileged_user_real_email = tokens[0] privileged_user_job_type = tokens[1] if utils.emails_equal(email, privileged_user_real_email): return privileged_user_job_type return None
Check if the email's domain is allowed.
def _is_domain_allowed(email): """Check if the email's domain is allowed.""" domains = local_config.AuthConfig().get('whitelisted_domains', default=[]) for domain in domains: if utils.normalize_email(email).endswith('@%s' % domain.lower()): return True return False
Check if the user has access.
def has_access(need_privileged_access=False, job_type=None, fuzzer_name=None): """Check if the user has access.""" result = get_access( need_privileged_access=need_privileged_access, job_type=job_type, fuzzer_name=fuzzer_name) return result == UserAccess.Allowed
Return 'allowed', 'redirected', or 'failed'.
def get_access(need_privileged_access=False, job_type=None, fuzzer_name=None): """Return 'allowed', 'redirected', or 'failed'.""" if auth.is_current_user_admin(): return UserAccess.Allowed user = auth.get_current_user() if not user: return UserAccess.Redirected email = user.email if _is_blacklisted_user(email): return UserAccess.Denied if _is_privileged_user(email): return UserAccess.Allowed if job_type and external_users.is_job_allowed_for_user(email, job_type): return UserAccess.Allowed if (fuzzer_name and external_users.is_fuzzer_allowed_for_user(email, fuzzer_name)): return UserAccess.Allowed if not need_privileged_access and _is_domain_allowed(email): return UserAccess.Allowed return UserAccess.Denied
Checks if the current user can access the testcase.
def can_user_access_testcase(testcase): """Checks if the current user can access the testcase.""" config = db_config.get() need_privileged_access = ( testcase.security_flag and not config.relax_security_bug_restrictions) if has_access( fuzzer_name=testcase.actual_fuzzer_name(), job_type=testcase.job_type, need_privileged_access=need_privileged_access): return True user_email = helpers.get_user_email() if testcase.uploader_email and testcase.uploader_email == user_email: return True # Allow owners of bugs to see associated test cases and test case groups. issue_id = testcase.bug_information or testcase.group_bug_information if not issue_id: return False issue_tracker = issue_tracker_utils.get_issue_tracker_for_testcase(testcase) associated_issue = issue_tracker.get_issue(issue_id) if not associated_issue: return False # Look at both associated issue and original issue (if the associated one # is a duplicate of the original issue). issues_to_check = [associated_issue] if associated_issue.merged_into: original_issue = issue_tracker.get_original_issue(issue_id) if original_issue: issues_to_check.append(original_issue) relaxed_restrictions = ( config.relax_testcase_restrictions or _is_domain_allowed(user_email)) for issue in issues_to_check: if relaxed_restrictions: if (any(utils.emails_equal(user_email, cc) for cc in issue.ccs) or utils.emails_equal(user_email, issue.assignee) or utils.emails_equal(user_email, issue.reporter)): return True elif utils.emails_equal(user_email, issue.assignee): return True return False
Check the failed attempt count and get the testcase.
def check_access_and_get_testcase(testcase_id): """Check the failed attempt count and get the testcase.""" if not helpers.get_user_email(): raise helpers.UnauthorizedError() if not testcase_id: raise helpers.EarlyExitError('No test case specified!', 404) try: testcase = data_handler.get_testcase_by_id(testcase_id) except errors.InvalidTestcaseError: raise helpers.EarlyExitError('Invalid test case!', 404) if not can_user_access_testcase(testcase): raise helpers.AccessDeniedError() return testcase
Get the auth domain.
def auth_domain(): """Get the auth domain.""" domain = local_config.ProjectConfig().get('firebase.auth_domain') if not domain: raise AuthError('No auth domain.') return domain
Get the real auth domain
def real_auth_domain(): """Get the real auth domain""" real_domain = local_config.ProjectConfig().get('firebase.real_auth_domain') if real_domain: return real_domain return utils.get_application_id() + '.firebaseapp.com'
Returns whether or not the current logged in user is an admin.
def is_current_user_admin(): """Returns whether or not the current logged in user is an admin.""" if environment.is_local_development(): return True user = get_current_user() if not user: return False key = ndb.Key(data_types.Admin, user.email) return bool(key.get())
Get the project number from project ID.
def _project_number_from_id(project_id): """Get the project number from project ID.""" resource_manager = build('cloudresourcemanager', 'v1') # pylint: disable=no-member result = resource_manager.projects().get(projectId=project_id).execute() if 'projectNumber' not in result: raise AuthError('Failed to get project number.') return result['projectNumber']
Retrieves a public key from the list published by Identity-Aware Proxy, re-fetching the key file if necessary.
def _get_iap_key(key_id): """Retrieves a public key from the list published by Identity-Aware Proxy, re-fetching the key file if necessary. """ # pylint: disable=missing-timeout resp = requests.get('https://www.gstatic.com/iap/verify/public_key') if resp.status_code != 200: raise AuthError('Unable to fetch IAP keys: ' f'{resp.status_code} / {resp.headers} / {resp.text}') result = resp.json() key = result.get(key_id) if not key: raise AuthError(f'Key {repr(key_id)} not found') return key
Validate JWT assertion.
def _validate_iap_jwt(iap_jwt): """Validate JWT assertion.""" project_id = utils.get_application_id() expected_audience = f'/projects/{_project_number_from_id(project_id)}' \ f'/apps/{project_id}' try: key_id = jwt.get_unverified_header(iap_jwt).get('kid') if not key_id: raise AuthError('No key ID.') key = _get_iap_key(key_id) decoded_jwt = jwt.decode( iap_jwt, key, algorithms=['ES256'], issuer='https://cloud.google.com/iap', audience=expected_audience) return decoded_jwt['email'] except (jwt.exceptions.InvalidTokenError, requests.exceptions.RequestException) as e: raise AuthError('JWT assertion decode error: ' + str(e))
Get Cloud IAP email.
def get_iap_email(current_request): """Get Cloud IAP email.""" jwt_assertion = current_request.headers.get('X-Goog-IAP-JWT-Assertion') if not jwt_assertion: return None return _validate_iap_jwt(jwt_assertion)
Get the current logged in user, or None.
def get_current_user(): """Get the current logged in user, or None.""" if environment.is_local_development(): return User('user@localhost') current_request = request_cache.get_current_request() if local_config.AuthConfig().get('enable_loas'): loas_user = current_request.headers.get('X-AppEngine-LOAS-Peer-Username') if loas_user: return User(loas_user + '@google.com') iap_email = get_iap_email(current_request) if iap_email: return User(iap_email) cache_backing = request_cache.get_cache_backing() oauth_email = getattr(cache_backing, '_oauth_email', None) if oauth_email: return User(oauth_email) cached_email = getattr(cache_backing, '_cached_email', None) if cached_email: return User(cached_email) session_cookie = get_session_cookie() if not session_cookie: return None try: decoded_claims = decode_claims(get_session_cookie()) except AuthError: logs.log_warn('Invalid session cookie.') return None allowed_firebase_providers = local_config.ProjectConfig().get( 'firebase.auth_providers', ['google.com']) firebase_info = decoded_claims.get('firebase', {}) sign_in_provider = firebase_info.get('sign_in_provider') if sign_in_provider not in allowed_firebase_providers: logs.log_error(f'Firebase provider {sign_in_provider} is not enabled.') return None # Per https://docs.github.com/en/authentication/ # keeping-your-account-and-data-secure/authorizing-oauth-apps # GitHub requires emails to be verified before an OAuth app can be # authorized, so we make an exception. if (not decoded_claims.get('email_verified') and sign_in_provider != 'github.com'): return None email = decoded_claims.get('email') if not email: return None # We cache the email for this request if we've validated the user to make # subsequent get_current_user() calls fast. setattr(cache_backing, '_cached_email', email) return User(email)
Create a new session cookie.
def create_session_cookie(id_token, expires_in): """Create a new session cookie.""" try: return auth.create_session_cookie(id_token, expires_in=expires_in) except auth.AuthError: raise AuthError('Failed to create session cookie.')
Get the current session cookie.
def get_session_cookie(): """Get the current session cookie.""" return request_cache.get_current_request().cookies.get('session')
Revoke a session cookie.
def revoke_session_cookie(session_cookie): """Revoke a session cookie.""" decoded_claims = decode_claims(session_cookie) auth.revoke_refresh_tokens(decoded_claims['sub'])
Decode the claims for the current session cookie.
def decode_claims(session_cookie): """Decode the claims for the current session cookie.""" try: return auth.verify_session_cookie(session_cookie, check_revoked=True) except (ValueError, auth.AuthError): raise AuthError('Invalid session cookie.')
Get scoped fuzzer names.
def get_permission_names(entity_kind): """Get scoped fuzzer names.""" # pylint: disable=protected-access permissions = external_users._get_permissions_query_for_user( helpers.get_user_email(), entity_kind) names = [] for permission in permissions: suffix = '*' if permission.is_prefix else '' names.append(permission.entity_name + suffix) return names
Get the scope object for the user.
def get_scope(): """Get the scope object for the user.""" user_email = helpers.get_user_email() is_privileged = access.has_access(need_privileged_access=True) everything = (is_privileged or access.has_access()) # pylint: disable=protected-access job_types = external_users._allowed_entities_for_user( user_email, data_types.PermissionEntityKind.JOB) allowed_job_type = access.get_user_job_type() if allowed_job_type: job_types.append(allowed_job_type) # pylint: disable=protected-access fuzzer_names = external_users._allowed_entities_for_user( user_email, data_types.PermissionEntityKind.FUZZER) return Scope(everything, is_privileged, job_types, fuzzer_names, allowed_job_type)
Add permissions to params.
def add_permissions_to_params(scope, params): """Add permissions to params.""" params['permissions'] = { 'everything': scope.everything, 'isPrivileged': scope.is_privileged, 'jobs': get_permission_names(data_types.PermissionEntityKind.JOB), 'fuzzers': get_permission_names(data_types.PermissionEntityKind.FUZZER) } if scope.allowed_job_type: params['permissions']['jobs'].append(scope.allowed_job_type)
Add scope to the query according to permissions and modify params.
def add_scope(query, params, security_field, job_type_field, fuzzer_name_field): """Add scope to the query according to permissions and modify params.""" scope = get_scope() add_permissions_to_params(scope, params) subqueries = [] if scope.is_privileged: # The user can access everything. return if scope.everything: everything_query = query.new_subquery() everything_query.filter(security_field, False) subqueries.append(everything_query) if scope.job_types: job_query = query.new_subquery() job_query.filter_in(job_type_field, scope.job_types) subqueries.append(job_query) if scope.fuzzer_names: fuzzer_query = query.new_subquery() fuzzer_query.filter_in(fuzzer_name_field, scope.fuzzer_names) subqueries.append(fuzzer_query) if not subqueries: # The user CANNOT access anything. raise helpers.AccessDeniedError() query.union(*subqueries)
Query from BigQuery given the query object.
def get(query, group_query, offset, limit): """Query from BigQuery given the query object.""" return crash_stats.get( end=query.end, days=query.days, block=query.block, group_by=query.group_by, where_clause=query.get_where_clause(), group_having_clause=group_query.get_where_clause(), sort_by=query.sort_by, offset=offset, limit=limit)
Get a CSPBuilder object for the default policy. Can be modified for specific pages if needed.
def get_default_builder(): """Get a CSPBuilder object for the default policy. Can be modified for specific pages if needed.""" builder = CSPBuilder() # By default, disallow everything. Whitelist only features that are needed. builder.add('default-src', 'none', quote=True) # Allow various directives if sourced from self. builder.add('font-src', 'self', quote=True) builder.add('connect-src', 'self', quote=True) builder.add('img-src', 'self', quote=True) builder.add('manifest-src', 'self', quote=True) # External scripts. Google analytics, charting libraries. builder.add('script-src', 'www.google-analytics.com') builder.add('script-src', 'www.gstatic.com') builder.add('script-src', 'apis.google.com') # Google Analytics also uses connect-src and img-src. builder.add('connect-src', 'www.google-analytics.com') builder.add('img-src', 'www.google-analytics.com') # Firebase. builder.add('img-src', 'www.gstatic.com') builder.add('connect-src', 'securetoken.googleapis.com') builder.add('connect-src', 'www.googleapis.com') builder.add('connect-src', 'identitytoolkit.googleapis.com') builder.add('frame-src', auth.auth_domain()) # External style. Used for fonts, charting libraries. builder.add('style-src', 'fonts.googleapis.com') builder.add('style-src', 'www.gstatic.com') # External fonts. builder.add('font-src', 'fonts.gstatic.com') # Some upload forms require us to connect to the cloud storage API. builder.add('connect-src', 'storage.googleapis.com') # Mixed content is unexpected, but upgrade requests rather than block. builder.add_sourceless('upgrade-insecure-requests') # We don't expect object to be used, but it doesn't fall back to default-src. builder.add('object-src', 'none', quote=True) # We don't expect workers to be used, but they fall back to script-src. builder.add('worker-src', 'none', quote=True) # Add reporting so that violations don't break things silently. builder.add('report-uri', '/report-csp-failure') # TODO(mbarbella): Remove Google-specific cases by allowing configuration. # Internal authentication. builder.add('manifest-src', 'login.corp.google.com') # TODO(mbarbella): Improve the policy by limiting the additions below. # Some scripts may be loaded from current auth domain. builder.add('script-src', auth.auth_domain()) # Because we use Polymer Bundler to create large files containing all of our # scripts inline, our policy requires this (which weakens CSP significantly). builder.add('script-src', 'unsafe-inline', quote=True) # Some of the pages that read responses from json handlers require this. builder.add('script-src', 'unsafe-eval', quote=True) # Our Polymer Bundler usage also requires inline style. builder.add('style-src', 'unsafe-inline', quote=True) # Some fonts and images are loaded from data URIs. builder.add('font-src', 'data:') builder.add('img-src', 'data:') return builder
Get the default Content Security Policy as a string.
def get_default(): """Get the default Content Security Policy as a string.""" return str(get_default_builder())
Determine if the param's value is considered as empty.
def is_empty(value): """Determine if the param's value is considered as empty.""" return not value
Check if there's any param.
def has_params(params, filters): """Check if there's any param.""" return any(params.get(fltr.param_key) for fltr in filters)
Extract the value from the keyword given the field and return the new keyword.
def extract_keyword_field(keyword, field): """Extract the value from the keyword given the field and return the new keyword.""" regex = re.compile(KEYWORD_FIELD_REGEX % field, flags=re.IGNORECASE) match = re.search(regex, keyword) if match: value = match.group(1) if value.startswith('"') and value.endswith('"'): value = value.strip('"') elif value.startswith("'") and value.endswith("'"): value = value.strip("'") return re.sub(regex, ' ', keyword), value return keyword, None
Convert yes/no to boolean or raise Exception.
def get_boolean(value): """Convert yes/no to boolean or raise Exception.""" if value == 'yes': return True if value == 'no': return False raise ValueError("The value must be 'yes' or 'no'.")
Get sanitized string.
def get_string(value): """Get sanitized string.""" return value.strip()
Return a string filter.
def String(field, param_key, required=False): """Return a string filter.""" return SimpleFilter( field, param_key, transformers=[get_string], required=required)
Return a boolean filter that converts yes/no to True/False.
def Boolean(field, param_key, required=False): """Return a boolean filter that converts yes/no to True/False.""" return SimpleFilter( field, param_key, transformers=[get_boolean], required=required)
Return a boolean filter that converts yes/no to False/True.
def NegativeBoolean(field, param_key, required=False): """Return a boolean filter that converts yes/no to False/True.""" return SimpleFilter( field, param_key, transformers=[get_boolean, lambda v: not v], required=required)
return an int filter.
def Int(field, param_key, required=False, operator=None): """return an int filter.""" return SimpleFilter( field, param_key, transformers=[int], required=required, operator=operator)
Add filters to query, given the param.
def add(query, params, filters): """Add filters to query, given the param.""" for fltr in filters: fltr.add(query, params)
Generate a CSRF token.
def generate_csrf_token(length=64, valid_seconds=3600, html=False): """Generate a CSRF token.""" now = utils.utcnow() valid_token = None # Clean up expired tokens to prevent junk from building up in the datastore. tokens = data_types.CSRFToken.query( data_types.CSRFToken.user_email == helpers.get_user_email()) tokens_to_delete = [] for token in tokens: if token.expiration_time > now: valid_token = token continue tokens_to_delete.append(token.key) ndb_utils.delete_multi(tokens_to_delete) # Generate a new token. if not valid_token: valid_token = data_types.CSRFToken() valid_token.value = base64.b64encode(os.urandom(length)) valid_token.expiration_time = ( now + datetime.timedelta(seconds=valid_seconds)) valid_token.user_email = helpers.get_user_email() valid_token.put() value = valid_token.value if html: return '<input type="hidden" name="csrf_token" value="%s" />' % value return value
Sign data with the default App Engine service account.
def sign_data(data): """Sign data with the default App Engine service account.""" iam = googleapiclient.discovery.build('iamcredentials', 'v1') service_account = 'projects/-/serviceAccounts/' + utils.service_account_email( ) response = iam.projects().serviceAccounts().signBlob( # pylint: disable=no-member name=service_account, body={ 'delegates': [], 'payload': base64.b64encode(data).decode('utf-8'), }).execute() try: return base64.b64decode(response['signedBlob']) except Exception as e: raise GcsError('Invalid response: ' + str(e))
Return a timestamp |expiry_seconds| from now.
def _get_expiration_time(expiry_seconds): """Return a timestamp |expiry_seconds| from now.""" return int(time.time() + expiry_seconds)
Return a signed url.
def get_signed_url(bucket_name, path, method='GET', expiry=DEFAULT_URL_VALID_SECONDS): """Return a signed url.""" timestamp = _get_expiration_time(expiry) blob = '%s\n\n\n%d\n/%s/%s' % (method, timestamp, bucket_name, path) local_server = environment.get_value('LOCAL_GCS_SERVER_HOST') if local_server: url = local_server + '/' + bucket_name signed_blob = b'SIGNATURE' service_account_name = 'service_account' else: url = STORAGE_URL % bucket_name signed_blob = sign_data(blob.encode('utf-8')) service_account_name = utils.service_account_email() params = { 'GoogleAccessId': service_account_name, 'Expires': timestamp, 'Signature': base64.b64encode(signed_blob).decode('utf-8'), } return str(url + '/' + path + '?' + urllib.parse.urlencode(params))
Prepare a signed GCS upload.
def prepare_upload(bucket_name, path, expiry=DEFAULT_URL_VALID_SECONDS): """Prepare a signed GCS upload.""" expiration_time = ( datetime.datetime.utcnow() + datetime.timedelta(seconds=expiry)) conditions = [ { 'key': path }, { 'bucket': bucket_name }, ['content-length-range', 0, MAX_UPLOAD_SIZE], ['starts-with', '$x-goog-meta-filename', ''], ] policy = base64.b64encode( json.dumps({ 'expiration': expiration_time.isoformat() + 'Z', 'conditions': conditions, }).encode('utf-8')) local_server = environment.get_value('LOCAL_GCS_SERVER_HOST') if local_server: url = local_server signature = b'SIGNATURE' service_account_name = 'service_account' else: url = STORAGE_URL % bucket_name signature = base64.b64encode(sign_data(policy)) service_account_name = utils.service_account_email() return GcsUpload(url, bucket_name, path, service_account_name, policy, signature)
Prepare a signed GCS blob upload.
def prepare_blob_upload(): """Prepare a signed GCS blob upload.""" return prepare_upload(storage.blobs_bucket(), blobs.generate_new_blob_name())
Return a config with auth root.
def _auth_config(): """Return a config with auth root.""" global _auth_config_obj if not _auth_config_obj: _auth_config_obj = local_config.AuthConfig() return _auth_config_obj
Extends a request.
def extend_request(req, params): """Extends a request.""" def _iterparams(): yield from params.items() def _get(key, default_value=None): """Return the value of the key or the default value.""" return params.get(key, default_value) req.get = _get req.iterparams = _iterparams
Extends a request to support JSON.
def extend_json_request(req): """Extends a request to support JSON.""" try: params = json.loads(req.data) except ValueError as e: raise helpers.EarlyExitError( 'Parsing the JSON request body failed: %s' % req.data, 400) from e extend_request(req, params)
Wrap a handler with cron.
def cron(): """Wrap a handler with cron.""" def decorator(func): """Decorator.""" @functools.wraps(func) def wrapper(self): """Wrapper.""" if not self.is_cron(): raise helpers.AccessDeniedError('You are not a cron.') result = func(self) if result is None: return 'OK' return result return wrapper return decorator
Wrap a handler with admin checking. This decorator must be below post(..) and get(..) when used.
def check_admin_access(func): """Wrap a handler with admin checking. This decorator must be below post(..) and get(..) when used. """ @functools.wraps(func) def wrapper(self): """Wrapper.""" if not auth.is_current_user_admin(): raise helpers.AccessDeniedError('Admin access is required.') return func(self) return wrapper
Wrap a handler with an admin check if this is OSS-Fuzz. This decorator must be below post(..) and get(..) when used.
def check_admin_access_if_oss_fuzz(func): """Wrap a handler with an admin check if this is OSS-Fuzz. This decorator must be below post(..) and get(..) when used. """ @functools.wraps(func) def wrapper(self): """Wrapper.""" if utils.is_oss_fuzz(): return check_admin_access(func)(self) return func(self) return wrapper
Wrap a handler to raise error when running in local App Engine development environment. This decorator must be below post(..) and get(..) when used.
def unsupported_on_local_server(func): """Wrap a handler to raise error when running in local App Engine development environment. This decorator must be below post(..) and get(..) when used. """ @functools.wraps(func) def wrapper(self, *args, **kwargs): """Wrapper.""" if environment.is_running_on_app_engine_development(): raise helpers.EarlyExitError( 'This feature is not available in local App Engine Development ' 'environment.', 400) return func(self, *args, **kwargs) return wrapper
Get user email from the request. See: https://developers.google.com/identity/protocols/OAuth2InstalledApp
def get_email_and_access_token(authorization): """Get user email from the request. See: https://developers.google.com/identity/protocols/OAuth2InstalledApp """ if not authorization.startswith(BEARER_PREFIX): raise helpers.UnauthorizedError( 'The Authorization header is invalid. It should have been started with' " '%s'." % BEARER_PREFIX) access_token = authorization.split(' ')[1] response = requests.get( 'https://www.googleapis.com/oauth2/v3/tokeninfo', params={'access_token': access_token}, timeout=HTTP_GET_TIMEOUT_SECS) if response.status_code != 200: raise helpers.UnauthorizedError( f'Failed to authorize. The Authorization header ({authorization}) ' 'might be invalid.') try: data = json.loads(response.text) # Whitelist service accounts. They have different client IDs (or aud). # Therefore, we check against their email directly. if data.get('email_verified') and data.get('email') in _auth_config().get( 'whitelisted_oauth_emails', default=[]): return data['email'], authorization # Validate that this is an explicitly whitelisted client ID. whitelisted_client_ids = _auth_config().get( 'whitelisted_oauth_client_ids', default=[]) if data.get('aud') not in whitelisted_client_ids: raise helpers.UnauthorizedError( "The access token doesn't belong to one of the allowed OAuth clients" ': %s.' % response.text) if not data.get('email_verified'): raise helpers.UnauthorizedError('The email (%s) is not verified: %s.' % (data.get('email'), response.text)) return data['email'], authorization except (KeyError, ValueError) as e: raise helpers.EarlyExitError( 'Parsing the JSON response body failed: %s' % response.text, 500) from e
Wrap a handler with OAuth authentication by reading the Authorization header and getting user email.
def oauth(func): """Wrap a handler with OAuth authentication by reading the Authorization header and getting user email. """ @functools.wraps(func) def wrapper(self): """Wrapper.""" auth_header = request.headers.get('Authorization') if auth_header: email, returned_auth_header = get_email_and_access_token(auth_header) setattr(g, '_oauth_email', email) response = make_response(func(self)) response.headers[CLUSTERFUZZ_AUTHORIZATION_HEADER] = str( returned_auth_header) response.headers[CLUSTERFUZZ_AUTHORIZATION_IDENTITY] = str(email) return response return func(self) return wrapper
Wrap a handler with pubsub push authentication.
def pubsub_push(func): """Wrap a handler with pubsub push authentication.""" @functools.wraps(func) def wrapper(self): """Wrapper.""" try: bearer_token = request.headers.get('Authorization', '') if not bearer_token.startswith(BEARER_PREFIX): raise helpers.UnauthorizedError('Missing or invalid bearer token.') token = bearer_token.split(' ')[1] claim = id_token.verify_oauth2_token(token, google_requests.Request()) except google.auth.exceptions.GoogleAuthError as e: raise helpers.UnauthorizedError('Invalid ID token.') from e if (not claim.get('email_verified') or claim.get('email') != utils.service_account_email()): raise helpers.UnauthorizedError('Invalid ID token.') message = pubsub.raw_message_to_message(json.loads(request.data.decode())) return func(self, message) return wrapper
Wrap a handler with check_user_access. This decorator must be below post(..) and get(..) when used.
def check_user_access(need_privileged_access): """Wrap a handler with check_user_access. This decorator must be below post(..) and get(..) when used. """ def decorator(func): """Decorator.""" @functools.wraps(func) def wrapper(self, *args, **kwargs): """Wrapper.""" if not access.has_access(need_privileged_access=need_privileged_access): raise helpers.AccessDeniedError() return func(self, *args, **kwargs) return wrapper return decorator
Wrap a handler with check_testcase_access. It expects the param `testcaseId`. And it expects func to have testcase as its first argument. This decorator must be below post(..) and get(..) when used.
def check_testcase_access(func): """Wrap a handler with check_testcase_access. It expects the param `testcaseId`. And it expects func to have testcase as its first argument. This decorator must be below post(..) and get(..) when used. """ @functools.wraps(func) def wrapper(self): """Wrapper.""" testcase_id = helpers.cast( request.get('testcaseId'), int, "The param 'testcaseId' is not a number.") testcase = access.check_access_and_get_testcase(testcase_id) return func(self, testcase) return wrapper
Wrap a handler with 'Access-Control-Allow-Origin to allow cross-domain AJAX calls.
def allowed_cors(func): """Wrap a handler with 'Access-Control-Allow-Origin to allow cross-domain AJAX calls.""" @functools.wraps(func) def wrapper(self): """Wrapper.""" origin = request.headers.get('Origin') whitelisted_cors_urls = _auth_config().get('whitelisted_cors_urls') response = make_response(func(self)) if origin and whitelisted_cors_urls: for domain_regex in whitelisted_cors_urls: if re.match(domain_regex, origin): response.headers['Access-Control-Allow-Origin'] = origin response.headers['Vary'] = 'Origin' response.headers['Access-Control-Allow-Credentials'] = 'true' response.headers['Access-Control-Allow-Methods'] = ( 'GET,OPTIONS,POST') response.headers['Access-Control-Allow-Headers'] = ( 'Accept,Authorization,Content-Type') response.headers['Access-Control-Max-Age'] = '3600' break return response return wrapper
Wrap a POST handler, parse request, and set response's content type.
def post(request_content_type, response_content_type): """Wrap a POST handler, parse request, and set response's content type.""" def decorator(func): """Decorator.""" @functools.wraps(func) def wrapper(self): """Wrapper.""" if response_content_type == JSON: self.is_json = True if request_content_type == JSON: extend_json_request(request) elif request_content_type == FORM: extend_request(request, request.form) else: extend_request(request, request.args) response = make_response(func(self)) if response_content_type == JSON: response.headers['Content-Type'] = 'application/json' elif response_content_type == TEXT: response.headers['Content-Type'] = 'text/plain' elif response_content_type == HTML: # Don't enforce content security policies in local development mode. if not environment.is_running_on_app_engine_development(): response.headers['Content-Security-Policy'] = csp.get_default() return response return wrapper return decorator
Wrap a GET handler and set response's content type.
def get(response_content_type): """Wrap a GET handler and set response's content type.""" def decorator(func): """Decorator.""" @functools.wraps(func) def wrapper(self, *args, **kwargs): """Wrapper.""" if response_content_type == JSON: self.is_json = True extend_request(request, request.args) response = make_response(func(self, *args, **kwargs)) if response_content_type == JSON: response.headers['Content-Type'] = 'application/json' elif response_content_type == TEXT: response.headers['Content-Type'] = 'text/plain' elif response_content_type == HTML: # Don't enforce content security policies in local development mode. if not environment.is_running_on_app_engine_development(): response.headers['Content-Security-Policy'] = csp.get_default() return response return wrapper return decorator
Wrap a handler to require a valid CSRF token.
def require_csrf_token(func): """Wrap a handler to require a valid CSRF token.""" def wrapper(self, *args, **kwargs): """Check to see if this handler has a valid CSRF token provided to it.""" token_value = request.get('csrf_token') user = auth.get_current_user() if not user: raise helpers.AccessDeniedError('Not logged in.') query = data_types.CSRFToken.query( data_types.CSRFToken.value == token_value, data_types.CSRFToken.user_email == user.email) token = query.get() if not token: raise helpers.AccessDeniedError('Invalid CSRF token.') # Make sure that the token is not expired. if token.expiration_time < datetime.datetime.utcnow(): token.key.delete() raise helpers.AccessDeniedError('Expired CSRF token.') return func(self, *args, **kwargs) return wrapper
Get a valid testcase or raise EarlyExitError.
def get_testcase(testcase_id): """Get a valid testcase or raise EarlyExitError.""" testcase = None try: testcase = data_handler.get_testcase_by_id(testcase_id) except errors.InvalidTestcaseError: pass if not testcase: raise EarlyExitError("Testcase (id=%s) doesn't exist" % testcase_id, 404) return testcase
Get an IssueTracker or raise EarlyExitError.
def get_issue_tracker_for_testcase(testcase): """Get an IssueTracker or raise EarlyExitError.""" issue_tracker = issue_tracker_utils.get_issue_tracker_for_testcase(testcase) if not issue_tracker: raise EarlyExitError( "The testcase doesn't have a corresponding issue tracker", 404) return issue_tracker
Return `fn(value)` or raise an EarlyExitError with 400.
def cast(value, fn, error_message): """Return `fn(value)` or raise an EarlyExitError with 400.""" try: return fn(value) except (ValueError, TypeError): raise EarlyExitError(error_message, 400)
Check accepts and content_type to see if we should render JSON.
def should_render_json(accepts, content_type): """Check accepts and content_type to see if we should render JSON.""" return 'application/json' in accepts or content_type == 'application/json'
Check if value is empty value or a tuple of empty values.
def _is_not_empty(value): """Check if value is empty value or a tuple of empty values.""" if isinstance(value, tuple): return any(bool(elem) for elem in value) return bool(value)
Get an entity using `fn`. If the returning entity is nothing (e.g. None or a tuple on Nones), it raises 404. Args: fn: the function to get an entity. It's a function because fn(..) might raise an exception. not_found_message: the 404 HTTP error is raised with not_found_message for an empty entity. error_message: the 500 HTTP error is raised with error_message for any other exception from fn(..). not_found_exception: the type of exception that will be considered as 'not found' as opposed to other errors.
def get_or_exit(fn, not_found_message, error_message, not_found_exception=_DoNotCatchException, non_empty_fn=_is_not_empty): """Get an entity using `fn`. If the returning entity is nothing (e.g. None or a tuple on Nones), it raises 404. Args: fn: the function to get an entity. It's a function because fn(..) might raise an exception. not_found_message: the 404 HTTP error is raised with not_found_message for an empty entity. error_message: the 500 HTTP error is raised with error_message for any other exception from fn(..). not_found_exception: the type of exception that will be considered as 'not found' as opposed to other errors.""" result = None try: result = fn() except not_found_exception: pass except Exception: raise EarlyExitError( '%s (%s: %s)' % (error_message, sys.exc_info()[0], str( sys.exc_info()[1])), 500) if non_empty_fn(result): return result raise EarlyExitError(not_found_message, 404)
Returns currently logged-in user's email.
def get_user_email(): """Returns currently logged-in user's email.""" try: return auth.get_current_user().email except Exception: return ''
Convenience function for getting an integer datastore key ID.
def get_integer_key(request): """Convenience function for getting an integer datastore key ID.""" key = request.get('key') try: return int(key) except (ValueError, KeyError): raise EarlyExitError('Invalid key format.', 400)
Logs operation being carried by current logged-in user.
def log(message, operation_type): """Logs operation being carried by current logged-in user.""" logging.info('ClusterFuzz: %s (%s): %s.', operation_type, get_user_email(), message)