docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Initializes the lexer object. Args: data: optional initial data to be processed by the lexer.
def __init__(self, data=''): super(Lexer, self).__init__() self.buffer = data self.error = 0 self.flags = 0 self.processed = 0 self.processed_buffer = '' self.state = self._INITIAL_STATE self.state_stack = [] self.verbose = 0
287,967
Push the match back on the stream. Args: string: optional data.
def PushBack(self, string='', **unused_kwargs): self.buffer = string + self.buffer self.processed_buffer = self.processed_buffer[:-len(string)]
287,971
Initializes the lexer feeder min object. Args: file_object: Optional file-like object.
def __init__(self, file_object=None): super(SelfFeederMixIn, self).__init__() self.file_object = file_object
287,972
Feed data into the buffer. Args: size: optional data size to read form the file-like object.
def Feed(self, size=512): data = self.file_object.read(size) Lexer.Feed(self, data) return len(data)
287,973
Adds a new argument to this expression. Args: argument (str): argument to add. Returns: True if the argument is the last argument, False otherwise. Raises: ParseError: If there are too many arguments.
def AddArg(self, argument): self.args.append(argument) if len(self.args) > self.number_of_args: raise errors.ParseError('Too many arguments for this expression.') elif len(self.args) == self.number_of_args: return True return False
287,975
Escape backslashes found inside a string quote. Backslashes followed by anything other than ['"rnbt] will just be included in the string. Args: string: The string that matched. match: the match object (instance of re.MatchObject). Where match.group(1) contains the escaped code.
def StringEscape(self, string, match, **unused_kwargs): if match.group(1) in '\'"rnbt': self.string += string.decode('unicode_escape') else: self.string += string
287,983
Initializes a file entry filter. Args: filename (str): name of the file.
def __init__(self, filename): super(FileNameFileEntryFilter, self).__init__() self._filename = filename.lower()
287,987
Determines if a file entry matches the filter. Args: file_entry (dfvfs.FileEntry): a file entry. Returns: bool: True if the file entry matches the filter.
def Match(self, file_entry): if not file_entry: return False filename = file_entry.name.lower() return filename == self._filename
287,988
Enables parser plugins. Args: plugin_includes (list[str]): names of the plugins to enable, where None or an empty list represents all plugins. Note the default plugin, if it exists, is always enabled and cannot be disabled.
def EnablePlugins(self, plugin_includes): self._plugins = [] if not self._plugin_classes: return default_plugin_name = '{0:s}_default'.format(self.NAME) for plugin_name, plugin_class in iter(self._plugin_classes.items()): if plugin_name == default_plugin_name: self._default_plugin = plugin_class() continue if plugin_includes and plugin_name not in plugin_includes: continue plugin_object = plugin_class() self._plugins.append(plugin_object)
287,990
Retrieves a specific plugin object by its name. Args: plugin_name (str): name of the plugin. Returns: BasePlugin: a plugin object or None if not available.
def GetPluginObjectByName(cls, plugin_name): plugin_class = cls._plugin_classes.get(plugin_name, None) if plugin_class: return plugin_class() return None
287,991
Registers a plugin class. The plugin classes are identified based on their lower case name. Args: plugin_class (type): class of the plugin. Raises: KeyError: if plugin class is already set for the corresponding name.
def RegisterPlugin(cls, plugin_class): plugin_name = plugin_class.NAME.lower() if plugin_name in cls._plugin_classes: raise KeyError(( 'Plugin class already set for name: {0:s}.').format( plugin_class.NAME)) cls._plugin_classes[plugin_name] = plugin_class
287,993
Parsers the file entry and extracts event objects. Args: parser_mediator (ParserMediator): a parser mediator. Raises: UnableToParseFile: when the file cannot be parsed.
def Parse(self, parser_mediator): file_entry = parser_mediator.GetFileEntry() if not file_entry: raise errors.UnableToParseFile('Invalid file entry') parser_mediator.AppendToParserChain(self) try: self.ParseFileEntry(parser_mediator, file_entry) finally: parser_mediator.PopFromParserChain()
287,994
Parses a single file-like object. Args: parser_mediator (ParserMediator): a parser mediator. file_object (dvfvs.FileIO): a file-like object to parse. Raises: UnableToParseFile: when the file cannot be parsed.
def Parse(self, parser_mediator, file_object): if not file_object: raise errors.UnableToParseFile('Invalid file object') if self._INITIAL_FILE_OFFSET is not None: file_object.seek(self._INITIAL_FILE_OFFSET, os.SEEK_SET) parser_mediator.AppendToParserChain(self) try: self.ParseFileObject(parser_mediator, file_object) finally: parser_mediator.PopFromParserChain()
287,995
Extract data from a Windows XML EventLog (EVTX) record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. record_index (int): event record index. evtx_record (pyevtx.record): event record. recovered (Optional[bool]): True if the record was recovered. Return: WinEvtxRecordEventData: event data.
def _GetEventData( self, parser_mediator, record_index, evtx_record, recovered=False): event_data = WinEvtxRecordEventData() try: event_data.record_number = evtx_record.identifier except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read record identifier from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) try: event_identifier = evtx_record.event_identifier except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read event identifier from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) event_identifier = None try: event_identifier_qualifiers = evtx_record.event_identifier_qualifiers except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read event identifier qualifiers from event record: ' '{0:d} with error: {1!s}').format(record_index, exception)) event_identifier_qualifiers = None event_data.offset = evtx_record.offset event_data.recovered = recovered if event_identifier is not None: event_data.event_identifier = event_identifier if event_identifier_qualifiers is not None: event_data.message_identifier = ( (event_identifier_qualifiers << 16) | event_identifier) event_data.event_level = evtx_record.event_level event_data.source_name = evtx_record.source_name # Computer name is the value stored in the event record and does not # necessarily corresponds with the actual hostname. event_data.computer_name = evtx_record.computer_name event_data.user_sid = evtx_record.user_security_identifier event_data.strings = list(evtx_record.strings) event_data.strings_parsed = {} if event_identifier in self._EVTX_FIELD_MAP.keys(): rules = self._EVTX_FIELD_MAP.get(event_identifier, []) for rule in rules: if len(evtx_record.strings) <= rule.index: parser_mediator.ProduceExtractionWarning(( 'evtx_record.strings has unexpected length of {0:d} ' '(expected at least {1:d})'.format( len(evtx_record.strings), rule.index))) event_data.strings_parsed[rule.name] = evtx_record.strings[rule.index] event_data.xml_string = evtx_record.xml_string return event_data
287,998
Extract data from a Windows XML EventLog (EVTX) record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. record_index (int): event record index. evtx_record (pyevtx.record): event record. recovered (Optional[bool]): True if the record was recovered.
def _ParseRecord( self, parser_mediator, record_index, evtx_record, recovered=False): event_data = self._GetEventData( parser_mediator, record_index, evtx_record, recovered=recovered) try: written_time = evtx_record.get_written_time_as_integer() except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read written time from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) written_time = None if not written_time: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime(timestamp=written_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
287,999
Parses Windows XML EventLog (EVTX) records. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. evtx_file (pyevt.file): Windows XML EventLog (EVTX) file.
def _ParseRecords(self, parser_mediator, evtx_file): # To handle errors when parsing a Windows XML EventLog (EVTX) file in the # most granular way the following code iterates over every event record. # The call to evt_file.get_record() and access to members of evt_record # should be called within a try-except. for record_index in range(evtx_file.number_of_records): if parser_mediator.abort: break try: evtx_record = evtx_file.get_record(record_index) self._ParseRecord(parser_mediator, record_index, evtx_record) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse event record: {0:d} with error: {1!s}'.format( record_index, exception)) for record_index in range(evtx_file.number_of_recovered_records): if parser_mediator.abort: break try: evtx_record = evtx_file.get_recovered_record(record_index) self._ParseRecord( parser_mediator, record_index, evtx_record, recovered=True) except IOError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse recovered event record: {0:d} with error: ' '{1!s}').format(record_index, exception))
288,000
Parses a Windows XML EventLog (EVTX) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object.
def ParseFileObject(self, parser_mediator, file_object): evtx_file = pyevtx.file() evtx_file.set_ascii_codepage(parser_mediator.codepage) try: evtx_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to open file with error: {0!s}'.format(exception)) return try: self._ParseRecords(parser_mediator, evtx_file) finally: evtx_file.close()
288,001
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() read_receipt = event_values.get('read_receipt', None) if read_receipt is not None: event_values['read_receipt'] = ( self._READ_RECEIPT.get(read_receipt, 'UNKNOWN')) message_type = event_values.get('message_type', None) if message_type is not None: event_values['message_type'] = ( self._MESSAGE_TYPE.get(message_type, 'UNKNOWN')) return self._ConditionalFormatMessages(event_values)
288,002
Parses a Windows Prefetch file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object.
def ParseFileObject(self, parser_mediator, file_object): scca_file = pyscca.file() try: scca_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to open file with error: {0!s}'.format(exception)) return format_version = scca_file.format_version executable_filename = scca_file.executable_filename prefetch_hash = scca_file.prefetch_hash run_count = scca_file.run_count number_of_volumes = scca_file.number_of_volumes volume_serial_numbers = [] volume_device_paths = [] path = '' for volume_information in iter(scca_file.volumes): volume_serial_number = volume_information.serial_number volume_device_path = volume_information.device_path volume_serial_numbers.append(volume_serial_number) volume_device_paths.append(volume_device_path) timestamp = volume_information.get_creation_time_as_integer() if timestamp: event_data = windows_events.WindowsVolumeEventData() event_data.device_path = volume_device_path event_data.origin = parser_mediator.GetFilename() event_data.serial_number = volume_serial_number date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) for filename in iter(scca_file.filenames): if not filename: continue if (filename.startswith(volume_device_path) and filename.endswith(executable_filename)): _, _, path = filename.partition(volume_device_path) mapped_files = [] for entry_index, file_metrics in enumerate(scca_file.file_metrics_entries): mapped_file_string = file_metrics.filename if not mapped_file_string: parser_mediator.ProduceExtractionWarning( 'missing filename for file metrics entry: {0:d}'.format( entry_index)) continue file_reference = file_metrics.file_reference if file_reference: mapped_file_string = ( '{0:s} [MFT entry: {1:d}, sequence: {2:d}]').format( mapped_file_string, file_reference & 0xffffffffffff, file_reference >> 48) mapped_files.append(mapped_file_string) event_data = WinPrefetchExecutionEventData() event_data.executable = executable_filename event_data.mapped_files = mapped_files event_data.number_of_volumes = number_of_volumes event_data.path = path event_data.prefetch_hash = prefetch_hash event_data.run_count = run_count event_data.version = format_version event_data.volume_device_paths = volume_device_paths event_data.volume_serial_numbers = volume_serial_numbers timestamp = scca_file.get_last_run_time_as_integer(0) if not timestamp: parser_mediator.ProduceExtractionWarning('missing last run time') date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_RUN) parser_mediator.ProduceEventWithEventData(event, event_data) # Check for the 7 older last run time values available since # format version 26. if format_version >= 26: for last_run_time_index in range(1, 8): timestamp = scca_file.get_last_run_time_as_integer(last_run_time_index) if not timestamp: continue date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) date_time_description = 'Previous {0:s}'.format( definitions.TIME_DESCRIPTION_LAST_RUN) event = time_events.DateTimeValuesEvent( date_time, date_time_description) parser_mediator.ProduceEventWithEventData(event, event_data) scca_file.close()
288,005
Adds command line arguments to an argument group. This function takes an argument parser or an argument group object and adds to it all the command line arguments this helper supports. Args: argument_group (argparse._ArgumentGroup|argparse.ArgumentParser): argparse group.
def AddArguments(cls, argument_group): argument_group.add_argument( '--analysis', metavar='PLUGIN_LIST', dest='analysis_plugins', default='', action='store', type=str, help=( 'A comma separated list of analysis plugin names to be loaded ' 'or "--analysis list" to see a list of available plugins.')) arguments = sys.argv[1:] argument_index = 0 if '--analysis' in arguments: argument_index = arguments.index('--analysis') + 1 if 0 < argument_index < len(arguments): names = [name.strip() for name in arguments[argument_index].split(',')] else: names = None if names and names != ['list']: manager.ArgumentHelperManager.AddCommandLineArguments( argument_group, category='analysis', names=names)
288,006
Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type.
def ParseOptions(cls, options, configuration_object): if not isinstance(configuration_object, tools.CLITool): raise errors.BadConfigObject( 'Configuration object is not an instance of CLITool') analysis_plugins = cls._ParseStringOption(options, 'analysis_plugins') if analysis_plugins and analysis_plugins.lower() != 'list': plugin_names = analysis_manager.AnalysisPluginManager.GetPluginNames() analysis_plugins = [name.strip() for name in analysis_plugins.split(',')] difference = set(analysis_plugins).difference(plugin_names) if difference: raise errors.BadConfigOption( 'Non-existent analysis plugins specified: {0:s}'.format( ' '.join(difference))) setattr(configuration_object, '_analysis_plugins', analysis_plugins)
288,007
Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type. BadConfigOption: when a configuration parameter fails validation.
def ParseOptions(cls, options, configuration_object): if not isinstance(configuration_object, tools.CLITool): raise errors.BadConfigObject( 'Configuration object is not an instance of CLITool') number_of_extraction_workers = cls._ParseNumericOption( options, 'workers', default_value=0) if number_of_extraction_workers < 0: raise errors.BadConfigOption( 'Invalid number of extraction workers value cannot be negative.') worker_memory_limit = cls._ParseNumericOption( options, 'worker_memory_limit') if worker_memory_limit and worker_memory_limit < 0: raise errors.BadConfigOption( 'Invalid worker memory limit value cannot be negative.') setattr( configuration_object, '_number_of_extraction_workers', number_of_extraction_workers) setattr(configuration_object, '_worker_memory_limit', worker_memory_limit)
288,008
Parses the UpdateKey value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value. key_path (str): Windows Registry key path.
def _ParseUpdateKeyValue(self, parser_mediator, registry_value, key_path): if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type: {0:s}'.format( registry_value.data_type_string)) return date_time_string = registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return month, day_of_month, year, hours, minutes, seconds, part_of_day = ( re_match.groups()) try: year = int(year, 10) month = int(month, 10) day_of_month = int(day_of_month, 10) hours = int(hours, 10) minutes = int(minutes, 10) seconds = int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(date_time_string)) return if part_of_day == 'PM': hours += 12 time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format( time_elements_tuple)) return event_data = CCleanerUpdateEventData() event_data.key_path = key_path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data)
288,010
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): values_dict = {} for registry_value in registry_key.GetValues(): if not registry_value.name or not registry_value.data: continue if registry_value.name == 'UpdateKey': self._ParseUpdateKeyValue( parser_mediator, registry_value, registry_key.path) else: values_dict[registry_value.name] = registry_value.GetDataAsObject() event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.source_append = self._SOURCE_APPEND event_data.urls = self.URLS event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,011
Sets the hashers that should be enabled. Args: hasher_names_string (str): comma separated names of hashers to enable.
def SetHasherNames(self, hasher_names_string): hasher_names = hashers_manager.HashersManager.GetHasherNamesFromString( hasher_names_string) debug_hasher_names = ', '.join(hasher_names) logger.debug('Got hasher names: {0:s}'.format(debug_hasher_names)) self._hashers = hashers_manager.HashersManager.GetHashers(hasher_names) self._hasher_names_string = hasher_names_string
288,015
Creates a task storage writer. Args: path (str): path to the storage file. task (Task): task. Returns: SQLiteStorageFileWriter: storage writer.
def _CreateTaskStorageWriter(self, path, task): return SQLiteStorageFileWriter( self._session, path, storage_type=definitions.STORAGE_TYPE_TASK, task=task)
288,016
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): values_dict = {} if registry_key.number_of_values > 0: for registry_value in registry_key.GetValues(): value_name = registry_value.name or '(default)' if registry_value.DataIsString(): value_string = '[{0:s}] {1:s}'.format( registry_value.data_type_string, registry_value.GetDataAsObject()) elif registry_value.DataIsInteger(): value_string = '[{0:s}] {1:d}'.format( registry_value.data_type_string, registry_value.GetDataAsObject()) elif registry_value.DataIsMultiString(): value_string = '[{0:s}] {1:s}'.format( registry_value.data_type_string, ''.join( registry_value.GetDataAsObject())) else: value_string = '[{0:s}]'.format(registry_value.data_type_string) values_dict[value_name] = value_string # Generate at least one event object for the key. event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.urls = self.URLS event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) if registry_key.number_of_subkeys == 0: error_string = 'Key: {0:s} missing subkeys.'.format(registry_key.path) parser_mediator.ProduceExtractionWarning(error_string) return for zone_key in registry_key.GetSubkeys(): # TODO: these values are stored in the Description value of the # zone key. This solution will break on zone values that are larger # than 5. path = '{0:s}\\{1:s}'.format( registry_key.path, self._ZONE_NAMES[zone_key.name]) values_dict = {} # TODO: this plugin currently just dumps the values and does not # distinguish between what is a feature control or not. for value in zone_key.GetValues(): # Ignore the default value. if not value.name: continue if value.DataIsString(): value_string = value.GetDataAsObject() elif value.DataIsInteger(): value_integer = value.GetDataAsObject() if value.name in self._KNOWN_PERMISSIONS_VALUE_NAMES: value_string = self._CONTROL_VALUES_PERMISSIONS.get( value_integer, 'UNKNOWN') elif value.name == '1A00': value_string = self._CONTROL_VALUES_1A00.get( value_integer, 'UNKNOWN') elif value.name == '1C00': value_string = self._CONTROL_VALUES_1C00.get( value_integer, 'UNKNOWN') elif value.name == '1E05': value_string = self._CONTROL_VALUES_SAFETY.get( value_integer, 'UNKNOWN') else: value_string = '{0:d}'.format(value_integer) else: value_string = '[{0:s}]'.format(value.data_type_string) if len(value.name) == 4 and value.name != 'Icon': value_description = self._FEATURE_CONTROLS.get(value.name, 'UNKNOWN') else: value_description = self._FEATURE_CONTROLS.get(value.name, '') if value_description: feature_control = '[{0:s}] {1:s}'.format( value.name, value_description) else: feature_control = '[{0:s}]'.format(value.name) values_dict[feature_control] = value_string event_data = windows_events.WindowsRegistryEventData() event_data.key_path = path event_data.offset = zone_key.offset event_data.regvalue = values_dict event_data.urls = self.URLS event = time_events.DateTimeValuesEvent( zone_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,017
Formats a field. Args: field (str): field value. Returns: str: formatted field value.
def _FormatField(self, field): if self._FIELD_DELIMITER and isinstance(field, py2to3.STRING_TYPES): return field.replace(self._FIELD_DELIMITER, ' ') return field
288,018
Formats the hostname. Args: event (EventObject): event. Returns: str: formatted hostname field.
def _FormatHostname(self, event): hostname = self._output_mediator.GetHostname(event) return self._FormatField(hostname)
288,019
Formats the username. Args: event (EventObject): event. Returns: str: formatted username field.
def _FormatUsername(self, event): username = self._output_mediator.GetUsername(event) return self._FormatField(username)
288,020
Retrieves output values. Args: event (EventObject): event. Returns: list[str]: output values or None if no timestamp was present in the event. Raises: NoFormatterFound: If no event formatter can be found to match the data type in the event.
def _GetOutputValues(self, event): if not hasattr(event, 'timestamp'): logger.error('Unable to output event without timestamp.') return None # TODO: add function to pass event_values to GetFormattedMessages. message, message_short = self._output_mediator.GetFormattedMessages(event) if message is None or message_short is None: data_type = getattr(event, 'data_type', 'UNKNOWN') raise errors.NoFormatterFound( 'Unable to find event formatter for: {0:s}.'.format(data_type)) # TODO: add function to pass event_values to GetFormattedSources. source_short, source = self._output_mediator.GetFormattedSources(event) if source is None or source_short is None: data_type = getattr(event, 'data_type', 'UNKNOWN') raise errors.NoFormatterFound( 'Unable to find event formatter for: {0:s}.'.format(data_type)) # TODO: preserve dfdatetime as an object. # TODO: add support for self._output_mediator.timezone date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=event.timestamp) format_variables = self._output_mediator.GetFormatStringAttributeNames( event) if format_variables is None: data_type = getattr(event, 'data_type', 'UNKNOWN') raise errors.NoFormatterFound( 'Unable to find event formatter for: {0:s}.'.format(data_type)) extra_attributes = [] for attribute_name, attribute_value in sorted(event.GetAttributes()): if (attribute_name in definitions.RESERVED_VARIABLE_NAMES or attribute_name in format_variables): continue # With ! in {1!s} we force a string conversion since some of # the extra attributes values can be integer, float point or # boolean values. extra_attributes.append( '{0:s}: {1!s}'.format(attribute_name, attribute_value)) extra_attributes = '; '.join(extra_attributes) extra_attributes = extra_attributes.replace('\n', '-').replace('\r', '') inode = getattr(event, 'inode', None) if inode is None: if hasattr(event, 'pathspec') and hasattr( event.pathspec, 'image_inode'): inode = event.pathspec.image_inode if inode is None: inode = '-' hostname = self._FormatHostname(event) username = self._FormatUsername(event) notes = [] note_string = getattr(event, 'notes', None) if note_string: notes.append(note_string) tag = getattr(event, 'tag', None) if tag: notes.extend(tag.labels) if not notes: notes.append('-') year, month, day_of_month = date_time.GetDate() hours, minutes, seconds = date_time.GetTimeOfDay() try: date_string = '{0:02d}/{1:02d}/{2:04d}'.format(month, day_of_month, year) time_string = '{0:02d}:{1:02d}:{2:02d}'.format(hours, minutes, seconds) except (TypeError, ValueError): self._ReportEventError(event, ( 'unable to copy timestamp: {0!s} to a human readable date and time. ' 'Defaulting to: "00/00/0000" "--:--:--"').format(event.timestamp)) date_string = '00/00/0000' time_string = '--:--:--' output_values = [ date_string, time_string, '{0!s}'.format(self._output_mediator.timezone), '....', source_short, source, '-', username, hostname, message_short, message, '2', getattr(event, 'display_name', '-'), '{0!s}'.format(inode), ' '.join(notes), getattr(event, 'parser', '-'), extra_attributes] return output_values
288,021
Writes values to the output. Args: output_values (list[str]): output values.
def _WriteOutputValues(self, output_values): for index, value in enumerate(output_values): if not isinstance(value, py2to3.STRING_TYPES): value = '' output_values[index] = value.replace(',', ' ') output_line = ','.join(output_values) output_line = '{0:s}\n'.format(output_line) self._output_writer.Write(output_line)
288,022
Writes the body of an event object to the output. Args: event (EventObject): event. Raises: NoFormatterFound: If no event formatter can be found to match the data type in the event object.
def WriteEventBody(self, event): output_values = self._GetOutputValues(event) output_values[3] = self._output_mediator.GetMACBRepresentation(event) output_values[6] = event.timestamp_desc or '-' self._WriteOutputValues(output_values)
288,023
Writes an event MACB group to the output. Args: event_macb_group (list[EventObject]): event MACB group.
def WriteEventMACBGroup(self, event_macb_group): output_values = self._GetOutputValues(event_macb_group[0]) timestamp_descriptions = [ event.timestamp_desc for event in event_macb_group] output_values[3] = ( self._output_mediator.GetMACBRepresentationFromDescriptions( timestamp_descriptions)) # TODO: fix timestamp description in source. output_values[6] = '; '.join(timestamp_descriptions) self._WriteOutputValues(output_values)
288,024
Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type. BadConfigOption: if the required artifact definitions are not defined.
def ParseOptions(cls, options, configuration_object): if not isinstance(configuration_object, tools.CLITool): raise errors.BadConfigObject( 'Configuration object is not an instance of CLITool') artifact_filters = cls._ParseStringOption(options, 'artifact_filter_string') artifact_filters_file = cls._ParseStringOption( options, 'artifact_filters_file') filter_file = cls._ParseStringOption(options, 'file_filter') if artifact_filters and artifact_filters_file: raise errors.BadConfigOption( 'Please only specify artifact definition names in a file ' 'or on the command line.') if (artifact_filters_file or artifact_filters) and filter_file: raise errors.BadConfigOption( 'Please do not specify both artifact definitions and legacy filters.') if artifact_filters_file and os.path.isfile(artifact_filters_file): with open(artifact_filters_file) as file_object: file_content = file_object.read() artifact_filters = file_content.splitlines() elif artifact_filters: artifact_filters = [name.strip() for name in artifact_filters.split(',')] setattr(configuration_object, '_artifact_filters', artifact_filters)
288,027
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): for subkey in registry_key.GetSubkeys(): name = subkey.name if not name: continue values_dict = {} values_dict['Volume'] = name label_value = subkey.GetValueByName('_LabelFromReg') if label_value: values_dict['Label'] = label_value.GetDataAsObject() if name.startswith('{'): values_dict['Type'] = 'Volume' elif name.startswith('#'): # The format is: ##Server_Name#Share_Name. values_dict['Type'] = 'Remote Drive' server_name, _, share_name = name[2:].partition('#') values_dict['Remote_Server'] = server_name values_dict['Share_Name'] = '\\{0:s}'.format( share_name.replace('#', '\\')) else: values_dict['Type'] = 'Drive' event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = subkey.offset event_data.regvalue = values_dict event_data.urls = self.URLS event = time_events.DateTimeValuesEvent( subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,028
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() primary_url = event_values['primary_url'] secondary_url = event_values['secondary_url'] # There is apparently a bug, either in GURL.cc or # content_settings_pattern.cc where URLs with file:// scheme are stored in # the URL as an empty string, which is later detected as being Invalid, and # Chrome produces the following example logs: # content_settings_pref.cc(469)] Invalid pattern strings: https://a.com:443, # content_settings_pref.cc(295)] Invalid pattern strings: , # content_settings_pref.cc(295)] Invalid pattern strings: ,* # More research needed, could be related to https://crbug.com/132659 if primary_url == '': subject = 'local file' elif secondary_url in (primary_url, '*'): subject = primary_url elif secondary_url == '': subject = '{0:s} embedded in local file'.format(primary_url) else: subject = '{0:s} embedded in {1:s}'.format(primary_url, secondary_url) event_values['subject'] = subject return self._ConditionalFormatMessages(event_values)
288,030
Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type.
def ParseOptions(cls, options, configuration_object): if not isinstance(configuration_object, tools.CLITool): raise errors.BadConfigObject( 'Configuration object is not an instance of CLITool') storage_file = cls._ParseStringOption(options, 'storage_file') setattr(configuration_object, '_storage_file_path', storage_file)
288,031
Extract extension installation events. Args: settings_dict (dict[str: object]): settings data from a Preferences file. parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs.
def _ExtractExtensionInstallEvents(self, settings_dict, parser_mediator): for extension_id, extension in sorted(settings_dict.items()): install_time = extension.get('install_time', None) if not install_time: parser_mediator.ProduceExtractionWarning( 'installation time missing for extension ID {0:s}'.format( extension_id)) continue try: install_time = int(install_time, 10) except ValueError: parser_mediator.ProduceExtractionWarning(( 'unable to convert installation time for extension ID ' '{0:s}').format(extension_id)) continue manifest = extension.get('manifest', None) if not manifest: parser_mediator.ProduceExtractionWarning( 'manifest missing for extension ID {0:s}'.format(extension_id)) continue event_data = ChromeExtensionInstallationEventData() event_data.extension_id = extension_id event_data.extension_name = manifest.get('name', None) event_data.path = extension.get('path', None) date_time = dfdatetime_webkit_time.WebKitTime(timestamp=install_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,036
Extracts site specific events. Args: exceptions_dict (dict): Permission exceptions data from Preferences file. parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs.
def _ExtractContentSettingsExceptions(self, exceptions_dict, parser_mediator): for permission in exceptions_dict: if permission not in self._EXCEPTIONS_KEYS: continue exception_dict = exceptions_dict.get(permission, {}) for urls, url_dict in exception_dict.items(): last_used = url_dict.get('last_used', None) if not last_used: continue # If secondary_url is '*', the permission applies to primary_url. # If secondary_url is a valid URL, the permission applies to # elements loaded from secondary_url being embedded in primary_url. primary_url, secondary_url = urls.split(',') event_data = ChromeContentSettingsExceptionsEventData() event_data.permission = permission event_data.primary_url = primary_url event_data.secondary_url = secondary_url timestamp = int(last_used * 1000000) date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,037
Parses a Chrome preferences file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): # First pass check for initial character being open brace. if file_object.read(1) != b'{': raise errors.UnableToParseFile(( '[{0:s}] {1:s} is not a valid Preference file, ' 'missing opening brace.').format( self.NAME, parser_mediator.GetDisplayName())) file_object.seek(0, os.SEEK_SET) file_content = file_object.read() file_content = codecs.decode(file_content, self._ENCODING) # Second pass to verify it's valid JSON try: json_dict = json.loads(file_content) except ValueError as exception: raise errors.UnableToParseFile(( '[{0:s}] Unable to parse file {1:s} as JSON: {2!s}').format( self.NAME, parser_mediator.GetDisplayName(), exception)) except IOError as exception: raise errors.UnableToParseFile(( '[{0:s}] Unable to open file {1:s} for parsing as' 'JSON: {2!s}').format( self.NAME, parser_mediator.GetDisplayName(), exception)) # Third pass to verify the file has the correct keys in it for Preferences if not set(self.REQUIRED_KEYS).issubset(set(json_dict.keys())): raise errors.UnableToParseFile('File does not contain Preference data.') extensions_setting_dict = json_dict.get('extensions') if not extensions_setting_dict: raise errors.UnableToParseFile( '[{0:s}] {1:s} is not a valid Preference file, ' 'does not contain extensions value.'.format( self.NAME, parser_mediator.GetDisplayName())) extensions_dict = extensions_setting_dict.get('settings') if not extensions_dict: raise errors.UnableToParseFile( '[{0:s}] {1:s} is not a valid Preference file, ' 'does not contain extensions settings value.'.format( self.NAME, parser_mediator.GetDisplayName())) extensions_autoupdate_dict = extensions_setting_dict.get('autoupdate') if extensions_autoupdate_dict: autoupdate_lastcheck_timestamp = extensions_autoupdate_dict.get( 'last_check', None) if autoupdate_lastcheck_timestamp: autoupdate_lastcheck = int(autoupdate_lastcheck_timestamp, 10) event_data = ChromeExtensionsAutoupdaterEventData() event_data.message = 'Chrome extensions autoupdater last run' date_time = dfdatetime_webkit_time.WebKitTime( timestamp=autoupdate_lastcheck) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data) autoupdate_nextcheck_timestamp = extensions_autoupdate_dict.get( 'next_check', None) if autoupdate_nextcheck_timestamp: autoupdate_nextcheck = int(autoupdate_nextcheck_timestamp, 10) event_data = ChromeExtensionsAutoupdaterEventData() event_data.message = 'Chrome extensions autoupdater next run' date_time = dfdatetime_webkit_time.WebKitTime( timestamp=autoupdate_nextcheck) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data) browser_dict = json_dict.get('browser', None) if browser_dict and 'last_clear_browsing_data_time' in browser_dict: last_clear_history_timestamp = browser_dict.get( 'last_clear_browsing_data_time', None) if last_clear_history_timestamp: last_clear_history = int(last_clear_history_timestamp, 10) event_data = ChromeExtensionsAutoupdaterEventData() event_data.message = 'Chrome history was cleared by user' date_time = dfdatetime_webkit_time.WebKitTime( timestamp=last_clear_history) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_DELETED) parser_mediator.ProduceEventWithEventData(event, event_data) self._ExtractExtensionInstallEvents(extensions_dict, parser_mediator) profile_dict = json_dict.get('profile', None) if profile_dict: content_settings_dict = profile_dict.get('content_settings', None) if content_settings_dict: exceptions_dict = content_settings_dict.get('exceptions', None) if exceptions_dict: self._ExtractContentSettingsExceptions( exceptions_dict, parser_mediator)
288,038
Collects values using a file artifact definition. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. artifact_definition (artifacts.ArtifactDefinition): artifact definition. searcher (dfvfs.FileSystemSearcher): file system searcher to preprocess the file system. file_system (dfvfs.FileSystem): file system to be preprocessed. Raises: PreProcessFail: if the preprocessing fails.
def Collect( self, knowledge_base, artifact_definition, searcher, file_system): for source in artifact_definition.sources: if source.type_indicator not in ( artifact_definitions.TYPE_INDICATOR_FILE, artifact_definitions.TYPE_INDICATOR_PATH): continue for path in source.paths: # Make sure the path separators used in the artifact definition # correspond to those used by the file system. path_segments = path.split(source.separator) find_spec = file_system_searcher.FindSpec( location_glob=path_segments[1:], case_sensitive=False) for path_specification in searcher.Find(find_specs=[find_spec]): self._ParsePathSpecification( knowledge_base, searcher, file_system, path_specification, source.separator)
288,039
Parses a file entry for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_entry (dfvfs.FileEntry): file entry that contains the artifact value data. Raises: PreProcessFail: if the preprocessing fails.
def _ParseFileEntry(self, knowledge_base, file_entry): file_object = file_entry.GetFileObject() try: self._ParseFileData(knowledge_base, file_object) finally: file_object.close()
288,041
Collects values using a Windows Registry value artifact definition. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. artifact_definition (artifacts.ArtifactDefinition): artifact definition. searcher (dfwinreg.WinRegistrySearcher): Windows Registry searcher to preprocess the Windows Registry. Raises: PreProcessFail: if the Windows Registry key or value cannot be read.
def Collect( self, knowledge_base, artifact_definition, searcher): for source in artifact_definition.sources: if source.type_indicator not in ( artifact_definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY, artifact_definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE): continue if source.type_indicator == ( artifact_definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY): key_value_pairs = [{'key': key} for key in source.keys] else: key_value_pairs = source.key_value_pairs for key_value_pair in key_value_pairs: key_path = key_value_pair['key'] # The artifact definitions currently incorrectly define # CurrentControlSet so we correct it here for now. # Also see: https://github.com/ForensicArtifacts/artifacts/issues/120 key_path_upper = key_path.upper() if key_path_upper.startswith('%%CURRENT_CONTROL_SET%%'): key_path = '{0:s}{1:s}'.format( 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet', key_path[23:]) find_spec = registry_searcher.FindSpec(key_path_glob=key_path) for key_path in searcher.Find(find_specs=[find_spec]): try: registry_key = searcher.GetKeyByPath(key_path) except IOError as exception: raise errors.PreProcessFail(( 'Unable to retrieve Windows Registry key: {0:s} with error: ' '{1!s}').format(key_path, exception)) if registry_key: value_name = key_value_pair.get('value', None) self._ParseKey(knowledge_base, registry_key, value_name)
288,042
Parses a Windows Registry key for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. value_name (str): name of the Windows Registry value. Raises: PreProcessFail: if the preprocessing fails.
def _ParseKey(self, knowledge_base, registry_key, value_name): try: registry_value = registry_key.GetValueByName(value_name) except IOError as exception: raise errors.PreProcessFail(( 'Unable to retrieve Windows Registry key: {0:s} value: {1:s} ' 'with error: {2!s}').format( registry_key.path, value_name, exception)) if registry_value: value_object = registry_value.GetDataAsObject() if value_object: self._ParseValueData(knowledge_base, value_object)
288,043
Parses an F value. Args: registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Returns: f_value: F value stored in the Windows Registry key. Raises: ParseError: if the Windows Registry key does not contain an F value or F value cannot be parsed.
def _ParseFValue(self, registry_key): registry_value = registry_key.GetValueByName('F') if not registry_value: raise errors.ParseError( 'missing value: "F" in Windows Registry key: {0:s}.'.format( registry_key.name)) f_value_map = self._GetDataTypeMap('f_value') try: return self._ReadStructureFromByteStream( registry_value.data, 0, f_value_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(exception)
288,045
Parses a V value string. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. data (bytes): Windows Registry V value data. user_information_descriptor (user_information_descriptor): V value user information descriptor. Returns: str: string value stored in the Windows Registry V value data.
def _ParseVValueString( self, parser_mediator, data, user_information_descriptor): data_start_offset = ( user_information_descriptor.offset + self._V_VALUE_STRINGS_OFFSET) data_end_offset = data_start_offset + user_information_descriptor.size descriptor_data = data[data_start_offset:data_end_offset] try: username = descriptor_data.decode('utf-16-le') except (UnicodeDecodeError, UnicodeEncodeError) as exception: username = descriptor_data.decode('utf-16-le', errors='replace') parser_mediator.ProduceExtractionWarning(( 'unable to decode V value string with error: {0!s}. Characters ' 'that cannot be decoded will be replaced with "?" or ' '"\\ufffd".').format(exception)) return username
288,046
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): names_key = registry_key.GetSubkeyByName('Names') if not names_key: parser_mediator.ProduceExtractionWarning('missing subkey: Names.') return last_written_time_per_username = { registry_value.name: registry_value.last_written_time for registry_value in names_key.GetSubkeys()} for subkey in registry_key.GetSubkeys(): if subkey.name == 'Names': continue try: f_value = self._ParseFValue(subkey) except errors.ParseError as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse F value with error: {0!s}'.format(exception)) continue registry_value = subkey.GetValueByName('V') if not registry_value: parser_mediator.ProduceExtractionWarning( 'missing Registry value: "V" in subkey: {0:s}.'.format( subkey.name)) continue v_value_map = self._GetDataTypeMap('v_value') try: v_value = self._ReadStructureFromByteStream( registry_value.data, 0, v_value_map) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse V value with error: {0!s}'.format(exception)) continue username = self._ParseVValueString( parser_mediator, registry_value.data, v_value[1]) fullname = self._ParseVValueString( parser_mediator, registry_value.data, v_value[2]) comments = self._ParseVValueString( parser_mediator, registry_value.data, v_value[3]) last_written_time = last_written_time_per_username.get(username, None) # TODO: check if subkey.name == f_value.rid if last_written_time: values_dict = { 'account_rid': f_value.rid, 'login_count': f_value.number_of_logons} if username: values_dict['username'] = username if fullname: values_dict['full_name'] = fullname if comments: values_dict['comments'] = comments event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.regvalue = values_dict event_data.source_append = self._SOURCE_APPEND event = time_events.DateTimeValuesEvent( last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = SAMUsersWindowsRegistryEventData() event_data.account_rid = f_value.rid event_data.comments = comments event_data.fullname = fullname event_data.key_path = registry_key.path event_data.login_count = f_value.number_of_logons event_data.username = username if f_value.last_login_time != 0: date_time = dfdatetime_filetime.Filetime( timestamp=f_value.last_login_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_LOGIN) parser_mediator.ProduceEventWithEventData(event, event_data) if f_value.last_password_set_time != 0: date_time = dfdatetime_filetime.Filetime( timestamp=f_value.last_password_set_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_PASSWORD_RESET) parser_mediator.ProduceEventWithEventData(event, event_data)
288,047
Parses a visited row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row.
def ParsePageVisitRow(self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) was_http_non_get = self._GetRowValue(query_hash, row, 'http_non_get') event_data = SafariHistoryPageVisitedEventData() event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.title = self._GetRowValue(query_hash, row, 'title') event_data.url = self._GetRowValue(query_hash, row, 'url') event_data.visit_count = self._GetRowValue(query_hash, row, 'visit_count') event_data.was_http_non_get = bool(was_http_non_get) timestamp = self._GetRowValue(query_hash, row, 'visit_time') date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,049
Return local path for a given inode. Args: inode (int): inode number for the file. cache (SQLiteCache): cache. database (SQLiteDatabase): database. Returns: str: full path, including the filename of the given inode value.
def GetLocalPath(self, inode, cache, database): local_path = cache.GetResults('local_path') if not local_path: results = database.Query(self.LOCAL_PATH_CACHE_QUERY) cache.CacheQueryResults( results, 'local_path', 'child_inode_number', ('parent_inode_number', 'filename')) local_path = cache.GetResults('local_path') parent, path = local_path.get(inode, [None, None]) # TODO: Read the local_sync_root from the sync_config.db and use that # for a root value. root_value = '%local_sync_root%/' if not path: return root_value paths = [] while path: paths.append(path) parent, path = local_path.get(parent, [None, None]) if not paths: return root_value # Paths are built top level to root so we need to reverse the list to # represent them in the traditional order. paths.reverse() return root_value + '/'.join(paths)
288,052
Return cloud path given a resource id. Args: resource_id (str): resource identifier for the file. cache (SQLiteCache): cache. database (SQLiteDatabase): database. Returns: str: full path to the resource value.
def GetCloudPath(self, resource_id, cache, database): cloud_path = cache.GetResults('cloud_path') if not cloud_path: results = database.Query(self.CLOUD_PATH_CACHE_QUERY) cache.CacheQueryResults( results, 'cloud_path', 'resource_id', ('filename', 'parent')) cloud_path = cache.GetResults('cloud_path') if resource_id == 'folder:root': return '/' paths = [] parent_path, parent_id = cloud_path.get(resource_id, ['', '']) while parent_path: if parent_path == 'folder:root': break paths.append(parent_path) parent_path, parent_id = cloud_path.get(parent_id, ['', '']) if not paths: return '/' # Paths are built top level to root so we need to reverse the list to # represent them in the traditional order. paths.reverse() return '/{0:s}/'.format('/'.join(paths))
288,053
Parses a cloud entry row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. cache (SQLiteCache): cache. database (SQLiteDatabase): database.
def ParseCloudEntryRow( self, parser_mediator, query, row, cache=None, database=None, **unused_kwargs): query_hash = hash(query) parent_resource_id = self._GetRowValue( query_hash, row, 'parent_resource_id') filename = self._GetRowValue(query_hash, row, 'filename') cloud_path = self.GetCloudPath(parent_resource_id, cache, database) cloud_filename = '{0:s}{1:s}'.format(cloud_path, filename) event_data = GoogleDriveSnapshotCloudEntryEventData() event_data.document_type = self._GetRowValue(query_hash, row, 'doc_type') event_data.path = cloud_filename event_data.query = query event_data.shared = bool(self._GetRowValue(query_hash, row, 'shared')) event_data.size = self._GetRowValue(query_hash, row, 'size') event_data.url = self._GetRowValue(query_hash, row, 'url') timestamp = self._GetRowValue(query_hash, row, 'modified') date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = self._GetRowValue(query_hash, row, 'created') if timestamp: date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
288,054
Parses a local entry row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. cache (Optional[SQLiteCache]): cache. database (Optional[SQLiteDatabase]): database.
def ParseLocalEntryRow( self, parser_mediator, query, row, cache=None, database=None, **unused_kwargs): query_hash = hash(query) inode_number = self._GetRowValue(query_hash, row, 'inode_number') local_path = self.GetLocalPath(inode_number, cache, database) event_data = GoogleDriveSnapshotLocalEntryEventData() event_data.path = local_path event_data.query = query event_data.size = self._GetRowValue(query_hash, row, 'size') timestamp = self._GetRowValue(query_hash, row, 'modified') date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)
288,055
Initializes an event source heap. Args: maximum_number_of_items (Optional[int]): maximum number of items in the heap.
def __init__(self, maximum_number_of_items=50000): super(_EventSourceHeap, self).__init__() self._heap = [] self._maximum_number_of_items = maximum_number_of_items
288,056
Pushes an event source onto the heap. Args: event_source (EventSource): event source.
def PushEventSource(self, event_source): if event_source.file_entry_type == ( dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY): weight = 1 else: weight = 100 heap_values = (weight, time.time(), event_source) heapq.heappush(self._heap, heap_values)
288,058
Initializes an engine. Args: maximum_number_of_tasks (Optional[int]): maximum number of concurrent tasks, where 0 represents no limit. use_zeromq (Optional[bool]): True if ZeroMQ should be used for queuing instead of Python's multiprocessing queue.
def __init__( self, maximum_number_of_tasks=_MAXIMUM_NUMBER_OF_TASKS, use_zeromq=True): super(TaskMultiProcessEngine, self).__init__() self._enable_sigsegv_handler = False self._filter_find_specs = None self._last_worker_number = 0 self._maximum_number_of_tasks = maximum_number_of_tasks self._merge_task = None self._merge_task_on_hold = None self._number_of_consumed_event_tags = 0 self._number_of_consumed_events = 0 self._number_of_consumed_reports = 0 self._number_of_consumed_sources = 0 self._number_of_consumed_warnings = 0 self._number_of_produced_event_tags = 0 self._number_of_produced_events = 0 self._number_of_produced_reports = 0 self._number_of_produced_sources = 0 self._number_of_produced_warnings = 0 self._number_of_worker_processes = 0 self._path_spec_extractor = extractors.PathSpecExtractor() self._processing_configuration = None self._resolver_context = context.Context() self._session_identifier = None self._status = definitions.STATUS_INDICATOR_IDLE self._storage_merge_reader = None self._storage_merge_reader_on_hold = None self._task_queue = None self._task_queue_port = None self._task_manager = task_manager.TaskManager() self._use_zeromq = use_zeromq
288,059
Fills the event source heap with the available written event sources. Args: storage_writer (StorageWriter): storage writer for a session storage. event_source_heap (_EventSourceHeap): event source heap. start_with_first (Optional[bool]): True if the function should start with the first written event source.
def _FillEventSourceHeap( self, storage_writer, event_source_heap, start_with_first=False): if self._processing_profiler: self._processing_profiler.StartTiming('fill_event_source_heap') if self._processing_profiler: self._processing_profiler.StartTiming('get_event_source') if start_with_first: event_source = storage_writer.GetFirstWrittenEventSource() else: event_source = storage_writer.GetNextWrittenEventSource() if self._processing_profiler: self._processing_profiler.StopTiming('get_event_source') while event_source: event_source_heap.PushEventSource(event_source) if event_source_heap.IsFull(): break if self._processing_profiler: self._processing_profiler.StartTiming('get_event_source') event_source = storage_writer.GetNextWrittenEventSource() if self._processing_profiler: self._processing_profiler.StopTiming('get_event_source') if self._processing_profiler: self._processing_profiler.StopTiming('fill_event_source_heap')
288,060
Merges a task storage with the session storage. This function checks all task stores that are ready to merge and updates the scheduled tasks. Note that to prevent this function holding up the task scheduling loop only the first available task storage is merged. Args: storage_writer (StorageWriter): storage writer for a session storage used to merge task storage.
def _MergeTaskStorage(self, storage_writer): if self._processing_profiler: self._processing_profiler.StartTiming('merge_check') for task_identifier in storage_writer.GetProcessedTaskIdentifiers(): try: task = self._task_manager.GetProcessedTaskByIdentifier(task_identifier) self._task_manager.SampleTaskStatus(task, 'processed') to_merge = self._task_manager.CheckTaskToMerge(task) if not to_merge: storage_writer.RemoveProcessedTaskStorage(task) self._task_manager.RemoveTask(task) self._task_manager.SampleTaskStatus(task, 'removed_processed') else: storage_writer.PrepareMergeTaskStorage(task) self._task_manager.UpdateTaskAsPendingMerge(task) except KeyError: logger.error( 'Unable to retrieve task: {0:s} to prepare it to be merged.'.format( task_identifier)) continue if self._processing_profiler: self._processing_profiler.StopTiming('merge_check') task = None if not self._storage_merge_reader_on_hold: task = self._task_manager.GetTaskPendingMerge(self._merge_task) # Limit the number of attribute containers from a single task-based # storage file that are merged per loop to keep tasks flowing. if task or self._storage_merge_reader: self._status = definitions.STATUS_INDICATOR_MERGING if self._processing_profiler: self._processing_profiler.StartTiming('merge') if task: if self._storage_merge_reader: self._merge_task_on_hold = self._merge_task self._storage_merge_reader_on_hold = self._storage_merge_reader self._task_manager.SampleTaskStatus( self._merge_task_on_hold, 'merge_on_hold') self._merge_task = task try: self._storage_merge_reader = storage_writer.StartMergeTaskStorage( task) self._task_manager.SampleTaskStatus(task, 'merge_started') except IOError as exception: logger.error(( 'Unable to merge results of task: {0:s} ' 'with error: {1!s}').format(task.identifier, exception)) self._storage_merge_reader = None if self._storage_merge_reader: fully_merged = self._storage_merge_reader.MergeAttributeContainers( maximum_number_of_containers=self._MAXIMUM_NUMBER_OF_CONTAINERS) else: # TODO: Do something more sensible when this happens, perhaps # retrying the task once that is implemented. For now, we mark the task # as fully merged because we can't continue with it. fully_merged = True if self._processing_profiler: self._processing_profiler.StopTiming('merge') if fully_merged: try: self._task_manager.CompleteTask(self._merge_task) except KeyError as exception: logger.error( 'Unable to complete task: {0:s} with error: {1!s}'.format( self._merge_task.identifier, exception)) if not self._storage_merge_reader_on_hold: self._merge_task = None self._storage_merge_reader = None else: self._merge_task = self._merge_task_on_hold self._storage_merge_reader = self._storage_merge_reader_on_hold self._merge_task_on_hold = None self._storage_merge_reader_on_hold = None self._task_manager.SampleTaskStatus( self._merge_task, 'merge_resumed') self._status = definitions.STATUS_INDICATOR_RUNNING self._number_of_produced_events = storage_writer.number_of_events self._number_of_produced_sources = storage_writer.number_of_event_sources self._number_of_produced_warnings = storage_writer.number_of_warnings
288,061
Processes the sources. Args: source_path_specs (list[dfvfs.PathSpec]): path specifications of the sources to process. storage_writer (StorageWriter): storage writer for a session storage. filter_find_specs (Optional[list[dfvfs.FindSpec]]): find specifications used in path specification extraction. If set, path specifications that match the find specification will be processed.
def _ProcessSources( self, source_path_specs, storage_writer, filter_find_specs=None): if self._processing_profiler: self._processing_profiler.StartTiming('process_sources') self._status = definitions.STATUS_INDICATOR_COLLECTING self._number_of_consumed_event_tags = 0 self._number_of_consumed_events = 0 self._number_of_consumed_reports = 0 self._number_of_consumed_sources = 0 self._number_of_consumed_warnings = 0 self._number_of_produced_event_tags = 0 self._number_of_produced_events = 0 self._number_of_produced_reports = 0 self._number_of_produced_sources = 0 self._number_of_produced_warnings = 0 path_spec_generator = self._path_spec_extractor.ExtractPathSpecs( source_path_specs, find_specs=filter_find_specs, recurse_file_system=False, resolver_context=self._resolver_context) for path_spec in path_spec_generator: if self._abort: break # TODO: determine if event sources should be DataStream or FileEntry # or both. event_source = event_sources.FileEntryEventSource(path_spec=path_spec) storage_writer.AddEventSource(event_source) self._number_of_produced_sources = storage_writer.number_of_event_sources # Update the foreman process status in case we are using a filter file. self._UpdateForemanProcessStatus() if self._status_update_callback: self._status_update_callback(self._processing_status) self._ScheduleTasks(storage_writer) if self._abort: self._status = definitions.STATUS_INDICATOR_ABORTED else: self._status = definitions.STATUS_INDICATOR_COMPLETED self._number_of_produced_events = storage_writer.number_of_events self._number_of_produced_sources = storage_writer.number_of_event_sources self._number_of_produced_warnings = storage_writer.number_of_warnings if self._processing_profiler: self._processing_profiler.StopTiming('process_sources') # Update the foreman process and task status in case we are using # a filter file. self._UpdateForemanProcessStatus() tasks_status = self._task_manager.GetStatusInformation() if self._task_queue_profiler: self._task_queue_profiler.Sample(tasks_status) self._processing_status.UpdateTasksStatus(tasks_status) if self._status_update_callback: self._status_update_callback(self._processing_status)
288,062
Schedules a task. Args: task (Task): task. Returns: bool: True if the task was scheduled.
def _ScheduleTask(self, task): if self._processing_profiler: self._processing_profiler.StartTiming('schedule_task') try: self._task_queue.PushItem(task, block=False) is_scheduled = True except errors.QueueFull: is_scheduled = False if self._processing_profiler: self._processing_profiler.StopTiming('schedule_task') return is_scheduled
288,063
Schedules tasks. Args: storage_writer (StorageWriter): storage writer for a session storage.
def _ScheduleTasks(self, storage_writer): logger.debug('Task scheduler started') self._status = definitions.STATUS_INDICATOR_RUNNING # TODO: make tasks persistent. # TODO: protect task scheduler loop by catch all and # handle abort path. event_source_heap = _EventSourceHeap() self._FillEventSourceHeap( storage_writer, event_source_heap, start_with_first=True) event_source = event_source_heap.PopEventSource() task = None while event_source or self._task_manager.HasPendingTasks(): if self._abort: break try: if not task: task = self._task_manager.CreateRetryTask() if not task and event_source: task = self._task_manager.CreateTask(self._session_identifier) task.file_entry_type = event_source.file_entry_type task.path_spec = event_source.path_spec event_source = None self._number_of_consumed_sources += 1 if self._guppy_memory_profiler: self._guppy_memory_profiler.Sample() if task: if self._ScheduleTask(task): logger.debug( 'Scheduled task {0:s} for path specification {1:s}'.format( task.identifier, task.path_spec.comparable)) self._task_manager.SampleTaskStatus(task, 'scheduled') task = None else: self._task_manager.SampleTaskStatus(task, 'schedule_attempted') self._MergeTaskStorage(storage_writer) if not event_source_heap.IsFull(): self._FillEventSourceHeap(storage_writer, event_source_heap) if not task and not event_source: event_source = event_source_heap.PopEventSource() except KeyboardInterrupt: self._abort = True self._processing_status.aborted = True if self._status_update_callback: self._status_update_callback(self._processing_status) for task in self._task_manager.GetFailedTasks(): warning = warnings.ExtractionWarning( message='Worker failed to process path specification', path_spec=task.path_spec) self._storage_writer.AddWarning(warning) self._processing_status.error_path_specs.append(task.path_spec) self._status = definitions.STATUS_INDICATOR_IDLE if self._abort: logger.debug('Task scheduler aborted') else: logger.debug('Task scheduler stopped')
288,064
Creates, starts, monitors and registers a worker process. Args: process_name (str): process name. storage_writer (StorageWriter): storage writer for a session storage used to create task storage. Returns: MultiProcessWorkerProcess: extraction worker process or None if the process could not be started.
def _StartWorkerProcess(self, process_name, storage_writer): process_name = 'Worker_{0:02d}'.format(self._last_worker_number) logger.debug('Starting worker process {0:s}'.format(process_name)) if self._use_zeromq: queue_name = '{0:s} task queue'.format(process_name) task_queue = zeromq_queue.ZeroMQRequestConnectQueue( delay_open=True, linger_seconds=0, name=queue_name, port=self._task_queue_port, timeout_seconds=self._TASK_QUEUE_TIMEOUT_SECONDS) else: task_queue = self._task_queue process = worker_process.WorkerProcess( task_queue, storage_writer, self._artifacts_filter_helper, self.knowledge_base, self._session_identifier, self._processing_configuration, enable_sigsegv_handler=self._enable_sigsegv_handler, name=process_name) # Remove all possible log handlers to prevent a child process from logging # to the main process log file and garbling the log. The log handlers are # recreated after the worker process has been started. for handler in logging.root.handlers: logging.root.removeHandler(handler) handler.close() process.start() loggers.ConfigureLogging( debug_output=self._debug_output, filename=self._log_filename, mode='a', quiet_mode=self._quiet_mode) try: self._StartMonitoringProcess(process) except (IOError, KeyError) as exception: pid = process.pid logger.error(( 'Unable to monitor replacement worker process: {0:s} ' '(PID: {1:d}) with error: {2!s}').format( process_name, pid, exception)) self._TerminateProcess(process) return None self._RegisterProcess(process) self._last_worker_number += 1 return process
288,065
Stops the extraction processes. Args: abort (bool): True to indicated the stop is issued on abort.
def _StopExtractionProcesses(self, abort=False): logger.debug('Stopping extraction processes.') self._StopMonitoringProcesses() # Note that multiprocessing.Queue is very sensitive regarding # blocking on either a get or a put. So we try to prevent using # any blocking behavior. if abort: # Signal all the processes to abort. self._AbortTerminate() logger.debug('Emptying task queue.') self._task_queue.Empty() # Wake the processes to make sure that they are not blocking # waiting for the queue new items. for _ in self._processes_per_pid: try: self._task_queue.PushItem(plaso_queue.QueueAbort(), block=False) except errors.QueueFull: logger.warning('Task queue full, unable to push abort message.') # Try waiting for the processes to exit normally. self._AbortJoin(timeout=self._PROCESS_JOIN_TIMEOUT) self._task_queue.Close(abort=abort) if not abort: # Check if the processes are still alive and terminate them if necessary. self._AbortTerminate() self._AbortJoin(timeout=self._PROCESS_JOIN_TIMEOUT) self._task_queue.Close(abort=True) # Kill any lingering processes. self._AbortKill()
288,067
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() login_type = event_values.get('type', None) if login_type is None: status = 'N/A' else: status = self._STATUS_TYPES.get(login_type, 'UNKNOWN') event_values['status'] = status return self._ConditionalFormatMessages(event_values)
288,069
Parses a message row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row.
def ParseMessageRow(self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) event_data = KikIOSMessageEventData() event_data.body = self._GetRowValue(query_hash, row, 'ZBODY') event_data.displayname = self._GetRowValue(query_hash, row, 'ZDISPLAYNAME') event_data.message_status = self._GetRowValue(query_hash, row, 'ZSTATE') event_data.message_type = self._GetRowValue(query_hash, row, 'ZTYPE') event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.username = self._GetRowValue(query_hash, row, 'ZUSERNAME') timestamp = self._GetRowValue(query_hash, row, 'ZRECEIVEDTIMESTAMP') # Convert the floating point value to an integer. timestamp = int(timestamp) date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
288,071
Retrieves event data from the Windows EventLog (EVT) record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. record_index (int): event record index. evt_record (pyevt.record): event record. recovered (Optional[bool]): True if the record was recovered. Returns: WinEvtRecordEventData: event data.
def _GetEventData( self, parser_mediator, record_index, evt_record, recovered=False): event_data = WinEvtRecordEventData() try: event_data.record_number = evt_record.identifier except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read record identifier from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) try: event_identifier = evt_record.event_identifier except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read event identifier from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) event_identifier = None event_data.offset = evt_record.offset event_data.recovered = recovered # We want the event identifier to match the behavior of that of the EVTX # event records. if event_identifier is not None: event_data.event_identifier = event_identifier & 0xffff event_data.facility = (event_identifier >> 16) & 0x0fff event_data.severity = event_identifier >> 30 event_data.message_identifier = event_identifier event_data.event_type = evt_record.event_type event_data.event_category = evt_record.event_category event_data.source_name = evt_record.source_name # Computer name is the value stored in the event record and does not # necessarily corresponds with the actual hostname. event_data.computer_name = evt_record.computer_name event_data.user_sid = evt_record.user_security_identifier event_data.strings = list(evt_record.strings) return event_data
288,073
Parses a Windows EventLog (EVT) record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. record_index (int): event record index. evt_record (pyevt.record): event record. recovered (Optional[bool]): True if the record was recovered.
def _ParseRecord( self, parser_mediator, record_index, evt_record, recovered=False): event_data = self._GetEventData( parser_mediator, record_index, evt_record, recovered=recovered) try: creation_time = evt_record.get_creation_time_as_integer() except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read creation time from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) creation_time = None if creation_time: date_time = dfdatetime_posix_time.PosixTime(timestamp=creation_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) try: written_time = evt_record.get_written_time_as_integer() except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read written time from event record: {0:d} ' 'with error: {1!s}').format(record_index, exception)) written_time = None if written_time: date_time = dfdatetime_posix_time.PosixTime(timestamp=written_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) if not creation_time and not written_time: date_time = dfdatetime_semantic_time.SemanticTime('Not set') event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME) parser_mediator.ProduceEventWithEventData(event, event_data)
288,074
Parses Windows EventLog (EVT) records. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. evt_file (pyevt.file): Windows EventLog (EVT) file.
def _ParseRecords(self, parser_mediator, evt_file): # To handle errors when parsing a Windows EventLog (EVT) file in the most # granular way the following code iterates over every event record. The # call to evt_file.get_record() and access to members of evt_record should # be called within a try-except. for record_index in range(evt_file.number_of_records): if parser_mediator.abort: break try: evt_record = evt_file.get_record(record_index) self._ParseRecord(parser_mediator, record_index, evt_record) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse event record: {0:d} with error: {1!s}'.format( record_index, exception)) for record_index in range(evt_file.number_of_recovered_records): if parser_mediator.abort: break try: evt_record = evt_file.get_recovered_record(record_index) self._ParseRecord( parser_mediator, record_index, evt_record, recovered=True) except IOError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse recovered event record: {0:d} with error: ' '{1!s}').format(record_index, exception))
288,075
Parses a Windows EventLog (EVT) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object.
def ParseFileObject(self, parser_mediator, file_object): evt_file = pyevt.file() evt_file.set_ascii_codepage(parser_mediator.codepage) try: evt_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to open file with error: {0!s}'.format(exception)) return try: self._ParseRecords(parser_mediator, evt_file) finally: evt_file.close()
288,076
Verifies whether content corresponds to a Zsh extended_history file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. lines (str): one or more lines from the text file. Returns: bool: True if the line was successfully parsed.
def VerifyStructure(self, parser_mediator, lines): if self._VERIFICATION_REGEX.match(lines): return True return False
288,078
Retrieves a plist value by path. Args: path_segments (list[str]): path segment strings relative to the root of the plist. Returns: object: The value of the key specified by the path or None.
def GetValueByPath(self, path_segments): key = self.root_key for path_segment in path_segments: if isinstance(key, dict): try: key = key[path_segment] except KeyError: return None elif isinstance(key, list): try: list_index = int(path_segment, 10) except ValueError: return None key = key[list_index] else: return None if not key: return None return key
288,079
Reads a plist from a file-like object. Args: file_object (dfvfs.FileIO): a file-like object containing plist data. Raises: IOError: if the plist file-like object cannot be read. OSError: if the plist file-like object cannot be read.
def Read(self, file_object): try: self.root_key = biplist.readPlist(file_object) except ( biplist.NotBinaryPlistException, biplist.InvalidPlistException) as exception: raise IOError(exception)
288,080
Determines the formatted message string. Args: format_string (str): message format string. event_values (dict[str, object]): event values. Returns: str: formatted message string.
def _FormatMessage(self, format_string, event_values): if not isinstance(format_string, py2to3.UNICODE_TYPE): logger.warning('Format string: {0:s} is non-Unicode.'.format( format_string)) # Plaso code files should be in UTF-8 any thus binary strings are # assumed UTF-8. If this is not the case this should be fixed. format_string = format_string.decode('utf-8', errors='ignore') try: message_string = format_string.format(**event_values) except KeyError as exception: data_type = event_values.get('data_type', 'N/A') display_name = event_values.get('display_name', 'N/A') event_identifier = event_values.get('uuid', 'N/A') parser_chain = event_values.get('parser', 'N/A') error_message = ( 'unable to format string: "{0:s}" event object is missing required ' 'attributes: {1!s}').format(format_string, exception) error_message = ( 'Event: {0:s} data type: {1:s} display name: {2:s} ' 'parser chain: {3:s} with error: {4:s}').format( event_identifier, data_type, display_name, parser_chain, error_message) logger.error(error_message) attribute_values = [] for attribute, value in iter(event_values.items()): attribute_values.append('{0:s}: {1!s}'.format(attribute, value)) message_string = ' '.join(attribute_values) except UnicodeDecodeError as exception: data_type = event_values.get('data_type', 'N/A') display_name = event_values.get('display_name', 'N/A') event_identifier = event_values.get('uuid', 'N/A') parser_chain = event_values.get('parser', 'N/A') error_message = 'Unicode decode error: {0!s}'.format(exception) error_message = ( 'Event: {0:s} data type: {1:s} display name: {2:s} ' 'parser chain: {3:s} with error: {4:s}').format( event_identifier, data_type, display_name, parser_chain, error_message) logger.error(error_message) message_string = '' # Strip carriage return and linefeed form the message strings. # Using replace function here because it is faster than re.sub() or # string.strip(). return message_string.replace('\r', '').replace('\n', '')
288,081
Determines the formatted message strings. Args: format_string (str): message format string. short_format_string (str): short message format string. event_values (dict[str, object]): event values. Returns: tuple(str, str): formatted message string and short message string.
def _FormatMessages(self, format_string, short_format_string, event_values): message_string = self._FormatMessage(format_string, event_values) if short_format_string: short_message_string = self._FormatMessage( short_format_string, event_values) else: short_message_string = message_string # Truncate the short message string if necessary. if len(short_message_string) > 80: short_message_string = '{0:s}...'.format(short_message_string[:77]) return message_string, short_message_string
288,082
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() return self._FormatMessages( self.FORMAT_STRING, self.FORMAT_STRING_SHORT, event_values)
288,084
Determines the the short and long source for an event object. Args: event (EventObject): event. Returns: tuple(str, str): short and long source string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetSources(self, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) return self.SOURCE_SHORT, self.SOURCE_LONG
288,085
Determines the conditional formatted message strings. Args: event_values (dict[str, object]): event values. Returns: tuple(str, str): formatted message string and short message string.
def _ConditionalFormatMessages(self, event_values): # Using getattr here to make sure the attribute is not set to None. # if A.b = None, hasattr(A, b) is True but getattr(A, b, None) is False. string_pieces = [] for map_index, attribute_name in enumerate(self._format_string_pieces_map): if not attribute_name or attribute_name in event_values: if attribute_name: attribute = event_values.get(attribute_name, None) # If an attribute is an int, yet has zero value we want to include # that in the format string, since that is still potentially valid # information. Otherwise we would like to skip it. # pylint: disable=unidiomatic-typecheck if (not isinstance(attribute, (bool, float)) and not isinstance(attribute, py2to3.INTEGER_TYPES) and not attribute): continue string_pieces.append(self.FORMAT_STRING_PIECES[map_index]) format_string = self.FORMAT_STRING_SEPARATOR.join(string_pieces) string_pieces = [] for map_index, attribute_name in enumerate( self._format_string_short_pieces_map): if not attribute_name or event_values.get(attribute_name, None): string_pieces.append(self.FORMAT_STRING_SHORT_PIECES[map_index]) short_format_string = self.FORMAT_STRING_SEPARATOR.join(string_pieces) return self._FormatMessages( format_string, short_format_string, event_values)
288,087
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() return self._ConditionalFormatMessages(event_values)
288,089
Compares the file entry against the filter. Args: file_entry (dfvfs.FileEntry): file entry to compare. Returns: bool: True if the file entry matches the filter, False if not or None if the filter does not apply.
def Matches(self, file_entry): if not self._date_time_ranges: return None for date_time_range in self._date_time_ranges: time_attribute = self._TIME_VALUE_MAPPINGS.get( date_time_range.time_value, None) if not time_attribute: continue timestamp = getattr(file_entry, time_attribute, None) if timestamp is None: continue if (date_time_range.start_date_time is not None and timestamp < date_time_range.start_date_time): return False if (date_time_range.end_date_time is not None and timestamp > date_time_range.end_date_time): return False return True
288,091
Prints a human readable version of the filter. Args: output_writer (CLIOutputWriter): output writer.
def Print(self, output_writer): if self._date_time_ranges: for date_time_range in self._date_time_ranges: if date_time_range.start_date_time is None: end_time_string = date_time_range.end_date_time.CopyToDateTimeString() output_writer.Write('\t{0:s} after {1:s}\n'.format( date_time_range.time_value, end_time_string)) elif date_time_range.end_date_time is None: start_time_string = ( date_time_range.start_date_time.CopyToDateTimeString()) output_writer.Write('\t{0:s} before {1:s}\n'.format( date_time_range.time_value, start_time_string)) else: start_time_string = ( date_time_range.start_date_time.CopyToDateTimeString()) end_time_string = date_time_range.end_date_time.CopyToDateTimeString() output_writer.Write('\t{0:s} between {1:s} and {2:s}\n'.format( date_time_range.time_value, start_time_string, end_time_string))
288,092
Initializes an extensions-based file entry filter. An extension is defined as "pdf" as in "document.pdf". Args: extensions (list[str]): a list of extension strings.
def __init__(self, extensions): super(ExtensionsFileEntryFilter, self).__init__() self._extensions = extensions
288,093
Compares the file entry against the filter. Args: file_entry (dfvfs.FileEntry): file entry to compare. Returns: bool: True if the file entry matches the filter, False if not or None if the filter does not apply.
def Matches(self, file_entry): location = getattr(file_entry.path_spec, 'location', None) if not location: return None if '.' not in location: return False _, _, extension = location.rpartition('.') return extension.lower() in self._extensions
288,094
Prints a human readable version of the filter. Args: output_writer (CLIOutputWriter): output writer.
def Print(self, output_writer): if self._extensions: output_writer.Write('\textensions: {0:s}\n'.format( ', '.join(self._extensions)))
288,095
Initializes a names-based file entry filter. Args: names (list[str]): names.
def __init__(self, names): super(NamesFileEntryFilter, self).__init__() self._names = names
288,096
Compares the file entry against the filter. Args: file_entry (dfvfs.FileEntry): file entry to compare. Returns: bool: True if the file entry matches the filter.
def Matches(self, file_entry): if not self._names or not file_entry.IsFile(): return False return file_entry.name.lower() in self._names
288,097
Prints a human readable version of the filter. Args: output_writer (CLIOutputWriter): output writer.
def Print(self, output_writer): if self._names: output_writer.Write('\tnames: {0:s}\n'.format( ', '.join(self._names)))
288,098
Initializes a signature-based file entry filter. Args: specification_store (FormatSpecificationStore): a specification store. signature_identifiers (list[str]): signature identifiers.
def __init__(self, specification_store, signature_identifiers): super(SignaturesFileEntryFilter, self).__init__() self._file_scanner = None self._signature_identifiers = [] self._file_scanner = self._GetScanner( specification_store, signature_identifiers)
288,099
Initializes the scanner form the specification store. Args: specification_store (FormatSpecificationStore): a specification store. signature_identifiers (list[str]): signature identifiers. Returns: pysigscan.scanner: signature scanner or None.
def _GetScanner(self, specification_store, signature_identifiers): if not specification_store: return None scanner_object = pysigscan.scanner() for format_specification in specification_store.specifications: if format_specification.identifier not in signature_identifiers: continue for signature in format_specification.signatures: pattern_offset = signature.offset if pattern_offset is None: signature_flags = pysigscan.signature_flags.NO_OFFSET elif pattern_offset < 0: pattern_offset *= -1 signature_flags = pysigscan.signature_flags.RELATIVE_FROM_END else: signature_flags = pysigscan.signature_flags.RELATIVE_FROM_START scanner_object.add_signature( signature.identifier, pattern_offset, signature.pattern, signature_flags) self._signature_identifiers.append(format_specification.identifier) return scanner_object
288,100
Compares the file entry against the filter. Args: file_entry (dfvfs.FileEntry): file entry to compare. Returns: bool: True if the file entry matches the filter, False if not or None if the filter does not apply.
def Matches(self, file_entry): if not self._file_scanner or not file_entry.IsFile(): return None file_object = file_entry.GetFileObject() if not file_object: return False try: scan_state = pysigscan.scan_state() self._file_scanner.scan_file_object(scan_state, file_object) except IOError as exception: # TODO: replace location by display name. location = getattr(file_entry.path_spec, 'location', '') logging.error(( '[skipping] unable to scan file: {0:s} for signatures ' 'with error: {1!s}').format(location, exception)) return False finally: file_object.close() return scan_state.number_of_scan_results > 0
288,101
Prints a human readable version of the filter. Args: output_writer (CLIOutputWriter): output writer.
def Print(self, output_writer): if self._file_scanner: output_writer.Write('\tsignature identifiers: {0:s}\n'.format( ', '.join(self._signature_identifiers)))
288,102
Compares the file entry against the filter collection. Args: file_entry (dfvfs.FileEntry): file entry to compare. Returns: bool: True if the file entry matches one of the filters. If no filters are provided or applicable the result will be True.
def Matches(self, file_entry): if not self._filters: return True results = [] for file_entry_filter in self._filters: result = file_entry_filter.Matches(file_entry) results.append(result) return True in results or False not in results
288,103
Prints a human readable version of the filter. Args: output_writer (CLIOutputWriter): output writer.
def Print(self, output_writer): if self._filters: output_writer.Write('Filters:\n') for file_entry_filter in self._filters: file_entry_filter.Print(output_writer)
288,104
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): # TODO: Test other Office versions to make sure this plugin is applicable. values_dict = {} for registry_value in registry_key.GetValues(): # Ignore any value not in the form: 'Item [0-9]+'. if not registry_value.name or not self._RE_VALUE_NAME.search( registry_value.name): continue # Ignore any value that is empty or that does not contain a string. if not registry_value.data or not registry_value.DataIsString(): continue value_string = registry_value.GetDataAsObject() values = self._RE_VALUE_DATA.findall(value_string) # Values will contain a list containing a tuple containing 2 values. if len(values) != 1 or len(values[0]) != 2: continue try: timestamp = int(values[0][0], 16) except ValueError: parser_mediator.ProduceExtractionWarning(( 'unable to convert filetime string to an integer for ' 'value: {0:s}.').format(registry_value.name)) continue event_data = OfficeMRUWindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_value.offset # TODO: split value string in individual values. event_data.value_string = value_string values_dict[registry_value.name] = value_string if not timestamp: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime(timestamp=timestamp) # TODO: determine if this should be last written time. event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.source_append = self._SOURCE_APPEND event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,106
Parses a zeitgeist event row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row.
def ParseZeitgeistEventRow( self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) event_data = ZeitgeistActivityEventData() event_data.offset = self._GetRowValue(query_hash, row, 'id') event_data.query = query event_data.subject_uri = self._GetRowValue(query_hash, row, 'subj_uri') timestamp = self._GetRowValue(query_hash, row, 'timestamp') date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UNKNOWN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,108
Parses a row from the database. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row.
def ParseRow(self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) event_data = AndroidWebViewCacheEventData() event_data.content_length = self._GetRowValue( query_hash, row, 'contentlength') event_data.query = query event_data.url = self._GetRowValue(query_hash, row, 'url') timestamp = self._GetRowValue(query_hash, row, 'expires') if timestamp is not None: date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_EXPIRATION) parser_mediator.ProduceEventWithEventData(event, event_data) timestamp = self._GetRowValue(query_hash, row, 'lastmodify') if timestamp is not None: date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)
288,110
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() document_type = event_values.get('document_type', None) if document_type: event_values['document_type'] = self._DOC_TYPES.get( document_type, 'UNKNOWN') shared = event_values.get('shared', False) if shared: event_values['shared'] = 'Shared' else: event_values['shared'] = 'Private' return self._ConditionalFormatMessages(event_values)
288,111
Writes the body of an event object to the output. Args: event (EventObject): event.
def WriteEventBody(self, event): inode = getattr(event, 'inode', None) if inode is None: event.inode = 0 try: message, _ = self._output_mediator.GetFormattedMessages(event) except errors.WrongFormatter: message = None if message: event.message = message json_dict = self._JSON_SERIALIZER.WriteSerializedDict(event) json_string = json.dumps(json_dict, sort_keys=True) # dumps() returns an ascii-encoded byte string in Python 2. if py2to3.PY_2: json_string = codecs.decode(json_string, 'ascii') self._output_writer.Write(json_string) self._output_writer.Write('\n')
288,112
Parses the registered DLLs that receive event notifications. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def _ParseRegisteredDLLs(self, parser_mediator, registry_key): notify_key = registry_key.GetSubkeyByName('Notify') if not notify_key: return for subkey in notify_key.GetSubkeys(): for trigger in self._TRIGGERS: handler_value = subkey.GetValueByName(trigger) if not handler_value: continue values_dict = { 'Application': subkey.name, 'Handler': handler_value.GetDataAsObject(), 'Trigger': trigger} command_value = subkey.GetValueByName('DllName') if command_value: values_dict['Command'] = command_value.GetDataAsObject() event_data = windows_events.WindowsRegistryEventData() event_data.key_path = subkey.path event_data.offset = subkey.offset event_data.regvalue = values_dict event_data.source_append = ': Winlogon' event = time_events.DateTimeValuesEvent( subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,113
Parses the registered logon applications. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def _ParseLogonApplications(self, parser_mediator, registry_key): for application in self._LOGON_APPLICATIONS: command_value = registry_key.GetValueByName(application) if not command_value: continue values_dict = { 'Application': application, 'Command': command_value.GetDataAsObject(), 'Trigger': 'Logon'} event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.source_append = ': Winlogon' event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,114
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): self._ParseLogonApplications(parser_mediator, registry_key) self._ParseRegisteredDLLs(parser_mediator, registry_key)
288,115