docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Parses and validates the signature. Args: value_data (bytes): value data. Returns: int: format type or None if format could not be determined. Raises: ParseError: if the value data could not be parsed.
def _CheckSignature(self, value_data): signature_map = self._GetDataTypeMap('uint32le') try: signature = self._ReadStructureFromByteStream( value_data, 0, signature_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse signature value with error: {0!s}'.format( exception)) format_type = self._HEADER_SIGNATURES.get(signature, None) if format_type == self._FORMAT_TYPE_2003: # TODO: determine which format version is used (2003 or Vista). return self._FORMAT_TYPE_2003 if format_type == self._FORMAT_TYPE_8: cached_entry_signature = value_data[signature:signature + 4] if cached_entry_signature in ( self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1): return self._FORMAT_TYPE_8 elif format_type == self._FORMAT_TYPE_10: # Windows 10 uses the same cache entry signature as Windows 8.1 cached_entry_signature = value_data[signature:signature + 4] if cached_entry_signature == self._CACHED_ENTRY_SIGNATURE_8_1: return self._FORMAT_TYPE_10 return format_type
288,119
Parses the cached entry structure common for Windows 2003, Vista and 7. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: appcompatcache_cached_entry_2003_common: cached entry structure common for Windows 2003, Windows Vista and Windows 7. Raises: ParseError: if the value data could not be parsed.
def _ParseCommon2003CachedEntry(self, value_data, cached_entry_offset): data_type_map = self._GetDataTypeMap( 'appcompatcache_cached_entry_2003_common') try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry value with error: {0!s}'.format( exception)) if cached_entry.path_size > cached_entry.maximum_path_size: raise errors.ParseError('Path size value out of bounds.') path_end_of_string_size = ( cached_entry.maximum_path_size - cached_entry.path_size) if cached_entry.path_size == 0 or path_end_of_string_size != 2: raise errors.ParseError('Unsupported path size values.') return cached_entry
288,121
Parses a Windows XP cached entry. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: AppCompatCacheCachedEntry: cached entry. Raises: ParseError: if the value data could not be parsed.
def _ParseCachedEntryXP(self, value_data, cached_entry_offset): try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, self._cached_entry_data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry value with error: {0!s}'.format( exception)) # TODO: have dtFabric handle string conversion. string_size = 0 for string_index in range(0, 528, 2): if (cached_entry.path[string_index] == 0 and cached_entry.path[string_index + 1] == 0): break string_size += 2 try: path = bytearray(cached_entry.path[0:string_size]).decode('utf-16-le') except UnicodeDecodeError: raise errors.ParseError('Unable to decode cached entry path to string') cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = ( self._cached_entry_data_type_map.GetByteSize()) cached_entry_object.file_size = cached_entry.file_size cached_entry_object.last_modification_time = ( cached_entry.last_modification_time) cached_entry_object.last_update_time = cached_entry.last_update_time cached_entry_object.path = path return cached_entry_object
288,122
Parses a Windows 2003 cached entry. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: AppCompatCacheCachedEntry: cached entry. Raises: ParseError: if the value data could not be parsed.
def _ParseCachedEntry2003(self, value_data, cached_entry_offset): try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, self._cached_entry_data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry value with error: {0!s}'.format( exception)) path_size = cached_entry.path_size maximum_path_size = cached_entry.maximum_path_size path_offset = cached_entry.path_offset if path_offset > 0 and path_size > 0: path_size += path_offset maximum_path_size += path_offset try: path = value_data[path_offset:path_size].decode('utf-16-le') except UnicodeDecodeError: raise errors.ParseError('Unable to decode cached entry path to string') cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = ( self._cached_entry_data_type_map.GetByteSize()) cached_entry_object.file_size = getattr(cached_entry, 'file_size', None) cached_entry_object.last_modification_time = ( cached_entry.last_modification_time) cached_entry_object.path = path return cached_entry_object
288,123
Parses a Windows Vista cached entry. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: AppCompatCacheCachedEntry: cached entry. Raises: ParseError: if the value data could not be parsed.
def _ParseCachedEntryVista(self, value_data, cached_entry_offset): try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, self._cached_entry_data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry value with error: {0!s}'.format( exception)) path_size = cached_entry.path_size maximum_path_size = cached_entry.maximum_path_size path_offset = cached_entry.path_offset if path_offset > 0 and path_size > 0: path_size += path_offset maximum_path_size += path_offset try: path = value_data[path_offset:path_size].decode('utf-16-le') except UnicodeDecodeError: raise errors.ParseError('Unable to decode cached entry path to string') cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = ( self._cached_entry_data_type_map.GetByteSize()) cached_entry_object.insertion_flags = cached_entry.insertion_flags cached_entry_object.last_modification_time = ( cached_entry.last_modification_time) cached_entry_object.path = path cached_entry_object.shim_flags = cached_entry.shim_flags return cached_entry_object
288,124
Parses a Windows 8.0 or 8.1 cached entry. Args: value_data (bytes): value data. cached_entry_offset (int): offset of the first cached entry data relative to the start of the value data. Returns: AppCompatCacheCachedEntry: cached entry. Raises: ParseError: if the value data could not be parsed.
def _ParseCachedEntry8(self, value_data, cached_entry_offset): try: cached_entry = self._ReadStructureFromByteStream( value_data[cached_entry_offset:], cached_entry_offset, self._cached_entry_data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry value with error: {0!s}'.format( exception)) if cached_entry.signature not in ( self._CACHED_ENTRY_SIGNATURE_8_0, self._CACHED_ENTRY_SIGNATURE_8_1): raise errors.ParseError('Unsupported cache entry signature') cached_entry_data = value_data[cached_entry_offset:] if cached_entry.signature == self._CACHED_ENTRY_SIGNATURE_8_0: data_type_map_name = 'appcompatcache_cached_entry_body_8_0' elif cached_entry.signature == self._CACHED_ENTRY_SIGNATURE_8_1: data_type_map_name = 'appcompatcache_cached_entry_body_8_1' data_type_map = self._GetDataTypeMap(data_type_map_name) context = dtfabric_data_maps.DataTypeMapContext() try: cached_entry_body = self._ReadStructureFromByteStream( cached_entry_data[12:], cached_entry_offset + 12, data_type_map, context=context) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse cached entry body with error: {0!s}'.format( exception)) data_offset = context.byte_size data_size = cached_entry_body.data_size cached_entry_object = AppCompatCacheCachedEntry() cached_entry_object.cached_entry_size = ( 12 + cached_entry.cached_entry_data_size) cached_entry_object.insertion_flags = cached_entry_body.insertion_flags cached_entry_object.last_modification_time = ( cached_entry_body.last_modification_time) cached_entry_object.path = cached_entry_body.path cached_entry_object.shim_flags = cached_entry_body.shim_flags if data_size > 0: cached_entry_object.data = cached_entry_data[ data_offset:data_offset + data_size] return cached_entry_object
288,125
Parses the header. Args: format_type (int): format type. value_data (bytes): value data. Returns: AppCompatCacheHeader: header. Raises: ParseError: if the value data could not be parsed.
def _ParseHeader(self, format_type, value_data): data_type_map_name = self._HEADER_DATA_TYPE_MAP_NAMES.get(format_type, None) if not data_type_map_name: raise errors.ParseError( 'Unsupported format type: {0:d}'.format(format_type)) data_type_map = self._GetDataTypeMap(data_type_map_name) try: header = self._ReadStructureFromByteStream( value_data, 0, data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse header value with error: {0!s}'.format( exception)) header_data_size = data_type_map.GetByteSize() if format_type == self._FORMAT_TYPE_10: header_data_size = header.signature cache_header = AppCompatCacheHeader() cache_header.header_size = header_data_size cache_header.number_of_cached_entries = getattr( header, 'number_of_cached_entries', 0) return cache_header
288,126
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Raises: ParseError: if the value data could not be parsed.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): value = registry_key.GetValueByName('AppCompatCache') if not value: return value_data = value.data value_data_size = len(value.data) format_type = self._CheckSignature(value_data) if not format_type: parser_mediator.ProduceExtractionWarning( 'Unsupported signature in AppCompatCache key: {0:s}'.format( registry_key.path)) return header_object = self._ParseHeader(format_type, value_data) # On Windows Vista and 2008 when the cache is empty it will # only consist of the header. if value_data_size <= header_object.header_size: return cached_entry_offset = header_object.header_size self._cached_entry_data_type_map = self._GetCachedEntryDataTypeMap( format_type, value_data, cached_entry_offset) if not self._cached_entry_data_type_map: raise errors.ParseError('Unable to determine cached entry data type.') parse_cached_entry_function = None if format_type == self._FORMAT_TYPE_XP: parse_cached_entry_function = self._ParseCachedEntryXP elif format_type == self._FORMAT_TYPE_2003: parse_cached_entry_function = self._ParseCachedEntry2003 elif format_type == self._FORMAT_TYPE_VISTA: parse_cached_entry_function = self._ParseCachedEntryVista elif format_type == self._FORMAT_TYPE_7: parse_cached_entry_function = self._ParseCachedEntry7 elif format_type == self._FORMAT_TYPE_8: parse_cached_entry_function = self._ParseCachedEntry8 elif format_type == self._FORMAT_TYPE_10: parse_cached_entry_function = self._ParseCachedEntry10 cached_entry_index = 0 while cached_entry_offset < value_data_size: cached_entry_object = parse_cached_entry_function( value_data, cached_entry_offset) event_data = AppCompatCacheEventData() event_data.entry_index = cached_entry_index + 1 event_data.key_path = registry_key.path event_data.offset = cached_entry_offset event_data.path = cached_entry_object.path if cached_entry_object.last_modification_time is not None: if not cached_entry_object.last_modification_time: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=cached_entry_object.last_modification_time) # TODO: refactor to file modification event. event = time_events.DateTimeValuesEvent( date_time, 'File Last Modification Time') parser_mediator.ProduceEventWithEventData(event, event_data) if cached_entry_object.last_update_time is not None: if not cached_entry_object.last_update_time: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=cached_entry_object.last_update_time) # TODO: refactor to process run event. event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_RUN) parser_mediator.ProduceEventWithEventData(event, event_data) cached_entry_offset += cached_entry_object.cached_entry_size cached_entry_index += 1 if (header_object.number_of_cached_entries != 0 and cached_entry_index >= header_object.number_of_cached_entries): break
288,127
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): regvalue = getattr(event, 'regvalue', {}) # Loop over all the registry value names in the service key. for service_value_name in regvalue.keys(): # A temporary variable so we can refer to this long name more easily. service_enums = human_readable_service_enums.SERVICE_ENUMS # Check if we need to can make the value more human readable. if service_value_name in service_enums.keys(): service_enum = service_enums[service_value_name] # Find the human readable version of the name and fall back to the # raw value if it's not found. human_readable_value = service_enum.get( regvalue[service_value_name], regvalue[service_value_name]) regvalue[service_value_name] = human_readable_value return super(WinRegistryServiceFormatter, self).GetMessages( formatter_mediator, event)
288,128
Returns the deserialized content of a plist as a dictionary object. Args: file_object (dfvfs.FileIO): a file-like object to parse. Returns: dict[str, object]: contents of the plist. Raises: UnableToParseFile: when the file cannot be parsed.
def GetTopLevel(self, file_object): try: top_level_object = biplist.readPlist(file_object) except (biplist.InvalidPlistException, biplist.NotBinaryPlistException) as exception: raise errors.UnableToParseFile( 'Unable to parse plist with error: {0!s}'.format(exception)) return top_level_object
288,129
Parses a plist file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): filename = parser_mediator.GetFilename() file_size = file_object.get_size() if file_size <= 0: raise errors.UnableToParseFile( 'File size: {0:d} bytes is less equal 0.'.format(file_size)) # 50MB is 10x larger than any plist seen to date. if file_size > 50000000: raise errors.UnableToParseFile( 'File size: {0:d} bytes is larger than 50 MB.'.format(file_size)) top_level_object = self.GetTopLevel(file_object) if not top_level_object: raise errors.UnableToParseFile( 'Unable to parse: {0:s} skipping.'.format(filename)) # TODO: add a parser filter. matching_plugin = None for plugin in self._plugins: try: plugin.UpdateChainAndProcess( parser_mediator, plist_name=filename, top_level=top_level_object) matching_plugin = plugin except errors.WrongPlistPlugin as exception: logger.debug('Wrong plugin: {0:s} for: {1:s}'.format( exception.args[0], exception.args[1])) if not matching_plugin and self._default_plugin: self._default_plugin.UpdateChainAndProcess( parser_mediator, plist_name=filename, top_level=top_level_object)
288,130
Updates the number of event reports. Args: number_of_consumed_reports (int): total number of event reports consumed by the process. number_of_produced_reports (int): total number of event reports produced by the process. Returns: bool: True if either number of event reports has increased. Raises: ValueError: if the consumed or produced number of event reports is smaller than the value of the previous update.
def UpdateNumberOfEventReports( self, number_of_consumed_reports, number_of_produced_reports): consumed_reports_delta = 0 if number_of_consumed_reports is not None: if number_of_consumed_reports < self.number_of_consumed_reports: raise ValueError( 'Number of consumed reports smaller than previous update.') consumed_reports_delta = ( number_of_consumed_reports - self.number_of_consumed_reports) self.number_of_consumed_reports = number_of_consumed_reports self.number_of_consumed_reports_delta = consumed_reports_delta produced_reports_delta = 0 if number_of_produced_reports is not None: if number_of_produced_reports < self.number_of_produced_reports: raise ValueError( 'Number of produced reports smaller than previous update.') produced_reports_delta = ( number_of_produced_reports - self.number_of_produced_reports) self.number_of_produced_reports = number_of_produced_reports self.number_of_produced_reports_delta = produced_reports_delta return consumed_reports_delta > 0 or produced_reports_delta > 0
288,132
Updates the number of events. Args: number_of_consumed_events (int): total number of events consumed by the process. number_of_produced_events (int): total number of events produced by the process. Returns: bool: True if either number of events has increased. Raises: ValueError: if the consumed or produced number of events is smaller than the value of the previous update.
def UpdateNumberOfEvents( self, number_of_consumed_events, number_of_produced_events): consumed_events_delta = 0 if number_of_consumed_events is not None: if number_of_consumed_events < self.number_of_consumed_events: raise ValueError( 'Number of consumed events smaller than previous update.') consumed_events_delta = ( number_of_consumed_events - self.number_of_consumed_events) self.number_of_consumed_events = number_of_consumed_events self.number_of_consumed_events_delta = consumed_events_delta produced_events_delta = 0 if number_of_produced_events is not None: if number_of_produced_events < self.number_of_produced_events: raise ValueError( 'Number of produced events smaller than previous update.') produced_events_delta = ( number_of_produced_events - self.number_of_produced_events) self.number_of_produced_events = number_of_produced_events self.number_of_produced_events_delta = produced_events_delta return consumed_events_delta > 0 or produced_events_delta > 0
288,133
Updates the number of event sources. Args: number_of_consumed_sources (int): total number of event sources consumed by the process. number_of_produced_sources (int): total number of event sources produced by the process. Returns: bool: True if either number of event sources has increased. Raises: ValueError: if the consumed or produced number of event sources is smaller than the value of the previous update.
def UpdateNumberOfEventSources( self, number_of_consumed_sources, number_of_produced_sources): consumed_sources_delta = 0 if number_of_consumed_sources is not None: if number_of_consumed_sources < self.number_of_consumed_sources: raise ValueError( 'Number of consumed sources smaller than previous update.') consumed_sources_delta = ( number_of_consumed_sources - self.number_of_consumed_sources) self.number_of_consumed_sources = number_of_consumed_sources self.number_of_consumed_sources_delta = consumed_sources_delta produced_sources_delta = 0 if number_of_produced_sources is not None: if number_of_produced_sources < self.number_of_produced_sources: raise ValueError( 'Number of produced sources smaller than previous update.') produced_sources_delta = ( number_of_produced_sources - self.number_of_produced_sources) self.number_of_produced_sources = number_of_produced_sources self.number_of_produced_sources_delta = produced_sources_delta return consumed_sources_delta > 0 or produced_sources_delta > 0
288,134
Updates the number of event tags. Args: number_of_consumed_event_tags (int): total number of event tags consumed by the process. number_of_produced_event_tags (int): total number of event tags produced by the process. Returns: bool: True if either number of event tags has increased. Raises: ValueError: if the consumed or produced number of event tags is smaller than the value of the previous update.
def UpdateNumberOfEventTags( self, number_of_consumed_event_tags, number_of_produced_event_tags): consumed_event_tags_delta = 0 if number_of_consumed_event_tags is not None: if number_of_consumed_event_tags < self.number_of_consumed_event_tags: raise ValueError( 'Number of consumed event tags smaller than previous update.') consumed_event_tags_delta = ( number_of_consumed_event_tags - self.number_of_consumed_event_tags) self.number_of_consumed_event_tags = number_of_consumed_event_tags self.number_of_consumed_event_tags_delta = consumed_event_tags_delta produced_event_tags_delta = 0 if number_of_produced_event_tags is not None: if number_of_produced_event_tags < self.number_of_produced_event_tags: raise ValueError( 'Number of produced event tags smaller than previous update.') produced_event_tags_delta = ( number_of_produced_event_tags - self.number_of_produced_event_tags) self.number_of_produced_event_tags = number_of_produced_event_tags self.number_of_produced_event_tags_delta = produced_event_tags_delta return consumed_event_tags_delta > 0 or produced_event_tags_delta > 0
288,135
Updates the number of warnings. Args: number_of_consumed_warnings (int): total number of warnings consumed by the process. number_of_produced_warnings (int): total number of warnings produced by the process. Returns: bool: True if either number of warnings has increased. Raises: ValueError: if the consumed or produced number of warnings is smaller than the value of the previous update.
def UpdateNumberOfWarnings( self, number_of_consumed_warnings, number_of_produced_warnings): consumed_warnings_delta = 0 if number_of_consumed_warnings is not None: if number_of_consumed_warnings < self.number_of_consumed_warnings: raise ValueError( 'Number of consumed warnings smaller than previous update.') consumed_warnings_delta = ( number_of_consumed_warnings - self.number_of_consumed_warnings) self.number_of_consumed_warnings = number_of_consumed_warnings self.number_of_consumed_warnings_delta = consumed_warnings_delta produced_warnings_delta = 0 if number_of_produced_warnings is not None: if number_of_produced_warnings < self.number_of_produced_warnings: raise ValueError( 'Number of produced warnings smaller than previous update.') produced_warnings_delta = ( number_of_produced_warnings - self.number_of_produced_warnings) self.number_of_produced_warnings = number_of_produced_warnings self.number_of_produced_warnings_delta = produced_warnings_delta return consumed_warnings_delta > 0 or produced_warnings_delta > 0
288,136
Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
def GetMessages(self, formatter_mediator, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() http_headers = event_values.get('http_headers', None) if http_headers: event_values['http_headers'] = http_headers.replace('\r\n', ' - ') if event_values.get('recovered', None): event_values['recovered_string'] = '[Recovered Entry]' cached_file_path = event_values.get('cached_filename', None) if cached_file_path: cache_directory_name = event_values.get('cache_directory_name', None) if cache_directory_name: cached_file_path = '\\'.join([cache_directory_name, cached_file_path]) event_values['cached_file_path'] = cached_file_path return self._ConditionalFormatMessages(event_values)
288,144
Initializes a Windows Registry key filter. Args: key_path (str): key path.
def __init__(self, key_path): super(WindowsRegistryKeyPathFilter, self).__init__() key_path.rstrip('\\') self._key_path = key_path key_path = key_path.upper() self._key_path_upper = key_path self._wow64_key_path = None self._wow64_key_path_upper = None if key_path.startswith(self._CONTROL_SET_PREFIX.upper()): self._key_path_prefix, _, self._key_path_suffix = key_path.partition( 'CurrentControlSet'.upper()) else: self._key_path_prefix = None self._key_path_suffix = None # Handle WoW64 Windows Registry key redirection. # Also see: # https://msdn.microsoft.com/en-us/library/windows/desktop/ # ms724072%28v=vs.85%29.aspx # https://msdn.microsoft.com/en-us/library/windows/desktop/ # aa384253(v=vs.85).aspx wow64_prefix = None for key_path_prefix in self._WOW64_PREFIXES: if key_path.startswith(key_path_prefix.upper()): wow64_prefix = key_path_prefix break if wow64_prefix: key_path_suffix = self._key_path[len(wow64_prefix):] if key_path_suffix.startswith('\\'): key_path_suffix = key_path_suffix[1:] self._wow64_key_path = '\\'.join([ wow64_prefix, 'Wow6432Node', key_path_suffix]) self._wow64_key_path_upper = self._wow64_key_path.upper()
288,145
Determines if a Windows Registry key matches the filter. Args: registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Returns: bool: True if the keys match.
def Match(self, registry_key): key_path = registry_key.path.upper() if self._key_path_prefix and self._key_path_suffix: if (key_path.startswith(self._key_path_prefix) and key_path.endswith(self._key_path_suffix)): key_path_segment = key_path[ len(self._key_path_prefix):-len(self._key_path_suffix)] if key_path_segment.startswith('ControlSet'.upper()): try: control_set = int(key_path_segment[10:], 10) except ValueError: control_set = None # TODO: check if control_set is in bounds. return control_set is not None return key_path in (self._key_path_upper, self._wow64_key_path_upper)
288,147
Initializes a Windows Registry key filter. Args: key_path_prefix (str): the key path prefix.
def __init__(self, key_path_prefix): super(WindowsRegistryKeyPathPrefixFilter, self).__init__() self._key_path_prefix = key_path_prefix
288,148
Initializes a Windows Registry key filter. Args: key_path_suffix (str): the key path suffix.
def __init__(self, key_path_suffix): super(WindowsRegistryKeyPathSuffixFilter, self).__init__() self._key_path_suffix = key_path_suffix
288,149
Initializes a Windows Registry key filter. Args: value_names (list[str]): name of values that should be present in the key.
def __init__(self, value_names): super(WindowsRegistryKeyWithValuesFilter, self).__init__() self._value_names = frozenset(value_names)
288,150
Determines if a Windows Registry key matches the filter. Args: registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Returns: bool: True if the keys match.
def Match(self, registry_key): value_names = frozenset([ registry_value.name for registry_value in registry_key.GetValues()]) return self._value_names.issubset(value_names)
288,151
Processes a Windows Registry key or value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Raises: ValueError: If the Windows Registry key is not set.
def Process(self, parser_mediator, registry_key, **kwargs): if registry_key is None: raise ValueError('Windows Registry key is not set.') # This will raise if unhandled keyword arguments are passed. super(WindowsRegistryPlugin, self).Process(parser_mediator, **kwargs) self.ExtractEvents(parser_mediator, registry_key, **kwargs)
288,152
Updates the parser chain and processes a Windows Registry key or value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Raises: ValueError: If the Windows Registry key is not set.
def UpdateChainAndProcess(self, parser_mediator, registry_key, **kwargs): parser_mediator.AppendToParserChain(self) try: self.Process(parser_mediator, registry_key, **kwargs) finally: parser_mediator.PopFromParserChain()
288,153
Retrieves the date and time from a FILETIME timestamp. Args: filetime (int): FILETIME timestamp. Returns: dfdatetime.DateTimeValues: date and time.
def _GetDateTime(self, filetime): if filetime == 0: return dfdatetime_semantic_time.SemanticTime('Not set') return dfdatetime_filetime.Filetime(timestamp=filetime)
288,157
Extract data from a NFTS $MFT attribute. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. mft_entry (pyfsntfs.file_entry): MFT entry. mft_attribute (pyfsntfs.attribute): MFT attribute.
def _ParseMFTAttribute(self, parser_mediator, mft_entry, mft_attribute): if mft_entry.is_empty() or mft_entry.base_record_file_reference != 0: return if mft_attribute.attribute_type in [ self._MFT_ATTRIBUTE_STANDARD_INFORMATION, self._MFT_ATTRIBUTE_FILE_NAME]: file_attribute_flags = getattr( mft_attribute, 'file_attribute_flags', None) name = getattr(mft_attribute, 'name', None) parent_file_reference = getattr( mft_attribute, 'parent_file_reference', None) event_data = NTFSFileStatEventData() event_data.attribute_type = mft_attribute.attribute_type event_data.file_attribute_flags = file_attribute_flags event_data.file_reference = mft_entry.file_reference event_data.is_allocated = mft_entry.is_allocated() event_data.name = name event_data.parent_file_reference = parent_file_reference try: creation_time = mft_attribute.get_creation_time_as_integer() except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read the creation timestamp from MFT attribute: ' '0x{0:08x} with error: {1!s}').format( mft_attribute.attribute_type, exception)) creation_time = None if creation_time is not None: date_time = self._GetDateTime(creation_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) try: modification_time = mft_attribute.get_modification_time_as_integer() except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read the modification timestamp from MFT attribute: ' '0x{0:08x} with error: {1!s}').format( mft_attribute.attribute_type, exception)) modification_time = None if modification_time is not None: date_time = self._GetDateTime(modification_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) try: access_time = mft_attribute.get_access_time_as_integer() except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read the access timestamp from MFT attribute: ' '0x{0:08x} with error: {1!s}').format( exception, mft_attribute.attribute_type)) access_time = None if access_time is not None: date_time = self._GetDateTime(access_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS) parser_mediator.ProduceEventWithEventData(event, event_data) try: entry_modification_time = ( mft_attribute.get_entry_modification_time_as_integer()) except OverflowError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read the entry modification timestamp from MFT ' 'attribute: 0x{0:08x} with error: {1!s}').format( mft_attribute.attribute_type, exception)) entry_modification_time = None if entry_modification_time is not None: date_time = self._GetDateTime(entry_modification_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) elif mft_attribute.attribute_type == self._MFT_ATTRIBUTE_OBJECT_ID: display_name = '$MFT: {0:d}-{1:d}'.format( mft_entry.file_reference & 0xffffffffffff, mft_entry.file_reference >> 48) if mft_attribute.droid_file_identifier: try: self._ParseDistributedTrackingIdentifier( parser_mediator, mft_attribute.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read droid file identifier from attribute: 0x{0:08x} ' 'with error: {1!s}').format( mft_attribute.attribute_type, exception)) if mft_attribute.birth_droid_file_identifier: try: self._ParseDistributedTrackingIdentifier( parser_mediator, mft_attribute.droid_file_identifier, display_name) except (TypeError, ValueError) as exception: parser_mediator.ProduceExtractionWarning(( 'unable to read birth droid file identifier from attribute: ' '0x{0:08x} with error: {1!s}').format( mft_attribute.attribute_type, exception))
288,158
Extracts data from a NFTS $MFT entry. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. mft_entry (pyfsntfs.file_entry): MFT entry.
def _ParseMFTEntry(self, parser_mediator, mft_entry): for attribute_index in range(0, mft_entry.number_of_attributes): try: mft_attribute = mft_entry.get_attribute(attribute_index) self._ParseMFTAttribute(parser_mediator, mft_entry, mft_attribute) except IOError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse MFT attribute: {0:d} with error: {1!s}').format( attribute_index, exception))
288,159
Parses a NTFS $MFT metadata file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object.
def ParseFileObject(self, parser_mediator, file_object): mft_metadata_file = pyfsntfs.mft_metadata_file() try: mft_metadata_file.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to open file with error: {0!s}'.format(exception)) for entry_index in range(0, mft_metadata_file.number_of_file_entries): try: mft_entry = mft_metadata_file.get_file_entry(entry_index) self._ParseMFTEntry(parser_mediator, mft_entry) except IOError as exception: parser_mediator.ProduceExtractionWarning(( 'unable to parse MFT entry: {0:d} with error: {1!s}').format( entry_index, exception)) mft_metadata_file.close()
288,160
Parses an USN change journal. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. usn_change_journal (pyfsntsfs.usn_change_journal): USN change journal. Raises: ParseError: if an USN change journal record cannot be parsed.
def _ParseUSNChangeJournal(self, parser_mediator, usn_change_journal): if not usn_change_journal: return usn_record_map = self._GetDataTypeMap('usn_record_v2') usn_record_data = usn_change_journal.read_usn_record() while usn_record_data: current_offset = usn_change_journal.get_offset() try: usn_record = self._ReadStructureFromByteStream( usn_record_data, current_offset, usn_record_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to parse USN record at offset: 0x{0:08x} with error: ' '{1!s}').format(current_offset, exception)) # Per MSDN we need to use name offset for forward compatibility. name_offset = usn_record.name_offset - 60 utf16_stream = usn_record.name[name_offset:usn_record.name_size] try: name_string = utf16_stream.decode('utf-16-le') except (UnicodeDecodeError, UnicodeEncodeError) as exception: name_string = utf16_stream.decode('utf-16-le', errors='replace') parser_mediator.ProduceExtractionWarning(( 'unable to decode USN record name string with error: ' '{0:s}. Characters that cannot be decoded will be replaced ' 'with "?" or "\\ufffd".').format(exception)) event_data = NTFSUSNChangeEventData() event_data.file_attribute_flags = usn_record.file_attribute_flags event_data.file_reference = usn_record.file_reference event_data.filename = name_string event_data.offset = current_offset event_data.parent_file_reference = usn_record.parent_file_reference event_data.update_reason_flags = usn_record.update_reason_flags event_data.update_sequence_number = usn_record.update_sequence_number event_data.update_source_flags = usn_record.update_source_flags if not usn_record.update_date_time: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=usn_record.update_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ENTRY_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data) usn_record_data = usn_change_journal.read_usn_record()
288,161
Parses a NTFS $UsnJrnl metadata file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object.
def ParseFileObject(self, parser_mediator, file_object): volume = pyfsntfs.volume() try: volume.open_file_object(file_object) except IOError as exception: parser_mediator.ProduceExtractionWarning( 'unable to open NTFS volume with error: {0!s}'.format(exception)) try: usn_change_journal = volume.get_usn_change_journal() self._ParseUSNChangeJournal(parser_mediator, usn_change_journal) finally: volume.close()
288,162
Initializes the parser. Args: origin (str): origin of the event.
def __init__(self, origin): super(ShellItemsParser, self).__init__() self._origin = origin self._path_segments = []
288,163
Parses a shell item. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. shell_item (pyfwsi.item): shell item.
def _ParseShellItem(self, parser_mediator, shell_item): path_segment = self._ParseShellItemPathSegment(shell_item) self._path_segments.append(path_segment) event_data = shell_item_events.ShellItemFileEntryEventData() event_data.origin = self._origin event_data.shell_item_path = self.CopyToPath() if isinstance(shell_item, pyfwsi.file_entry): event_data.name = shell_item.name for extension_block in shell_item.extension_blocks: if isinstance(extension_block, pyfwsi.file_entry_extension): long_name = extension_block.long_name localized_name = extension_block.localized_name file_reference = extension_block.file_reference if file_reference: file_reference = '{0:d}-{1:d}'.format( file_reference & 0xffffffffffff, file_reference >> 48) event_data.file_reference = file_reference event_data.localized_name = localized_name event_data.long_name = long_name fat_date_time = extension_block.get_creation_time_as_integer() if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) fat_date_time = extension_block.get_access_time_as_integer() if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS) parser_mediator.ProduceEventWithEventData(event, event_data) fat_date_time = shell_item.get_modification_time_as_integer() if fat_date_time != 0: date_time = dfdatetime_fat_date_time.FATDateTime( fat_date_time=fat_date_time) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_MODIFICATION) parser_mediator.ProduceEventWithEventData(event, event_data)
288,164
Parses a shell item path segment. Args: shell_item (pyfwsi.item): shell item. Returns: str: shell item path segment.
def _ParseShellItemPathSegment(self, shell_item): path_segment = None if isinstance(shell_item, pyfwsi.root_folder): description = shell_folder_ids.DESCRIPTIONS.get( shell_item.shell_folder_identifier, None) if description: path_segment = description else: path_segment = '{{{0:s}}}'.format(shell_item.shell_folder_identifier) path_segment = '<{0:s}>'.format(path_segment) elif isinstance(shell_item, pyfwsi.volume): if shell_item.name: path_segment = shell_item.name elif shell_item.identifier: path_segment = '{{{0:s}}}'.format(shell_item.identifier) elif isinstance(shell_item, pyfwsi.file_entry): long_name = '' for extension_block in shell_item.extension_blocks: if isinstance(extension_block, pyfwsi.file_entry_extension): long_name = extension_block.long_name if long_name: path_segment = long_name elif shell_item.name: path_segment = shell_item.name elif isinstance(shell_item, pyfwsi.network_location): if shell_item.location: path_segment = shell_item.location if path_segment is None and shell_item.class_type == 0x00: # TODO: check for signature 0x23febbee pass if path_segment is None: path_segment = '<UNKNOWN: 0x{0:02x}>'.format(shell_item.class_type) return path_segment
288,165
Parses the shell items from the byte stream. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. byte_stream (bytes): shell items data. parent_path_segments (Optional[list[str]]): parent shell item path segments. codepage (Optional[str]): byte stream codepage.
def ParseByteStream( self, parser_mediator, byte_stream, parent_path_segments=None, codepage='cp1252'): if parent_path_segments and isinstance(parent_path_segments, list): self._path_segments = list(parent_path_segments) else: self._path_segments = [] shell_item_list = pyfwsi.item_list() parser_mediator.AppendToParserChain(self) try: shell_item_list.copy_from_byte_stream( byte_stream, ascii_codepage=codepage) for shell_item in iter(shell_item_list.items): self._ParseShellItem(parser_mediator, shell_item) finally: parser_mediator.PopFromParserChain()
288,167
Initializes an event data attribute container. Args: data_type (Optional[str]): event data type indicator.
def __init__(self, data_type=DATA_TYPE): super(SyslogLineEventData, self).__init__(data_type=data_type) self.body = None self.hostname = None self.pid = None self.reporter = None self.severity = None
288,168
Updates the year to use for events, based on last observed month. Args: mediator (ParserMediator): mediates the interactions between parsers and other components, such as storage and abort signals. month (int): month observed by the parser, where January is 1.
def _UpdateYear(self, mediator, month): if not self._year_use: self._year_use = mediator.GetEstimatedYear() if not self._maximum_year: self._maximum_year = mediator.GetLatestYear() if not self._last_month: self._last_month = month return # Some syslog daemons allow out-of-order sequences, so allow some leeway # to not cause Apr->May->Apr to cause the year to increment. # See http://bugzilla.adiscon.com/show_bug.cgi?id=527 if self._last_month > (month + 1): if self._year_use != self._maximum_year: self._year_use += 1 self._last_month = month
288,171
Enables parser plugins. Args: plugin_includes (list[str]): names of the plugins to enable, where None or an empty list represents all plugins. Note that the default plugin is handled separately.
def EnablePlugins(self, plugin_includes): super(SyslogParser, self).EnablePlugins(plugin_includes) self._plugin_by_reporter = {} for plugin in self._plugins: self._plugin_by_reporter[plugin.REPORTER] = plugin
288,172
Parses a matching entry. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. key (str): name of the parsed structure. structure (pyparsing.ParseResults): elements parsed from the file. Raises: ParseError: when the structure type is unknown.
def ParseRecord(self, parser_mediator, key, structure): if key not in self._SUPPORTED_KEYS: raise errors.ParseError( 'Unable to parse record, unknown structure: {0:s}'.format(key)) if key == 'chromeos_syslog_line': date_time = dfdatetime_time_elements.TimeElementsInMicroseconds() try: date_time.CopyFromStringISO8601(structure.chromeos_date) except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid date time value: {0:s}'.format(structure.chromeos_date)) return else: # TODO: add support for fractional seconds. month = timelib.MONTH_DICT.get(structure.month.lower(), 0) if month != 0: self._UpdateYear(parser_mediator, month) time_elements_tuple = ( self._year_use, month, structure.day, structure.hour, structure.minute, structure.second) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid date time value: {0!s}'.format(time_elements_tuple)) return plugin = None if key == 'syslog_comment': event_data = SyslogCommentEventData() event_data.body = structure.body # TODO: pass line number to offset or remove. event_data.offset = 0 else: event_data = SyslogLineEventData() event_data.body = structure.body event_data.hostname = structure.hostname or None # TODO: pass line number to offset or remove. event_data.offset = 0 event_data.pid = structure.pid event_data.reporter = structure.reporter event_data.severity = structure.severity plugin = self._plugin_by_reporter.get(structure.reporter, None) if plugin: attributes = { 'hostname': structure.hostname, 'severity': structure.severity, 'reporter': structure.reporter, 'pid': structure.pid, 'body': structure.body} try: # TODO: pass event_data instead of attributes. plugin.Process(parser_mediator, date_time, attributes) except errors.WrongPlugin: plugin = None if not plugin: event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,173
Verifies that this is a syslog-formatted file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. lines (str): one or more lines from the text file. Returns: bool: True if this is the correct parser, False otherwise.
def VerifyStructure(self, parser_mediator, lines): return (re.match(self._VERIFICATION_REGEX, lines) or re.match(self._CHROMEOS_VERIFICATION_REGEX, lines)) is not None
288,174
Pushes an event onto the heap. Args: event (EventObject): event.
def PushEvent(self, event): event_string = event.GetAttributeValuesString() heap_values = (event.timestamp, event.timestamp_desc, event_string, event) heapq.heappush(self._heap, heap_values)
288,176
Pushes a serialized event onto the heap. Args: timestamp (int): event timestamp, which contains the number of micro seconds since January 1, 1970, 00:00:00 UTC. event_data (bytes): serialized event.
def PushEvent(self, timestamp, event_data): heap_values = (timestamp, event_data) heapq.heappush(self._heap, heap_values) self.data_size += len(event_data)
288,179
Adds an user account. Args: user_account (UserAccountArtifact): user account artifact. session_identifier (Optional[str])): session identifier, where CURRENT_SESSION represents the active session. Raises: KeyError: if the user account already exists.
def AddUserAccount(self, user_account, session_identifier=CURRENT_SESSION): if session_identifier not in self._user_accounts: self._user_accounts[session_identifier] = {} user_accounts = self._user_accounts[session_identifier] if user_account.identifier in user_accounts: raise KeyError('User account: {0:s} already exists.'.format( user_account.identifier)) user_accounts[user_account.identifier] = user_account
288,182
Adds an environment variable. Args: environment_variable (EnvironmentVariableArtifact): environment variable artifact. Raises: KeyError: if the environment variable already exists.
def AddEnvironmentVariable(self, environment_variable): name = environment_variable.name.upper() if name in self._environment_variables: raise KeyError('Environment variable: {0:s} already exists.'.format( environment_variable.name)) self._environment_variables[name] = environment_variable
288,183
Retrieves an environment variable. Args: name (str): name of the environment variable. Returns: EnvironmentVariableArtifact: environment variable artifact or None if there was no value set for the given name.
def GetEnvironmentVariable(self, name): name = name.upper() return self._environment_variables.get(name, None)
288,184
Retrieves the hostname related to the event. If the hostname is not stored in the event it is determined based on the preprocessing information that is stored inside the storage file. Args: session_identifier (Optional[str])): session identifier, where CURRENT_SESSION represents the active session. Returns: str: hostname.
def GetHostname(self, session_identifier=CURRENT_SESSION): hostname_artifact = self._hostnames.get(session_identifier, None) if not hostname_artifact: return '' return hostname_artifact.name or ''
288,185
Retrieves the knowledge base as a system configuration artifact. Args: session_identifier (Optional[str])): session identifier, where CURRENT_SESSION represents the active session. Returns: SystemConfigurationArtifact: system configuration artifact.
def GetSystemConfigurationArtifact(self, session_identifier=CURRENT_SESSION): system_configuration = artifacts.SystemConfigurationArtifact() system_configuration.code_page = self.GetValue( 'codepage', default_value=self._codepage) system_configuration.hostname = self._hostnames.get( session_identifier, None) system_configuration.keyboard_layout = self.GetValue('keyboard_layout') system_configuration.operating_system = self.GetValue('operating_system') system_configuration.operating_system_product = self.GetValue( 'operating_system_product') system_configuration.operating_system_version = self.GetValue( 'operating_system_version') date_time = datetime.datetime(2017, 1, 1) time_zone = self._time_zone.tzname(date_time) if time_zone and isinstance(time_zone, py2to3.BYTES_TYPE): time_zone = time_zone.decode('ascii') system_configuration.time_zone = time_zone user_accounts = self._user_accounts.get(session_identifier, {}) # In Python 3 dict.values() returns a type dict_values, which will cause # the JSON serializer to raise a TypeError. system_configuration.user_accounts = list(user_accounts.values()) return system_configuration
288,187
Retrieves the username based on an user identifier. Args: user_identifier (str): user identifier, either a UID or SID. session_identifier (Optional[str])): session identifier, where CURRENT_SESSION represents the active session. Returns: str: username.
def GetUsernameByIdentifier( self, user_identifier, session_identifier=CURRENT_SESSION): user_accounts = self._user_accounts.get(session_identifier, {}) user_account = user_accounts.get(user_identifier, None) if not user_account: return '' return user_account.username or ''
288,188
Retrieves a username for a specific path. This is determining if a specific path is within a user's directory and returning the username of the user if so. Args: path (str): path. Returns: str: username or None if the path does not appear to be within a user's directory.
def GetUsernameForPath(self, path): path = path.lower() user_accounts = self._user_accounts.get(self.CURRENT_SESSION, {}) for user_account in iter(user_accounts.values()): if not user_account.user_directory: continue user_directory = user_account.user_directory.lower() if path.startswith(user_directory): return user_account.username return None
288,189
Retrieves a value by identifier. Args: identifier (str): case insensitive unique identifier for the value. default_value (object): default value. Returns: object: value or default value if not available. Raises: TypeError: if the identifier is not a string type.
def GetValue(self, identifier, default_value=None): if not isinstance(identifier, py2to3.STRING_TYPES): raise TypeError('Identifier not a string type.') identifier = identifier.lower() return self._values.get(identifier, default_value)
288,190
Reads the knowledge base values from a system configuration artifact. Note that this overwrites existing values in the knowledge base. Args: system_configuration (SystemConfigurationArtifact): system configuration artifact. session_identifier (Optional[str])): session identifier, where CURRENT_SESSION represents the active session.
def ReadSystemConfigurationArtifact( self, system_configuration, session_identifier=CURRENT_SESSION): if system_configuration.code_page: try: self.SetCodepage(system_configuration.code_page) except ValueError: logger.warning( 'Unsupported codepage: {0:s}, defaulting to {1:s}'.format( system_configuration.code_page, self._codepage)) self._hostnames[session_identifier] = system_configuration.hostname self.SetValue('keyboard_layout', system_configuration.keyboard_layout) self.SetValue('operating_system', system_configuration.operating_system) self.SetValue( 'operating_system_product', system_configuration.operating_system_product) self.SetValue( 'operating_system_version', system_configuration.operating_system_version) if system_configuration.time_zone: try: self.SetTimeZone(system_configuration.time_zone) except ValueError: logger.warning( 'Unsupported time zone: {0:s}, defaulting to {1:s}'.format( system_configuration.time_zone, self.timezone.zone)) self._user_accounts[session_identifier] = { user_account.username: user_account for user_account in system_configuration.user_accounts}
288,191
Sets the codepage. Args: codepage (str): codepage. Raises: ValueError: if the codepage is not supported.
def SetCodepage(self, codepage): try: codecs.getencoder(codepage) self._codepage = codepage except LookupError: raise ValueError('Unsupported codepage: {0:s}'.format(codepage))
288,192
Sets an environment variable. Args: environment_variable (EnvironmentVariableArtifact): environment variable artifact.
def SetEnvironmentVariable(self, environment_variable): name = environment_variable.name.upper() self._environment_variables[name] = environment_variable
288,193
Sets the time zone. Args: time_zone (str): time zone. Raises: ValueError: if the timezone is not supported.
def SetTimeZone(self, time_zone): try: self._time_zone = pytz.timezone(time_zone) except (AttributeError, pytz.UnknownTimeZoneError): raise ValueError('Unsupported timezone: {0!s}'.format(time_zone))
288,194
Sets a value by identifier. Args: identifier (str): case insensitive unique identifier for the value. value (object): value. Raises: TypeError: if the identifier is not a string type.
def SetValue(self, identifier, value): if not isinstance(identifier, py2to3.STRING_TYPES): raise TypeError('Identifier not a string type.') identifier = identifier.lower() self._values[identifier] = value
288,195
Creates the analysis plugins. Args: options (argparse.Namespace): command line arguments. Returns: dict[str, AnalysisPlugin]: analysis plugins and their names.
def _CreateAnalysisPlugins(self, options): if not self._analysis_plugins: return {} analysis_plugins = ( analysis_manager.AnalysisPluginManager.GetPluginObjects( self._analysis_plugins)) for analysis_plugin in analysis_plugins.values(): helpers_manager.ArgumentHelperManager.ParseOptions( options, analysis_plugin) return analysis_plugins
288,196
Creates the output module. Args: options (argparse.Namespace): command line arguments. Returns: OutputModule: output module. Raises: RuntimeError: if the output module cannot be created.
def _CreateOutputModule(self, options): formatter_mediator = formatters_mediator.FormatterMediator( data_location=self._data_location) try: formatter_mediator.SetPreferredLanguageIdentifier( self._preferred_language) except (KeyError, TypeError) as exception: raise RuntimeError(exception) mediator = output_mediator.OutputMediator( self._knowledge_base, formatter_mediator, preferred_encoding=self.preferred_encoding) mediator.SetTimezone(self._preferred_time_zone) try: output_module = output_manager.OutputManager.NewOutputModule( self._output_format, mediator) except (KeyError, ValueError) as exception: raise RuntimeError( 'Unable to create output module with error: {0!s}'.format( exception)) if output_manager.OutputManager.IsLinearOutputModule(self._output_format): output_file_object = open(self._output_filename, 'wb') output_writer = tools.FileObjectOutputWriter(output_file_object) output_module.SetOutputWriter(output_writer) helpers_manager.ArgumentHelperManager.ParseOptions(options, output_module) # Check if there are parameters that have not been defined and need to # in order for the output module to continue. Prompt user to supply # those that may be missing. missing_parameters = output_module.GetMissingArguments() while missing_parameters: for parameter in missing_parameters: value = self._PromptUserForInput( 'Missing parameter {0:s} for output module'.format(parameter)) if value is None: logger.warning( 'Unable to set the missing parameter for: {0:s}'.format( parameter)) continue setattr(options, parameter, value) helpers_manager.ArgumentHelperManager.ParseOptions( options, output_module) missing_parameters = output_module.GetMissingArguments() return output_module
288,201
Initializes a fake attribute container identifier. Args: attribute_values_hash (int): hash value of the attribute values.
def __init__(self, attribute_values_hash): super(FakeIdentifier, self).__init__() self.attribute_values_hash = attribute_values_hash
288,207
Initializes a serialized stream attribute container identifier. Args: stream_number (int): number of the serialized attribute container stream. entry_index (int): number of the serialized event within the stream.
def __init__(self, stream_number, entry_index): super(SerializedStreamIdentifier, self).__init__() self.entry_index = entry_index self.stream_number = stream_number
288,208
Initializes a SQL table attribute container identifier. Args: name (str): name of the table. row_identifier (int): unique identifier of the row in the table.
def __init__(self, name, row_identifier): super(SQLTableIdentifier, self).__init__() self.name = name self.row_identifier = row_identifier
288,210
Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.urls = self.URLS values_dict = {} for registry_value in registry_key.GetValues(): value_name = registry_value.name or '(default)' if value_name == 'BootExecute': # MSDN: claims that the data type of this value is REG_BINARY # although REG_MULTI_SZ is known to be used as well. if registry_value.DataIsString(): value_string = registry_value.GetDataAsObject() elif registry_value.DataIsMultiString(): value_string = ''.join(registry_value.GetDataAsObject()) elif registry_value.DataIsBinaryData(): value_string = registry_value.GetDataAsObject() else: value_string = '' error_string = ( 'Key: {0:s}, value: {1:s}: unsupported value data type: ' '{2:s}.').format( registry_key.path, value_name, registry_value.data_type_string) parser_mediator.ProduceExtractionWarning(error_string) # TODO: why does this have a separate event object? Remove this. event_data.regvalue = {'BootExecute': value_string} event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) else: values_dict[value_name] = registry_value.GetDataAsObject() event_data.regvalue = values_dict event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,212
Check if it is a valid Apple account plist file name. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. plist_name (str): name of the plist. top_level (dict[str, object]): plist top-level key.
def Process(self, parser_mediator, plist_name, top_level, **kwargs): if not plist_name.startswith(self.PLIST_PATH): raise errors.WrongPlistPlugin(self.NAME, plist_name) super(AppleAccountPlugin, self).Process( parser_mediator, plist_name=self.PLIST_PATH, top_level=top_level)
288,213
Extracts relevant Apple Account entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS.
def GetEntries(self, parser_mediator, match=None, **unused_kwargs): accounts = match.get('Accounts', {}) for name_account, account in iter(accounts.items()): first_name = account.get('FirstName', '<FirstName>') last_name = account.get('LastName', '<LastName>') general_description = '{0:s} ({1:s} {2:s})'.format( name_account, first_name, last_name) event_data = plist_event.PlistTimeEventData() event_data.key = name_account event_data.root = '/Accounts' datetime_value = account.get('CreationDate', None) if datetime_value: event_data.desc = 'Configured Apple account {0:s}'.format( general_description) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = account.get('LastSuccessfulConnect', None) if datetime_value: event_data.desc = 'Connected Apple account {0:s}'.format( general_description) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) datetime_value = account.get('ValidationDate', None) if datetime_value: event_data.desc = 'Last validation Apple account {0:s}'.format( general_description) event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,214
Adds command line arguments the helper supports to an argument group. This function takes an argument parser or an argument group object and adds to it all the command line arguments this helper supports. Args: argument_group (argparse._ArgumentGroup|argparse.ArgumentParser): argparse group.
def AddArguments(cls, argument_group): shared_4n6time_output.Shared4n6TimeOutputArgumentsHelper.AddArguments( argument_group) MySQL4n6TimeDatabaseArgumentsHelper.AddArguments(argument_group)
288,215
Parses and validates options. Args: options (argparse.Namespace): parser options. output_module (OutputModule): output module to configure. Raises: BadConfigObject: when the output module object is of the wrong type.
def ParseOptions(cls, options, output_module): if not isinstance(output_module, mysql_4n6time.MySQL4n6TimeOutputModule): raise errors.BadConfigObject( 'Output module is not an instance of MySQL4n6TimeOutputModule') MySQL4n6TimeDatabaseArgumentsHelper.ParseOptions(options, output_module) shared_4n6time_output.Shared4n6TimeOutputArgumentsHelper.ParseOptions( options, output_module)
288,216
Extracts events from a Terminal Server Client Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): mru_values_dict = {} for subkey in registry_key.GetSubkeys(): username_value = subkey.GetValueByName('UsernameHint') if (username_value and username_value.data and username_value.DataIsString()): username = username_value.GetDataAsObject() else: username = 'N/A' mru_values_dict[subkey.name] = username event_data = windows_events.WindowsRegistryEventData() event_data.key_path = subkey.path event_data.offset = subkey.offset event_data.regvalue = {'Username hint': username} event_data.source_append = self._SOURCE_APPEND event = time_events.DateTimeValuesEvent( subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = mru_values_dict event_data.source_append = self._SOURCE_APPEND event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
288,217
Parses a bencoded file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): file_object.seek(0, os.SEEK_SET) header = file_object.read(2) if not self.BENCODE_RE.match(header): raise errors.UnableToParseFile('Not a valid Bencoded file.') file_object.seek(0, os.SEEK_SET) try: data_object = bencode.bdecode(file_object.read()) except (IOError, bencode.BTFailure) as exception: raise errors.UnableToParseFile( '[{0:s}] unable to parse file: {1:s} with error: {2!s}'.format( self.NAME, parser_mediator.GetDisplayName(), exception)) if not data_object: raise errors.UnableToParseFile( '[{0:s}] missing decoded data for file: {1:s}'.format( self.NAME, parser_mediator.GetDisplayName())) for plugin in self._plugins: try: plugin.UpdateChainAndProcess(parser_mediator, data=data_object) except errors.WrongBencodePlugin as exception: logger.debug('[{0:s}] wrong plugin: {1!s}'.format( self.NAME, exception))
288,218
Parse header lines and store appropriate attributes. ['Logging started.', 'Version=', '17.0.2011.0627', [2013, 7, 25], 16, 3, 23, 291, 'StartLocalTime', '<details>'] Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file.
def _ParseHeader(self, parser_mediator, structure): try: date_time = dfdatetime_time_elements.TimeElementsInMilliseconds( time_elements_tuple=structure.header_date_time) except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid date time value: {0!s}'.format(structure.header_date_time)) return event_data = SkyDriveLogEventData() # TODO: refactor detail to individual event data attributes. event_data.detail = '{0:s} {1:s} {2:s} {3:s} {4:s}'.format( structure.log_start, structure.version_string, structure.version_number, structure.local_time_string, structure.details) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,221
Parses a logline and store appropriate attributes. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file.
def _ParseLine(self, parser_mediator, structure): # TODO: Verify if date and time value is locale dependent. month, day_of_month, year, hours, minutes, seconds, milliseconds = ( structure.date_time) year += 2000 time_elements_tuple = ( year, month, day_of_month, hours, minutes, seconds, milliseconds) try: date_time = dfdatetime_time_elements.TimeElementsInMilliseconds( time_elements_tuple=time_elements_tuple) except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid date time value: {0!s}'.format(structure.date_time)) return event_data = SkyDriveLogEventData() # Replace newlines with spaces in structure.detail to preserve output. # TODO: refactor detail to individual event data attributes. event_data.detail = structure.detail.replace('\n', ' ') event_data.log_level = structure.log_level event_data.module = structure.module event_data.source_code = structure.source_code event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,222
Parse each record structure and return an EventObject if applicable. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. key (str): identifier of the structure of tokens. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file. Raises: ParseError: when the structure type is unknown.
def ParseRecord(self, parser_mediator, key, structure): if key not in ('header', 'logline'): raise errors.ParseError( 'Unable to parse record, unknown structure: {0:s}'.format(key)) if key == 'logline': self._ParseLine(parser_mediator, structure) elif key == 'header': self._ParseHeader(parser_mediator, structure)
288,223
Verify that this file is a SkyDrive log file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. lines (str): one or more lines from the text file. Returns: bool: True if this is the correct parser, False otherwise.
def VerifyStructure(self, parser_mediator, lines): try: structure = self._SDF_HEADER.parseString(lines) except pyparsing.ParseException: logger.debug('Not a SkyDrive log file') return False try: dfdatetime_time_elements.TimeElementsInMilliseconds( time_elements_tuple=structure.header_date_time) except ValueError: logger.debug( 'Not a SkyDrive log file, invalid date and time: {0!s}'.format( structure.header_date_time)) return False return True
288,224
Parse a logline and store appropriate attributes. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file.
def _ParseLogline(self, parser_mediator, structure): # TODO: Verify if date and time value is locale dependent. month, day_of_month, year, hours, minutes, seconds, milliseconds = ( structure.date_time) time_elements_tuple = ( year, month, day_of_month, hours, minutes, seconds, milliseconds) try: date_time = dfdatetime_time_elements.TimeElementsInMilliseconds( time_elements_tuple=time_elements_tuple) except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid date time value: {0!s}'.format(structure.date_time)) return event_data = SkyDriveOldLogEventData() event_data.log_level = structure.log_level event_data.offset = self.offset event_data.source_code = structure.source_code event_data.text = structure.text event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data) self._last_date_time = date_time self._last_event_data = event_data
288,226
Parse an isolated header line and store appropriate attributes. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file.
def _ParseNoHeaderSingleLine(self, parser_mediator, structure): if not self._last_event_data: logger.debug('SkyDrive, found isolated line with no previous events') return event_data = SkyDriveOldLogEventData() event_data.offset = self._last_event_data.offset event_data.text = structure.text event = time_events.DateTimeValuesEvent( self._last_date_time, definitions.TIME_DESCRIPTION_ADDED) parser_mediator.ProduceEventWithEventData(event, event_data) # TODO think to a possible refactoring for the non-header lines. self._last_date_time = None self._last_event_data = None
288,227
Parse each record structure and return an EventObject if applicable. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. key (str): identifier of the structure of tokens. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file. Raises: ParseError: when the structure type is unknown.
def ParseRecord(self, parser_mediator, key, structure): if key not in ('logline', 'no_header_single_line'): raise errors.ParseError( 'Unable to parse record, unknown structure: {0:s}'.format(key)) if key == 'logline': self._ParseLogline(parser_mediator, structure) elif key == 'no_header_single_line': self._ParseNoHeaderSingleLine(parser_mediator, structure)
288,228
Verify that this file is a SkyDrive old log file. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. line (str): line from a text file. Returns: bool: True if the line is in the expected format, False if not.
def VerifyStructure(self, parser_mediator, line): try: structure = self._LINE.parseString(line) except pyparsing.ParseException: logger.debug('Not a SkyDrive old log file') return False day_of_month, month, year, hours, minutes, seconds, milliseconds = ( structure.date_time) time_elements_tuple = ( year, month, day_of_month, hours, minutes, seconds, milliseconds) try: dfdatetime_time_elements.TimeElementsInMilliseconds( time_elements_tuple=time_elements_tuple) except ValueError: logger.debug( 'Not a SkyDrive old log file, invalid date and time: {0!s}'.format( structure.date_time)) return False return True
288,229
Parses a Video row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row.
def ParseVideoRow(self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) event_data = KodiVideoEventData() event_data.filename = self._GetRowValue(query_hash, row, 'strFilename') event_data.play_count = self._GetRowValue(query_hash, row, 'playCount') event_data.query = query timestamp = self._GetRowValue(query_hash, row, 'lastPlayed') date_time = dfdatetime_time_elements.TimeElements() date_time.CopyFromDateTimeString(timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,231
Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type.
def ParseOptions(cls, options, configuration_object): if not isinstance(configuration_object, tools.CLITool): raise errors.BadConfigObject( 'Configuration object is not an instance of CLITool') parsers = cls._ParseStringOption(options, 'parsers', default_value='') parsers = parsers.replace('\\', '/') # TODO: validate parser names. setattr(configuration_object, '_parser_filter_expression', parsers)
288,232
Copies attributes from a session completion. Args: session_completion (SessionCompletion): session completion attribute container. Raises: ValueError: if the identifier of the session completion does not match that of the session.
def CopyAttributesFromSessionCompletion(self, session_completion): if self.identifier != session_completion.identifier: raise ValueError('Session identifier mismatch.') self.aborted = session_completion.aborted if session_completion.analysis_reports_counter: self.analysis_reports_counter = ( session_completion.analysis_reports_counter) self.completion_time = session_completion.timestamp if session_completion.event_labels_counter: self.event_labels_counter = session_completion.event_labels_counter if session_completion.parsers_counter: self.parsers_counter = session_completion.parsers_counter
288,234
Initializes a session completion attribute container. Args: identifier (Optional[str]): unique identifier of the session. The identifier should match that of the corresponding session start information.
def __init__(self, identifier=None): super(SessionCompletion, self).__init__() self.aborted = False self.analysis_reports_counter = None self.event_labels_counter = None self.identifier = identifier self.parsers_counter = None self.timestamp = None
288,237
Initializes a session start attribute container. Args: identifier (Optional[str]): unique identifier of the session. The identifier should match that of the corresponding session completion information.
def __init__(self, identifier=None): super(SessionStart, self).__init__() self.artifact_filters = None self.command_line_arguments = None self.debug_mode = False self.enabled_parser_names = None self.filter_file = None self.identifier = identifier self.parser_filter_expression = None self.preferred_encoding = None self.preferred_time_zone = None self.preferred_year = None self.product_name = None self.product_version = None self.timestamp = None
288,238
Parses a numeric command line argument. Args: options (argparse.Namespace): parser options. argument_name (str): name of the command line argument. default_value (Optional[int]): default value of the command line argument. Returns: int: command line argument value or the default value if the command line argument is not set Raises: BadConfigOption: if the command line argument value cannot be converted to a Unicode string.
def _ParseNumericOption(cls, options, argument_name, default_value=None): argument_value = getattr(options, argument_name, None) if argument_value is None: return default_value if not isinstance(argument_value, py2to3.INTEGER_TYPES): raise errors.BadConfigOption( 'Unsupported option: {0:s} integer type required.'.format( argument_name)) return argument_value
288,239
Parses a string command line argument. Args: options (argparse.Namespace): parser options. argument_name (str): name of the command line argument. default_value (Optional[str]): default value of the command line argument. Returns: str: command line argument value or the default value if the command line argument is not set Raises: BadConfigOption: if the command line argument value cannot be converted to a Unicode string.
def _ParseStringOption(cls, options, argument_name, default_value=None): argument_value = getattr(options, argument_name, None) if argument_value is None: return default_value if isinstance(argument_value, py2to3.BYTES_TYPE): encoding = sys.stdin.encoding # Note that sys.stdin.encoding can be None. if not encoding: encoding = locale.getpreferredencoding() if not encoding: encoding = cls._PREFERRED_ENCODING try: argument_value = argument_value.decode(encoding) except UnicodeDecodeError as exception: raise errors.BadConfigOption(( 'Unable to convert option: {0:s} to Unicode with error: ' '{1!s}.').format(argument_name, exception)) elif not isinstance(argument_value, py2to3.UNICODE_TYPE): raise errors.BadConfigOption( 'Unsupported option: {0:s} string type required.'.format( argument_name)) return argument_value
288,240
Parses a DLS page header from a file-like object. Args: file_object (file): file-like object to read the header from. page_offset (int): offset of the start of the page header, relative to the start of the file. Returns: tuple: containing: dls_page_header: parsed record structure. int: header size. Raises: ParseError: when the header cannot be parsed.
def _ParseDLSPageHeader(self, file_object, page_offset): page_header_map = self._GetDataTypeMap('dls_page_header') try: page_header, page_size = self._ReadStructureFromFileObject( file_object, page_offset, page_header_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse page header at offset: 0x{0:08x} ' 'with error: {1!s}'.format(page_offset, exception)) if page_header.signature not in self._DLS_SIGNATURES: raise errors.UnableToParseFile( 'Unsupported page header signature at offset: 0x{0:08x}'.format( page_offset)) return page_header, page_size
288,243
Builds an FseventsdData object from a parsed structure. Args: record (dls_record_v1|dls_record_v2): parsed record structure. Returns: FseventsdEventData: event data attribute container.
def _BuildEventData(self, record): event_data = FseventsdEventData() event_data.path = record.path event_data.flags = record.event_flags event_data.event_identifier = record.event_identifier # Node identifier is only set in DLS V2 records. event_data.node_identifier = getattr(record, 'node_identifier', None) return event_data
288,244
Parses an fseventsd file. Args: parser_mediator (ParserMediator): parser mediator. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the header cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): page_header_map = self._GetDataTypeMap('dls_page_header') try: page_header, file_offset = self._ReadStructureFromFileObject( file_object, 0, page_header_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToParseFile( 'Unable to parse page header with error: {0!s}'.format( exception)) if page_header.signature not in self._DLS_SIGNATURES: raise errors.UnableToParseFile('Invalid file signature') current_page_end = page_header.page_size file_entry = parser_mediator.GetFileEntry() date_time = self._GetParentModificationTime(file_entry) # TODO: Change this to use a more representative time definition (time span) # when https://github.com/log2timeline/dfdatetime/issues/65 is resolved. if date_time: timestamp_description = definitions.TIME_DESCRIPTION_RECORDED else: date_time = dfdatetime_semantic_time.SemanticTime('Not set') timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME event = time_events.DateTimeValuesEvent(date_time, timestamp_description) file_size = file_object.get_size() while file_offset < file_size: if file_offset >= current_page_end: try: page_header, header_size = self._ParseDLSPageHeader( file_object, file_offset) except errors.ParseError as exception: parser_mediator.ProduceExtractionWarning( 'Unable to parse page header with error: {0!s}'.format( exception)) break current_page_end += page_header.page_size file_offset += header_size continue if page_header.signature == self._DLS_V1_SIGNATURE: record_map = self._GetDataTypeMap('dls_record_v1') else: record_map = self._GetDataTypeMap('dls_record_v2') try: record, record_length = self._ReadStructureFromFileObject( file_object, file_offset, record_map) file_offset += record_length except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning( 'Unable to parse page record with error: {0!s}'.format( exception)) break event_data = self._BuildEventData(record) parser_mediator.ProduceEventWithEventData(event, event_data)
288,246
Parses a document versions row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row.
def DocumentVersionsRow( self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) # version_path = "PerUser/UserID/xx/client_id/version_file" # where PerUser and UserID are a real directories. version_path = self._GetRowValue(query_hash, row, 'version_path') path = self._GetRowValue(query_hash, row, 'path') paths = version_path.split('/') if len(paths) < 2 or not paths[1].isdigit(): user_sid = '' else: user_sid = paths[1] version_path = self.ROOT_VERSION_PATH + version_path path, _, _ = path.rpartition('/') event_data = MacDocumentVersionsEventData() # TODO: shouldn't this be a separate event? event_data.last_time = self._GetRowValue(query_hash, row, 'last_time') event_data.name = self._GetRowValue(query_hash, row, 'name') event_data.path = path event_data.query = query # Note that the user_sid value is expected to be a string. event_data.user_sid = '{0!s}'.format(user_sid) event_data.version_path = version_path timestamp = self._GetRowValue(query_hash, row, 'version_time') date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
288,248
Parses an utmp file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): file_offset = 0 try: timestamp, event_data = self._ReadEntry( parser_mediator, file_object, file_offset) except errors.ParseError as exception: raise errors.UnableToParseFile( 'Unable to parse first utmp entry with error: {0!s}'.format( exception)) if not event_data.username: raise errors.UnableToParseFile( 'Unable to parse first utmp entry with error: missing username') if not timestamp: raise errors.UnableToParseFile( 'Unable to parse first utmp entry with error: missing timestamp') date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_START) parser_mediator.ProduceEventWithEventData(event, event_data) file_offset = file_object.tell() file_size = file_object.get_size() while file_offset < file_size: if parser_mediator.abort: break try: timestamp, event_data = self._ReadEntry( parser_mediator, file_object, file_offset) except errors.ParseError: # Note that the utmp file can contain trailing data. break date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_START) parser_mediator.ProduceEventWithEventData(event, event_data) file_offset = file_object.tell()
288,251
Parses an Opera typed history file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): data = file_object.read(self._HEADER_READ_SIZE) if not data.startswith(b'<?xml'): raise errors.UnableToParseFile( 'Not an Opera typed history file [not a XML]') _, _, data = data.partition(b'\n') if not data.startswith(b'<typed_history'): raise errors.UnableToParseFile( 'Not an Opera typed history file [wrong XML root key]') # For ElementTree to work we need to work on a file object seeked # to the beginning. file_object.seek(0, os.SEEK_SET) xml = ElementTree.parse(file_object) for history_item in xml.iterfind('typed_history_item'): event_data = OperaTypedHistoryEventData() event_data.entry_type = history_item.get('type', None) event_data.url = history_item.get('content', None) if event_data.entry_type == 'selected': event_data.entry_selection = 'Filled from autocomplete.' elif event_data.entry_type == 'text': event_data.entry_selection = 'Manually typed.' last_typed_time = history_item.get('last_typed', None) if last_typed_time is None: parser_mediator.ProduceExtractionWarning('missing last typed time.') continue date_time = dfdatetime_time_elements.TimeElements() try: date_time.CopyFromStringISO8601(last_typed_time) except ValueError as exception: parser_mediator.ProduceExtractionWarning( 'unsupported last typed time: {0:s} with error: {1!s}.'.format( last_typed_time, exception)) continue event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,254
Parses an Opera global history record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. text_file_object (dfvfs.TextFile): text file. Returns: bool: True if the record was successfully parsed.
def _ParseRecord(self, parser_mediator, text_file_object): try: title = text_file_object.readline() except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning( 'unable to read and decode title') return False if not title: return False try: url = text_file_object.readline() except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning( 'unable to read and decode url') return False try: timestamp = text_file_object.readline() except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning( 'unable to read and decode timestamp') return False try: popularity_index = text_file_object.readline() except UnicodeDecodeError: parser_mediator.ProduceExtractionWarning( 'unable to read and decode popularity index') return False event_data = OperaGlobalHistoryEventData() event_data.url = url.strip() title = title.strip() if title != event_data.url: event_data.title = title popularity_index = popularity_index.strip() try: event_data.popularity_index = int(popularity_index, 10) except ValueError: parser_mediator.ProduceExtractionWarning( 'unable to convert popularity index: {0:s}'.format(popularity_index)) if event_data.popularity_index < 0: event_data.description = 'First and Only Visit' else: event_data.description = 'Last Visit' timestamp = timestamp.strip() try: timestamp = int(timestamp, 10) except ValueError: parser_mediator.ProduceExtractionWarning( 'unable to convert timestamp: {0:s}'.format(timestamp)) timestamp = None if timestamp is None: date_time = dfdatetime_semantic_time.SemanticTime('Invalid') else: date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data) return True
288,256
Parses and validates an Opera global history record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. text_file_object (dfvfs.TextFile): text file. Returns: bool: True if the record was successfully parsed.
def _ParseAndValidateRecord(self, parser_mediator, text_file_object): try: title = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) url = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) timestamp = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) popularity_index = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) except UnicodeDecodeError: return False if len(title) == self._MAXIMUM_LINE_SIZE and title[-1] != '\n': return False if len(url) == self._MAXIMUM_LINE_SIZE and url[-1] != '\n': return False if len(timestamp) == self._MAXIMUM_LINE_SIZE and timestamp[-1] != '\n': return False if (len(popularity_index) == self._MAXIMUM_LINE_SIZE and popularity_index[-1] != '\n'): return False title = title.strip() url = url.strip() timestamp = timestamp.strip() popularity_index = popularity_index.strip() if not title or not url or not timestamp or not popularity_index: return False event_data = OperaGlobalHistoryEventData() if not self._IsValidUrl(url): return False event_data.url = url if title != url: event_data.title = title try: event_data.popularity_index = int(popularity_index, 10) timestamp = int(timestamp, 10) except ValueError: return False if event_data.popularity_index < 0: event_data.description = 'First and Only Visit' else: event_data.description = 'Last Visit' date_time = dfdatetime_posix_time.PosixTime(timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_LAST_VISITED) parser_mediator.ProduceEventWithEventData(event, event_data) return True
288,257
Parses an Opera global history file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): encoding = self._ENCODING or parser_mediator.codepage text_file_object = text_file.TextFile(file_object, encoding=encoding) if not self._ParseAndValidateRecord(parser_mediator, text_file_object): raise errors.UnableToParseFile( 'Unable to parse as Opera global_history.dat.') while self._ParseRecord(parser_mediator, text_file_object): pass
288,258
Parses the original filename. Args: file_object (FileIO): file-like object. format_version (int): format version. Returns: str: filename or None on error. Raises: ParseError: if the original filename cannot be read.
def _ParseOriginalFilename(self, file_object, format_version): file_offset = file_object.tell() if format_version == 1: data_type_map = self._GetDataTypeMap( 'recycle_bin_metadata_utf16le_string') else: data_type_map = self._GetDataTypeMap( 'recycle_bin_metadata_utf16le_string_with_size') try: original_filename, _ = self._ReadStructureFromFileObject( file_object, file_offset, data_type_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse original filename with error: {0!s}'.format( exception)) if format_version == 1: return original_filename.rstrip('\x00') return original_filename.string.rstrip('\x00')
288,260
Parses a Windows Recycle.Bin metadata ($I) file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): # We may have to rely on filenames since this header is very generic. # TODO: Rethink this and potentially make a better test. filename = parser_mediator.GetFilename() if not filename.startswith('$I'): raise errors.UnableToParseFile('Filename must start with $I.') file_header_map = self._GetDataTypeMap('recycle_bin_metadata_file_header') try: file_header, _ = self._ReadStructureFromFileObject( file_object, 0, file_header_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToParseFile(( 'Unable to parse Windows Recycle.Bin metadata file header with ' 'error: {0!s}').format(exception)) if file_header.format_version not in self._SUPPORTED_FORMAT_VERSIONS: raise errors.UnableToParseFile( 'Unsupported format version: {0:d}.'.format( file_header.format_version)) if file_header.deletion_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=file_header.deletion_time) event_data = WinRecycleBinEventData() try: event_data.original_filename = self._ParseOriginalFilename( file_object, file_header.format_version) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse original filename with error: {0!s}.'.format( exception)) event_data.file_size = file_header.original_file_size event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_DELETED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,261
Parses an INFO-2 record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. record_offset (int): record offset. record_size (int): record size. Raises: ParseError: if the record cannot be read.
def _ParseInfo2Record( self, parser_mediator, file_object, record_offset, record_size): record_data = self._ReadData(file_object, record_offset, record_size) record_map = self._GetDataTypeMap('recycler_info2_file_entry') try: record = self._ReadStructureFromByteStream( record_data, record_offset, record_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map record data at offset: 0x{0:08x} with error: ' '{1!s}').format(record_offset, exception)) codepage = parser_mediator.codepage or 'ascii' # The original filename can contain remnant data after the end-of-string # character. ascii_filename = record.original_filename.split(b'\x00')[0] try: ascii_filename = ascii_filename.decode(codepage) except UnicodeDecodeError: ascii_filename = ascii_filename.decode(codepage, errors='replace') parser_mediator.ProduceExtractionWarning( 'unable to decode original filename.') unicode_filename = None if record_size > 280: record_offset += 280 utf16_string_map = self._GetDataTypeMap( 'recycler_info2_file_entry_utf16le_string') try: unicode_filename = self._ReadStructureFromByteStream( record_data[280:], record_offset, utf16_string_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to map record data at offset: 0x{0:08x} with error: ' '{1!s}').format(record_offset, exception)) unicode_filename = unicode_filename.rstrip('\x00') if record.deletion_time == 0: date_time = dfdatetime_semantic_time.SemanticTime('Not set') else: date_time = dfdatetime_filetime.Filetime( timestamp=record.deletion_time) event_data = WinRecycleBinEventData() event_data.drive_number = record.drive_number event_data.original_filename = unicode_filename or ascii_filename event_data.file_size = record.original_file_size event_data.offset = record_offset event_data.record_index = record.index if ascii_filename != unicode_filename: event_data.short_filename = ascii_filename event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_DELETED) parser_mediator.ProduceEventWithEventData(event, event_data)
288,262
Parses a Windows Recycler INFO2 file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
def ParseFileObject(self, parser_mediator, file_object): # Since this header value is really generic it is hard not to use filename # as an indicator too. # TODO: Rethink this and potentially make a better test. filename = parser_mediator.GetFilename() if not filename.startswith('INFO2'): return file_header_map = self._GetDataTypeMap('recycler_info2_file_header') try: file_header, _ = self._ReadStructureFromFileObject( file_object, 0, file_header_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToParseFile(( 'Unable to parse Windows Recycler INFO2 file header with ' 'error: {0!s}').format(exception)) if file_header.unknown1 != 5: parser_mediator.ProduceExtractionWarning('unsupported format signature.') return file_entry_size = file_header.file_entry_size if file_entry_size not in (280, 800): parser_mediator.ProduceExtractionWarning( 'unsupported file entry size: {0:d}'.format(file_entry_size)) return file_offset = file_object.get_offset() file_size = file_object.get_size() while file_offset < file_size: self._ParseInfo2Record( parser_mediator, file_object, file_offset, file_entry_size) file_offset += file_entry_size
288,263
Initializes the CLI tool object. Args: input_reader (Optional[InputReader]): input reader, where None indicates that the stdin input reader should be used. output_writer (Optional[OutputWriter]): output writer, where None indicates that the stdout output writer should be used.
def __init__(self, input_reader=None, output_writer=None): super(StorageMediaTool, self).__init__( input_reader=input_reader, output_writer=output_writer) self._custom_artifacts_path = None self._artifact_definitions_path = None self._artifact_filters = None self._credentials = [] self._credential_configurations = [] self._filter_file = None self._partitions = None self._process_vss = False self._source_scanner = source_scanner.SourceScanner() self._source_path = None self._source_path_specs = [] self._textwrapper = textwrap.TextWrapper() self._user_selected_vss_stores = False self._volumes = None self._vss_only = False self._vss_stores = None
288,264
Adds a credential configuration. Args: path_spec (dfvfs.PathSpec): path specification. credential_type (str): credential type. credential_data (bytes): credential data.
def _AddCredentialConfiguration( self, path_spec, credential_type, credential_data): credential_configuration = configurations.CredentialConfiguration( credential_data=credential_data, credential_type=credential_type, path_spec=path_spec) self._credential_configurations.append(credential_configuration)
288,265
Represents a number of bytes as a human readable string. Args: size (int): size in bytes. Returns: str: human readable string of the size.
def _FormatHumanReadableSize(self, size): magnitude_1000 = 0 size_1000 = float(size) while size_1000 >= 1000: size_1000 /= 1000 magnitude_1000 += 1 magnitude_1024 = 0 size_1024 = float(size) while size_1024 >= 1024: size_1024 /= 1024 magnitude_1024 += 1 size_string_1000 = None if 0 < magnitude_1000 <= 7: size_string_1000 = '{0:.1f}{1:s}'.format( size_1000, self._UNITS_1000[magnitude_1000]) size_string_1024 = None if 0 < magnitude_1024 <= 7: size_string_1024 = '{0:.1f}{1:s}'.format( size_1024, self._UNITS_1024[magnitude_1024]) if not size_string_1000 or not size_string_1024: return '{0:d} B'.format(size) return '{0:s} / {1:s} ({2:d} B)'.format( size_string_1024, size_string_1000, size)
288,266
Determines the APFS volume identifiers. Args: scan_node (dfvfs.SourceScanNode): scan node. Returns: list[str]: APFS volume identifiers. Raises: SourceScannerError: if the format of or within the source is not supported or the the scan node is invalid. UserAbort: if the user requested to abort.
def _GetAPFSVolumeIdentifiers(self, scan_node): if not scan_node or not scan_node.path_spec: raise errors.SourceScannerError('Invalid scan node.') volume_system = apfs_volume_system.APFSVolumeSystem() volume_system.Open(scan_node.path_spec) volume_identifiers = self._source_scanner.GetVolumeIdentifiers( volume_system) if not volume_identifiers: return [] # TODO: refactor self._volumes to use scan options. if self._volumes: if self._volumes == 'all': volumes = range(1, volume_system.number_of_volumes + 1) else: volumes = self._volumes selected_volume_identifiers = self._NormalizedVolumeIdentifiers( volume_system, volumes, prefix='apfs') if not set(selected_volume_identifiers).difference(volume_identifiers): return selected_volume_identifiers if len(volume_identifiers) > 1: try: volume_identifiers = self._PromptUserForAPFSVolumeIdentifiers( volume_system, volume_identifiers) except KeyboardInterrupt: raise errors.UserAbort('File system scan aborted.') return self._NormalizedVolumeIdentifiers( volume_system, volume_identifiers, prefix='apfs')
288,267
Determines the VSS store identifiers. Args: scan_node (dfvfs.SourceScanNode): scan node. Returns: list[str]: VSS store identifiers. Raises: SourceScannerError: if the format of or within the source is not supported or the scan node is invalid. UserAbort: if the user requested to abort.
def _GetVSSStoreIdentifiers(self, scan_node): if not scan_node or not scan_node.path_spec: raise errors.SourceScannerError('Invalid scan node.') volume_system = vshadow_volume_system.VShadowVolumeSystem() volume_system.Open(scan_node.path_spec) volume_identifiers = self._source_scanner.GetVolumeIdentifiers( volume_system) if not volume_identifiers: return [] # TODO: refactor to use scan options. if self._vss_stores: if self._vss_stores == 'all': vss_stores = range(1, volume_system.number_of_volumes + 1) else: vss_stores = self._vss_stores selected_volume_identifiers = self._NormalizedVolumeIdentifiers( volume_system, vss_stores, prefix='vss') if not set(selected_volume_identifiers).difference(volume_identifiers): return selected_volume_identifiers try: volume_identifiers = self._PromptUserForVSSStoreIdentifiers( volume_system, volume_identifiers) except KeyboardInterrupt: raise errors.UserAbort('File system scan aborted.') return self._NormalizedVolumeIdentifiers( volume_system, volume_identifiers, prefix='vss')
288,269