code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
194
url
stringlengths
46
254
license
stringclasses
4 values
def debug(self, *args, **kwargs) -> None: """For logging. A wrapper around logging.debug() that adds the scanId to LogRecord Args: *args: passed through to logging.debug() *kwargs: passed through to logging.debug() """ self.log.debug(*args, extra={'scanId': self.__scanId__}, **kwargs)
For logging. A wrapper around logging.debug() that adds the scanId to LogRecord Args: *args: passed through to logging.debug() *kwargs: passed through to logging.debug()
debug
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def info(self, *args, **kwargs) -> None: """For logging. A wrapper around logging.info() that adds the scanId to LogRecord Args: *args: passed through to logging.info() *kwargs: passed through to logging.info() """ self.log.info(*args, extra={'scanId': self.__scanId__}, **kwargs)
For logging. A wrapper around logging.info() that adds the scanId to LogRecord Args: *args: passed through to logging.info() *kwargs: passed through to logging.info()
info
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def error(self, *args, **kwargs) -> None: """For logging. A wrapper around logging.error() that adds the scanId to LogRecord Args: *args: passed through to logging.error() *kwargs: passed through to logging.error() """ self.log.error(*args, extra={'scanId': self.__scanId__}, **kwargs)
For logging. A wrapper around logging.error() that adds the scanId to LogRecord Args: *args: passed through to logging.error() *kwargs: passed through to logging.error()
error
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def enrichTarget(self, target: str) -> None: """Find aliases for a target. Note: rarely used in special cases Args: target (str): TBD """ pass
Find aliases for a target. Note: rarely used in special cases Args: target (str): TBD
enrichTarget
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def setTarget(self, target) -> None: """Assigns the current target this module is acting against. Args: target (SpiderFootTarget): target Raises: TypeError: target argument was invalid type """ from spiderfoot import SpiderFootTarget if not isinstance(target, SpiderFootTarget): raise TypeError(f"target is {type(target)}; expected SpiderFootTarget") self._currentTarget = target
Assigns the current target this module is acting against. Args: target (SpiderFootTarget): target Raises: TypeError: target argument was invalid type
setTarget
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def setDbh(self, dbh) -> None: """Used to set the database handle, which is only to be used by modules in very rare/exceptional cases (e.g. sfp__stor_db) Args: dbh (SpiderFootDb): database handle """ self.__sfdb__ = dbh
Used to set the database handle, which is only to be used by modules in very rare/exceptional cases (e.g. sfp__stor_db) Args: dbh (SpiderFootDb): database handle
setDbh
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def setScanId(self, scanId: str) -> None: """Set the scan ID. Args: scanId (str): scan instance ID Raises: TypeError: scanId argument was invalid type """ if not isinstance(scanId, str): raise TypeError(f"scanId is {type(scanId)}; expected str") self.__scanId__ = scanId
Set the scan ID. Args: scanId (str): scan instance ID Raises: TypeError: scanId argument was invalid type
setScanId
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def getScanId(self) -> str: """Get the scan ID. Returns: str: scan ID Raises: TypeError: Module called getScanId() but no scanId is set. """ if not self.__scanId__: raise TypeError("Module called getScanId() but no scanId is set.") return self.__scanId__
Get the scan ID. Returns: str: scan ID Raises: TypeError: Module called getScanId() but no scanId is set.
getScanId
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def getTarget(self) -> str: """Gets the current target this module is acting against. Returns: str: current target Raises: TypeError: Module called getTarget() but no target is set. """ if not self._currentTarget: raise TypeError("Module called getTarget() but no target is set.") return self._currentTarget
Gets the current target this module is acting against. Returns: str: current target Raises: TypeError: Module called getTarget() but no target is set.
getTarget
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def registerListener(self, listener) -> None: """Listener modules which will get notified once we have data for them to work with. Args: listener: TBD """ self._listenerModules.append(listener)
Listener modules which will get notified once we have data for them to work with. Args: listener: TBD
registerListener
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def tempStorage(self) -> dict: """For future use. Module temporary storage. A dictionary used to persist state (in memory) for a module. Todo: Move all module state to use this, which then would enable a scan to be paused/resumed. Note: Required for SpiderFoot HX compatibility of modules. Returns: dict: module temporary state data """ return dict()
For future use. Module temporary storage. A dictionary used to persist state (in memory) for a module. Todo: Move all module state to use this, which then would enable a scan to be paused/resumed. Note: Required for SpiderFoot HX compatibility of modules. Returns: dict: module temporary state data
tempStorage
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def notifyListeners(self, sfEvent) -> None: """Call the handleEvent() method of every other plug-in listening for events from this plug-in. Remember that those plug-ins will be called within the same execution context of this thread, not on their own. Args: sfEvent (SpiderFootEvent): event Raises: TypeError: sfEvent argument was invalid type """ from spiderfoot import SpiderFootEvent if not isinstance(sfEvent, SpiderFootEvent): raise TypeError(f"sfEvent is {type(sfEvent)}; expected SpiderFootEvent") eventName = sfEvent.eventType eventData = sfEvent.data # Be strict about what events to pass on, unless they are # the ROOT event or the event type of the target. if self.__outputFilter__ and eventName not in ['ROOT', self.getTarget().targetType, self.__outputFilter__]: return storeOnly = False # Under some conditions, only store and don't notify if not eventData: return if self.checkForStop(): return # Look back to ensure the original notification for an element # is what's linked to children. For instance, sfp_dns may find # xyz.abc.com, and then sfp_ripe obtains some raw data for the # same, and then sfp_dns finds xyz.abc.com in there, we should # suppress the notification of that to other modules, as the # original xyz.abc.com notification from sfp_dns will trigger # those modules anyway. This also avoids messy iterations that # traverse many many levels. # storeOnly is used in this case so that the source to dest # relationship is made, but no further events are triggered # from dest, as we are already operating on dest's original # notification from one of the upstream events. prevEvent = sfEvent.sourceEvent while prevEvent is not None: if prevEvent.sourceEvent is not None and prevEvent.sourceEvent.eventType == sfEvent.eventType and prevEvent.sourceEvent.data.lower() == eventData.lower(): storeOnly = True break prevEvent = prevEvent.sourceEvent # output to queue if applicable if self.outgoingEventQueue is not None: self.outgoingEventQueue.put(sfEvent) # otherwise, call other modules directly else: self._listenerModules.sort(key=lambda m: m._priority) for listener in self._listenerModules: if eventName not in listener.watchedEvents() and '*' not in listener.watchedEvents(): continue if storeOnly and "__stor" not in listener.__module__: continue listener._currentEvent = sfEvent # Check if we've been asked to stop in the meantime, so that # notifications stop triggering module activity. if self.checkForStop(): return try: listener.handleEvent(sfEvent) except Exception as e: self.sf.error(f"Module ({listener.__module__}) encountered an error: {e}") # set errorState self.errorState = True # clear incoming queue if self.incomingEventQueue: with suppress(queue.Empty): while 1: self.incomingEventQueue.get_nowait()
Call the handleEvent() method of every other plug-in listening for events from this plug-in. Remember that those plug-ins will be called within the same execution context of this thread, not on their own. Args: sfEvent (SpiderFootEvent): event Raises: TypeError: sfEvent argument was invalid type
notifyListeners
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def checkForStop(self) -> bool: """For modules to use to check for when they should give back control. Returns: bool: True if scan should stop """ # Stop if module is in error state. if self.errorState: return True # If threading is enabled, check the _stopScanning attribute instead. # This is to prevent each thread needing its own sqlite db handle. if self.outgoingEventQueue is not None and self.incomingEventQueue is not None: return self._stopScanning if not self.__scanId__: return False scanstatus = self.__sfdb__.scanInstanceGet(self.__scanId__) if not scanstatus: return False if scanstatus[5] == "ABORT-REQUESTED": self._stopScanning = True return True return False
For modules to use to check for when they should give back control. Returns: bool: True if scan should stop
checkForStop
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def running(self) -> bool: """Indicates whether the module is currently processing data. Modules that process data in pools/batches typically override this method. Returns: bool: True if the module is currently processing data. """ return self.sharedThreadPool.countQueuedTasks(f"{self.__name__}_threadWorker") > 0
Indicates whether the module is currently processing data. Modules that process data in pools/batches typically override this method. Returns: bool: True if the module is currently processing data.
running
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def watchedEvents(self) -> list: """What events is this module interested in for input. The format is a list of event types that are applied to event types that this module wants to be notified of, or * if it wants everything. Will usually be overriden by the implementer, unless it is interested in all events (default behavior). Returns: list: list of events this modules watches """ return ['*']
What events is this module interested in for input. The format is a list of event types that are applied to event types that this module wants to be notified of, or * if it wants everything. Will usually be overriden by the implementer, unless it is interested in all events (default behavior). Returns: list: list of events this modules watches
watchedEvents
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def producedEvents(self) -> list: """What events this module produces This is to support the end user in selecting modules based on events produced. Returns: list: list of events produced by this module """ return []
What events this module produces This is to support the end user in selecting modules based on events produced. Returns: list: list of events produced by this module
producedEvents
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def handleEvent(self, sfEvent) -> None: """Handle events to this module. Will usually be overriden by the implementer, unless it doesn't handle any events. Args: sfEvent (SpiderFootEvent): event """ return
Handle events to this module. Will usually be overriden by the implementer, unless it doesn't handle any events. Args: sfEvent (SpiderFootEvent): event
handleEvent
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def finish(self): """Perform final/cleanup functions before module exits Note that this function may be called multiple times Overridden by the implementer """ return
Perform final/cleanup functions before module exits Note that this function may be called multiple times Overridden by the implementer
finish
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def poolExecute(self, callback, *args, **kwargs) -> None: """Execute a callback with the given args. If we're in a storage module, execute normally. Otherwise, use the shared thread pool. Args: callback: function to call args: args (passed through to callback) kwargs: kwargs (passed through to callback) """ if self.__name__.startswith('sfp__stor_'): callback(*args, **kwargs) else: self.sharedThreadPool.submit(callback, *args, taskName=f"{self.__name__}_threadWorker", maxThreads=self.maxThreads, **kwargs)
Execute a callback with the given args. If we're in a storage module, execute normally. Otherwise, use the shared thread pool. Args: callback: function to call args: args (passed through to callback) kwargs: kwargs (passed through to callback)
poolExecute
python
smicallef/spiderfoot
spiderfoot/plugin.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/plugin.py
MIT
def __init__(self, threads: int = 100, qsize: int = 10, name: str = '') -> None: """Initialize the SpiderFootThreadPool class. Args: threads (int): Max number of threads qsize (int): Queue size name (str): Name """ self.log = logging.getLogger(f"spiderfoot.{__name__}") self.threads = int(threads) self.qsize = int(qsize) self.pool = [None] * self.threads self.name = str(name) self.inputThread = None self.inputQueues = dict() self.outputQueues = dict() self._stop = False self._lock = threading.Lock()
Initialize the SpiderFootThreadPool class. Args: threads (int): Max number of threads qsize (int): Queue size name (str): Name
__init__
python
smicallef/spiderfoot
spiderfoot/threadpool.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/threadpool.py
MIT
def shutdown(self, wait: bool = True) -> dict: """Shut down the pool. Args: wait (bool): Whether to wait for the pool to finish executing Returns: results (dict): (unordered) results in the format: {"taskName": [returnvalue1, returnvalue2, ...]} """ results = dict() self.log.debug(f'Shutting down thread pool "{self.name}" with wait={wait}') if wait: while not self.finished and not self.stop: with self._lock: outputQueues = list(self.outputQueues) for taskName in outputQueues: moduleResults = list(self.results(taskName)) try: results[taskName] += moduleResults except KeyError: results[taskName] = moduleResults sleep(.1) self.stop = True # make sure input queues are empty with self._lock: inputQueues = list(self.inputQueues.values()) for q in inputQueues: with suppress(Exception): while 1: q.get_nowait() with suppress(Exception): q.close() # make sure output queues are empty with self._lock: outputQueues = list(self.outputQueues.items()) for taskName, q in outputQueues: moduleResults = list(self.results(taskName)) try: results[taskName] += moduleResults except KeyError: results[taskName] = moduleResults with suppress(Exception): q.close() return results
Shut down the pool. Args: wait (bool): Whether to wait for the pool to finish executing Returns: results (dict): (unordered) results in the format: {"taskName": [returnvalue1, returnvalue2, ...]}
shutdown
python
smicallef/spiderfoot
spiderfoot/threadpool.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/threadpool.py
MIT
def submit(self, callback, *args, **kwargs) -> None: """Submit a function call to the pool. The "taskName" and "maxThreads" arguments are optional. Args: callback (function): callback function *args: Passed through to callback **kwargs: Passed through to callback, except for taskName and maxThreads """ taskName = kwargs.get('taskName', 'default') maxThreads = kwargs.pop('maxThreads', 100) # block if this module's thread limit has been reached while self.countQueuedTasks(taskName) >= maxThreads: sleep(.01) continue self.log.debug(f"Submitting function \"{callback.__name__}\" from module \"{taskName}\" to thread pool \"{self.name}\"") self.inputQueue(taskName).put((callback, args, kwargs))
Submit a function call to the pool. The "taskName" and "maxThreads" arguments are optional. Args: callback (function): callback function *args: Passed through to callback **kwargs: Passed through to callback, except for taskName and maxThreads
submit
python
smicallef/spiderfoot
spiderfoot/threadpool.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/threadpool.py
MIT
def countQueuedTasks(self, taskName: str) -> int: """For the specified task, returns the number of queued function calls plus the number of functions which are currently executing Args: taskName (str): Name of task Returns: int: the number of queued function calls plus the number of functions which are currently executing """ queuedTasks = 0 with suppress(Exception): queuedTasks += self.inputQueues[taskName].qsize() runningTasks = 0 for t in self.pool: with suppress(Exception): if t.taskName == taskName: runningTasks += 1 return queuedTasks + runningTasks
For the specified task, returns the number of queued function calls plus the number of functions which are currently executing Args: taskName (str): Name of task Returns: int: the number of queued function calls plus the number of functions which are currently executing
countQueuedTasks
python
smicallef/spiderfoot
spiderfoot/threadpool.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/threadpool.py
MIT
def map(self, callback, iterable, *args, **kwargs) -> None: # noqa: A003 """map. Args: callback: the function to thread iterable: each entry will be passed as the first argument to the function args: additional arguments to pass to callback function kwargs: keyword arguments to pass to callback function Yields: return values from completed callback function """ taskName = kwargs.get("taskName", "default") self.inputThread = threading.Thread(target=self.feedQueue, args=(callback, iterable, args, kwargs)) self.inputThread.start() self.start() sleep(.1) yield from self.results(taskName, wait=True)
map. Args: callback: the function to thread iterable: each entry will be passed as the first argument to the function args: additional arguments to pass to callback function kwargs: keyword arguments to pass to callback function Yields: return values from completed callback function
map
python
smicallef/spiderfoot
spiderfoot/threadpool.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/threadpool.py
MIT
def __init__(self, dbh: SpiderFootDb, ruleset: dict, scanId: str = None) -> None: """Initialize SpiderFoot correlator engine with scan ID and ruleset. Args: dbh (SpiderFootDb): database handle ruleset (dict): correlation rule set scanId (str): scan instance ID Raises: TypeError: argument type was invalid SyntaxError: correlation ruleset contains malformed or invalid rule """ if not isinstance(ruleset, dict): raise TypeError(f"ruleset is {type(ruleset)}; expected dict()") if not isinstance(dbh, SpiderFootDb): raise TypeError(f"dbh is {type(dbh)}; expected SpiderFootDb()") self.dbh = dbh if scanId and not isinstance(scanId, str): raise TypeError(f"scanId is {type(scanId)}; expected str()") self.scanId = scanId self.types = self.dbh.eventTypes() for t in self.types: self.type_entity_map[t[1]] = t[3] self.rules = list() # Sanity-check the rules for rule_id in ruleset.keys(): self.log.debug(f"Parsing rule {rule_id}...") try: self.rules.append(yaml.safe_load(ruleset[rule_id])) self.rules[len(self.rules) - 1]['rawYaml'] = ruleset[rule_id] except Exception as e: raise SyntaxError(f"Unable to process a YAML correlation rule [{rule_id}]") from e # Strip any trailing newlines that may have creeped into meta name/description for rule in self.rules: for k in rule['meta'].keys(): if isinstance(rule['meta'][k], str): rule['meta'][k] = rule['meta'][k].strip() else: rule['meta'][k] = rule[k] if not self.check_ruleset_validity(self.rules): raise SyntaxError("Sanity check of correlation rules failed.")
Initialize SpiderFoot correlator engine with scan ID and ruleset. Args: dbh (SpiderFootDb): database handle ruleset (dict): correlation rule set scanId (str): scan instance ID Raises: TypeError: argument type was invalid SyntaxError: correlation ruleset contains malformed or invalid rule
__init__
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def get_ruleset(self) -> list: """Correlation rule set. Returns: list: correlation rules """ return self.rules
Correlation rule set. Returns: list: correlation rules
get_ruleset
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def run_correlations(self) -> None: """Run all correlation rules. Raises: ValueError: correlation rules cannot be run on specified scanId """ scan_instance = self.dbh.scanInstanceGet(self.scanId) if not scan_instance: raise ValueError(f"Invalid scan ID. Scan {self.scanId} does not exist.") if scan_instance[5] in ["RUNNING", "STARTING", "STARTED"]: raise ValueError(f"Scan {self.scanId} is {scan_instance[5]}. You cannot run correlations on running scans.") for rule in self.rules: self.log.debug(f"Processing rule: {rule['id']}") results = self.process_rule(rule) if not results: self.log.debug(f"No results for rule {rule['id']}.") continue self.log.info(f"Rule {rule['id']} returned {len(results.keys())} results.") for result in results: self.create_correlation(rule, results[result])
Run all correlation rules. Raises: ValueError: correlation rules cannot be run on specified scanId
run_correlations
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def build_db_criteria(self, matchrule: dict) -> dict: """Build up the criteria to be used to query the database. Args: matchrule (dict): dict representing a match rule Returns: dict: criteria to be used with SpiderFootDb.scanResultEvent() Raises: TypeError: argument type was invalid """ if not isinstance(matchrule, dict): raise TypeError(f"matchrule is {type(matchrule)}; expected dict()") criterias = dict() if "." in matchrule['field']: self.log.error("The first collection must either be data, type or module.") return None if matchrule['field'] == "data" and matchrule['type'] == "regex": self.log.error("The first collection cannot use regex on data.") return None if matchrule['field'] == "module" and matchrule['method'] != 'exact': self.log.error("Collection based on module names doesn't support regex.") return None # Build up the event type part of the query if matchrule['field'] == "type": if 'eventType' not in criterias: criterias['eventType'] = list() if matchrule['method'] == 'regex': if type(matchrule['value']) != list: regexps = [matchrule['value']] else: regexps = matchrule['value'] for r in regexps: for t in self.types: if re.search(r, t[1]): criterias['eventType'].append(t[1]) if matchrule['method'] == 'exact': if type(matchrule['value']) != list: matches = [matchrule['value']] else: matches = matchrule['value'] for m in matches: matched = False for t in self.types: if t[1] == m: matched = True criterias['eventType'].append(t[1]) if not matched: self.log.error(f"Invalid type specified: {m}") return None # Match by module(s) if matchrule['field'] == "module": if 'srcModule' not in criterias: criterias['srcModule'] = list() if matchrule['method'] == 'exact': if isinstance(matchrule['value'], list): criterias['srcModule'].extend(matchrule['value']) else: criterias['srcModule'].append(matchrule['value']) # Match by data if matchrule['field'] == "data": if 'data' not in criterias: criterias['data'] = list() if isinstance(matchrule['value'], list): for v in matchrule['value']: criterias['data'].append(v.encode('raw_unicode_escape')) else: criterias['data'].append(matchrule['value'].encode('raw_unicode_escape')) return criterias
Build up the criteria to be used to query the database. Args: matchrule (dict): dict representing a match rule Returns: dict: criteria to be used with SpiderFootDb.scanResultEvent() Raises: TypeError: argument type was invalid
build_db_criteria
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def enrich_event_sources(self, events: dict) -> None: """Enrich event sources. Args: events (dict): events Raises: TypeError: argument type was invalid """ if not isinstance(events, dict): raise TypeError(f"events is {type(events)}; expected dict()") event_chunks = [list(events.keys())[x:(x + 5000)] for x in range(0, len(list(events.keys())), 5000)] for chunk in event_chunks: # Get sources self.log.debug(f"Getting sources for {len(chunk)} events") source_data = self.dbh.scanElementSourcesDirect(self.scanId, chunk) for row in source_data: events[row[8]]['source'].append({ 'type': row[15], 'data': row[2], 'module': row[16], 'id': row[9], 'entity_type': self.type_entity_map[row[15]] })
Enrich event sources. Args: events (dict): events Raises: TypeError: argument type was invalid
enrich_event_sources
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def enrich_event_children(self, events: dict) -> None: """Enrich event children. Args: events (dict): events Raises: TypeError: argument type was invalid """ if not isinstance(events, dict): raise TypeError(f"events is {type(events)}; expected dict()") event_chunks = [list(events.keys())[x:x + 5000] for x in range(0, len(list(events.keys())), 5000)] for chunk in event_chunks: # Get children self.log.debug(f"Getting children for {len(chunk)} events") child_data = self.dbh.scanResultEvent(self.scanId, sourceId=chunk) for row in child_data: events[row[9]]['child'].append({ 'type': row[4], 'data': row[1], 'module': row[3], 'id': row[8] })
Enrich event children. Args: events (dict): events Raises: TypeError: argument type was invalid
enrich_event_children
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def enrich_event_entities(self, events: dict) -> None: """Given our starting set of ids, loop through the source of each until you have a match according to the criteria provided. Args: events (dict): events Raises: TypeError: argument type was invalid """ if not isinstance(events, dict): raise TypeError(f"events is {type(events)}; expected dict()") entity_missing = dict() for event_id in events: if 'source' not in events[event_id]: continue row = events[event_id] # Go through each source if it's not an ENTITY, capture its ID # so we can capture its source, otherwise copy the source as # an entity record, since it's of a valid type to be considered one. for source in row['source']: if source['entity_type'] in ['ENTITY', 'INTERNAL']: events[row['id']]['entity'].append(source) else: # key is the element ID that we need to find an entity for # by checking its source, and the value is the original ID # for which we are seeking an entity. As we traverse up the # discovery path the key will change but the value must always # point back to the same ID. entity_missing[source['id']] = row['id'] while len(entity_missing) > 0: self.log.debug(f"{len(entity_missing.keys())} entities are missing, going deeper...") new_missing = dict() self.log.debug(f"Getting sources for {len(entity_missing.keys())} items") if len(entity_missing.keys()) > 5000: chunks = [list(entity_missing.keys())[x:x + 5000] for x in range(0, len(list(entity_missing.keys())), 5000)] entity_data = list() self.log.debug("Fetching data in chunks") for chunk in chunks: self.log.debug(f"chunk size: {len(chunk)}") entity_data.extend(self.dbh.scanElementSourcesDirect(self.scanId, chunk)) else: self.log.debug(f"fetching sources for {len(entity_missing)} items") entity_data = self.dbh.scanElementSourcesDirect(self.scanId, list(entity_missing.keys())) for entity_candidate in entity_data: event_id = entity_missing[entity_candidate[8]] if self.type_entity_map[entity_candidate[15]] not in ['ENTITY', 'INTERNAL']: # key of this dictionary is the id we need to now get a source for, # and the value is the original ID of the item missing an entity new_missing[entity_candidate[9]] = event_id else: events[event_id]['entity'].append({ 'type': entity_candidate[15], 'data': entity_candidate[2], 'module': entity_candidate[16], 'id': entity_candidate[9], 'entity_type': self.type_entity_map[entity_candidate[15]] }) if len(new_missing) == 0: break entity_missing = deepcopy(new_missing)
Given our starting set of ids, loop through the source of each until you have a match according to the criteria provided. Args: events (dict): events Raises: TypeError: argument type was invalid
enrich_event_entities
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def collect_from_db(self, matchrule: dict, fetchChildren: bool, fetchSources: bool, fetchEntities: bool) -> list: """Collect event values from database. Args: matchrule (dict): correlation rule fetchChildren (bool): TBD fetchSources (bool): TBD fetchEntities (bool): TBD Returns: list: event values """ events = dict() self.log.debug(f"match rule: {matchrule}") # Parse the criteria from the match rule query_args = self.build_db_criteria(matchrule) if not query_args: self.log.error(f"Error encountered parsing match rule: {matchrule}.") return None query_args['instanceId'] = self.scanId self.log.debug(f"db query: {query_args}") for row in self.dbh.scanResultEvent(**query_args): events[row[8]] = { 'type': row[4], 'data': row[1], 'module': row[3], 'id': row[8], 'entity_type': self.type_entity_map[row[4]], 'source': [], 'child': [], 'entity': [] } # You need to fetch sources if you need entities, since # the source will often be the entity. if fetchSources or fetchEntities: self.enrich_event_sources(events) if fetchChildren: self.enrich_event_children(events) if fetchEntities: self.enrich_event_entities(events) self.log.debug(f"returning {len(events.values())} events from match_rule {matchrule}") return list(events.values())
Collect event values from database. Args: matchrule (dict): correlation rule fetchChildren (bool): TBD fetchSources (bool): TBD fetchEntities (bool): TBD Returns: list: event values
collect_from_db
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def event_extract(self, event: dict, field: str) -> list: """Event event field. Args: event (dict): event field (str): TBD Returns: list: event data """ if "." in field: ret = list() key, field = field.split(".") for subevent in event[key]: ret.extend(self.event_extract(subevent, field)) return ret return [event[field]]
Event event field. Args: event (dict): event field (str): TBD Returns: list: event data
event_extract
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def event_keep(self, event: dict, field: str, patterns: str, patterntype: str) -> bool: """Keep event field. Args: event (dict): event field (str): TBD patterns (str): TBD patterntype (str): TBD Returns: bool: TBD """ if "." in field: key, field = field.split(".") return any(self.event_keep(subevent, field, patterns, patterntype) for subevent in event[key]) value = event[field] if patterntype == "exact": ret = False for pattern in patterns: if pattern.startswith("not "): ret = True pattern = re.sub(r"^not\s+", "", pattern) if value == pattern: return False else: ret = False if value == pattern: return True if ret: return True return False if patterntype == "regex": ret = False for pattern in patterns: if pattern.startswith("not "): ret = True pattern = re.sub(r"^not\s+", "", pattern) if re.search(pattern, value, re.IGNORECASE): return False else: ret = False if re.search(pattern, value, re.IGNORECASE): return True if ret: return True return False return False
Keep event field. Args: event (dict): event field (str): TBD patterns (str): TBD patterntype (str): TBD Returns: bool: TBD
event_keep
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def refine_collection(self, matchrule: dict, events: list) -> None: """Cull events from the events list if they don't meet the match criteria. Args: matchrule (dict): TBD events (list): TBD """ patterns = list() if isinstance(matchrule['value'], list): for r in matchrule['value']: patterns.append(str(r)) else: patterns = [str(matchrule['value'])] field = matchrule['field'] self.log.debug(f"attempting to match {patterns} against the {field} field in {len(events)} events") # Go through each event, remove it if we shouldn't keep it # according to the match rule patterns. for event in events[:]: if not self.event_keep(event, field, patterns, matchrule['method']): self.log.debug(f"removing {event} because of {field}") events.remove(event)
Cull events from the events list if they don't meet the match criteria. Args: matchrule (dict): TBD events (list): TBD
refine_collection
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def collect_events(self, collection: dict, fetchChildren: bool, fetchSources: bool, fetchEntities: bool, collectIndex: int) -> list: """Collect data for aggregation and analysis. Args: collection (dict): TBD fetchChildren (bool): TBD fetchSources (bool): TBD fetchEntities (bool): TBD collectIndex (int): TBD Returns: list: TBD """ step = 0 for matchrule in collection: # First match rule means we fetch from the database, every # other step happens locally to avoid burdening the db. if step == 0: events = self.collect_from_db(matchrule, fetchEntities=fetchEntities, fetchChildren=fetchChildren, fetchSources=fetchSources) step += 1 continue # Remove events in-place based on subsequent match-rules self.refine_collection(matchrule, events) # Stamp events with this collection ID for potential # use in analysis later. for e in events: e['_collection'] = collectIndex if fetchEntities: for ee in e['entity']: ee['_collection'] = collectIndex if fetchChildren: for ce in e['child']: ce['_collection'] = collectIndex if fetchSources: for se in e['source']: se['_collection'] = collectIndex self.log.debug(f"returning collection ({len(events)})...") return events
Collect data for aggregation and analysis. Args: collection (dict): TBD fetchChildren (bool): TBD fetchSources (bool): TBD fetchEntities (bool): TBD collectIndex (int): TBD Returns: list: TBD
collect_events
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def event_strip(event: dict, field: str, value: str) -> None: """Strip sub fields that don't match value. Args: event (dict): event field (str): TBD value (str): TBD """ topfield, subfield = field.split(".") if field.startswith(topfield + "."): for s in event[topfield]: if s[subfield] != value: event[topfield].remove(s)
Strip sub fields that don't match value. Args: event (dict): event field (str): TBD value (str): TBD
aggregate_events.event_strip
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def aggregate_events(self, rule: dict, events: list) -> dict: """Aggregate events according to the rule. Args: rule (dict): correlation rule events (list): TBD Returns: dict: TBD """ if 'field' not in rule: self.log.error(f"Unable to find field definition for aggregation in {rule['id']}") return False def event_strip(event: dict, field: str, value: str) -> None: """Strip sub fields that don't match value. Args: event (dict): event field (str): TBD value (str): TBD """ topfield, subfield = field.split(".") if field.startswith(topfield + "."): for s in event[topfield]: if s[subfield] != value: event[topfield].remove(s) ret = dict() for e in events: buckets = self.event_extract(e, rule['field']) for b in buckets: e_copy = deepcopy(e) # if the bucket is of a child, source or entity, # remove the children, sources or entities that # aren't matching this bucket if "." in rule['field']: event_strip(e_copy, rule['field'], b) if b in ret: ret[b].append(e_copy) continue ret[b] = [e_copy] return ret
Aggregate events according to the rule. Args: rule (dict): correlation rule events (list): TBD Returns: dict: TBD
aggregate_events
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analyze_events(self, rule: dict, buckets: dict) -> None: """Analyze events according to the rule. Modifies buckets in place. Args: rule (dict): correlation rule buckets (dict): TBD Todo: Implement support for 'both_collections' Returns: None """ self.log.debug(f"applying {rule}") if rule['method'] == "threshold": return self.analysis_threshold(rule, buckets) if rule['method'] == "outlier": return self.analysis_outlier(rule, buckets) if rule['method'] == "first_collection_only": return self.analysis_first_collection_only(rule, buckets) if rule['method'] == "match_all_to_first_collection": return self.analysis_match_all_to_first_collection(rule, buckets) if rule['method'] == "both_collections": # TODO: Implement when genuine case appears pass return None
Analyze events according to the rule. Modifies buckets in place. Args: rule (dict): correlation rule buckets (dict): TBD Todo: Implement support for 'both_collections' Returns: None
analyze_events
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def check_event(events: list, reference: list) -> bool: """Check event. Args: events (list): TBD reference (list): TBD Returns: bool: TBD """ for event_data in events: if rule['match_method'] == 'subnet': for r in reference: try: self.log.debug(f"checking if {event_data} is in {r}") if netaddr.IPAddress(event_data) in netaddr.IPNetwork(r): self.log.debug(f"found subnet match: {event_data} in {r}") return True except Exception: pass if rule['match_method'] == 'exact' and event_data in reference: self.log.debug(f"found exact match: {event_data} in {reference}") return True if rule['match_method'] == 'contains': for r in reference: if event_data in r: self.log.debug(f"found pattern match: {event_data} in {r}") return True return False
Check event. Args: events (list): TBD reference (list): TBD Returns: bool: TBD
analysis_match_all_to_first_collection.check_event
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analysis_match_all_to_first_collection(self, rule: dict, buckets: dict) -> None: """Find buckets that are in the first collection. Args: rule (dict): correlation rule buckets (dict): TBD """ self.log.debug(f"called with buckets {buckets}") def check_event(events: list, reference: list) -> bool: """Check event. Args: events (list): TBD reference (list): TBD Returns: bool: TBD """ for event_data in events: if rule['match_method'] == 'subnet': for r in reference: try: self.log.debug(f"checking if {event_data} is in {r}") if netaddr.IPAddress(event_data) in netaddr.IPNetwork(r): self.log.debug(f"found subnet match: {event_data} in {r}") return True except Exception: pass if rule['match_method'] == 'exact' and event_data in reference: self.log.debug(f"found exact match: {event_data} in {reference}") return True if rule['match_method'] == 'contains': for r in reference: if event_data in r: self.log.debug(f"found pattern match: {event_data} in {r}") return True return False # 1. Build up the list of values from collection 0 # 2. Go through each event in each collection > 0 and drop any events that aren't # in collection 0. # 3. For each bucket, if there are no events from collection > 0, drop them. reference = set() for bucket in buckets: for event in buckets[bucket]: if event['_collection'] == 0: reference.update(self.event_extract(event, rule['field'])) for bucket in list(buckets.keys()): pluszerocount = 0 for event in buckets[bucket][:]: if event['_collection'] == 0: continue pluszerocount += 1 if not check_event(self.event_extract(event, rule['field']), reference): buckets[bucket].remove(event) pluszerocount -= 1 # delete the bucket if there are no events > collection 0 if pluszerocount == 0: del (buckets[bucket])
Find buckets that are in the first collection. Args: rule (dict): correlation rule buckets (dict): TBD
analysis_match_all_to_first_collection
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analysis_first_collection_only(self, rule: dict, buckets: dict) -> None: """analysis_first_collection_only TBD Args: rule (dict): TBD buckets (dict): TBD """ colzero = set() for bucket in buckets: for e in buckets[bucket]: if e['_collection'] == 0: colzero.add(e[rule['field']]) for bucket in list(buckets.keys()): delete = False for e in buckets[bucket]: if e['_collection'] > 0 and e[rule['field']] in colzero: delete = True break if delete: del (buckets[bucket]) # Remove buckets with collection > 0 values for bucket in list(buckets.keys()): for e in buckets[bucket]: if e['_collection'] > 0: del (buckets[bucket]) break
analysis_first_collection_only TBD Args: rule (dict): TBD buckets (dict): TBD
analysis_first_collection_only
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analysis_outlier(self, rule: dict, buckets: dict) -> None: """analysis_outlier TBD Args: rule (dict): TBD buckets (dict): TBD """ countmap = dict() for bucket in list(buckets.keys()): countmap[bucket] = len(buckets[bucket]) if len(list(countmap.keys())) == 0: for bucket in list(buckets.keys()): del (buckets[bucket]) return total = float(sum(countmap.values())) avg = total / float(len(list(countmap.keys()))) avgpct = (avg / total) * 100.0 self.log.debug(f"average percent is {avgpct} based on {avg} / {total} * 100.0") if avgpct < rule.get('noisy_percent', 10): self.log.debug(f"Not correlating because the average percent is {avgpct} (too anomalous)") for bucket in list(buckets.keys()): del (buckets[bucket]) return # Figure out which buckets don't contain outliers and delete them delbuckets = list() for bucket in buckets: if (countmap[bucket] / total) * 100.0 > rule['maximum_percent']: delbuckets.append(bucket) for bucket in set(delbuckets): del (buckets[bucket])
analysis_outlier TBD Args: rule (dict): TBD buckets (dict): TBD
analysis_outlier
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analysis_threshold(self, rule: dict, buckets: dict) -> None: """analysis_treshold TBD Args: rule (dict): TBD buckets (dict): TBD """ for bucket in list(buckets.keys()): countmap = dict() for event in buckets[bucket]: e = self.event_extract(event, rule['field']) for ef in e: if ef not in countmap: countmap[ef] = 0 countmap[ef] += 1 if not rule.get('count_unique_only'): for v in countmap: if countmap[v] >= rule.get('minimum', 0) and countmap[v] <= rule.get('maximum', 999999999): continue # Delete the bucket of events if it didn't meet the # analysis criteria. if bucket in buckets: del (buckets[bucket]) continue # If we're only looking at the number of times the requested # field appears in the bucket... uniques = len(list(countmap.keys())) if uniques < rule.get('minimum', 0) or uniques > rule.get('maximum', 999999999): del (buckets[bucket])
analysis_treshold TBD Args: rule (dict): TBD buckets (dict): TBD
analysis_threshold
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analyze_field_scope(self, field: str) -> list: """Analysis field scope. Args: field (str): TBD Returns: list: TBD """ return [ field.startswith('child.'), field.startswith('source.'), field.startswith('entity.') ]
Analysis field scope. Args: field (str): TBD Returns: list: TBD
analyze_field_scope
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def analyze_rule_scope(self, rule: dict) -> list: """Analyze the rule for use of children, sources or entities so that they can be fetched during collection. Args: rule (dict): TBD Returns: list: TBD """ children = False source = False entity = False if rule.get('collections'): for collection in rule['collections']: for method in collection['collect']: c, s, e = self.analyze_field_scope(method['field']) if c: children = True if s: source = True if e: entity = True if rule.get('aggregation'): c, s, e = self.analyze_field_scope(rule['aggregation']['field']) if c: children = True if s: source = True if e: entity = True if rule.get('analysis'): for analysis in rule['analysis']: if 'field' not in analysis: continue c, s, e = self.analyze_field_scope(analysis['field']) if c: children = True if s: source = True if e: entity = True return children, source, entity
Analyze the rule for use of children, sources or entities so that they can be fetched during collection. Args: rule (dict): TBD Returns: list: TBD
analyze_rule_scope
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def process_rule(self, rule: dict) -> list: """Work through all the components of the rule to produce a final set of data elements for building into correlations. Args: rule (dict): correlation rule Returns: list: TBD Raises: TypeError: argument type was invalid """ if not isinstance(rule, dict): raise TypeError(f"rule is {type(rule)}; expected dict()") events = list() buckets = dict() fetchChildren, fetchSources, fetchEntities = self.analyze_rule_scope(rule) # Go through collections and collect the data from the DB for collectIndex, c in enumerate(rule.get('collections')): events.extend(self.collect_events(c['collect'], fetchChildren, fetchSources, fetchEntities, collectIndex)) if not events: self.log.debug("No events found after going through collections.") return None self.log.debug(f"{len(events)} proceeding to next stage: aggregation.") self.log.debug(f"{events} ready to be processed.") # Perform aggregations. Aggregating breaks up the events # into buckets with the key being the field to aggregate by. if 'aggregation' in rule: buckets = self.aggregate_events(rule['aggregation'], events) if not buckets: self.log.debug("no buckets found after aggregation") return None else: buckets = {'default': events} # Perform analysis across the buckets if 'analysis' in rule: for method in rule['analysis']: # analyze() will operate on the bucket, make changes # and empty it if the analysis doesn't yield results. self.analyze_events(method, buckets) return buckets
Work through all the components of the rule to produce a final set of data elements for building into correlations. Args: rule (dict): correlation rule Returns: list: TBD Raises: TypeError: argument type was invalid
process_rule
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def build_correlation_title(self, rule: dict, data: list) -> str: """Build the correlation title with field substitution. Args: rule (dict): correlation rule data (list): TBD Returns: str: correlation rule title Raises: TypeError: argument type was invalid """ if not isinstance(rule, dict): raise TypeError(f"rule is {type(rule)}; expected dict()") if not isinstance(data, list): raise TypeError(f"data is {type(data)}; expected list()") title = rule['headline'] if isinstance(title, dict): title = title['text'] fields = re.findall(r"{([a-z\.]+)}", title) for m in fields: try: v = self.event_extract(data[0], m)[0] except Exception: self.log.error(f"Field requested was not available: {m}") title = title.replace("{" + m + "}", v.replace("\r", "").split("\n")[0]) return title
Build the correlation title with field substitution. Args: rule (dict): correlation rule data (list): TBD Returns: str: correlation rule title Raises: TypeError: argument type was invalid
build_correlation_title
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def create_correlation(self, rule: dict, data: list, readonly: bool = False) -> bool: """Store the correlation result in the backend database. Args: rule (dict): correlation rule data (list): TBD readonly (bool): Dry run. Do not store the correlation result in the database. Returns: bool: Correlation rule result was stored successfully. """ title = self.build_correlation_title(rule, data) self.log.info(f"New correlation [{rule['id']}]: {title}") if readonly: return True eventIds = list() for e in data: eventIds.append(e['id']) corrId = self.dbh.correlationResultCreate(self.scanId, rule['id'], rule['meta']['name'], rule['meta']['description'], rule['meta']['risk'], rule['rawYaml'], title, eventIds) if not corrId: self.log.error(f"Unable to create correlation in DB for {rule['id']}") return False return True
Store the correlation result in the backend database. Args: rule (dict): correlation rule data (list): TBD readonly (bool): Dry run. Do not store the correlation result in the database. Returns: bool: Correlation rule result was stored successfully.
create_correlation
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def check_ruleset_validity(self, rules: list) -> bool: """Syntax-check all rules. Args: rules (list): correlation rules Returns: bool: correlation rule set is valid """ if not isinstance(rules, list): return False ok = True for rule in rules: if not self.check_rule_validity(rule): ok = False if ok: return True return False
Syntax-check all rules. Args: rules (list): correlation rules Returns: bool: correlation rule set is valid
check_ruleset_validity
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def check_rule_validity(self, rule: dict) -> bool: """Check a correlation rule for syntax errors. Args: rule (dict): correlation rule Returns: bool: correlation rule is valid """ if not isinstance(rule, dict): return False fields = set(rule.keys()) if not fields: self.log.error("Rule is empty.") return False if not rule.get('id'): self.log.error("Rule has no ID.") return False ok = True for f in self.mandatory_components: if f not in fields: self.log.error(f"Mandatory rule component, {f}, not found in {rule['id']}.") ok = False validfields = set(self.components.keys()) if len(fields.union(validfields)) > len(validfields): self.log.error(f"Unexpected field(s) in correlation rule {rule['id']}: {[f for f in fields if f not in validfields]}") ok = False for collection in rule.get('collections', list()): # Match by data element type(s) or type regexps for matchrule in collection['collect']: if matchrule['method'] not in ["exact", "regex"]: self.log.error(f"Invalid collection method: {matchrule['method']}") ok = False if matchrule['field'] not in ["type", "module", "data", "child.type", "child.module", "child.data", "source.type", "source.module", "source.data", "entity.type", "entity.module", "entity.data"]: self.log.error(f"Invalid collection field: {matchrule['field']}") ok = False if 'value' not in matchrule: self.log.error(f"Value missing for collection rule in {rule['id']}") ok = False if 'analysis' in rule: valid_methods = ["threshold", "outlier", "first_collection_only", "both_collections", "match_all_to_first_collection"] for method in rule['analysis']: if method['method'] not in valid_methods: self.log.error(f"Unknown analysis method '{method['method']}' defined for {rule['id']}.") ok = False for field in fields: # Check strict options are defined strictoptions = self.components[field].get('strict', list()) otheroptions = self.components[field].get('optional', list()) alloptions = set(strictoptions).union(otheroptions) for opt in strictoptions: if isinstance(rule[field], list): for item, optelement in enumerate(rule[field]): if not optelement.get(opt): self.log.error(f"Required field for {field} missing in {rule['id']}, item {item}: {opt}") ok = False continue if isinstance(rule[field], dict): if not rule[field].get(opt): self.log.error(f"Required field for {field} missing in {rule['id']}: {opt}") ok = False else: self.log.error(f"Rule field '{field}' is not a list() or dict()") ok = False # Check if any of the options aren't valid if opt not in alloptions: self.log.error(f"Unexpected option, {opt}, found in {field} for {rule['id']}. Must be one of {alloptions}.") ok = False if ok: return True return False
Check a correlation rule for syntax errors. Args: rule (dict): correlation rule Returns: bool: correlation rule is valid
check_rule_validity
python
smicallef/spiderfoot
spiderfoot/correlation.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/correlation.py
MIT
def __dbregex__(qry: str, data: str) -> bool: """SQLite doesn't support regex queries, so we create a custom function to do so. Args: qry (str): TBD data (str): TBD Returns: bool: matches """ try: rx = re.compile(qry, re.IGNORECASE | re.DOTALL) ret = rx.match(data) except Exception: return False return ret is not None
SQLite doesn't support regex queries, so we create a custom function to do so. Args: qry (str): TBD data (str): TBD Returns: bool: matches
__init__.__dbregex__
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def __init__(self, opts: dict, init: bool = False) -> None: """Initialize database and create handle to the SQLite database file. Creates the database file if it does not exist. Creates database schema if it does not exist. Args: opts (dict): must specify the database file path in the '__database' key init (bool): initialise the database schema. if the database file does not exist this option will be ignored. Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ if not isinstance(opts, dict): raise TypeError(f"opts is {type(opts)}; expected dict()") from None if not opts: raise ValueError("opts is empty") from None if not opts.get('__database'): raise ValueError("opts['__database'] is empty") from None database_path = opts['__database'] # create database directory Path(database_path).parent.mkdir(exist_ok=True, parents=True) # connect() will create the database file if it doesn't exist, but # at least we can use this opportunity to ensure we have permissions to # read and write to such a file. try: dbh = sqlite3.connect(database_path) except Exception as e: raise IOError(f"Error connecting to internal database {database_path}") from e if dbh is None: raise IOError(f"Could not connect to internal database, and could not create {database_path}") from None dbh.text_factory = str self.conn = dbh self.dbh = dbh.cursor() def __dbregex__(qry: str, data: str) -> bool: """SQLite doesn't support regex queries, so we create a custom function to do so. Args: qry (str): TBD data (str): TBD Returns: bool: matches """ try: rx = re.compile(qry, re.IGNORECASE | re.DOTALL) ret = rx.match(data) except Exception: return False return ret is not None # Now we actually check to ensure the database file has the schema set # up correctly. with self.dbhLock: try: self.dbh.execute('SELECT COUNT(*) FROM tbl_scan_config') self.conn.create_function("REGEXP", 2, __dbregex__) except sqlite3.Error: init = True try: self.create() except Exception as e: raise IOError("Tried to set up the SpiderFoot database schema, but failed") from e # For users with pre 4.0 databases, add the correlation # tables + indexes if they don't exist. try: self.dbh.execute("SELECT COUNT(*) FROM tbl_scan_correlation_results") except sqlite3.Error: try: for query in self.createSchemaQueries: if "correlation" in query: self.dbh.execute(query) self.conn.commit() except sqlite3.Error: raise IOError("Looks like you are running a pre-4.0 database. Unfortunately " "SpiderFoot wasn't able to migrate you, so you'll need to delete " "your SpiderFoot database in order to proceed.") from None if init: for row in self.eventDetails: event = row[0] event_descr = row[1] event_raw = row[2] event_type = row[3] qry = "INSERT INTO tbl_event_types (event, event_descr, event_raw, event_type) VALUES (?, ?, ?, ?)" try: self.dbh.execute(qry, ( event, event_descr, event_raw, event_type )) self.conn.commit() except Exception: continue self.conn.commit()
Initialize database and create handle to the SQLite database file. Creates the database file if it does not exist. Creates database schema if it does not exist. Args: opts (dict): must specify the database file path in the '__database' key init (bool): initialise the database schema. if the database file does not exist this option will be ignored. Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
__init__
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def create(self) -> None: """Create the database schema. Raises: IOError: database I/O failed """ with self.dbhLock: try: for qry in self.createSchemaQueries: self.dbh.execute(qry) self.conn.commit() for row in self.eventDetails: event = row[0] event_descr = row[1] event_raw = row[2] event_type = row[3] qry = "INSERT INTO tbl_event_types (event, event_descr, event_raw, event_type) VALUES (?, ?, ?, ?)" self.dbh.execute(qry, ( event, event_descr, event_raw, event_type )) self.conn.commit() except sqlite3.Error as e: raise IOError("SQL error encountered when setting up database") from e
Create the database schema. Raises: IOError: database I/O failed
create
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def close(self) -> None: """Close the database handle.""" with self.dbhLock: self.dbh.close()
Close the database handle.
close
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def vacuumDB(self) -> None: """Vacuum the database. Clears unused database file pages. Returns: bool: success Raises: IOError: database I/O failed """ with self.dbhLock: try: self.dbh.execute("VACUUM") self.conn.commit() return True except sqlite3.Error as e: raise IOError("SQL error encountered when vacuuming the database") from e return False
Vacuum the database. Clears unused database file pages. Returns: bool: success Raises: IOError: database I/O failed
vacuumDB
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def search(self, criteria: dict, filterFp: bool = False) -> list: """Search database. Args: criteria (dict): search criteria such as: - scan_id (search within a scan, if omitted search all) - type (search a specific type, if omitted search all) - value (search values for a specific string, if omitted search all) - regex (search values for a regular expression) ** at least two criteria must be set ** filterFp (bool): filter out false positives Returns: list: search results Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ if not isinstance(criteria, dict): raise TypeError(f"criteria is {type(criteria)}; expected dict()") from None valid_criteria = ['scan_id', 'type', 'value', 'regex'] for key in list(criteria.keys()): if key not in valid_criteria: criteria.pop(key, None) continue if not isinstance(criteria.get(key), str): raise TypeError(f"criteria[{key}] is {type(criteria.get(key))}; expected str()") from None if not criteria[key]: criteria.pop(key, None) continue if len(criteria) == 0: raise ValueError(f"No valid search criteria provided; expected: {', '.join(valid_criteria)}") from None if len(criteria) == 1: raise ValueError("Only one search criteria provided; expected at least two") qvars = list() qry = "SELECT ROUND(c.generated) AS generated, c.data, \ s.data as 'source_data', \ c.module, c.type, c.confidence, c.visibility, c.risk, c.hash, \ c.source_event_hash, t.event_descr, t.event_type, c.scan_instance_id, \ c.false_positive as 'fp', s.false_positive as 'parent_fp' \ FROM tbl_scan_results c, tbl_scan_results s, tbl_event_types t \ WHERE s.scan_instance_id = c.scan_instance_id AND \ t.event = c.type AND c.source_event_hash = s.hash " if filterFp: qry += " AND c.false_positive <> 1 " if criteria.get('scan_id') is not None: qry += "AND c.scan_instance_id = ? " qvars.append(criteria['scan_id']) if criteria.get('type') is not None: qry += " AND c.type = ? " qvars.append(criteria['type']) if criteria.get('value') is not None: qry += " AND (c.data LIKE ? OR s.data LIKE ?) " qvars.append(criteria['value']) qvars.append(criteria['value']) if criteria.get('regex') is not None: qry += " AND (c.data REGEXP ? OR s.data REGEXP ?) " qvars.append(criteria['regex']) qvars.append(criteria['regex']) qry += " ORDER BY c.data" with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching search results") from e
Search database. Args: criteria (dict): search criteria such as: - scan_id (search within a scan, if omitted search all) - type (search a specific type, if omitted search all) - value (search values for a specific string, if omitted search all) - regex (search values for a regular expression) ** at least two criteria must be set ** filterFp (bool): filter out false positives Returns: list: search results Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
search
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def eventTypes(self) -> list: """Get event types. Returns: list: event types Raises: IOError: database I/O failed """ qry = "SELECT event_descr, event, event_raw, event_type FROM tbl_event_types" with self.dbhLock: try: self.dbh.execute(qry) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when retrieving event types") from e
Get event types. Returns: list: event types Raises: IOError: database I/O failed
eventTypes
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanLogEvents(self, batch: list) -> bool: """Logs a batch of events to the database. Args: batch (list): tuples containing: instanceId, classification, message, component, logTime Raises: TypeError: arg type was invalid IOError: database I/O failed Returns: bool: Whether the logging operation succeeded """ inserts = [] for instanceId, classification, message, component, logTime in batch: if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(classification, str): raise TypeError(f"classification is {type(classification)}; expected str()") from None if not isinstance(message, str): raise TypeError(f"message is {type(message)}; expected str()") from None if not component: component = "SpiderFoot" inserts.append((instanceId, logTime * 1000, component, classification, message)) if inserts: qry = "INSERT INTO tbl_scan_log \ (scan_instance_id, generated, component, type, message) \ VALUES (?, ?, ?, ?, ?)" with self.dbhLock: try: self.dbh.executemany(qry, inserts) self.conn.commit() except sqlite3.Error as e: if "locked" not in e.args[0] and "thread" not in e.args[0]: raise IOError("Unable to log scan event in database") from e return False return True
Logs a batch of events to the database. Args: batch (list): tuples containing: instanceId, classification, message, component, logTime Raises: TypeError: arg type was invalid IOError: database I/O failed Returns: bool: Whether the logging operation succeeded
scanLogEvents
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanLogEvent(self, instanceId: str, classification: str, message: str, component: str = None) -> None: """Log an event to the database. Args: instanceId (str): scan instance ID classification (str): TBD message (str): TBD component (str): TBD Raises: TypeError: arg type was invalid IOError: database I/O failed Todo: Do something smarter to handle database locks """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(classification, str): raise TypeError(f"classification is {type(classification)}; expected str()") from None if not isinstance(message, str): raise TypeError(f"message is {type(message)}; expected str()") from None if not component: component = "SpiderFoot" qry = "INSERT INTO tbl_scan_log \ (scan_instance_id, generated, component, type, message) \ VALUES (?, ?, ?, ?, ?)" with self.dbhLock: try: self.dbh.execute(qry, ( instanceId, time.time() * 1000, component, classification, message )) self.conn.commit() except sqlite3.Error as e: if "locked" not in e.args[0] and "thread" not in e.args[0]: raise IOError("Unable to log scan event in database") from e # print("[warning] Couldn't log due to SQLite limitations. You can probably ignore this.") # log.critical(f"Unable to log event in DB due to lock: {e.args[0]}") pass
Log an event to the database. Args: instanceId (str): scan instance ID classification (str): TBD message (str): TBD component (str): TBD Raises: TypeError: arg type was invalid IOError: database I/O failed Todo: Do something smarter to handle database locks
scanLogEvent
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanInstanceCreate(self, instanceId: str, scanName: str, scanTarget: str) -> None: """Store a scan instance in the database. Args: instanceId (str): scan instance ID scanName(str): scan name scanTarget (str): scan target Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(scanName, str): raise TypeError(f"scanName is {type(scanName)}; expected str()") from None if not isinstance(scanTarget, str): raise TypeError(f"scanTarget is {type(scanTarget)}; expected str()") from None qry = "INSERT INTO tbl_scan_instance \ (guid, name, seed_target, created, status) \ VALUES (?, ?, ?, ?, ?)" with self.dbhLock: try: self.dbh.execute(qry, ( instanceId, scanName, scanTarget, time.time() * 1000, 'CREATED' )) self.conn.commit() except sqlite3.Error as e: raise IOError("Unable to create scan instance in database") from e
Store a scan instance in the database. Args: instanceId (str): scan instance ID scanName(str): scan name scanTarget (str): scan target Raises: TypeError: arg type was invalid IOError: database I/O failed
scanInstanceCreate
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanInstanceSet(self, instanceId: str, started: str = None, ended: str = None, status: str = None) -> None: """Update the start time, end time or status (or all 3) of a scan instance. Args: instanceId (str): scan instance ID started (str): scan start time ended (str): scan end time status (str): scan status Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qvars = list() qry = "UPDATE tbl_scan_instance SET " if started is not None: qry += " started = ?," qvars.append(started) if ended is not None: qry += " ended = ?," qvars.append(ended) if status is not None: qry += " status = ?," qvars.append(status) # guid = guid is a little hack to avoid messing with , placement above qry += " guid = guid WHERE guid = ?" qvars.append(instanceId) with self.dbhLock: try: self.dbh.execute(qry, qvars) self.conn.commit() except sqlite3.Error: raise IOError("Unable to set information for the scan instance.") from None
Update the start time, end time or status (or all 3) of a scan instance. Args: instanceId (str): scan instance ID started (str): scan start time ended (str): scan end time status (str): scan status Raises: TypeError: arg type was invalid IOError: database I/O failed
scanInstanceSet
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanInstanceGet(self, instanceId: str) -> list: """Return info about a scan instance (name, target, created, started, ended, status) Args: instanceId (str): scan instance ID Returns: list: scan instance info Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qry = "SELECT name, seed_target, ROUND(created/1000) AS created, \ ROUND(started/1000) AS started, ROUND(ended/1000) AS ended, status \ FROM tbl_scan_instance WHERE guid = ?" qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchone() except sqlite3.Error as e: raise IOError("SQL error encountered when retrieving scan instance") from e
Return info about a scan instance (name, target, created, started, ended, status) Args: instanceId (str): scan instance ID Returns: list: scan instance info Raises: TypeError: arg type was invalid IOError: database I/O failed
scanInstanceGet
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanResultSummary(self, instanceId: str, by: str = "type") -> list: """Obtain a summary of the results, filtered by event type, module or entity. Args: instanceId (str): scan instance ID by (str): filter by type Returns: list: scan instance info Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(by, str): raise TypeError(f"by is {type(by)}; expected str()") from None if by not in ["type", "module", "entity"]: raise ValueError(f"Invalid filter by value: {by}") from None if by == "type": qry = "SELECT r.type, e.event_descr, MAX(ROUND(generated)) AS last_in, \ count(*) AS total, count(DISTINCT r.data) as utotal FROM \ tbl_scan_results r, tbl_event_types e WHERE e.event = r.type \ AND r.scan_instance_id = ? GROUP BY r.type ORDER BY e.event_descr" if by == "module": qry = "SELECT r.module, '', MAX(ROUND(generated)) AS last_in, \ count(*) AS total, count(DISTINCT r.data) as utotal FROM \ tbl_scan_results r, tbl_event_types e WHERE e.event = r.type \ AND r.scan_instance_id = ? GROUP BY r.module ORDER BY r.module DESC" if by == "entity": qry = "SELECT r.data, e.event_descr, MAX(ROUND(generated)) AS last_in, \ count(*) AS total, count(DISTINCT r.data) as utotal FROM \ tbl_scan_results r, tbl_event_types e WHERE e.event = r.type \ AND r.scan_instance_id = ? \ AND e.event_type in ('ENTITY') \ GROUP BY r.data, e.event_descr ORDER BY total DESC limit 50" qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching result summary") from e
Obtain a summary of the results, filtered by event type, module or entity. Args: instanceId (str): scan instance ID by (str): filter by type Returns: list: scan instance info Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
scanResultSummary
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanCorrelationSummary(self, instanceId: str, by: str = "rule") -> list: """Obtain a summary of the correlations, filtered by rule or risk Args: instanceId (str): scan instance ID by (str): filter by rule or risk Returns: list: scan correlation summary Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(by, str): raise TypeError(f"by is {type(by)}; expected str()") from None if by not in ["rule", "risk"]: raise ValueError(f"Invalid filter by value: {by}") from None if by == "risk": qry = "SELECT rule_risk, count(*) AS total FROM \ tbl_scan_correlation_results \ WHERE scan_instance_id = ? GROUP BY rule_risk ORDER BY rule_id" if by == "rule": qry = "SELECT rule_id, rule_name, rule_risk, rule_descr, \ count(*) AS total FROM \ tbl_scan_correlation_results \ WHERE scan_instance_id = ? GROUP BY rule_id ORDER BY rule_id" qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching correlation summary") from e
Obtain a summary of the correlations, filtered by rule or risk Args: instanceId (str): scan instance ID by (str): filter by rule or risk Returns: list: scan correlation summary Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
scanCorrelationSummary
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanCorrelationList(self, instanceId: str) -> list: """Obtain a list of the correlations from a scan Args: instanceId (str): scan instance ID Returns: list: scan correlation list Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qry = "SELECT c.id, c.title, c.rule_id, c.rule_risk, c.rule_name, \ c.rule_descr, c.rule_logic, count(e.event_hash) AS event_count FROM \ tbl_scan_correlation_results c, tbl_scan_correlation_results_events e \ WHERE scan_instance_id = ? AND c.id = e.correlation_id \ GROUP BY c.id ORDER BY c.title, c.rule_risk" qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching correlation list") from e
Obtain a list of the correlations from a scan Args: instanceId (str): scan instance ID Returns: list: scan correlation list Raises: TypeError: arg type was invalid IOError: database I/O failed
scanCorrelationList
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanResultEvent( self, instanceId: str, eventType: str = 'ALL', srcModule: str = None, data: list = None, sourceId: list = None, correlationId: str = None, filterFp: bool = False ) -> list: """Obtain the data for a scan and event type. Args: instanceId (str): scan instance ID eventType (str): filter by event type srcModule (str): filter by the generating module data (list): filter by the data sourceId (list): filter by the ID of the source event correlationId (str): filter by the ID of a correlation result filterFp (bool): filter false positives Returns: list: scan results Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(eventType, str) and not isinstance(eventType, list): raise TypeError(f"eventType is {type(eventType)}; expected str() or list()") from None qry = "SELECT ROUND(c.generated) AS generated, c.data, \ s.data as 'source_data', \ c.module, c.type, c.confidence, c.visibility, c.risk, c.hash, \ c.source_event_hash, t.event_descr, t.event_type, s.scan_instance_id, \ c.false_positive as 'fp', s.false_positive as 'parent_fp' \ FROM tbl_scan_results c, tbl_scan_results s, tbl_event_types t " if correlationId: qry += ", tbl_scan_correlation_results_events ce " qry += "WHERE c.scan_instance_id = ? AND c.source_event_hash = s.hash AND \ s.scan_instance_id = c.scan_instance_id AND t.event = c.type" qvars = [instanceId] if correlationId: qry += " AND ce.event_hash = c.hash AND ce.correlation_id = ?" qvars.append(correlationId) if eventType != "ALL": if isinstance(eventType, list): qry += " AND c.type in (" + ','.join(['?'] * len(eventType)) + ")" qvars.extend(eventType) else: qry += " AND c.type = ?" qvars.append(eventType) if filterFp: qry += " AND c.false_positive <> 1" if srcModule: if isinstance(srcModule, list): qry += " AND c.module in (" + ','.join(['?'] * len(srcModule)) + ")" qvars.extend(srcModule) else: qry += " AND c.module = ?" qvars.append(srcModule) if data: if isinstance(data, list): qry += " AND c.data in (" + ','.join(['?'] * len(data)) + ")" qvars.extend(data) else: qry += " AND c.data = ?" qvars.append(data) if sourceId: if isinstance(sourceId, list): qry += " AND c.source_event_hash in (" + ','.join(['?'] * len(sourceId)) + ")" qvars.extend(sourceId) else: qry += " AND c.source_event_hash = ?" qvars.append(sourceId) qry += " ORDER BY c.data" with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching result events") from e
Obtain the data for a scan and event type. Args: instanceId (str): scan instance ID eventType (str): filter by event type srcModule (str): filter by the generating module data (list): filter by the data sourceId (list): filter by the ID of the source event correlationId (str): filter by the ID of a correlation result filterFp (bool): filter false positives Returns: list: scan results Raises: TypeError: arg type was invalid IOError: database I/O failed
scanResultEvent
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanResultEventUnique(self, instanceId: str, eventType: str = 'ALL', filterFp: bool = False) -> list: """Obtain a unique list of elements. Args: instanceId (str): scan instance ID eventType (str): filter by event type filterFp (bool): filter false positives Returns: list: unique scan results Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(eventType, str): raise TypeError(f"eventType is {type(eventType)}; expected str()") from None qry = "SELECT DISTINCT data, type, COUNT(*) FROM tbl_scan_results \ WHERE scan_instance_id = ?" qvars = [instanceId] if eventType != "ALL": qry += " AND type = ?" qvars.append(eventType) if filterFp: qry += " AND false_positive <> 1" qry += " GROUP BY type, data ORDER BY COUNT(*)" with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching unique result events") from e
Obtain a unique list of elements. Args: instanceId (str): scan instance ID eventType (str): filter by event type filterFp (bool): filter false positives Returns: list: unique scan results Raises: TypeError: arg type was invalid IOError: database I/O failed
scanResultEventUnique
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanLogs(self, instanceId: str, limit: int = None, fromRowId: int = 0, reverse: bool = False) -> list: """Get scan logs. Args: instanceId (str): scan instance ID limit (int): limit number of results fromRowId (int): retrieve logs starting from row ID reverse (bool): search result order Returns: list: scan logs Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qry = "SELECT generated AS generated, component, \ type, message, rowid FROM tbl_scan_log WHERE scan_instance_id = ?" if fromRowId: qry += " and rowid > ?" qry += " ORDER BY generated " if reverse: qry += "ASC" else: qry += "DESC" qvars = [instanceId] if fromRowId: qvars.append(str(fromRowId)) if limit is not None: qry += " LIMIT ?" qvars.append(str(limit)) with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching scan logs") from e
Get scan logs. Args: instanceId (str): scan instance ID limit (int): limit number of results fromRowId (int): retrieve logs starting from row ID reverse (bool): search result order Returns: list: scan logs Raises: TypeError: arg type was invalid IOError: database I/O failed
scanLogs
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanErrors(self, instanceId: str, limit: int = 0) -> list: """Get scan errors. Args: instanceId (str): scan instance ID limit (int): limit number of results Returns: list: scan errors Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(limit, int): raise TypeError(f"limit is {type(limit)}; expected int()") from None qry = "SELECT generated AS generated, component, \ message FROM tbl_scan_log WHERE scan_instance_id = ? \ AND type = 'ERROR' ORDER BY generated DESC" qvars = [instanceId] if limit: qry += " LIMIT ?" qvars.append(str(limit)) with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching scan errors") from e
Get scan errors. Args: instanceId (str): scan instance ID limit (int): limit number of results Returns: list: scan errors Raises: TypeError: arg type was invalid IOError: database I/O failed
scanErrors
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanInstanceDelete(self, instanceId: str) -> bool: """Delete a scan instance. Args: instanceId (str): scan instance ID Returns: bool: success Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qry1 = "DELETE FROM tbl_scan_instance WHERE guid = ?" qry2 = "DELETE FROM tbl_scan_config WHERE scan_instance_id = ?" qry3 = "DELETE FROM tbl_scan_results WHERE scan_instance_id = ?" qry4 = "DELETE FROM tbl_scan_log WHERE scan_instance_id = ?" qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry1, qvars) self.dbh.execute(qry2, qvars) self.dbh.execute(qry3, qvars) self.dbh.execute(qry4, qvars) self.conn.commit() except sqlite3.Error as e: raise IOError("SQL error encountered when deleting scan") from e return True
Delete a scan instance. Args: instanceId (str): scan instance ID Returns: bool: success Raises: TypeError: arg type was invalid IOError: database I/O failed
scanInstanceDelete
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanResultsUpdateFP(self, instanceId: str, resultHashes: list, fpFlag: int) -> bool: """Set the false positive flag for a result. Args: instanceId (str): scan instance ID resultHashes (list): list of event hashes fpFlag (int): false positive Returns: bool: success Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(resultHashes, list): raise TypeError(f"resultHashes is {type(resultHashes)}; expected list()") from None with self.dbhLock: for resultHash in resultHashes: qry = "UPDATE tbl_scan_results SET false_positive = ? WHERE \ scan_instance_id = ? AND hash = ?" qvars = [fpFlag, instanceId, resultHash] try: self.dbh.execute(qry, qvars) except sqlite3.Error as e: raise IOError("SQL error encountered when updating false-positive") from e try: self.conn.commit() except sqlite3.Error as e: raise IOError("SQL error encountered when updating false-positive") from e return True
Set the false positive flag for a result. Args: instanceId (str): scan instance ID resultHashes (list): list of event hashes fpFlag (int): false positive Returns: bool: success Raises: TypeError: arg type was invalid IOError: database I/O failed
scanResultsUpdateFP
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def configSet(self, optMap: dict = {}) -> bool: """Store the default configuration in the database. Args: optMap (dict): config options Returns: bool: success Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ if not isinstance(optMap, dict): raise TypeError(f"optMap is {type(optMap)}; expected dict()") from None if not optMap: raise ValueError("optMap is empty") from None qry = "REPLACE INTO tbl_config (scope, opt, val) VALUES (?, ?, ?)" with self.dbhLock: for opt in list(optMap.keys()): # Module option if ":" in opt: parts = opt.split(':') qvals = [parts[0], parts[1], optMap[opt]] else: # Global option qvals = ["GLOBAL", opt, optMap[opt]] try: self.dbh.execute(qry, qvals) except sqlite3.Error as e: raise IOError("SQL error encountered when storing config, aborting") from e try: self.conn.commit() except sqlite3.Error as e: raise IOError("SQL error encountered when storing config, aborting") from e return True
Store the default configuration in the database. Args: optMap (dict): config options Returns: bool: success Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
configSet
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def configGet(self) -> dict: """Retreive the config from the database Returns: dict: config Raises: IOError: database I/O failed """ qry = "SELECT scope, opt, val FROM tbl_config" retval = dict() with self.dbhLock: try: self.dbh.execute(qry) for [scope, opt, val] in self.dbh.fetchall(): if scope == "GLOBAL": retval[opt] = val else: retval[f"{scope}:{opt}"] = val return retval except sqlite3.Error as e: raise IOError("SQL error encountered when fetching configuration") from e
Retreive the config from the database Returns: dict: config Raises: IOError: database I/O failed
configGet
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def configClear(self) -> None: """Reset the config to default. Clears the config from the database and lets the hard-coded settings in the code take effect. Raises: IOError: database I/O failed """ qry = "DELETE from tbl_config" with self.dbhLock: try: self.dbh.execute(qry) self.conn.commit() except sqlite3.Error as e: raise IOError("Unable to clear configuration from the database") from e
Reset the config to default. Clears the config from the database and lets the hard-coded settings in the code take effect. Raises: IOError: database I/O failed
configClear
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanConfigSet(self, scan_id, optMap=dict()) -> None: """Store a configuration value for a scan. Args: scan_id (int): scan instance ID optMap (dict): config options Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ if not isinstance(optMap, dict): raise TypeError(f"optMap is {type(optMap)}; expected dict()") from None if not optMap: raise ValueError("optMap is empty") from None qry = "REPLACE INTO tbl_scan_config \ (scan_instance_id, component, opt, val) VALUES (?, ?, ?, ?)" with self.dbhLock: for opt in list(optMap.keys()): # Module option if ":" in opt: parts = opt.split(':') qvals = [scan_id, parts[0], parts[1], optMap[opt]] else: # Global option qvals = [scan_id, "GLOBAL", opt, optMap[opt]] try: self.dbh.execute(qry, qvals) except sqlite3.Error as e: raise IOError("SQL error encountered when storing config, aborting") from e try: self.conn.commit() except sqlite3.Error as e: raise IOError("SQL error encountered when storing config, aborting") from e
Store a configuration value for a scan. Args: scan_id (int): scan instance ID optMap (dict): config options Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
scanConfigSet
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanConfigGet(self, instanceId: str) -> dict: """Retrieve configuration data for a scan component. Args: instanceId (str): scan instance ID Returns: dict: configuration data Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qry = "SELECT component, opt, val FROM tbl_scan_config \ WHERE scan_instance_id = ? ORDER BY component, opt" qvars = [instanceId] retval = dict() with self.dbhLock: try: self.dbh.execute(qry, qvars) for [component, opt, val] in self.dbh.fetchall(): if component == "GLOBAL": retval[opt] = val else: retval[f"{component}:{opt}"] = val return retval except sqlite3.Error as e: raise IOError("SQL error encountered when fetching configuration") from e
Retrieve configuration data for a scan component. Args: instanceId (str): scan instance ID Returns: dict: configuration data Raises: TypeError: arg type was invalid IOError: database I/O failed
scanConfigGet
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanEventStore(self, instanceId: str, sfEvent, truncateSize: int = 0) -> None: """Store an event in the database. Args: instanceId (str): scan instance ID sfEvent (SpiderFootEvent): event to be stored in the database truncateSize (int): truncate size for event data Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed """ from spiderfoot import SpiderFootEvent if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not instanceId: raise ValueError("instanceId is empty") from None if not isinstance(sfEvent, SpiderFootEvent): raise TypeError(f"sfEvent is {type(sfEvent)}; expected SpiderFootEvent()") from None if not isinstance(sfEvent.generated, float): raise TypeError(f"sfEvent.generated is {type(sfEvent.generated)}; expected float()") from None if not sfEvent.generated: raise ValueError("sfEvent.generated is empty") from None if not isinstance(sfEvent.eventType, str): raise TypeError(f"sfEvent.eventType is {type(sfEvent.eventType,)}; expected str()") from None if not sfEvent.eventType: raise ValueError("sfEvent.eventType is empty") from None if not isinstance(sfEvent.data, str): raise TypeError(f"sfEvent.data is {type(sfEvent.data)}; expected str()") from None if not sfEvent.data: raise ValueError("sfEvent.data is empty") from None if not isinstance(sfEvent.module, str): raise TypeError(f"sfEvent.module is {type(sfEvent.module)}; expected str()") from None if not sfEvent.module and sfEvent.eventType != "ROOT": raise ValueError("sfEvent.module is empty") from None if not isinstance(sfEvent.confidence, int): raise TypeError(f"sfEvent.confidence is {type(sfEvent.confidence)}; expected int()") from None if not 0 <= sfEvent.confidence <= 100: raise ValueError(f"sfEvent.confidence value is {type(sfEvent.confidence)}; expected 0 - 100") from None if not isinstance(sfEvent.visibility, int): raise TypeError(f"sfEvent.visibility is {type(sfEvent.visibility)}; expected int()") from None if not 0 <= sfEvent.visibility <= 100: raise ValueError(f"sfEvent.visibility value is {type(sfEvent.visibility)}; expected 0 - 100") from None if not isinstance(sfEvent.risk, int): raise TypeError(f"sfEvent.risk is {type(sfEvent.risk)}; expected int()") from None if not 0 <= sfEvent.risk <= 100: raise ValueError(f"sfEvent.risk value is {type(sfEvent.risk)}; expected 0 - 100") from None if not isinstance(sfEvent.sourceEvent, SpiderFootEvent) and sfEvent.eventType != "ROOT": raise TypeError(f"sfEvent.sourceEvent is {type(sfEvent.sourceEvent)}; expected str()") from None if not isinstance(sfEvent.sourceEventHash, str): raise TypeError(f"sfEvent.sourceEventHash is {type(sfEvent.sourceEventHash)}; expected str()") from None if not sfEvent.sourceEventHash: raise ValueError("sfEvent.sourceEventHash is empty") from None storeData = sfEvent.data # truncate if required if isinstance(truncateSize, int) and truncateSize > 0: storeData = storeData[0:truncateSize] # retrieve scan results qry = "INSERT INTO tbl_scan_results \ (scan_instance_id, hash, type, generated, confidence, \ visibility, risk, module, data, source_event_hash) \ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" qvals = [instanceId, sfEvent.hash, sfEvent.eventType, sfEvent.generated, sfEvent.confidence, sfEvent.visibility, sfEvent.risk, sfEvent.module, storeData, sfEvent.sourceEventHash] with self.dbhLock: try: self.dbh.execute(qry, qvals) self.conn.commit() except sqlite3.Error as e: raise IOError(f"SQL error encountered when storing event data ({self.dbh})") from e
Store an event in the database. Args: instanceId (str): scan instance ID sfEvent (SpiderFootEvent): event to be stored in the database truncateSize (int): truncate size for event data Raises: TypeError: arg type was invalid ValueError: arg value was invalid IOError: database I/O failed
scanEventStore
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanInstanceList(self) -> list: """List all previously run scans. Returns: list: previously run scans Raises: IOError: database I/O failed """ # SQLite doesn't support OUTER JOINs, so we need a work-around that # does a UNION of scans with results and scans without results to # get a complete listing. qry = "SELECT i.guid, i.name, i.seed_target, ROUND(i.created/1000), \ ROUND(i.started)/1000 as started, ROUND(i.ended)/1000, i.status, COUNT(r.type) \ FROM tbl_scan_instance i, tbl_scan_results r WHERE i.guid = r.scan_instance_id \ AND r.type <> 'ROOT' GROUP BY i.guid \ UNION ALL \ SELECT i.guid, i.name, i.seed_target, ROUND(i.created/1000), \ ROUND(i.started)/1000 as started, ROUND(i.ended)/1000, i.status, '0' \ FROM tbl_scan_instance i WHERE i.guid NOT IN ( \ SELECT distinct scan_instance_id FROM tbl_scan_results WHERE type <> 'ROOT') \ ORDER BY started DESC" with self.dbhLock: try: self.dbh.execute(qry) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when fetching scan list") from e
List all previously run scans. Returns: list: previously run scans Raises: IOError: database I/O failed
scanInstanceList
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanResultHistory(self, instanceId: str) -> list: """History of data from the scan. Args: instanceId (str): scan instance ID Returns: list: scan data history Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None qry = "SELECT STRFTIME('%H:%M %w', generated, 'unixepoch') AS hourmin, \ type, COUNT(*) FROM tbl_scan_results \ WHERE scan_instance_id = ? GROUP BY hourmin, type" qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError(f"SQL error encountered when fetching history for scan {instanceId}") from e
History of data from the scan. Args: instanceId (str): scan instance ID Returns: list: scan data history Raises: TypeError: arg type was invalid IOError: database I/O failed
scanResultHistory
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanElementSourcesDirect(self, instanceId: str, elementIdList: list) -> list: """Get the source IDs, types and data for a set of IDs. Args: instanceId (str): scan instance ID elementIdList (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") from None if not isinstance(elementIdList, list): raise TypeError(f"elementIdList is {type(elementIdList)}; expected list()") from None hashIds = [] for hashId in elementIdList: if not hashId: continue if not hashId.isalnum(): continue hashIds.append(hashId) # the output of this needs to be aligned with scanResultEvent, # as other functions call both expecting the same output. qry = "SELECT ROUND(c.generated) AS generated, c.data, \ s.data as 'source_data', \ c.module, c.type, c.confidence, c.visibility, c.risk, c.hash, \ c.source_event_hash, t.event_descr, t.event_type, s.scan_instance_id, \ c.false_positive as 'fp', s.false_positive as 'parent_fp', \ s.type, s.module, st.event_type as 'source_entity_type' \ FROM tbl_scan_results c, tbl_scan_results s, tbl_event_types t, \ tbl_event_types st \ WHERE c.scan_instance_id = ? AND c.source_event_hash = s.hash AND \ s.scan_instance_id = c.scan_instance_id AND st.event = s.type AND \ t.event = c.type AND c.hash in ('%s')" % "','".join(hashIds) qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when getting source element IDs") from e
Get the source IDs, types and data for a set of IDs. Args: instanceId (str): scan instance ID elementIdList (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid IOError: database I/O failed
scanElementSourcesDirect
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanElementChildrenDirect(self, instanceId: str, elementIdList: list) -> list: """Get the child IDs, types and data for a set of IDs. Args: instanceId (str): scan instance ID elementIdList (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid IOError: database I/O failed """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") if not isinstance(elementIdList, list): raise TypeError(f"elementIdList is {type(elementIdList)}; expected list()") hashIds = [] for hashId in elementIdList: if not hashId: continue if not hashId.isalnum(): continue hashIds.append(hashId) # the output of this needs to be aligned with scanResultEvent, # as other functions call both expecting the same output. qry = "SELECT ROUND(c.generated) AS generated, c.data, \ s.data as 'source_data', \ c.module, c.type, c.confidence, c.visibility, c.risk, c.hash, \ c.source_event_hash, t.event_descr, t.event_type, s.scan_instance_id, \ c.false_positive as 'fp', s.false_positive as 'parent_fp' \ FROM tbl_scan_results c, tbl_scan_results s, tbl_event_types t \ WHERE c.scan_instance_id = ? AND c.source_event_hash = s.hash AND \ s.scan_instance_id = c.scan_instance_id AND \ t.event = c.type AND s.hash in ('%s')" % "','".join(hashIds) qvars = [instanceId] with self.dbhLock: try: self.dbh.execute(qry, qvars) return self.dbh.fetchall() except sqlite3.Error as e: raise IOError("SQL error encountered when getting child element IDs") from e
Get the child IDs, types and data for a set of IDs. Args: instanceId (str): scan instance ID elementIdList (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid IOError: database I/O failed
scanElementChildrenDirect
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanElementSourcesAll(self, instanceId: str, childData: list) -> list: """Get the full set of upstream IDs which are parents to the supplied set of IDs. Args: instanceId (str): scan instance ID childData (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid ValueError: arg value was invalid """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") if not isinstance(childData, list): raise TypeError(f"childData is {type(childData)}; expected list()") if not childData: raise ValueError("childData is empty") # Get the first round of source IDs for the leafs keepGoing = True nextIds = list() datamap = dict() pc = dict() for row in childData: # these must be unique values! parentId = row[9] childId = row[8] datamap[childId] = row if parentId in pc: if childId not in pc[parentId]: pc[parentId].append(childId) else: pc[parentId] = [childId] # parents of the leaf set if parentId not in nextIds: nextIds.append(parentId) while keepGoing: parentSet = self.scanElementSourcesDirect(instanceId, nextIds) nextIds = list() keepGoing = False for row in parentSet: parentId = row[9] childId = row[8] datamap[childId] = row if parentId in pc: if childId not in pc[parentId]: pc[parentId].append(childId) else: pc[parentId] = [childId] if parentId not in nextIds: nextIds.append(parentId) # Prevent us from looping at root if parentId != "ROOT": keepGoing = True datamap[parentId] = row return [datamap, pc]
Get the full set of upstream IDs which are parents to the supplied set of IDs. Args: instanceId (str): scan instance ID childData (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid ValueError: arg value was invalid
scanElementSourcesAll
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def scanElementChildrenAll(self, instanceId: str, parentIds: list) -> list: """Get the full set of downstream IDs which are children of the supplied set of IDs. Args: instanceId (str): scan instance ID parentIds (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid Note: This function is not the same as the scanElementParent* functions. This function returns only ids. """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") if not isinstance(parentIds, list): raise TypeError(f"parentIds is {type(parentIds)}; expected list()") datamap = list() keepGoing = True nextIds = list() nextSet = self.scanElementChildrenDirect(instanceId, parentIds) for row in nextSet: datamap.append(row[8]) for row in nextSet: if row[8] not in nextIds: nextIds.append(row[8]) while keepGoing: nextSet = self.scanElementChildrenDirect(instanceId, nextIds) if nextSet is None or len(nextSet) == 0: keepGoing = False break for row in nextSet: datamap.append(row[8]) nextIds = list() nextIds.append(row[8]) return datamap
Get the full set of downstream IDs which are children of the supplied set of IDs. Args: instanceId (str): scan instance ID parentIds (list): TBD Returns: list: TBD Raises: TypeError: arg type was invalid Note: This function is not the same as the scanElementParent* functions. This function returns only ids.
scanElementChildrenAll
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def correlationResultCreate( self, instanceId: str, ruleId: str, ruleName: str, ruleDescr: str, ruleRisk: str, ruleYaml: str, correlationTitle: str, eventHashes: list ) -> str: """Create a correlation result in the database. Args: instanceId (str): scan instance ID ruleId(str): correlation rule ID ruleName(str): correlation rule name ruleDescr(str): correlation rule description ruleRisk(str): correlation rule risk level ruleYaml(str): correlation rule raw YAML correlationTitle(str): correlation title eventHashes(list): events mapped to the correlation result Raises: TypeError: arg type was invalid IOError: database I/O failed Returns: str: Correlation ID created """ if not isinstance(instanceId, str): raise TypeError(f"instanceId is {type(instanceId)}; expected str()") if not isinstance(ruleId, str): raise TypeError(f"ruleId is {type(ruleId)}; expected str()") if not isinstance(ruleName, str): raise TypeError(f"ruleName is {type(ruleName)}; expected str()") if not isinstance(ruleDescr, str): raise TypeError(f"ruleDescr is {type(ruleDescr)}; expected str()") if not isinstance(ruleRisk, str): raise TypeError(f"ruleRisk is {type(ruleRisk)}; expected str()") if not isinstance(ruleYaml, str): raise TypeError(f"ruleYaml is {type(ruleYaml)}; expected str()") if not isinstance(correlationTitle, str): raise TypeError(f"correlationTitle is {type(correlationTitle)}; expected str()") if not isinstance(eventHashes, list): raise TypeError(f"eventHashes is {type(eventHashes)}; expected list()") uniqueId = str(hashlib.md5(str(time.time() + random.SystemRandom().randint(0, 99999999)).encode('utf-8')).hexdigest()) # noqa: DUO130 qry = "INSERT INTO tbl_scan_correlation_results \ (id, scan_instance_id, title, rule_name, rule_descr, rule_risk, rule_id, rule_logic) \ VALUES (?, ?, ?, ?, ?, ?, ?, ?)" with self.dbhLock: try: self.dbh.execute(qry, ( uniqueId, instanceId, correlationTitle, ruleName, ruleDescr, ruleRisk, ruleId, ruleYaml )) self.conn.commit() except sqlite3.Error as e: raise IOError("Unable to create correlation result in database") from e # Map events to the correlation result qry = "INSERT INTO tbl_scan_correlation_results_events \ (correlation_id, event_hash) \ VALUES (?, ?)" with self.dbhLock: for eventHash in eventHashes: try: self.dbh.execute(qry, ( uniqueId, eventHash )) self.conn.commit() except sqlite3.Error as e: raise IOError("Unable to create correlation result in database") from e return uniqueId
Create a correlation result in the database. Args: instanceId (str): scan instance ID ruleId(str): correlation rule ID ruleName(str): correlation rule name ruleDescr(str): correlation rule description ruleRisk(str): correlation rule risk level ruleYaml(str): correlation rule raw YAML correlationTitle(str): correlation title eventHashes(list): events mapped to the correlation result Raises: TypeError: arg type was invalid IOError: database I/O failed Returns: str: Correlation ID created
correlationResultCreate
python
smicallef/spiderfoot
spiderfoot/db.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/db.py
MIT
def __init__(self, opts: dict) -> None: """TBD. Args: opts (dict): TBD """ self.opts = opts self.dbh = None self.batch = [] if self.opts.get('_debug', False): self.batch_size = 100 else: self.batch_size = 5 self.shutdown_hook = False super().__init__()
TBD. Args: opts (dict): TBD
__init__
python
smicallef/spiderfoot
spiderfoot/logger.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/logger.py
MIT
def emit(self, record: 'logging.LogRecord') -> None: """TBD Args: record (logging.LogRecord): Log event record """ if not self.shutdown_hook: atexit.register(self.logBatch) self.shutdown_hook = True scanId = getattr(record, "scanId", None) component = getattr(record, "module", None) if scanId: level = ("STATUS" if record.levelname == "INFO" else record.levelname) self.batch.append((scanId, level, record.getMessage(), component, time.time())) if len(self.batch) >= self.batch_size: self.logBatch()
TBD Args: record (logging.LogRecord): Log event record
emit
python
smicallef/spiderfoot
spiderfoot/logger.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/logger.py
MIT
def makeDbh(self) -> None: """TBD.""" self.dbh = SpiderFootDb(self.opts)
TBD.
makeDbh
python
smicallef/spiderfoot
spiderfoot/logger.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/logger.py
MIT
def logListenerSetup(loggingQueue, opts: dict = None) -> 'logging.handlers.QueueListener': """Create and start a SpiderFoot log listener in its own thread. This function should be called as soon as possible in the main process, or whichever process is attached to stdin/stdout. Args: loggingQueue (Queue): Queue (accepts both normal and multiprocessing queue types) Must be instantiated in the main process. opts (dict): SpiderFoot config Returns: spiderFootLogListener (logging.handlers.QueueListener): Log listener """ if opts is None: opts = dict() doLogging = opts.get("__logging", True) debug = opts.get("_debug", False) logLevel = (logging.DEBUG if debug else logging.INFO) # Log to terminal console_handler = logging.StreamHandler(sys.stderr) # Log debug messages to file log_dir = SpiderFootHelpers.logPath() debug_handler = logging.handlers.TimedRotatingFileHandler( f"{log_dir}/spiderfoot.debug.log", when="d", interval=1, backupCount=30 ) # Log error messages to file error_handler = logging.handlers.TimedRotatingFileHandler( f"{log_dir}/spiderfoot.error.log", when="d", interval=1, backupCount=30 ) # Filter by log level console_handler.addFilter(lambda x: x.levelno >= logLevel) debug_handler.addFilter(lambda x: x.levelno >= logging.DEBUG) error_handler.addFilter(lambda x: x.levelno >= logging.WARN) # Set log format log_format = logging.Formatter("%(asctime)s [%(levelname)s] %(module)s : %(message)s") debug_format = logging.Formatter("%(asctime)s [%(levelname)s] %(filename)s:%(lineno)s : %(message)s") console_handler.setFormatter(log_format) debug_handler.setFormatter(debug_format) error_handler.setFormatter(debug_format) if doLogging: handlers = [console_handler, debug_handler, error_handler] else: handlers = [] if doLogging and opts is not None: sqlite_handler = SpiderFootSqliteLogHandler(opts) sqlite_handler.setLevel(logLevel) sqlite_handler.setFormatter(log_format) handlers.append(sqlite_handler) spiderFootLogListener = QueueListener(loggingQueue, *handlers) spiderFootLogListener.start() atexit.register(stop_listener, spiderFootLogListener) return spiderFootLogListener
Create and start a SpiderFoot log listener in its own thread. This function should be called as soon as possible in the main process, or whichever process is attached to stdin/stdout. Args: loggingQueue (Queue): Queue (accepts both normal and multiprocessing queue types) Must be instantiated in the main process. opts (dict): SpiderFoot config Returns: spiderFootLogListener (logging.handlers.QueueListener): Log listener
logListenerSetup
python
smicallef/spiderfoot
spiderfoot/logger.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/logger.py
MIT
def logWorkerSetup(loggingQueue) -> 'logging.Logger': """Root SpiderFoot logger. Args: loggingQueue (Queue): TBD Returns: logging.Logger: Logger """ log = logging.getLogger("spiderfoot") # Don't do this more than once if len(log.handlers) == 0: log.setLevel(logging.DEBUG) queue_handler = QueueHandler(loggingQueue) log.addHandler(queue_handler) return log
Root SpiderFoot logger. Args: loggingQueue (Queue): TBD Returns: logging.Logger: Logger
logWorkerSetup
python
smicallef/spiderfoot
spiderfoot/logger.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/logger.py
MIT
def stop_listener(listener: 'logging.handlers.QueueListener') -> None: """TBD. Args: listener: (logging.handlers.QueueListener): TBD """ with suppress(Exception): listener.stop()
TBD. Args: listener: (logging.handlers.QueueListener): TBD
stop_listener
python
smicallef/spiderfoot
spiderfoot/logger.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/logger.py
MIT
def __init__(self, eventType: str, data: str, module: str, sourceEvent: 'SpiderFootEvent') -> None: """Initialize SpiderFoot event object. Args: eventType (str): Event type, e.g. URL_FORM, RAW_DATA, etc. data (str): Event data, e.g. a URL, port number, webpage content, etc. module (str): Module from which the event originated sourceEvent (SpiderFootEvent): SpiderFootEvent event that triggered this event """ self._generated = time.time() self.data = data self.eventType = eventType self.module = module self.confidence = 100 self.visibility = 100 self.risk = 0 self.sourceEvent = sourceEvent self.__id = f"{self.eventType}{self.generated}{self.module}{random.SystemRandom().randint(0, 99999999)}"
Initialize SpiderFoot event object. Args: eventType (str): Event type, e.g. URL_FORM, RAW_DATA, etc. data (str): Event data, e.g. a URL, port number, webpage content, etc. module (str): Module from which the event originated sourceEvent (SpiderFootEvent): SpiderFootEvent event that triggered this event
__init__
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def generated(self) -> float: """Timestamp of event creation time. Returns: float: timestamp of event creation time """ return self._generated
Timestamp of event creation time. Returns: float: timestamp of event creation time
generated
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def eventType(self) -> str: """Event type. Returns: str: event type """ return self._eventType
Event type. Returns: str: event type
eventType
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def confidence(self) -> int: """How sure are we of this data's validity. Returns: int: confidence score (0 to 100). """ return self._confidence
How sure are we of this data's validity. Returns: int: confidence score (0 to 100).
confidence
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def visibility(self) -> int: """How 'visible' was this data (0 to 100). Returns: int: visibility score (0 to 100). """ return self._visibility
How 'visible' was this data (0 to 100). Returns: int: visibility score (0 to 100).
visibility
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def risk(self) -> int: """How much risk does this data represent. Returns: int: risk score (0 to 100). """ return self._risk
How much risk does this data represent. Returns: int: risk score (0 to 100).
risk
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def hash(self) -> str: """Unique SHA256 hash of the event, or "ROOT". Returns: str: unique SHA256 hash of the event, or "ROOT" """ if self.eventType == "ROOT": return "ROOT" digestStr = self.__id.encode('raw_unicode_escape') return hashlib.sha256(digestStr).hexdigest()
Unique SHA256 hash of the event, or "ROOT". Returns: str: unique SHA256 hash of the event, or "ROOT"
hash
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def eventType(self, eventType: str) -> None: """Event type. Args: eventType (str): type of data for this event Raises: TypeError: confidence type was invalid ValueError: confidence value was invalid """ if not isinstance(eventType, str): raise TypeError(f"eventType is {type(eventType)}; expected str()") if not eventType: raise ValueError("eventType is empty") self._eventType = eventType
Event type. Args: eventType (str): type of data for this event Raises: TypeError: confidence type was invalid ValueError: confidence value was invalid
eventType
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT
def confidence(self, confidence: int) -> None: """Event confidence. Args: confidence (int): How sure are we of this data's validity (0 to 100) Raises: TypeError: confidence type was invalid ValueError: confidence value was invalid """ if not isinstance(confidence, int): raise TypeError(f"confidence is {type(confidence)}; expected int()") if not 0 <= confidence <= 100: raise ValueError(f"confidence value is {confidence}; expected 0 - 100") self._confidence = confidence
Event confidence. Args: confidence (int): How sure are we of this data's validity (0 to 100) Raises: TypeError: confidence type was invalid ValueError: confidence value was invalid
confidence
python
smicallef/spiderfoot
spiderfoot/event.py
https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py
MIT