code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def visibility(self, visibility: int) -> None:
"""Event visibility.
Args:
visibility (int): How 'visible' was this data (0 to 100)
Raises:
TypeError: visibility type was invalid
ValueError: visibility value was invalid
"""
if not isinstance(visibility, int):
raise TypeError(f"visibility is {type(visibility)}; expected int()")
if not 0 <= visibility <= 100:
raise ValueError(f"visibility value is {visibility}; expected 0 - 100")
self._visibility = visibility | Event visibility.
Args:
visibility (int): How 'visible' was this data (0 to 100)
Raises:
TypeError: visibility type was invalid
ValueError: visibility value was invalid | visibility | python | smicallef/spiderfoot | spiderfoot/event.py | https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py | MIT |
def risk(self, risk: int) -> None:
"""Event risk.
Args:
risk (int): How much risk does this data represent (0 to 100)
Raises:
TypeError: risk type was invalid
ValueError: risk value was invalid
"""
if not isinstance(risk, int):
raise TypeError(f"risk is {type(risk)}; expected int()")
if not 0 <= risk <= 100:
raise ValueError(f"risk value is {risk}; expected 0 - 100")
self._risk = risk | Event risk.
Args:
risk (int): How much risk does this data represent (0 to 100)
Raises:
TypeError: risk type was invalid
ValueError: risk value was invalid | risk | python | smicallef/spiderfoot | spiderfoot/event.py | https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py | MIT |
def module(self, module: str) -> None:
"""Module which created the event.
Args:
module (str): module
Raises:
TypeError: module type was invalid
ValueError: module value was invalid
"""
if not isinstance(module, str):
raise TypeError(f"module is {type(module )}; expected str()")
if not module and self.eventType != "ROOT":
raise ValueError("module is empty")
self._module = module | Module which created the event.
Args:
module (str): module
Raises:
TypeError: module type was invalid
ValueError: module value was invalid | module | python | smicallef/spiderfoot | spiderfoot/event.py | https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py | MIT |
def data(self, data: str) -> None:
"""Event data.
Args:
data (str): data
Raises:
TypeError: data type was invalid
ValueError: data value was invalid
"""
if not isinstance(data, str):
raise TypeError(f"data is {type(data)}; expected str()")
if not data:
raise ValueError(f"data is empty: '{str(data)}'")
self._data = data | Event data.
Args:
data (str): data
Raises:
TypeError: data type was invalid
ValueError: data value was invalid | data | python | smicallef/spiderfoot | spiderfoot/event.py | https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py | MIT |
def sourceEvent(self, sourceEvent: 'SpiderFootEvent') -> None:
"""Source event which lead to this event.
Args:
sourceEvent (SpiderFootEvent): source event
Raises:
TypeError: sourceEvent type was invalid
"""
# "ROOT" is a special "hash" reserved for elements with no parent,
# such as targets provided via the web UI or CLI.
if self.eventType == "ROOT":
self._sourceEvent = None
self._sourceEventHash = "ROOT"
return
if not isinstance(sourceEvent, SpiderFootEvent):
raise TypeError(f"sourceEvent is {type(sourceEvent)}; expected SpiderFootEvent()")
self._sourceEvent = sourceEvent
self._sourceEventHash = self.sourceEvent.hash | Source event which lead to this event.
Args:
sourceEvent (SpiderFootEvent): source event
Raises:
TypeError: sourceEvent type was invalid | sourceEvent | python | smicallef/spiderfoot | spiderfoot/event.py | https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py | MIT |
def asDict(self) -> dict:
"""Event object as dictionary.
Returns:
dict: event as dictionary
"""
evtDict = {
'generated': int(self.generated),
'type': self.eventType,
'data': self.data,
'module': self.module,
'source': ''
}
if self.sourceEvent is not None and self.sourceEvent.data is not None:
evtDict['source'] = self.sourceEvent.data
return evtDict | Event object as dictionary.
Returns:
dict: event as dictionary | asDict | python | smicallef/spiderfoot | spiderfoot/event.py | https://github.com/smicallef/spiderfoot/blob/master/spiderfoot/event.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from botvrij.eu
Returns:
list: list of blacklisted host names
"""
hosts = list()
if not blacklist:
return hosts
for line in blacklist.split('\n'):
if not line:
continue
if line.startswith('#'):
continue
host = line.strip().split(",")[0].lower()
# Note: Validation with sf.validHost() is too slow to use here
# if not self.sf.validHost(host, self.opts['_internettlds']):
# continue
hosts.append(host)
return hosts | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from botvrij.eu
Returns:
list: list of blacklisted host names | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_botvrij.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_botvrij.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from VXVault.net
Returns:
list: list of blacklisted IP addresses and host names
"""
hosts = list()
if not blacklist:
return hosts
for line in blacklist.split('\n'):
if not line:
continue
if not line.startswith('http'):
continue
# Note: URL parsing and validation with sf.validHost() is too slow to use here
url = line.strip().lower()
if len(url.split("/")) < 3:
continue
host = url.split("/")[2]
if not host:
continue
if "." not in host and "::" not in host:
continue
hosts.append(host)
return hosts | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from VXVault.net
Returns:
list: list of blacklisted IP addresses and host names | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_vxvault.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_vxvault.py | MIT |
def queryDomainDetails(self, qry):
"""Search for hosts on a domain.
Args:
qry (str): domain name
Returns:
dict: search results
"""
headers = {
'X-API-KEY': self.opts['api_key']
}
res = self.sf.fetchUrl(
f"https://fullhunt.io/api/v1/domain/{qry}/details",
timeout=30,
headers=headers,
useragent=self.opts['_useragent']
)
return self.parseApiResponse(res) | Search for hosts on a domain.
Args:
qry (str): domain name
Returns:
dict: search results | queryDomainDetails | python | smicallef/spiderfoot | modules/sfp_fullhunt.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_fullhunt.py | MIT |
def resolveTargets(self, target, validateReverse: bool) -> list:
"""Resolve alternative names for a given target.
Args:
target (SpiderFootTarget): target object
validateReverse (bool): validate domain names resolve
Returns:
list: list of domain names and IP addresses
"""
ret = list()
if not target:
return ret
t = target.targetType
v = target.targetValue
if t in ["IP_ADDRESS", "IPV6_ADDRESS"]:
r = self.sf.resolveIP(v)
if r:
ret.extend(r)
if t == "INTERNET_NAME":
r = self.sf.resolveHost(v)
if r:
ret.extend(r)
r = self.sf.resolveHost6(v)
if r:
ret.extend(r)
if t == "NETBLOCK_OWNER":
max_netblock = self.opts['maxnetblock']
if IPNetwork(v).prefixlen < max_netblock:
self.debug(f"Network size bigger than permitted: {IPNetwork(v).prefixlen} > {max_netblock}")
return list(set(ret))
for addr in IPNetwork(v):
if self.checkForStop():
return list(set(ret))
ipaddr = str(addr)
if ipaddr.split(".")[3] in ['255', '0']:
continue
if '255' in ipaddr.split("."):
continue
ret.append(ipaddr)
# Add the reverse-resolved hostnames as aliases too
names = self.sf.resolveIP(ipaddr)
if not names:
continue
if not validateReverse:
ret.extend(names)
continue
for host in names:
chk = self.sf.resolveHost(host)
if chk and ipaddr in chk:
ret.append(host)
if t == "NETBLOCKV6_OWNER":
max_netblock = self.opts['maxv6netblock']
if IPNetwork(v).prefixlen < max_netblock:
self.debug(f"Network size bigger than permitted: {IPNetwork(v).prefixlen} > {max_netblock}")
return list(set(ret))
for addr in IPNetwork(v):
if self.checkForStop():
return list(set(ret))
ipaddr = str(addr)
ret.append(ipaddr)
# Add the reverse-resolved hostnames as aliases too
names = self.sf.resolveIP(ipaddr)
if not names:
continue
if not validateReverse:
ret.extend(names)
continue
for host in names:
chk = self.sf.resolveHost6(host)
if chk and ipaddr in chk:
ret.append(host)
return list(set(ret)) | Resolve alternative names for a given target.
Args:
target (SpiderFootTarget): target object
validateReverse (bool): validate domain names resolve
Returns:
list: list of domain names and IP addresses | resolveTargets | python | smicallef/spiderfoot | modules/sfp_dnsresolve.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_dnsresolve.py | MIT |
def queryDomain(self, qry):
"""Query a domain
Args:
qry (str): domain
Returns:
str: API response as JSON
"""
headers = {
"Accept": "application/json"
}
res = self.sf.fetchUrl(
f"https://zonefiles.io/q/{self.opts['api_key']}/{qry}",
headers=headers,
timeout=30,
useragent=self.opts['_useragent']
)
time.sleep(self.opts['delay'])
return self.parseApiResponse(res) | Query a domain
Args:
qry (str): domain
Returns:
str: API response as JSON | queryDomain | python | smicallef/spiderfoot | modules/sfp_zonefiles.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_zonefiles.py | MIT |
def queryAddr(self, qaddr):
"""Query SORBS DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: SORBS DNS entries
"""
if not self.sf.validIP(qaddr):
self.debug(f"Invalid IPv4 address {qaddr}")
return None
try:
lookup = self.reverseAddr(qaddr) + '.dnsbl.sorbs.net'
self.debug(f"Checking SORBS blacklist: {lookup}")
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"SORBS did not resolve {qaddr} / {lookup}: {e}")
return None | Query SORBS DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: SORBS DNS entries | queryAddr | python | smicallef/spiderfoot | modules/sfp_sorbs.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_sorbs.py | MIT |
def queryDnsblLevel1(self, qaddr):
"""Query UCEPROTECT DNS Level 1 for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: UCEPROTECT DNS entries
"""
if not self.sf.validIP(qaddr):
self.debug(f"Invalid IPv4 address {qaddr}")
return None
try:
lookup = self.reverseAddr(qaddr) + '.dnsbl-1.uceprotect.net'
self.debug(f"Checking UCEPROTECT blacklist: {lookup}")
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"UCEPROTECT did not resolve {qaddr} / {lookup}: {e}")
return None | Query UCEPROTECT DNS Level 1 for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: UCEPROTECT DNS entries | queryDnsblLevel1 | python | smicallef/spiderfoot | modules/sfp_uceprotect.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_uceprotect.py | MIT |
def queryDnsblLevel2(self, qaddr):
"""Query UCEPROTECT DNS Level 2 for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: UCEPROTECT DNS entries
"""
if not self.sf.validIP(qaddr):
self.debug(f"Invalid IPv4 address {qaddr}")
return None
try:
lookup = self.reverseAddr(qaddr) + '.dnsbl-2.uceprotect.net'
self.debug(f"Checking UCEPROTECT blacklist: {lookup}")
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"UCEPROTECT did not resolve {qaddr} / {lookup}: {e}")
return None | Query UCEPROTECT DNS Level 2 for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: UCEPROTECT DNS entries | queryDnsblLevel2 | python | smicallef/spiderfoot | modules/sfp_uceprotect.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_uceprotect.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from AbuseIPDB
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for ip in blacklist.split('\n'):
ip = ip.strip()
if ip.startswith('#'):
continue
if not self.sf.validIP(ip) and not self.sf.validIP6(ip):
continue
ips.append(ip)
return ips | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from AbuseIPDB
Returns:
list: list of blacklisted IP addresses | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_abuseipdb.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abuseipdb.py | MIT |
def queryIpAddress(self, ip):
"""Query API for an IPv4 or IPv6 address.
Note: Currently unused.
Args:
ip (str): IP address
Returns:
str: API response as JSON
"""
headers = {
'Key': self.opts['api_key'],
'Accept': 'application/json',
}
params = urllib.parse.urlencode({
'ipAddress': ip,
'maxAgeInDays': 30,
})
res = self.sf.fetchUrl(
f"https://api.abuseipdb.com/api/v2/check?{params}",
timeout=self.opts['_fetchtimeout'],
useragent=self.opts['_useragent'],
headers=headers
)
time.sleep(1)
if res['code'] == '429':
self.error("You are being rate-limited by AbuseIPDB")
self.errorState = True
return None
if res['code'] != "200":
self.error("Error retrieving search results from AbuseIPDB")
self.errorState = True
return None
if res['content'] is None:
self.error("Received no content from AbuseIPDB")
self.errorState = True
return None
try:
return json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None
return None | Query API for an IPv4 or IPv6 address.
Note: Currently unused.
Args:
ip (str): IP address
Returns:
str: API response as JSON | queryIpAddress | python | smicallef/spiderfoot | modules/sfp_abuseipdb.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abuseipdb.py | MIT |
def queryNetblock(self, ip):
"""Query API for a netblock.
Note: Currently unused.
Args:
ip (str): CIDR range
Returns:
str: API response as JSON
"""
headers = {
'Key': self.opts['api_key'],
'Accept': 'application/json',
}
params = urllib.parse.urlencode({
'ipAddress': ip,
'maxAgeInDays': 30,
})
res = self.sf.fetchUrl(
f"https://api.abuseipdb.com/api/v2/check-block?{params}",
timeout=self.opts['_fetchtimeout'],
useragent=self.opts['_useragent'],
headers=headers
)
time.sleep(1)
if res['code'] == '429':
self.error("You are being rate-limited by AbuseIPDB")
self.errorState = True
return None
if res['code'] != "200":
self.error("Error retrieving search results from AbuseIPDB")
self.errorState = True
return None
if res['content'] is None:
self.error("Received no content from AbuseIPDB")
self.errorState = True
return None
try:
return json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None | Query API for a netblock.
Note: Currently unused.
Args:
ip (str): CIDR range
Returns:
str: API response as JSON | queryNetblock | python | smicallef/spiderfoot | modules/sfp_abuseipdb.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abuseipdb.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from OpenPhish
Returns:
list: list of blacklisted host names
"""
hosts = list()
if not blacklist:
return hosts
for line in blacklist.split('\n'):
if not line:
continue
if not line.startswith('http'):
continue
# Note: URL parsing and validation with sf.validHost() is too slow to use here
url = line.strip().lower()
if len(url.split("/")) < 3:
continue
host = url.split("/")[2]
if not host:
continue
if "." not in host:
continue
hosts.append(host)
return hosts | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from OpenPhish
Returns:
list: list of blacklisted host names | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_openphish.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_openphish.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from AlienVault IP Reputation Database
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for ip in blacklist.split('\n'):
ip = ip.strip().split(" #")[0]
if ip.startswith('#'):
continue
if not self.sf.validIP(ip):
continue
ips.append(ip)
return ips | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from AlienVault IP Reputation Database
Returns:
list: list of blacklisted IP addresses | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_alienvaultiprep.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_alienvaultiprep.py | MIT |
def query(self, qaddr):
"""Query SURBL DNS.
Args:
qaddr (str): Host name or IPv4 address.
Returns:
list: SURBL DNS entries
"""
if self.sf.validIP(qaddr):
lookup = self.reverseAddr(qaddr) + '.multi.surbl.org'
else:
lookup = f"{qaddr}.multi.surbl.org"
self.debug(f"Checking SURBL blacklist: {lookup}")
try:
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"SURBL did not resolve {qaddr} / {lookup}: {e}")
return None | Query SURBL DNS.
Args:
qaddr (str): Host name or IPv4 address.
Returns:
list: SURBL DNS entries | query | python | smicallef/spiderfoot | modules/sfp_surbl.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_surbl.py | MIT |
def parseProxyList(self, proxy_list):
"""Parse plaintext open proxy list
Args:
proxy_list (str): plaintext open proxy list from multiproxy.org
Returns:
list: list of open proxy IP addresses
"""
ips = list()
if not proxy_list:
return ips
for ip in proxy_list.split('\n'):
ip = ip.strip().split(":")[0]
if ip.startswith('#'):
continue
if not self.sf.validIP(ip):
continue
ips.append(ip)
return ips | Parse plaintext open proxy list
Args:
proxy_list (str): plaintext open proxy list from multiproxy.org
Returns:
list: list of open proxy IP addresses | parseProxyList | python | smicallef/spiderfoot | modules/sfp_multiproxy.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_multiproxy.py | MIT |
def queryAddr(self, qaddr):
"""Query DroneBL DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: DroneBL DNS entries
"""
if not self.sf.validIP(qaddr):
self.debug(f"Invalid IPv4 address {qaddr}")
return None
try:
lookup = self.reverseAddr(qaddr) + '.dnsbl.dronebl.org'
self.debug(f"Checking DroneBL blacklist: {lookup}")
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"DroneBL did not resolve {qaddr} / {lookup}: {e}")
return None | Query DroneBL DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: DroneBL DNS entries | queryAddr | python | smicallef/spiderfoot | modules/sfp_dronebl.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_dronebl.py | MIT |
def httpHeaders(self, ip):
"""Retrieve HTTP headers for IP address
Args:
ip (str): IPv4 address
Returns:
dict: HTTP headers
"""
params = urllib.parse.urlencode({
'q': ip
})
res = self.sf.fetchUrl(
f"https://api.hackertarget.com/httpheaders/?{params}",
useragent=self.opts['_useragent'],
timeout=self.opts['_fetchtimeout']
)
if res['content'] is None:
self.error(f"Unable to fetch HTTP headers for {ip} from HackerTarget.com.")
return None
if res['code'] == '429':
self.error("You are being rate-limited by HackerTarget")
self.errorState = True
return None
if not res['content'].startswith('HTTP/'):
self.debug(f"Found no HTTP headers for {ip}")
return None
headers = dict()
for header in res['content'].splitlines():
if ': ' not in header:
continue
k = header.split(': ')[0].lower()
v = ': '.join(header.split(': ')[1:])
headers[k] = v
return headers | Retrieve HTTP headers for IP address
Args:
ip (str): IPv4 address
Returns:
dict: HTTP headers | httpHeaders | python | smicallef/spiderfoot | modules/sfp_hackertarget.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hackertarget.py | MIT |
def zoneTransfer(self, ip):
"""Retrieve DNS zone transfer
Args:
ip (str): IPv4 address
Returns:
list: DNS zone
"""
params = urllib.parse.urlencode({
'q': ip
})
res = self.sf.fetchUrl(
f"https://api.hackertarget.com/zonetransfer/?{params}",
useragent=self.opts['_useragent'],
timeout=self.opts['_fetchtimeout']
)
if res['content'] is None:
self.error(f"Unable to fetch DNS zone for {ip} from HackerTarget.com.")
return None
if res['code'] == '429':
self.error("You are being rate-limited by HackerTarget")
self.errorState = True
return None
records = list()
for record in res['content'].splitlines():
if record.strip().startswith(';'):
continue
if record.strip() == '':
continue
records.append(record.strip())
return records | Retrieve DNS zone transfer
Args:
ip (str): IPv4 address
Returns:
list: DNS zone | zoneTransfer | python | smicallef/spiderfoot | modules/sfp_hackertarget.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hackertarget.py | MIT |
def reverseIpLookup(self, ip):
"""Reverse lookup hosts on the same IP address
Args:
ip (str): IPv4 address
Returns:
list: (co)hosts on provided IP addresses
"""
params = urllib.parse.urlencode({
'q': ip
})
res = self.sf.fetchUrl(
f"https://api.hackertarget.com/reverseiplookup/?{params}",
useragent=self.opts['_useragent'],
timeout=self.opts['_fetchtimeout']
)
if res['content'] is None:
self.error("Unable to fetch hackertarget.com content.")
return None
if res['code'] == '429':
self.error("You are being rate-limited by HackerTarget")
self.errorState = True
return None
if "No records" in res['content']:
return None
hosts = res['content'].split('\n')
self.debug(f"Found {len(hosts)} on {ip}")
return hosts | Reverse lookup hosts on the same IP address
Args:
ip (str): IPv4 address
Returns:
list: (co)hosts on provided IP addresses | reverseIpLookup | python | smicallef/spiderfoot | modules/sfp_hackertarget.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hackertarget.py | MIT |
def setBlocklistRules(self, blocklist):
"""Parse AdBlock Plus blocklist and set blocklist rules
Args:
blocklist (str): plaintext AdBlock Plus blocklist
"""
if not blocklist:
return
lines = blocklist.split('\n')
self.debug(f"Retrieved {len(lines)} AdBlock blocklist rules")
try:
self.rules = adblockparser.AdblockRules(lines)
except adblockparser.AdblockParsingError as e:
self.errorState = True
self.error(f"Parsing error handling AdBlock list: {e}") | Parse AdBlock Plus blocklist and set blocklist rules
Args:
blocklist (str): plaintext AdBlock Plus blocklist | setBlocklistRules | python | smicallef/spiderfoot | modules/sfp_adblock.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_adblock.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from Talos Intelligence
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for ip in blacklist.split('\n'):
ip = ip.strip()
if ip.startswith('#'):
continue
if not self.sf.validIP(ip):
continue
ips.append(ip)
return ips | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from Talos Intelligence
Returns:
list: list of blacklisted IP addresses | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_talosintel.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_talosintel.py | MIT |
def query(self, qry):
"""Retrieve IP address information from Focsec.
Args:
qry (str): IPv4/IPv6 address
Returns:
dict: JSON formatted results
"""
params = urllib.parse.urlencode({
'api_key': self.opts["api_key"],
})
res = self.sf.fetchUrl(
f"https://api.focsec.com/v1/ip/{qry}?{params}",
timeout=self.opts["_fetchtimeout"],
useragent=self.opts['_useragent']
)
if not res:
self.error("No response from Focsec.")
return None
if res['code'] == "400":
self.error("Bad request.")
self.errorState = True
return None
if res['code'] == "401":
self.error("Unauthorized - Invalid API key.")
self.errorState = True
return None
if res['code'] == "402":
self.error("Unauthorized - Payment Required. Subscription or trial period expired.")
self.errorState = True
return None
if res['code'] == "404":
self.debug(f"No results for {qry}")
return None
# Future proofing - Focsec does not implement rate limiting
if res['code'] == "429":
self.error("You are being rate-limited by Focsec.")
return None
if res['code'] != "200":
self.error(f"Unexpected HTTP response code {res['code']} from Focsec.")
return None
if not res['content']:
self.debug("No results from Focsec.")
return None
try:
return json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None | Retrieve IP address information from Focsec.
Args:
qry (str): IPv4/IPv6 address
Returns:
dict: JSON formatted results | query | python | smicallef/spiderfoot | modules/sfp_focsec.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_focsec.py | MIT |
def queryIp(self, ip):
"""Query API for an IPv4 or IPv6 address.
Args:
ip (str): IP address
Returns:
str: API response as JSON
"""
headers = {
'Authorization': f"Bearer {self.opts['api_key']}",
'Accept': 'application/json',
}
res = self.sf.fetchUrl(
f"https://{self.opts['api_hostname']}/v1/assess/ip/{ip}",
timeout=self.opts['_fetchtimeout'],
useragent=self.opts['_useragent'],
headers=headers
)
time.sleep(1)
if res['code'] == '400':
self.error("ThreatJammer.com rejected the IP address. Use only public IP addresses.")
return None
if res['code'] == '422':
self.error("ThreatJammer.com could not process the IP address. Check the format.")
return None
if res['code'] == '429':
self.error("You are being rate-limited by ThreatJammer.com")
self.errorState = True
return None
if res['code'] == '401':
self.error("You are not authorized by ThreatJammer.com. Check your API key.")
self.errorState = True
return None
if res['code'] != "200":
self.error("ThreatJammer.com could not process the IP address. Unknown error.")
self.errorState = True
return None
if res['content'] is None:
self.error("Received no content from ThreatJammer.com")
self.errorState = True
return None
try:
return json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None | Query API for an IPv4 or IPv6 address.
Args:
ip (str): IP address
Returns:
str: API response as JSON | queryIp | python | smicallef/spiderfoot | modules/sfp_threatjammer.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_threatjammer.py | MIT |
def query(self, qry):
"""Query IP address
Args:
qry (str): IPv4/IPv6 address
Returns:
dict: JSON formatted results
"""
fraudguard_url = "https://api.fraudguard.io/ip/" + qry
api_key_account = self.opts['fraudguard_api_key_account']
if type(api_key_account) == str:
api_key_account = api_key_account.encode('utf-8')
api_key_password = self.opts['fraudguard_api_key_password']
if type(api_key_password) == str:
api_key_password = api_key_password.encode('utf-8')
token = base64.b64encode(api_key_account + ':'.encode('utf-8') + api_key_password)
headers = {
'Authorization': "Basic " + token.decode('utf-8')
}
res = self.sf.fetchUrl(
fraudguard_url,
timeout=self.opts['_fetchtimeout'],
useragent="SpiderFoot",
headers=headers
)
if res['code'] in ["400", "429", "500", "403"]:
self.error("Fraudguard.io API key seems to have been rejected or you have exceeded usage limits for the month.")
self.errorState = True
return None
if res['content'] is None:
self.info(f"No Fraudguard.io info found for {qry}")
return None
try:
return json.loads(res['content'])
except Exception as e:
self.error(f"Error processing JSON response from Fraudguard.io: {e}")
return None | Query IP address
Args:
qry (str): IPv4/IPv6 address
Returns:
dict: JSON formatted results | query | python | smicallef/spiderfoot | modules/sfp_fraudguard.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_fraudguard.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from CyberCrime-Tracker.net
Returns:
list: list of blacklisted IP addresses and host names
"""
hosts = list()
if not blacklist:
return hosts
for line in blacklist.split('\n'):
if not line:
continue
if line.startswith('#'):
continue
# Note: URL parsing and validation with sf.validHost() is too slow to use here
host = line.split("/")[0]
if not host:
continue
if "." not in host:
continue
hosts.append(host.split(':')[0])
return hosts | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from CyberCrime-Tracker.net
Returns:
list: list of blacklisted IP addresses and host names | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_cybercrimetracker.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_cybercrimetracker.py | MIT |
def parseExitNodes(self, data):
"""Extract exit node IP addresses from TOR relay search results
Args:
data (str): TOR relay search results
Returns:
list: list of TOR exit IP addresses
"""
ips = list()
if not data:
return ips
try:
results = json.loads(data)
except Exception as e:
self.error(f"Error processing JSON response: {e}")
return None
relays = results.get('relays')
if not relays:
return ips
for relay in relays:
or_addresses = relay.get('or_addresses')
if or_addresses:
for ip in or_addresses:
# IPv6 addresses are wrapped in [] (For example: "[127.0.0.1]:443")
if ip.startswith("["):
ip = ip.split('[')[1].split(']')[0]
if self.sf.validIP6(ip):
ips.append(ip)
else:
ip = ip.split(':')[0]
if self.sf.validIP(ip):
ips.append(ip)
# Exit addresses are only listed in the exit addreses array
# if the address differs from the OR address.
exit_addresses = relay.get('exit_addresses')
if exit_addresses:
for ip in exit_addresses:
if self.sf.validIP(ip) or self.sf.validIP6(ip):
ips.append(ip)
return list(set(ips)) | Extract exit node IP addresses from TOR relay search results
Args:
data (str): TOR relay search results
Returns:
list: list of TOR exit IP addresses | parseExitNodes | python | smicallef/spiderfoot | modules/sfp_torexits.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_torexits.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from blocklist.de
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for ip in blacklist.split('\n'):
ip = ip.strip()
if ip.startswith('#'):
continue
if not self.sf.validIP(ip) and not self.sf.validIP6(ip):
continue
ips.append(ip)
return ips | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from blocklist.de
Returns:
list: list of blacklisted IP addresses | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_blocklistde.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_blocklistde.py | MIT |
def tlds(self):
"""Valid OpenNIC top-level domains.
Returns:
list: OpenNIC TLDs (and peer TLDs).
"""
return [
'bbs',
'chan',
'cyb',
'dyn',
'epic',
'free',
'geek',
'glue',
'gopher',
'indy',
'libre',
'neo',
'null',
'o',
'oss',
'oz',
'parody',
'pirate',
# Peers
'bazar',
'bit',
'coin',
'emc',
'fur',
'ku',
'lib',
'te',
'ti',
'uu',
] | Valid OpenNIC top-level domains.
Returns:
list: OpenNIC TLDs (and peer TLDs). | tlds | python | smicallef/spiderfoot | modules/sfp_opennic.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_opennic.py | MIT |
def queryAddr(self, qaddr):
"""Query SpamCop DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: SpamCop DNS entries
"""
if not self.sf.validIP(qaddr):
self.debug(f"Invalid IPv4 address {qaddr}")
return None
try:
lookup = self.reverseAddr(qaddr) + '.bl.spamcop.net'
self.debug(f"Checking SpamCop blacklist: {lookup}")
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"SpamCop did not resolve {qaddr} / {lookup}: {e}")
return None | Query SpamCop DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: SpamCop DNS entries | queryAddr | python | smicallef/spiderfoot | modules/sfp_spamcop.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_spamcop.py | MIT |
def detectCountryFromPhone(self, srcPhoneNumber: str) -> str:
"""Lookup name of country from phone number region code.
Args:
srcPhoneNumber (str): phone number
Returns:
str: country name
"""
if not isinstance(srcPhoneNumber, str):
return None
try:
phoneNumber = phonenumbers.parse(srcPhoneNumber)
except Exception:
self.debug(f"Skipped invalid phone number: {srcPhoneNumber}")
return None
try:
countryCode = region_code_for_country_code(phoneNumber.country_code)
except Exception:
self.debug(f"Lookup of region code failed for phone number: {srcPhoneNumber}")
return None
if not countryCode:
return None
return SpiderFootHelpers.countryNameFromCountryCode(countryCode.upper()) | Lookup name of country from phone number region code.
Args:
srcPhoneNumber (str): phone number
Returns:
str: country name | detectCountryFromPhone | python | smicallef/spiderfoot | modules/sfp_countryname.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_countryname.py | MIT |
def detectCountryFromDomainName(self, srcDomain: str) -> str:
"""Lookup name of country from TLD of domain name.
Args:
srcDomain (str): domain
Returns:
str: country name
"""
if not isinstance(srcDomain, str):
return None
# Split domain into parts by '.'
# Country TLDs are reserved
domainParts = srcDomain.split(".")
# Search for country TLD in the domain parts - reversed
for part in domainParts[::-1]:
country_name = SpiderFootHelpers.countryNameFromTld(part)
if country_name:
return country_name
return None | Lookup name of country from TLD of domain name.
Args:
srcDomain (str): domain
Returns:
str: country name | detectCountryFromDomainName | python | smicallef/spiderfoot | modules/sfp_countryname.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_countryname.py | MIT |
def detectCountryFromIBAN(self, srcIBAN: str) -> str:
"""Detect name of country from IBAN.
Args:
srcIBAN (str): IBAN
Returns:
str: country name
"""
if not isinstance(srcIBAN, str):
return None
return SpiderFootHelpers.countryNameFromCountryCode(srcIBAN[0:2]) | Detect name of country from IBAN.
Args:
srcIBAN (str): IBAN
Returns:
str: country name | detectCountryFromIBAN | python | smicallef/spiderfoot | modules/sfp_countryname.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_countryname.py | MIT |
def detectCountryFromData(self, srcData: str) -> list:
"""Detect name of country from event data (WHOIS lookup, Geo Info, Physical Address, etc)
Args:
srcData (str): event data
Returns:
list: list of countries
"""
countries = list()
if not srcData:
return countries
# Get dictionary of country codes and country names
abbvCountryCodes = SpiderFootHelpers.countryCodes()
# Look for countrycodes and country in source data
for countryName in abbvCountryCodes.values():
if countryName.lower() not in srcData.lower():
continue
# Look for country name in source data
# Spaces are not included since "New Jersey" and others
# will get interpreted as "Jersey", etc.
matchCountries = re.findall(r"[,'\"\:\=\[\(\[\n\t\r\.] ?" + countryName + r"[,'\"\:\=\[\(\[\n\t\r\.]", srcData, re.IGNORECASE)
if matchCountries:
countries.append(countryName)
# Look for "Country: ", usually found in Whois records
matchCountries = re.findall("country: (.+?)", srcData, re.IGNORECASE)
if matchCountries:
for m in matchCountries:
m = m.strip()
if m in abbvCountryCodes:
countries.append(abbvCountryCodes[m])
if m in abbvCountryCodes.values():
countries.append(m)
return list(set(countries)) | Detect name of country from event data (WHOIS lookup, Geo Info, Physical Address, etc)
Args:
srcData (str): event data
Returns:
list: list of countries | detectCountryFromData | python | smicallef/spiderfoot | modules/sfp_countryname.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_countryname.py | MIT |
def queryAddr(self, qaddr):
"""Query Spamhaus Zen DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: Spamhaus Zen DNS entries
"""
if not self.sf.validIP(qaddr):
self.debug(f"Invalid IPv4 address {qaddr}")
return None
try:
lookup = self.reverseAddr(qaddr) + '.zen.spamhaus.org'
self.debug(f"Checking Spamhaus Zen blacklist: {lookup}")
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"Spamhaus Zen did not resolve {qaddr} / {lookup}: {e}")
return None | Query Spamhaus Zen DNS for an IPv4 address.
Args:
qaddr (str): IPv4 address.
Returns:
list: Spamhaus Zen DNS entries | queryAddr | python | smicallef/spiderfoot | modules/sfp_spamhaus.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_spamhaus.py | MIT |
def queryDomain(self, qry):
"""Query a domain
Args:
qry (str): domain
Returns:
str: API response as JSON
"""
params = {
"domain": qry.encode('raw_unicode_escape').decode("ascii", errors='replace')
}
headers = {
"Accept": "application/json",
'api-key': self.opts['api_key']
}
res = self.sf.fetchUrl(
'https://www.hybrid-analysis.com/api/v2/search/terms',
headers=headers,
timeout=15,
useragent="Falcon Sandbox",
postData=params
)
time.sleep(self.opts['delay'])
return self.parseApiResponse(res) | Query a domain
Args:
qry (str): domain
Returns:
str: API response as JSON | queryDomain | python | smicallef/spiderfoot | modules/sfp_hybrid_analysis.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hybrid_analysis.py | MIT |
def queryHost(self, qry):
"""Query a host
Args:
qry (str): host
Returns:
str: API response as JSON
"""
params = {
"host": qry.encode('raw_unicode_escape').decode("ascii", errors='replace')
}
headers = {
"Accept": "application/json",
'api-key': self.opts['api_key']
}
res = self.sf.fetchUrl(
'https://www.hybrid-analysis.com/api/v2/search/terms',
headers=headers,
timeout=15,
useragent="Falcon Sandbox",
postData=params
)
time.sleep(self.opts['delay'])
return self.parseApiResponse(res) | Query a host
Args:
qry (str): host
Returns:
str: API response as JSON | queryHost | python | smicallef/spiderfoot | modules/sfp_hybrid_analysis.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hybrid_analysis.py | MIT |
def queryHash(self, qry):
"""Query a hash
Args:
qry (str): hash
Returns:
str: API response as JSON
"""
params = {
"hash": qry.encode('raw_unicode_escape').decode("ascii", errors='replace')
}
headers = {
"Accept": "application/json",
'api-key': self.opts['api_key']
}
res = self.sf.fetchUrl(
'https://www.hybrid-analysis.com/api/v2/search/hash',
headers=headers,
timeout=15,
useragent="Falcon Sandbox",
postData=params
)
time.sleep(self.opts['delay'])
return self.parseApiResponse(res) | Query a hash
Args:
qry (str): hash
Returns:
str: API response as JSON | queryHash | python | smicallef/spiderfoot | modules/sfp_hybrid_analysis.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hybrid_analysis.py | MIT |
def parseApiResponse(self, res: dict):
"""Parse HTTP response from API
Args:
res (dict): HTTP response from SpiderFoot.fetchUrl()
Returns:
str: API response as JSON
"""
if not res:
self.error("No response from Hybrid Analysis.")
return None
if res['code'] == '400':
self.error("Failed to retrieve content from Hybrid Analysis: Invalid request")
self.debug(f"API response: {res['content']}")
return None
# Future proofing - Hybrid Analysis does not implement rate limiting
if res['code'] == '429':
self.error("Failed to retrieve content from Hybrid Analysis: rate limit exceeded")
self.errorState = True
return None
# Catch all non-200 status codes, and presume something went wrong
if res['code'] != '200':
self.error(f"Failed to retrieve content from Hybrid Analysis: Unexpected response status {res['code']}")
self.errorState = True
return None
if res['content'] is None:
return None
try:
return json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None | Parse HTTP response from API
Args:
res (dict): HTTP response from SpiderFoot.fetchUrl()
Returns:
str: API response as JSON | parseApiResponse | python | smicallef/spiderfoot | modules/sfp_hybrid_analysis.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_hybrid_analysis.py | MIT |
def parseFeodoTrackerBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from Abuse.ch Feodo Tracker
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for ip in blacklist.split('\n'):
ip = ip.strip()
if not ip:
continue
if ip.startswith('#'):
continue
if not self.sf.validIP(ip):
continue
ips.append(ip)
return ips | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from Abuse.ch Feodo Tracker
Returns:
list: list of blacklisted IP addresses | parseFeodoTrackerBlacklist | python | smicallef/spiderfoot | modules/sfp_abusech.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abusech.py | MIT |
def parseSslBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): CSV blacklist from Abuse.ch SSL Blacklist
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for line in blacklist.split('\n'):
line = line.strip()
if not line:
continue
if line.startswith('#'):
continue
csv = line.split(',')
if len(csv) < 2:
continue
ip = csv[1]
if not self.sf.validIP(ip):
continue
ips.append(ip)
return ips | Parse plaintext blacklist
Args:
blacklist (str): CSV blacklist from Abuse.ch SSL Blacklist
Returns:
list: list of blacklisted IP addresses | parseSslBlacklist | python | smicallef/spiderfoot | modules/sfp_abusech.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abusech.py | MIT |
def parseUrlHausBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from Abuse.ch URL Haus
Returns:
list: list of blacklisted hosts
"""
hosts = list()
if not blacklist:
return hosts
for line in blacklist.split('\n'):
if not line:
continue
if line.startswith('#'):
continue
# Note: URL parsing and validation with sf.validHost() is too slow to use here
url = line.strip().lower()
if len(url.split("/")) < 3:
continue
host = url.split("/")[2].split(':')[0]
if not host:
continue
if "." not in host:
continue
hosts.append(host)
return hosts | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from Abuse.ch URL Haus
Returns:
list: list of blacklisted hosts | parseUrlHausBlacklist | python | smicallef/spiderfoot | modules/sfp_abusech.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abusech.py | MIT |
def parseBlocklist(self, blocklist):
"""Parse plaintext CoinBlocker list
Args:
blocklist (str): plaintext CoinBlocker list
Returns:
list: list of blocked host names
"""
hosts = list()
if not blocklist:
return hosts
for line in blocklist.split('\n'):
if not line:
continue
if line.startswith('#'):
continue
host = line.strip()
# Note: Validation with sf.validHost() is too slow to use here
# if not self.sf.validHost(host, self.opts['_internettlds']):
# continue
if not host:
continue
hosts.append(host.lower())
return hosts | Parse plaintext CoinBlocker list
Args:
blocklist (str): plaintext CoinBlocker list
Returns:
list: list of blocked host names | parseBlocklist | python | smicallef/spiderfoot | modules/sfp_coinblocker.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_coinblocker.py | MIT |
def query(self, qry):
"""Query IOCs
Args:
qry (str): IP address
Returns:
str: API response data as JSON
"""
params = {
'query': 'search_ioc',
'search_term': qry
}
headers = {
"Accept": "application/json",
}
res = self.sf.fetchUrl(
"https://threatfox-api.abuse.ch/api/v1/",
useragent=self.opts['_useragent'],
timeout=self.opts['_fetchtimeout'],
headers=headers,
postData=json.dumps(params)
)
time.sleep(1)
if res['content'] is None:
return None
if res['code'] == "429":
self.error("You are being rate-limited by ThreatFox.")
self.errorState = True
return None
if res['code'] != '200':
self.error(f"Unexpected reply from ThreatFox: {res['code']}")
self.errorState = True
return None
try:
json_result = json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response from ThreatFox: {e}")
return None
query_status = json_result.get('query_status')
if query_status == 'no_result':
self.debug(f"No results from ThreatFox for: {qry}")
return None
if query_status != 'ok':
self.debug(f"ThreatFox query failed: {query_status}")
return None
data = json_result.get('data')
if not data:
self.debug(f"No results from ThreatFox for: {qry}")
return None
return data | Query IOCs
Args:
qry (str): IP address
Returns:
str: API response data as JSON | query | python | smicallef/spiderfoot | modules/sfp_threatfox.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_threatfox.py | MIT |
def query(self, qaddr):
"""Query Abusix Mail Intelligence DNS.
Args:
qaddr (str): Host name or IPv4 address.
Returns:
list: Abusix DNS entries
"""
if self.sf.validIP(qaddr):
lookup = f"{self.reverseIpAddress(qaddr)}.{self.opts['api_key']}.combined.mail.abusix.zone"
elif self.sf.validIP6(qaddr):
lookup = f"{self.reverseIp6Address(qaddr)}.{self.opts['api_key']}.combined.mail.abusix.zone"
else:
lookup = f"{qaddr}.{self.opts['api_key']}.combined.mail.abusix.zone"
self.debug(f"Checking Abusix Mail Intelligence blacklist: {lookup}")
try:
return self.sf.resolveHost(lookup)
except Exception as e:
self.debug(f"Abusix Mail Intelligence did not resolve {qaddr} / {lookup}: {e}")
return None | Query Abusix Mail Intelligence DNS.
Args:
qaddr (str): Host name or IPv4 address.
Returns:
list: Abusix DNS entries | query | python | smicallef/spiderfoot | modules/sfp_abusix.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abusix.py | MIT |
def query(self, qry, limit=500, offset=0):
"""Query the Mnemonic PassiveDNS v3 API.
Args:
qry (str): domain name or IP address
limit (int): Limit the number of returned values.
offset (int): Skip the initial <offset> number of values in the resultset.
Returns:
dict: results as JSON
"""
params = urllib.parse.urlencode({
'limit': limit,
'offset': offset
})
res = self.sf.fetchUrl(
f"https://api.mnemonic.no/pdns/v3/{qry}?{params}",
timeout=self.opts['timeout'],
useragent=self.opts['_useragent']
)
# Unauthenticated users are limited to 100 requests per minute, and 1000 requests per day.
time.sleep(0.75)
if res['content'] is None:
self.info("No results found for " + qry)
return None
try:
data = json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response from Mnemonic: {e}")
return None
response_code = data.get('responseCode')
if not response_code:
self.debug("Error retrieving search results.")
return None
if response_code == 402:
self.debug("Error retrieving search results: Resource limit exceeded")
self.errorState = True
return None
if response_code != 200:
self.debug(f"Error retrieving search results: {response_code}")
return None
if 'data' not in data:
self.info(f"No results found for {qry}")
return None
size = data.get('size')
count = data.get('count')
if not count or not size:
self.info(f"No results found for {qry}")
return None
self.info(f"Retrieved {size} of {count} results")
return data['data'] | Query the Mnemonic PassiveDNS v3 API.
Args:
qry (str): domain name or IP address
limit (int): Limit the number of returned values.
offset (int): Skip the initial <offset> number of values in the resultset.
Returns:
dict: results as JSON | query | python | smicallef/spiderfoot | modules/sfp_mnemonic.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_mnemonic.py | MIT |
def queryCompanyEnrichment(self, qry):
"""Enrich domain with company information.
Args:
qry (str): domain name
Returns:
dict: company information
"""
api_key = self.opts['companyenrichment_api_key']
if not api_key:
return None
params = urllib.parse.urlencode({
'api_key': api_key,
'domain': qry.encode('raw_unicode_escape').decode("ascii", errors='replace'),
})
res = self.sf.fetchUrl(
f"https://companyenrichment.abstractapi.com/v1/?{params}",
useragent=self.opts['_useragent']
)
time.sleep(1)
if not res:
self.debug("No response from AbstractAPI Company Enrichment API endpoint")
return None
return self.parseApiResponse(res) | Enrich domain with company information.
Args:
qry (str): domain name
Returns:
dict: company information | queryCompanyEnrichment | python | smicallef/spiderfoot | modules/sfp_abstractapi.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abstractapi.py | MIT |
def queryPhoneValidation(self, qry):
"""Verify phone number and enrich with carrier and location information.
Args:
qry (str): phone number
Returns:
dict: phone number information
"""
api_key = self.opts['phonevalidation_api_key']
if not api_key:
return None
params = urllib.parse.urlencode({
'api_key': api_key,
'phone': qry.encode('raw_unicode_escape').decode("ascii", errors='replace'),
})
res = self.sf.fetchUrl(
f"https://phonevalidation.abstractapi.com/v1/?{params}",
useragent=self.opts['_useragent']
)
time.sleep(1)
if not res:
self.debug("No response from AbstractAPI Phone Validation API endpoint")
return None
return self.parseApiResponse(res) | Verify phone number and enrich with carrier and location information.
Args:
qry (str): phone number
Returns:
dict: phone number information | queryPhoneValidation | python | smicallef/spiderfoot | modules/sfp_abstractapi.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abstractapi.py | MIT |
def queryIpGeolocation(self, qry):
"""Enrich IP address with geolocation information.
Args:
qry (str): IPv4 address
Returns:
dict: location information
"""
api_key = self.opts['ipgeolocation_api_key']
if not api_key:
return None
params = urllib.parse.urlencode({
'api_key': api_key,
'ip_address': qry.encode('raw_unicode_escape').decode("ascii", errors='replace'),
})
res = self.sf.fetchUrl(
f"https://ipgeolocation.abstractapi.com/v1/?{params}",
useragent=self.opts['_useragent']
)
time.sleep(1)
if not res:
self.debug("No response from AbstractAPI Phone Validation API endpoint")
return None
return self.parseApiResponse(res) | Enrich IP address with geolocation information.
Args:
qry (str): IPv4 address
Returns:
dict: location information | queryIpGeolocation | python | smicallef/spiderfoot | modules/sfp_abstractapi.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_abstractapi.py | MIT |
def searchLegalName(self, qry):
"""Fuzzy search for legal entity by name
Args:
qry (str): legal entity name
Returns:
dict: search results
"""
params = urllib.parse.urlencode({
'q': qry.encode('raw_unicode_escape').decode("ascii", errors='replace'),
'field': "entity.legalName"
})
headers = {
'Accept': 'application/vnd.api+json'
}
res = self.sf.fetchUrl(
f"https://api.gleif.org/api/v1/fuzzycompletions?{params}",
timeout=30,
headers=headers,
useragent=self.opts['_useragent']
)
if res['code'] == "429":
self.error("You are being rate-limited by GLEIF.")
return None
try:
results = json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None
data = results.get('data')
if not data:
return None
if not len(data):
return None
return data | Fuzzy search for legal entity by name
Args:
qry (str): legal entity name
Returns:
dict: search results | searchLegalName | python | smicallef/spiderfoot | modules/sfp_gleif.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_gleif.py | MIT |
def searchAutocompletions(self, qry):
"""Search for legal entity name autocompletions
Args:
qry (str): legal entity name
Returns:
dict: search results
"""
params = urllib.parse.urlencode({
'q': qry.encode('raw_unicode_escape').decode("ascii", errors='replace'),
'field': "fulltext"
})
headers = {
'Accept': 'application/vnd.api+json'
}
res = self.sf.fetchUrl(
f"https://api.gleif.org/api/v1/autocompletions?{params}",
timeout=30,
headers=headers,
useragent=self.opts['_useragent']
)
if res['code'] == "429":
self.error("You are being rate-limited by GLEIF.")
return None
try:
results = json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None
data = results.get('data')
if not data:
return None
if not len(data):
return None
return data | Search for legal entity name autocompletions
Args:
qry (str): legal entity name
Returns:
dict: search results | searchAutocompletions | python | smicallef/spiderfoot | modules/sfp_gleif.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_gleif.py | MIT |
def query(self, selector: str, qry: str) -> str:
"""Search Keybase for a domain name or username.
Args:
selector (str): query type ("usernames" | "domain")
qry (str): username
Returns:
str: Search results as JSON string
"""
if not selector:
return None
if not qry:
return None
params = {
selector: qry.encode('raw_unicode_escape').decode("ascii", errors='replace')
}
headers = {
'Accept': "application/json"
}
res = self.sf.fetchUrl(
'https://keybase.io/_/api/1.0/user/lookup.json?' + urllib.parse.urlencode(params),
headers=headers,
timeout=15,
useragent=self.opts['_useragent']
)
# In this case, it will always be 200 if keybase is queried
# The actual response codes are stored in status tag of the response
if res['code'] != '200':
self.error(f"Unexpected reply from Keybase: {res['code']}")
return None
try:
content = json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None
status = content.get('status')
if not status:
return None
code = status.get('code')
if code != 0:
self.error(f"Unexpected JSON response code reply from Keybase: {code}")
return None
them = content.get('them')
if not isinstance(them, list):
return None
return them | Search Keybase for a domain name or username.
Args:
selector (str): query type ("usernames" | "domain")
qry (str): username
Returns:
str: Search results as JSON string | query | python | smicallef/spiderfoot | modules/sfp_keybase.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_keybase.py | MIT |
def processUrl(self, url: str) -> dict:
"""Fetch data from a URL and obtain all links that should be followed.
Args:
url (str): URL to fetch
Returns:
dict: links identified in URL content
"""
site = self.sf.urlFQDN(url)
cookies = None
# Filter out certain file types (if user chooses to)
if list(filter(lambda ext: url.lower().split('?')[0].endswith('.' + ext.lower()), self.opts['filterfiles'])):
# self.debug(f"Ignoring URL with filtered file extension: {link}")
return None
if site in self.siteCookies:
self.debug(f"Restoring cookies for {site}: {self.siteCookies[site]}")
cookies = self.siteCookies[site]
# Fetch the contents of the supplied URL
fetched = self.sf.fetchUrl(
url,
cookies=cookies,
timeout=self.opts['_fetchtimeout'],
useragent=self.opts['_useragent'],
sizeLimit=10000000,
verify=False
)
self.fetchedPages[url] = True
if not fetched:
return None
# Track cookies a site has sent, then send the back in subsquent requests
if self.opts['usecookies'] and fetched['headers'] is not None:
if fetched['headers'].get('Set-Cookie'):
self.siteCookies[site] = fetched['headers'].get('Set-Cookie')
self.debug(f"Saving cookies for {site}: {self.siteCookies[site]}")
if url not in self.urlEvents:
# TODO: be more descriptive
self.error("Something strange happened - shouldn't get here: url not in self.urlEvents")
self.urlEvents[url] = None
# Notify modules about the content obtained
self.contentNotify(url, fetched, self.urlEvents[url])
real_url = fetched['realurl']
if real_url and real_url != url:
# self.debug(f"Redirect of {url} to {real_url}")
# Store the content for the redirect so that it isn't fetched again
self.fetchedPages[real_url] = True
# Notify modules about the new link
self.urlEvents[real_url] = self.linkNotify(real_url, self.urlEvents[url])
url = real_url # override the URL if we had a redirect
data = fetched['content']
if not data:
return None
if isinstance(data, bytes):
data = data.decode('utf-8', errors='replace')
# Extract links from the content
links = SpiderFootHelpers.extractLinksFromHtml(
url,
data,
self.getTarget().getNames()
)
if not links:
self.debug(f"No links found at {url}")
return None
# Notify modules about the links found
# Aside from the first URL, this will be the first time a new
# URL is spotted.
for link in links:
if not self.opts['reportduplicates']:
if link in self.urlEvents:
continue
# Supply the SpiderFootEvent of the parent URL as the parent
self.urlEvents[link] = self.linkNotify(link, self.urlEvents[url])
self.debug(f"Links found from parsing: {links.keys()}")
return links | Fetch data from a URL and obtain all links that should be followed.
Args:
url (str): URL to fetch
Returns:
dict: links identified in URL content | processUrl | python | smicallef/spiderfoot | modules/sfp_spider.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_spider.py | MIT |
def cleanLinks(self, links: list) -> list:
"""Clear out links that we don't want to follow.
Args:
links (list): links
Returns:
list: links suitable for spidering
"""
returnLinks = dict()
for link in links:
linkBase = SpiderFootHelpers.urlBaseUrl(link)
linkFQDN = self.sf.urlFQDN(link)
# Skip external sites (typical behaviour..)
if not self.getTarget().matches(linkFQDN):
# self.debug('Ignoring external site: ' + link)
continue
# Optionally skip sub-domain sites
if self.opts['nosubs'] and not \
self.getTarget().matches(linkFQDN, includeChildren=False):
# self.debug("Ignoring subdomain: " + link)
continue
# Skip parent domain sites
if not self.getTarget().matches(linkFQDN, includeParents=False):
# self.debug("Ignoring parent domain: " + link)
continue
# Optionally skip user directories
if self.opts['filterusers'] and '/~' in link:
# self.debug("Ignoring user folder: " + link)
continue
# If we are respecting robots.txt, filter those out too
if linkBase in self.robotsRules and self.opts['robotsonly']:
if list(filter(lambda blocked: type(blocked).lower(blocked) in link.lower() or blocked == '*', self.robotsRules[linkBase])):
# self.debug("Ignoring page found in robots.txt: " + link)
continue
# All tests passed, add link to be spidered
self.debug(f"Adding URL for spidering: {link}")
returnLinks[link] = links[link]
return list(returnLinks.keys()) | Clear out links that we don't want to follow.
Args:
links (list): links
Returns:
list: links suitable for spidering | cleanLinks | python | smicallef/spiderfoot | modules/sfp_spider.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_spider.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from phishtank.com
Returns:
list: list of blacklisted host names and associated PhishTank IDs
"""
hosts = list()
if not blacklist:
return hosts
for line in blacklist.split('\n'):
if not line:
continue
if line.startswith('#'):
continue
phish_id = line.strip().split(",")[0]
url = str(line.strip().split(",")[1]).lower()
# Note: URL parsing and validation with sf.validHost() is too slow to use here
if len(url.split("/")) < 3:
continue
host = url.split("/")[2]
if not host:
continue
if "." not in host:
continue
hosts.append([phish_id, host])
return hosts | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from phishtank.com
Returns:
list: list of blacklisted host names and associated PhishTank IDs | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_phishtank.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_phishtank.py | MIT |
def searchCompany(self, qry):
"""Search for company name
Args:
qry (str): company name
Returns:
str
"""
version = '0.4'
apiparam = ""
if self.opts['api_key']:
apiparam = "&api_token=" + self.opts['api_key']
params = urllib.parse.urlencode({
'q': qry.encode('raw_unicode_escape').decode("ascii", errors='replace'),
'format': 'json',
'order': 'score',
'confidence': self.opts['confidence']
})
res = self.sf.fetchUrl(
f"https://api.opencorporates.com/v{version}/companies/search?{params}{apiparam}",
timeout=60, # High timeouts as they can sometimes take a while
useragent=self.opts['_useragent']
)
if res['code'] == "401":
self.error("Invalid OpenCorporates API key.")
return None
if res['code'] == "403":
self.error("You are being rate-limited by OpenCorporates.")
return None
try:
data = json.loads(res['content'])
except Exception as e:
self.debug(f"Error processing JSON response: {e}")
return None
if 'results' not in data:
return None
return data['results'] | Search for company name
Args:
qry (str): company name
Returns:
str | searchCompany | python | smicallef/spiderfoot | modules/sfp_opencorporates.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_opencorporates.py | MIT |
def parseBlocklist(self, blocklist):
"""Parse plaintext block list
Args:
blocklist (str): plaintext Steven Black Hosts block list
Returns:
list: list of blocked host names
"""
hosts = list()
if not blocklist:
return hosts
for line in blocklist.split('\n'):
if not line:
continue
if line.startswith('#'):
continue
host = line.strip().split(" ")[1]
# Note: Validation with sf.validHost() is too slow to use here
# if not self.sf.validHost(host, self.opts['_internettlds']):
# continue
if not host:
continue
hosts.append(host.lower())
return hosts | Parse plaintext block list
Args:
blocklist (str): plaintext Steven Black Hosts block list
Returns:
list: list of blocked host names | parseBlocklist | python | smicallef/spiderfoot | modules/sfp_stevenblack_hosts.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_stevenblack_hosts.py | MIT |
def query(self, qry):
"""Search iknowwhatyoudownload.com for an IPv4/IPv6 address.
Args:
qry: IPv4/IPv6 address
Returns:
dict: JSON response containing dowloaded content
"""
params = urllib.parse.urlencode({
'ip': qry,
'days': self.opts['daysback'],
'key': self.opts['api_key'],
})
res = self.sf.fetchUrl(
f"https://api.antitor.com/history/peer/?{params}",
timeout=self.opts['_fetchtimeout'],
useragent="SpiderFoot"
)
if res['code'] != "200":
self.error(f"Unexpected HTTP response code {res['code']} from iknowwhatyoudownload.com.")
return None
if res['content'] is None:
self.info(f"No results for {qry} from iknowwhatyoudownload.com")
return None
try:
data = json.loads(res['content'])
except Exception as e:
self.error(f"Error processing JSON response from iknowwhatyoudownload.com: {e}")
return None
error = data.get('error')
if error and error == "INVALID_DAYS":
self.errorState = True
self.error(f"The number of days you have configured ({self.opts['daysback']}) was not accepted. If you have the demo key, try 30 days or less.")
return None
contents = data.get('contents')
if not contents:
return None
return contents | Search iknowwhatyoudownload.com for an IPv4/IPv6 address.
Args:
qry: IPv4/IPv6 address
Returns:
dict: JSON response containing dowloaded content | query | python | smicallef/spiderfoot | modules/sfp_iknowwhatyoudownload.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_iknowwhatyoudownload.py | MIT |
def parseBlacklist(self, blacklist):
"""Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from VoIP Blacklist (VoIPBL)
Returns:
list: list of blacklisted IP addresses
"""
ips = list()
if not blacklist:
return ips
for cidr in blacklist.split('\n'):
cidr = cidr.strip()
if not cidr:
continue
if cidr.startswith('#'):
continue
try:
for ip in IPNetwork(cidr):
ips.append(str(ip))
except Exception:
continue
return ips | Parse plaintext blacklist
Args:
blacklist (str): plaintext blacklist from VoIP Blacklist (VoIPBL)
Returns:
list: list of blacklisted IP addresses | parseBlacklist | python | smicallef/spiderfoot | modules/sfp_voipbl.py | https://github.com/smicallef/spiderfoot/blob/master/modules/sfp_voipbl.py | MIT |
def test_default(self):
"""
Test default(self, line)
"""
sfcli = SpiderFootCli()
io_output = io.StringIO()
sys.stdout = io_output
sfcli.default("")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertIn("Unknown command", output) | Test default(self, line) | test_default | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_default_should_ignore_comments(self):
"""
Test default(self, line)
"""
sfcli = SpiderFootCli()
io_output = io.StringIO()
sys.stdout = io_output
result = sfcli.default("# test comment")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertEqual(None, result)
self.assertEqual("", output) | Test default(self, line) | test_default_should_ignore_comments | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_complete_start_should_return_a_list(self):
"""
Test complete_start(self, text, line, startidx, endidx)
"""
sfcli = SpiderFootCli()
start = sfcli.complete_start(None, None, None, None)
self.assertIsInstance(start, list)
self.assertEqual([], start) | Test complete_start(self, text, line, startidx, endidx) | test_complete_start_should_return_a_list | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_complete_find_should_return_a_list(self):
"""
Test complete_find(self, text, line, startidx, endidx)
"""
sfcli = SpiderFootCli()
find = sfcli.complete_find(None, None, None, None)
self.assertIsInstance(find, list)
self.assertEqual([], find) | Test complete_find(self, text, line, startidx, endidx) | test_complete_find_should_return_a_list | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_complete_data_should_return_a_list(self):
"""
Test complete_data(self, text, line, startidx, endidx)
"""
sfcli = SpiderFootCli()
data = sfcli.complete_data(None, None, None, None)
self.assertIsInstance(data, list)
self.assertEqual([], data) | Test complete_data(self, text, line, startidx, endidx) | test_complete_data_should_return_a_list | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_complete_default(self):
"""
Test complete_default(self, text, line, startidx, endidx)
"""
sfcli = SpiderFootCli()
default = sfcli.complete_default("", "-t -m", None, None)
self.assertIsInstance(default, list)
self.assertEqual('TBD', 'TBD')
default = sfcli.complete_default("", "-m -t", None, None)
self.assertIsInstance(default, list)
self.assertEqual('TBD', 'TBD') | Test complete_default(self, text, line, startidx, endidx) | test_complete_default | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_complete_default_invalid_text_should_return_a_string(self):
"""
Test complete_default(self, text, line, startidx, endidx)
"""
sfcli = SpiderFootCli()
default = sfcli.complete_default(None, "example line", None, None)
self.assertIsInstance(default, list)
self.assertEqual([], default) | Test complete_default(self, text, line, startidx, endidx) | test_complete_default_invalid_text_should_return_a_string | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_complete_default_invalid_line_should_return_a_string(self):
"""
Test complete_default(self, text, line, startidx, endidx)
"""
sfcli = SpiderFootCli()
default = sfcli.complete_default("example text", None, None, None)
self.assertIsInstance(default, list)
self.assertEqual([], default) | Test complete_default(self, text, line, startidx, endidx) | test_complete_default_invalid_line_should_return_a_string | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_debug_should_toggle_debug(self):
"""
Test do_debug(self, line)
"""
sfcli = SpiderFootCli(self.cli_default_options)
sfcli.do_debug(None)
initial_debug_state = sfcli.ownopts['cli.debug']
sfcli.do_debug(None)
new_debug_state = sfcli.ownopts['cli.debug']
self.assertNotEqual(initial_debug_state, new_debug_state) | Test do_debug(self, line) | test_do_debug_should_toggle_debug | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_spool_should_toggle_spool(self):
"""
Test do_spool(self, line)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.spool_file'] = '/dev/null'
sfcli.do_spool(None)
initial_spool_state = sfcli.ownopts['cli.spool']
sfcli.do_spool(None)
new_spool_state = sfcli.ownopts['cli.spool']
self.assertNotEqual(initial_spool_state, new_spool_state) | Test do_spool(self, line) | test_do_spool_should_toggle_spool | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_history_should_toggle_history_option(self):
"""
Test do_history(self, line)
"""
sfcli = SpiderFootCli(self.cli_default_options)
sfcli.do_history("0")
initial_history_state = sfcli.ownopts['cli.history']
sfcli.do_history("1")
new_history_state = sfcli.ownopts['cli.history']
self.assertNotEqual(initial_history_state, new_history_state) | Test do_history(self, line) | test_do_history_should_toggle_history_option | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_precmd_should_return_line(self):
"""
Test precmd(self, line)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.history'] = False
sfcli.ownopts['cli.spool'] = False
line = "example line"
precmd = sfcli.precmd(line)
self.assertEqual(line, precmd) | Test precmd(self, line) | test_precmd_should_return_line | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_precmd_should_print_line_to_history_file(self):
"""
Test precmd(self, line)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.history'] = True
sfcli.ownopts['cli.spool'] = False
line = "example line"
precmd = sfcli.precmd(line)
self.assertEqual(line, precmd)
self.assertEqual('TBD', 'TBD') | Test precmd(self, line) | test_precmd_should_print_line_to_history_file | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_precmd_should_print_line_to_spool_file(self):
"""
Test precmd(self, line)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.history'] = False
sfcli.ownopts['cli.spool'] = True
sfcli.ownopts['cli.spool_file'] = '/dev/null'
line = "example line"
precmd = sfcli.precmd(line)
self.assertEqual(line, precmd)
self.assertEqual('TBD', 'TBD') | Test precmd(self, line) | test_precmd_should_print_line_to_spool_file | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_dprint_should_print_if_debug_option_is_set(self):
"""
Test dprint(self, msg, err=False, deb=False, plain=False, color=None)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.debug'] = True
sfcli.ownopts['cli.spool'] = False
io_output = io.StringIO()
sys.stdout = io_output
sfcli.dprint("example output")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertIn("example output", output) | Test dprint(self, msg, err=False, deb=False, plain=False, color=None) | test_dprint_should_print_if_debug_option_is_set | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_dprint_should_not_print_unless_debug_option_is_set(self):
"""
Test dprint(self, msg, err=False, deb=False, plain=False, color=None)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.debug'] = False
sfcli.ownopts['cli.spool'] = False
io_output = io.StringIO()
sys.stdout = io_output
sfcli.dprint("example output")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertIn("", output) | Test dprint(self, msg, err=False, deb=False, plain=False, color=None) | test_dprint_should_not_print_unless_debug_option_is_set | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_ddprint_should_print_if_debug_option_is_set(self):
"""
Test ddprint(self, msg)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.debug'] = True
sfcli.ownopts['cli.spool'] = False
io_output = io.StringIO()
sys.stdout = io_output
sfcli.ddprint("example debug output")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertIn("example debug output", output) | Test ddprint(self, msg) | test_ddprint_should_print_if_debug_option_is_set | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_ddprint_should_not_print_unless_debug_option_is_set(self):
"""
Test ddprint(self, msg)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.debug'] = False
sfcli.ownopts['cli.spool'] = False
io_output = io.StringIO()
sys.stdout = io_output
sfcli.ddprint("example debug output")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertEqual("", output) | Test ddprint(self, msg) | test_ddprint_should_not_print_unless_debug_option_is_set | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_edprint_should_print_error_regardless_of_debug_option(self):
"""
Test edprint(self, msg)
"""
sfcli = SpiderFootCli()
sfcli.ownopts['cli.debug'] = False
sfcli.ownopts['cli.spool'] = False
io_output = io.StringIO()
sys.stdout = io_output
sfcli.edprint("example debug output")
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertIn("example debug output", output) | Test edprint(self, msg) | test_edprint_should_print_error_regardless_of_debug_option | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_pretty_should_return_a_string(self):
"""
Test pretty(self, data, titlemap=None)
"""
sfcli = SpiderFootCli()
invalid_types = [None, "", list(), dict()]
for invalid_type in invalid_types:
with self.subTest(invalid_type=invalid_type):
pretty = sfcli.pretty(invalid_type)
self.assertEqual("", pretty) | Test pretty(self, data, titlemap=None) | test_pretty_should_return_a_string | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_request_invalid_url_should_return_none(self):
"""
Test request(self, url, post=None)
"""
sfcli = SpiderFootCli()
invalid_types = [None, list(), dict()]
for invalid_type in invalid_types:
with self.subTest(invalid_type=invalid_type):
result = sfcli.request(invalid_type)
self.assertEqual(None, result) | Test request(self, url, post=None) | test_request_invalid_url_should_return_none | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_emptyline_should_return_none(self):
"""
Test emptyline(self)
"""
sfcli = SpiderFootCli()
emptyline = sfcli.emptyline()
self.assertEqual(None, emptyline) | Test emptyline(self) | test_emptyline_should_return_none | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_completedefault_should_return_empty_list(self):
"""
Test completedefault(self, text, line, begidx, endidx)
"""
sfcli = SpiderFootCli()
completedefault = sfcli.completedefault(None, None, None, None)
self.assertIsInstance(completedefault, list)
self.assertEqual([], completedefault) | Test completedefault(self, text, line, begidx, endidx) | test_completedefault_should_return_empty_list | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_myparseline_should_return_a_list_of_two_lists(self):
"""
Test myparseline(self, cmdline, replace=True)
"""
sfcli = SpiderFootCli()
parsed_line = sfcli.myparseline(None)
self.assertEqual(len(parsed_line), 2)
self.assertIsInstance(parsed_line, list)
self.assertIsInstance(parsed_line[0], list)
self.assertIsInstance(parsed_line[1], list)
parsed_line = sfcli.myparseline("")
self.assertEqual(len(parsed_line), 2)
self.assertIsInstance(parsed_line, list)
self.assertIsInstance(parsed_line[0], list)
self.assertIsInstance(parsed_line[1], list) | Test myparseline(self, cmdline, replace=True) | test_myparseline_should_return_a_list_of_two_lists | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_send_output(self):
"""
Test send_output(self, data, cmd, titles=None, total=True, raw=False)
"""
sfcli = SpiderFootCli()
io_output = io.StringIO()
sys.stdout = io_output
sfcli.send_output("{}", "", raw=True)
sys.stdout = sys.__stdout__
output = io_output.getvalue()
self.assertIn("Total records: 0", output)
self.assertEqual('TBD', 'TBD') | Test send_output(self, data, cmd, titles=None, total=True, raw=False) | test_send_output | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_query(self):
"""
Test do_query(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_query(None)
self.assertEqual('TBD', 'TBD') | Test do_query(self, line) | test_do_query | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_ping(self):
"""
Test do_ping(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_ping(None)
self.assertEqual('TBD', 'TBD') | Test do_ping(self, line) | test_do_ping | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_modules(self):
"""
Test do_modules(self, line, cacheonly=False)
"""
sfcli = SpiderFootCli()
sfcli.do_modules(None, None)
self.assertEqual('TBD', 'TBD') | Test do_modules(self, line, cacheonly=False) | test_do_modules | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_types(self):
"""
Test do_types(self, line, cacheonly=False)
"""
sfcli = SpiderFootCli()
sfcli.do_types(None, None)
self.assertEqual('TBD', 'TBD') | Test do_types(self, line, cacheonly=False) | test_do_types | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_load(self):
"""
Test do_load(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_load(None)
self.assertEqual('TBD', 'TBD') | Test do_load(self, line) | test_do_load | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_scaninfo(self):
"""
Test do_scaninfo(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_scaninfo(None)
self.assertEqual('TBD', 'TBD') | Test do_scaninfo(self, line) | test_do_scaninfo | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_scans(self):
"""
Test do_scans(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_scans(None)
self.assertEqual('TBD', 'TBD') | Test do_scans(self, line) | test_do_scans | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_data(self):
"""
Test do_data(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_data(None)
self.assertEqual('TBD', 'TBD') | Test do_data(self, line) | test_do_data | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_export(self):
"""
Test do_export(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_export(None)
self.assertEqual('TBD', 'TBD') | Test do_export(self, line) | test_do_export | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_logs(self):
"""
Test do_logs(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_logs(None)
self.assertEqual('TBD', 'TBD') | Test do_logs(self, line) | test_do_logs | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
def test_do_start(self):
"""
Test do_start(self, line)
"""
sfcli = SpiderFootCli()
sfcli.do_start(None)
self.assertEqual('TBD', 'TBD') | Test do_start(self, line) | test_do_start | python | smicallef/spiderfoot | test/unit/test_spiderfootcli.py | https://github.com/smicallef/spiderfoot/blob/master/test/unit/test_spiderfootcli.py | MIT |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.