Skip to content

Commit

Permalink
Flake8: Resolve ANN and SIM violations (smicallef#1662)
Browse files Browse the repository at this point in the history
  • Loading branch information
bcoles committed May 2, 2022
1 parent ca1048f commit bee7c49
Show file tree
Hide file tree
Showing 17 changed files with 431 additions and 346 deletions.
20 changes: 8 additions & 12 deletions modules/sfp_dnsbrute.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,21 +65,17 @@ def setup(self, sfc, userOpts=dict()):
self.opts[opt] = userOpts[opt]

dicts_dir = f"{self.sf.myPath()}/spiderfoot/dicts/"
cslines = list()
if self.opts['commons']:
cs = open(f"{dicts_dir}/subdomains.txt", 'r')
cslines = cs.readlines()
for s in cslines:
s = s.strip()
self.sublist[s] = True
with open(f"{dicts_dir}/subdomains.txt", 'r') as f:
for s in f.readlines():
s = s.strip()
self.sublist[s] = True

ttlines = list()
if self.opts['top10000']:
tt = open(f"{dicts_dir}/subdomains-10000.txt", 'r')
ttlines = tt.readlines()
for s in ttlines:
s = s.strip()
self.sublist[s] = True
with open(f"{dicts_dir}/subdomains-10000.txt", 'r') as f:
for s in f.readlines():
s = s.strip()
self.sublist[s] = True

# What events is this module interested in for input
def watchedEvents(self):
Expand Down
13 changes: 5 additions & 8 deletions modules/sfp_reversewhois.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ class sfp_reversewhois(SpiderFootPlugin):
def setup(self, sfc, userOpts=dict()):
self.sf = sfc
self.results = self.tempStorage()
self.errorState = False

# Clear / reset any other class member variables here
# or you risk them persisting between threads.
Expand All @@ -67,14 +68,12 @@ def producedEvents(self):
def query(self, qry):
url = f"https://reversewhois.io?searchterm={qry}"

ret = ([], [])

res = self.sf.fetchUrl(url, timeout=self.opts.get("_fetchtimeout", 30))

if res["code"] not in ["200"]:
self.error("You may have exceeded ReverseWhois usage limits.")
self.errorState = True
return ret
return ([], [])

html = BeautifulSoup(res["content"], features="lxml")
date_regex = re.compile(r'\d{4}-\d{2}-\d{2}')
Expand All @@ -95,12 +94,10 @@ def query(self, qry):
self.debug(f"Invalid row {table_row}")
continue

ret = (list(domains), list(registrars))

if not registrars and not domains:
self.info(f"No ReverseWhois info found for {qry}")

return ret
return (list(domains), list(registrars))

# Handle events sent to this module
def handleEvent(self, event):
Expand All @@ -121,7 +118,7 @@ def handleEvent(self, event):

domains, registrars = self.query(eventData)

for domain in domains:
for domain in set(domains):
# if this domain isn't the main target
if not self.getTarget().matches(domain, includeChildren=False):
e = SpiderFootEvent("AFFILIATE_INTERNET_NAME", domain, self.__name__, event)
Expand All @@ -130,7 +127,7 @@ def handleEvent(self, event):
evt = SpiderFootEvent("AFFILIATE_DOMAIN_NAME", domain, self.__name__, event)
self.notifyListeners(evt)

for registrar in registrars:
for registrar in set(registrars):
e = SpiderFootEvent("DOMAIN_REGISTRAR", registrar, self.__name__, event)
self.notifyListeners(e)

Expand Down
4 changes: 1 addition & 3 deletions modules/sfp_s3bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,9 @@ def checkSite(self, url):
def threadSites(self, siteList):
self.s3results = dict()
running = True
i = 0
t = []

for site in siteList:
for i, site in enumerate(siteList):
if self.checkForStop():
return False

Expand All @@ -103,7 +102,6 @@ def threadSites(self, siteList):
t.append(threading.Thread(name='thread_sfp_s3buckets_' + tname,
target=self.checkSite, args=(site,)))
t[i].start()
i += 1

# Block until all threads are finished
while running:
Expand Down
4 changes: 1 addition & 3 deletions modules/sfp_similar.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,10 +141,8 @@ def handleEvent(self, event):
domlist.append(c + dom)

# Search for double character domains
pos = 0
for c in dom:
for pos, c in enumerate(dom):
domlist.append(dom[0:pos] + c + c + dom[(pos + 1):len(dom)])
pos += 1

for d in domlist:
try:
Expand Down
6 changes: 2 additions & 4 deletions modules/sfp_tldsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,15 @@ def tryTld(self, target, tld):
def tryTldWrapper(self, tldList, sourceEvent):
self.tldResults = dict()
running = True
i = 0
t = []

# Spawn threads for scanning
self.info("Spawning threads to check TLDs: " + str(tldList))
for pair in tldList:
self.info(f"Spawning threads to check TLDs: {tldList}")
for i, pair in enumerate(tldList):
(domain, tld) = pair
tn = 'thread_sfp_tldsearch_' + str(random.SystemRandom().randint(0, 999999999))
t.append(threading.Thread(name=tn, target=self.tryTld, args=(domain, tld,)))
t[i].start()
i += 1

# Block until all threads are finished
while running:
Expand Down
72 changes: 44 additions & 28 deletions modules/sfp_zetalytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,46 +132,62 @@ def generate_subdomains_events(self, data, pevent):
if self.verify_emit_internet_name(qname, pevent):
events_generated = True

return events_generated
return events_generated # noqa R504

def generate_hostname_events(self, data, pevent):
events_generated = False
if not isinstance(data, dict):
return False

results = data.get("results")
if not isinstance(results, list):
return False

hostnames = set()
if isinstance(data, dict):
results = data.get("results")
if isinstance("results", list):
for r in results:
qname = r.get("qname")
if isinstance("qname", str):
hostnames.add(qname)
for r in results:
qname = r.get("qname")
if isinstance("qname", str):
hostnames.add(qname)

events_generated = False
for hostname in hostnames:
if self.verify_emit_internet_name(hostname, pevent):
events_generated = True
return events_generated

return events_generated # noqa R504

def generate_email_events(self, data, pevent):
if not isinstance(data, dict):
return False

results = data.get("results")
if not isinstance(results, list):
return False

events_generated = False
if isinstance(data, dict):
results = data.get("results")
if isinstance(results, list):
for r in results:
domain = r.get("d")
if isinstance(domain, str):
self.emit("AFFILIATE_DOMAIN_NAME", domain, pevent)
events_generated = True
return events_generated
for r in results:
domain = r.get("d")
if isinstance(domain, str):
self.emit("AFFILIATE_DOMAIN_NAME", domain, pevent)
events_generated = True

return events_generated # noqa R504

def generate_email_domain_events(self, data, pevent):
if not isinstance(data, dict):
return False

results = data.get("results")
if not isinstance(results, list):
return False

events_generated = False
if isinstance(data, dict):
results = data.get("results")
if isinstance(results, list):
for r in results:
domain = r.get("d")
if isinstance(domain, str):
self.emit("AFFILIATE_DOMAIN_NAME", domain, pevent)
events_generated = True
return events_generated
for r in results:
domain = r.get("d")
if isinstance(domain, str):
self.emit("AFFILIATE_DOMAIN_NAME", domain, pevent)
events_generated = True

return events_generated # noqa R504

def handleEvent(self, event):
eventName = event.eventType
Expand Down
14 changes: 5 additions & 9 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -5,29 +5,25 @@ docstring-convention = google
ignore-decorators = property
select = C,E,F,W,B,B9,DAR,DUO,R,A,S,Q0,SIM,SFS
# Note: ANN and SIM tests should be reviewed and fixed instead of ignored
ignore = E501 W503 B006 E800 B904 B950 SFS301 SF01 Q000 SIM102 SIM111 SIM113 SIM114 SIM115 I D ANN
extend-ignore = E501 W503 B006 E800 B950 SFS301 SF01 Q000 SIM102 SIM113 SIM114 I D ANN
# Note: most of these should be fixed instead of ignored
per-file-ignores =
spiderfoot/event.py:A
spiderfoot/event.py:A003
spiderfoot/plugin.py:B902
spiderfoot/db.py:SFS101,B902
sf.py:SFS201,B902
sflib.py:SFS101,SIM110,SIM111,B902
sflib.py:SFS101,SIM110,B902
sfscan.py:SIM105,B902
modules/*:B902
modules/*:SIM115,B902
modules/sfp_alienvault.py:B902,C901
modules/sfp_binaryedge.py:B902,C901
modules/sfp__stor_stdout.py:SFS201
modules/sfp_bitcoin.py:SFS101
modules/sfp_reversewhois.py:R504
modules/sfp_ripe.py:SIM110,B902
modules/sfp_zetalytics.py:R504,B902

spiderfoot/correlation.py:SIM110,B902
spiderfoot/__init__.py:F401
sfcli.py:DAR,B902
sfwebui.py:A001,A002,B902
test/*:SIM117,ANN
test/*:SIM117,B904,ANN
docs/conf.py:A

[darglint]
Expand Down
13 changes: 7 additions & 6 deletions sf.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
dbh = None


def main():
def main() -> None:
# web server config
sfWebUiConfig = {
'host': '127.0.0.1',
Expand Down Expand Up @@ -233,7 +233,7 @@ def main():
start_scan(sfConfig, sfModules, args, loggingQueue)


def start_scan(sfConfig, sfModules, args, loggingQueue):
def start_scan(sfConfig: dict, sfModules: dict, args, loggingQueue) -> None:
"""Start scan
Args:
Expand Down Expand Up @@ -452,7 +452,7 @@ def start_scan(sfConfig, sfModules, args, loggingQueue):
return


def start_web_server(sfWebUiConfig, sfConfig, loggingQueue=None):
def start_web_server(sfWebUiConfig: dict, sfConfig: dict, loggingQueue=None) -> None:
"""Start the web server so you can start looking at results
Args:
Expand Down Expand Up @@ -497,9 +497,10 @@ def start_web_server(sfWebUiConfig, sfConfig, loggingQueue=None):
log.error("Could not read passwd file. Permission denied.")
sys.exit(-1)

pw = open(passwd_file, 'r')
with open(passwd_file, 'r') as f:
passwd_data = f.readlines()

for line in pw.readlines():
for line in passwd_data:
if line.strip() == '':
continue

Expand Down Expand Up @@ -581,7 +582,7 @@ def start_web_server(sfWebUiConfig, sfConfig, loggingQueue=None):
cherrypy.quickstart(SpiderFootWebUi(sfWebUiConfig, sfConfig, loggingQueue), script_name=web_root, config=conf)


def handle_abort(signal, frame):
def handle_abort(signal, frame) -> None:
"""Handle interrupt and abort scan.
Args:
Expand Down
22 changes: 8 additions & 14 deletions sfcli.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,8 +248,7 @@ def pretty(self, data, titlemap=None):
spaces = 2
# Find the maximum column sizes
for r in data:
i = 0
for c in r:
for i, c in enumerate(r):
if type(r) == list:
# we have list index
cn = str(i)
Expand All @@ -264,7 +263,6 @@ def pretty(self, data, titlemap=None):
# print(str(cn) + ", " + str(c) + ", " + str(v))
if len(v) > maxsize.get(cn, 0):
maxsize[cn] = len(v)
i += 1

# Adjust for long titles
if titlemap:
Expand All @@ -273,8 +271,7 @@ def pretty(self, data, titlemap=None):
maxsize[c] = len(titlemap.get(c, c))

# Display the column titles
i = 0
for c in cols:
for i, c in enumerate(cols):
if titlemap:
t = titlemap.get(c, c)
else:
Expand All @@ -287,19 +284,16 @@ def pretty(self, data, titlemap=None):
if sdiff > 0 and i < len(cols) - 1:
# out += " " * sdiff
out.append(" " * sdiff)
i += 1
# out += "\n"
out.append('\n')

# Then the separator
i = 0
for c in cols:
for i, c in enumerate(cols):
# out += "-" * ((maxsize[c]+spaces))
out.append("-" * ((maxsize[c] + spaces)))
if i < len(cols) - 1:
# out += "+"
out.append("+")
i += 1
# out += "\n"
out.append("\n")

Expand Down Expand Up @@ -1364,9 +1358,10 @@ def do_EOF(self, line):
# Load commands from a file
if args.e:
try:
cin = open(args.e, "r")
with open(args.e, 'r') as f:
cin = f.read()
except BaseException as e:
print("Unable to open " + args.e + ":" + " (" + str(e) + ")")
print(f"Unable to open {args.e}: ({e})")
sys.exit(-1)
else:
cin = sys.stdin
Expand All @@ -1380,9 +1375,8 @@ def do_EOF(self, line):
s.ownopts['cli.password'] = args.p
if args.P:
try:
pf = open(args.P, "r")
s.ownopts['cli.password'] = pf.readlines()[0].strip('\n')
pf.close()
with open(args.P, 'r') as f:
s.ownopts['cli.password'] = f.readlines()[0].strip('\n')
except BaseException as e:
print(f"Unable to open {args.P}: ({e})")
sys.exit(-1)
Expand Down
Loading

0 comments on commit bee7c49

Please sign in to comment.