Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: make quotes in repo consistent #1212

Merged
merged 1 commit into from
Dec 11, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 34 additions & 34 deletions sherlock/sherlock.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,19 +74,19 @@ def response_time(resp, *args, **kwargs):
# Make sure that the time measurement hook is first, so we will not
# track any later hook's execution time.
try:
if isinstance(hooks['response'], list):
hooks['response'].insert(0, response_time)
elif isinstance(hooks['response'], tuple):
if isinstance(hooks["response"], list):
hooks["response"].insert(0, response_time)
elif isinstance(hooks["response"], tuple):
# Convert tuple to list and insert time measurement hook first.
hooks['response'] = list(hooks['response'])
hooks['response'].insert(0, response_time)
hooks["response"] = list(hooks["response"])
hooks["response"].insert(0, response_time)
else:
# Must have previously contained a single hook function,
# so convert to list.
hooks['response'] = [response_time, hooks['response']]
hooks["response"] = [response_time, hooks["response"]]
except KeyError:
# No response hook was already defined, so install it ourselves.
hooks['response'] = [response_time]
hooks["response"] = [response_time]

return super(SherlockFuturesSession, self).request(method,
url,
Expand Down Expand Up @@ -209,12 +209,12 @@ def sherlock(username, site_data, query_notify,
results_site = {}

# Record URL of main site
results_site['url_main'] = net_info.get("urlMain")
results_site["url_main"] = net_info.get("urlMain")

# A user agent is needed because some sites don't return the correct
# information since they think that we are bots (Which we actually are...)
headers = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0',
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0",
}

if "headers" in net_info:
Expand All @@ -228,14 +228,14 @@ def sherlock(username, site_data, query_notify,
regex_check = net_info.get("regexCheck")
if regex_check and re.search(regex_check, username) is None:
# No need to do the check at the site: this user name is not allowed.
results_site['status'] = QueryResult(username,
results_site["status"] = QueryResult(username,
social_network,
url,
QueryStatus.ILLEGAL)
results_site["url_user"] = ""
results_site['http_status'] = ""
results_site['response_text'] = ""
query_notify.update(results_site['status'])
results_site["http_status"] = ""
results_site["response_text"] = ""
query_notify.update(results_site["status"])
else:
# URL of user on site (if it exists)
results_site["url_user"] = url
Expand Down Expand Up @@ -268,7 +268,7 @@ def sherlock(username, site_data, query_notify,
url_probe = interpolate_string(url_probe, username)

if request is None:
if net_info["errorType"] == 'status_code':
if net_info["errorType"] == "status_code":
# In most cases when we are detecting by status code,
# it is not necessary to get the entire body: we can
# detect fine with just the HEAD response.
Expand Down Expand Up @@ -436,11 +436,11 @@ def sherlock(username, site_data, query_notify,
query_notify.update(result)

# Save status of request
results_site['status'] = result
results_site["status"] = result

# Save results from request
results_site['http_status'] = http_status
results_site['response_text'] = response_text
results_site["http_status"] = http_status
results_site["response_text"] = response_text

# Add this site's results into final dictionary with all of the other results.
results_total[social_network] = results_site
Expand Down Expand Up @@ -510,19 +510,19 @@ def main():
help="Create Comma-Separated Values (CSV) File."
)
parser.add_argument("--site",
action="append", metavar='SITE_NAME',
action="append", metavar="SITE_NAME",
dest="site_list", default=None,
help="Limit analysis to just the listed sites. Add multiple options to specify more than one site."
)
parser.add_argument("--proxy", "-p", metavar='PROXY_URL',
parser.add_argument("--proxy", "-p", metavar="PROXY_URL",
action="store", dest="proxy", default=None,
help="Make requests over a proxy. e.g. socks5://127.0.0.1:1080"
)
parser.add_argument("--json", "-j", metavar="JSON_FILE",
dest="json_file", default=None,
help="Load data from a JSON file or an online, valid, JSON file.")
parser.add_argument("--timeout",
action="store", metavar='TIMEOUT',
action="store", metavar="TIMEOUT",
dest="timeout", type=timeout_check, default=None,
help="Time (in seconds) to wait for response to requests. "
"Default timeout is infinity. "
Expand All @@ -542,7 +542,7 @@ def main():
help="Don't color terminal output"
)
parser.add_argument("username",
nargs='+', metavar='USERNAMES',
nargs="+", metavar="USERNAMES",
action="store",
help="One or more usernames to check with social networks."
)
Expand Down Expand Up @@ -598,7 +598,7 @@ def main():
# Create object with all information about sites we are aware of.
try:
if args.local:
sites = SitesInformation(os.path.join(os.path.dirname(__file__), 'resources/data.json'))
sites = SitesInformation(os.path.join(os.path.dirname(__file__), "resources/data.json"))
else:
sites = SitesInformation(args.json_file)
except Exception as error:
Expand Down Expand Up @@ -682,25 +682,25 @@ def main():

with open(result_file, "w", newline='', encoding="utf-8") as csv_report:
writer = csv.writer(csv_report)
writer.writerow(['username',
'name',
'url_main',
'url_user',
'exists',
'http_status',
'response_time_s'
writer.writerow(["username",
"name",
"url_main",
"url_user",
"exists",
"http_status",
"response_time_s"
]
)
for site in results:
response_time_s = results[site]['status'].query_time
response_time_s = results[site]["status"].query_time
if response_time_s is None:
response_time_s = ""
writer.writerow([username,
site,
results[site]['url_main'],
results[site]['url_user'],
str(results[site]['status'].status),
results[site]['http_status'],
results[site]["url_main"],
results[site]["url_user"],
str(results[site]["status"].status),
results[site]["http_status"],
response_time_s
]
)
Expand Down
12 changes: 6 additions & 6 deletions sherlock/tests/all.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def test_detect_true_via_message(self):
Will trigger an assert if detection mechanism did not work as expected.
"""

site = 'BinarySearch'
site = "BinarySearch"
site_data = self.site_data_all[site]

#Ensure that the site's detection method has not changed.
Expand All @@ -48,7 +48,7 @@ def test_detect_false_via_message(self):
Will trigger an assert if detection mechanism did not work as expected.
"""

site = 'BinarySearch'
site = "BinarySearch"
site_data = self.site_data_all[site]

#Ensure that the site's detection method has not changed.
Expand All @@ -75,7 +75,7 @@ def test_detect_true_via_status_code(self):
Will trigger an assert if detection mechanism did not work as expected.
"""

site = 'Pinterest'
site = "Pinterest"
site_data = self.site_data_all[site]

#Ensure that the site's detection method has not changed.
Expand All @@ -102,7 +102,7 @@ def test_detect_false_via_status_code(self):
Will trigger an assert if detection mechanism did not work as expected.
"""

site = 'Pinterest'
site = "Pinterest"
site_data = self.site_data_all[site]

#Ensure that the site's detection method has not changed.
Expand All @@ -129,7 +129,7 @@ def test_detect_true_via_response_url(self):
Will trigger an assert if detection mechanism did not work as expected.
"""

site = 'VK'
site = "VK"
site_data = self.site_data_all[site]

#Ensure that the site's detection method has not changed.
Expand All @@ -156,7 +156,7 @@ def test_detect_false_via_response_url(self):
Will trigger an assert if detection mechanism did not work as expected.
"""

site = 'VK'
site = "VK"
site_data = self.site_data_all[site]

#Ensure that the site's detection method has not changed.
Expand Down
6 changes: 3 additions & 3 deletions sherlock/tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def site_data_filter(self, site_list):
should be filtered.

Return Value:
Dictionary containing sub-set of site data specified by 'site_list'.
Dictionary containing sub-set of site data specified by "site_list".
"""

# Create new dictionary that has filtered site data based on input.
Expand Down Expand Up @@ -126,7 +126,7 @@ def username_check(self, username_list, site_list, exist_check=True):
):
if (
(self.skip_error_sites == True) and
(result['status'].status == QueryStatus.UNKNOWN)
(result["status"].status == QueryStatus.UNKNOWN)
):
#Some error connecting to site.
self.skipTest(f"Skipping Username '{username}' "
Expand All @@ -135,7 +135,7 @@ def username_check(self, username_list, site_list, exist_check=True):
)

self.assertEqual(exist_result_desired,
result['status'].status)
result["status"].status)

return

Expand Down
6 changes: 3 additions & 3 deletions site_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,19 @@

with open("sites.md", "w") as site_file:
data_length = len(data)
site_file.write(f'## List Of Supported Sites ({data_length} Sites In Total!)\n')
site_file.write(f"## List Of Supported Sites ({data_length} Sites In Total!)\n")

for social_network in data:
url_main = data.get(social_network).get("urlMain")
pool.append((social_network, url_main))

for social_network, url_main in pool:
site_file.write(f'1. [{social_network}]({url_main})\n')
site_file.write(f"1. [{social_network}]({url_main})\n")

sorted_json_data = json.dumps(data, indent=2, sort_keys=True)

with open("sherlock/resources/data.json", "w") as data_file:
data_file.write(sorted_json_data)
data_file.write('\n')
data_file.write("\n")

print("Finished updating supported site listing!")