-
Notifications
You must be signed in to change notification settings - Fork 0
/
BulkHeadersRetriever.py
105 lines (81 loc) · 3.75 KB
/
BulkHeadersRetriever.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import argparse
import pandas as pd
import requests
import json
from urllib.parse import urlparse
import sys
# Disable all requests and urllib3 warnings
requests.packages.urllib3.disable_warnings()
# Function to retrieve and return security response headers
def get_security_response_headers(url, disable_ssl_verify=False):
try:
# Check if the URL lacks a scheme (http or https) and add "https://" as the scheme if needed
parsed_url = urlparse(url)
if not parsed_url.scheme:
url = "https://" + url
# Set the user agent header
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36"
}
# Determine whether to verify SSL certificates
verify_ssl = not disable_ssl_verify
response = requests.get(url, allow_redirects=True, headers=headers, verify=verify_ssl)
final_url = response.url
status_code = response.status_code
# Print the status code
print(f"URL: {final_url}, Status Code: {status_code}")
# Specify the list of security headers to retrieve
security_headers = [
"X-Content-Type-Options",
"X-Frame-Options",
"Strict-Transport-Security",
"Content-Security-Policy",
"Referrer-Policy",
"Permissions-Policy"
]
# Capture security response headers regardless of the status code
headers_dict = {header: response.headers.get(header, "HEADER_NOT_SET") for header in security_headers}
return final_url, headers_dict, status_code
except requests.exceptions.RequestException as e:
return url, {"Error": str(e)}, None
def save_to_csv(data, output_file):
df = pd.DataFrame(data)
df.to_csv(output_file, index=False)
def save_to_json(data, output_file):
with open(output_file, 'w') as json_file:
json.dump(data, json_file, indent=4)
def main():
parser = argparse.ArgumentParser(description="Retrieve security headers and save to a file")
parser.add_argument("input_file", help="Path to the input text file containing one website URL per line")
parser.add_argument("output_file", help="Path to the output file where headers will be saved")
parser.add_argument("--format", choices=["csv", "json"], default="csv", help="Output format (csv or json, default is csv)")
parser.add_argument("--disable-ssl-verify", action="store_true", help="Disable SSL certificate verification")
args = parser.parse_args()
input_file = args.input_file
output_file = args.output_file
output_format = args.format
disable_ssl_verify = args.disable_ssl_verify
try:
with open(input_file, 'r') as file:
websites = file.read().splitlines()
all_headers = []
for website in websites:
original_url = website
modified_url, headers, status_code = get_security_response_headers(website, disable_ssl_verify)
entry = {"Original URL": original_url, "Modified URL": modified_url, **headers, "Status Code": status_code}
all_headers.append(entry)
# Real-time printing of progress
sys.stdout.write("\rRetrieving security headers... ")
sys.stdout.flush()
print("\nSecurity headers retrieved. Saving to file...")
if output_format == "csv":
save_to_csv(all_headers, output_file)
elif output_format == "json":
save_to_json(all_headers, output_file)
print(f"Security headers saved to '{output_file}'")
except FileNotFoundError:
print(f"File '{input_file}' not found.")
except Exception as e:
print(f"An error occurred: {e}")
if __name__ == "__main__":
main()