Browse Source

Remove incorrectly uploaded files and corrected them

Jose Rodriguez Rios 4 weeks ago
parent
commit
a0ff94ed2c

Scanner/networkdiscovery.py → Scanner/Host_Discovery/networkdiscovery.py View File


Scanner/portscanner.py → Scanner/Host_Discovery/portscanner.py View File


+ 161
- 0
Scanner/Vunerability_Scanner/createTargets.py View File

@@ -0,0 +1,161 @@
1
+import csv
2
+import os
3
+from gvm.connections import TLSConnection
4
+from gvm.protocols.gmp import Gmp
5
+from gvm.transforms import EtreeTransform
6
+
7
+# Set up data directory
8
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
9
+DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "data"))
10
+os.makedirs(DATA_DIR, exist_ok=True)
11
+
12
+
13
+# OpenVAS Configuration
14
+OPENVAS_HOST = "localhost"
15
+OPENVAS_PORT = 9390
16
+USERNAME = "admin"
17
+PASSWORD = "admin"
18
+
19
+
20
+ACTIVE_HOSTS_CSV = os.path.join(DATA_DIR, "active_hosts.csv")
21
+TARGET_ID_CSV = os.path.join(DATA_DIR, "target_id.csv")
22
+
23
+
24
+def get_port_list_id(gmp):
25
+    """
26
+    Retrieve a valid port list ID.
27
+    """
28
+    port_lists = gmp.get_port_lists()
29
+    default_port_list_id = None
30
+    for port_list in port_lists.findall("port_list"):
31
+        name = port_list.find("name").text
32
+        port_list_id = port_list.get("id")
33
+        print(f"Port List: {name} (ID: {port_list_id})")
34
+        if "OpenVAS Default" in name:
35
+            default_port_list_id = port_list_id
36
+
37
+    # If "OpenVAS Default" is not found, use the first available port list
38
+    if not default_port_list_id and len(port_lists.findall("port_list")) > 0:
39
+
40
+        default_port_list_id = port_lists.find("port_list").get("id")
41
+        print(f"'OpenVAS Default' not found. Using the first available port list with ID: {default_port_list_id}")
42
+
43
+    return default_port_list_id
44
+
45
+
46
+def target_exists(gmp, ip):
47
+    """
48
+    Check if a target already exists for the given IP and return its target_id if it does.
49
+    """
50
+    targets = gmp.get_targets()
51
+    for target in targets.findall("target"):
52
+        if target.find("hosts").text == ip:
53
+            target_id = target.get("id")
54
+            print(f"Target for IP {ip} already exists with ID: {target_id}")
55
+            return target_id
56
+    return None
57
+
58
+
59
+def create_target(gmp, name, ip, port_list_id):
60
+    """
61
+    Create a target in OpenVAS.
62
+    """
63
+    response = gmp.create_target(name=name, hosts=[ip], port_list_id=port_list_id)
64
+    target_id = response.get("id")
65
+    if target_id:
66
+        print(f"Created target '{name}' for IP '{ip}' with ID: {target_id}")
67
+    else:
68
+        print(f"Failed to create target for IP {ip}. Check logs.")
69
+    return target_id
70
+
71
+
72
+def save_target_id_to_csv(ip, target_id):
73
+    """
74
+    Save the target ID to the target_id.csv file if it's not already there.
75
+    """
76
+    existing_data = []
77
+    try:
78
+        # Read existing data from the CSV
79
+        with open(TARGET_ID_CSV, "r", newline="") as csvfile:
80
+            reader = csv.reader(csvfile)
81
+            existing_data = list(reader)
82
+    except FileNotFoundError:
83
+        # If the file doesn't exist, it will be created later
84
+        pass
85
+
86
+    # Check if the IP is already in the CSV
87
+    for row in existing_data:
88
+        if row[0] == ip:
89
+            print(f"IP {ip} already recorded in {TARGET_ID_CSV}")
90
+            return
91
+
92
+    # Append the new target ID to the CSV
93
+    with open(TARGET_ID_CSV, "a", newline="") as csvfile:
94
+        writer = csv.writer(csvfile)
95
+        writer.writerow([ip, target_id])
96
+    print(f"Saved target ID {target_id} for IP {ip} to {TARGET_ID_CSV}")
97
+
98
+
99
+def read_csv_to_ip_list(csv_file):
100
+    """
101
+    Read the CSV file and extract IPs into a list.
102
+    """
103
+    ip_list = []
104
+    with open(csv_file, newline="") as csvfile:
105
+        reader = csv.DictReader(csvfile)
106
+        for row in reader:
107
+            ip_list.append(row["IP"])
108
+    return ip_list
109
+
110
+
111
+def main():
112
+    connection = TLSConnection(hostname=OPENVAS_HOST, port=OPENVAS_PORT)
113
+    with Gmp(connection=connection, transform=EtreeTransform()) as gmp:
114
+        # Authenticate with OpenVAS
115
+        gmp.authenticate(username=USERNAME, password=PASSWORD)
116
+        print("Authenticated with OpenVAS")
117
+
118
+        # Get a valid port list ID
119
+        port_list_id = get_port_list_id(gmp)
120
+        if not port_list_id:
121
+            print("Failed to retrieve a valid port list. Exiting.")
122
+            return
123
+
124
+        # Read IPs from the active hosts CSV
125
+        ip_list = read_csv_to_ip_list(ACTIVE_HOSTS_CSV)
126
+        if not ip_list:
127
+            print("No IPs found in the CSV file. Exiting.")
128
+            return
129
+
130
+        print(f"Found {len(ip_list)} active hosts.")
131
+
132
+        # Process each IP
133
+        for ip in ip_list:
134
+            print(f"Processing IP: {ip}")
135
+
136
+            # Check if target already exists
137
+            target_id = target_exists(gmp, ip)
138
+            if target_id:
139
+                # Save the existing target ID to the CSV if not already recorded
140
+                save_target_id_to_csv(ip, target_id)
141
+                continue
142
+
143
+            # Create target if it doesn't exist
144
+            target_name = f"Target for {ip}"
145
+            target_id = create_target(gmp, target_name, ip, port_list_id)
146
+            if target_id:
147
+                save_target_id_to_csv(ip, target_id)
148
+            else:
149
+                print(f"Failed to create target for IP {ip}. Continuing.")
150
+
151
+
152
+if __name__ == "__main__":
153
+    # Ensure the target_id.csv file exists with headers
154
+    try:
155
+        with open(TARGET_ID_CSV, "x", newline="") as csvfile:
156
+            writer = csv.writer(csvfile)
157
+            writer.writerow(["IP", "Target ID"])  # Write headers if the file doesn't exist
158
+    except FileExistsError:
159
+        pass  # File already exists, no need to create
160
+
161
+    main()

+ 73
- 0
Scanner/Vunerability_Scanner/generate_reports copy.py View File

@@ -0,0 +1,73 @@
1
+# generate_reports.py (fixed)
2
+import csv
3
+import subprocess
4
+import os
5
+from datetime import datetime
6
+
7
+REPORT_IDS_CSV = "data/report_ids.csv"
8
+CONSOLIDATED_CSV = "data/openvasscan.csv"
9
+TEMP_DIR = "temp_reports"
10
+
11
+def append_to_consolidated(temp_csv):
12
+    if not os.path.exists(temp_csv):
13
+        print(f"Warning: File {temp_csv} not found, skipping")
14
+        return
15
+
16
+    file_exists = os.path.isfile(CONSOLIDATED_CSV)
17
+    
18
+    with open(temp_csv, 'r') as infile, open(CONSOLIDATED_CSV, 'a', newline='') as outfile:
19
+        reader = csv.reader(infile)
20
+        writer = csv.writer(outfile)
21
+        
22
+        if file_exists:
23
+            next(reader, None)
24
+            
25
+        for row in reader:
26
+            writer.writerow(row)
27
+
28
+def main():
29
+    os.makedirs(TEMP_DIR, exist_ok=True)
30
+    
31
+    with open(REPORT_IDS_CSV, 'r') as f:
32
+        reader = csv.DictReader(f)
33
+        for row in reader:
34
+            report_id = row['report_id']
35
+            timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
36
+            base_name = f"report_{report_id}_{timestamp}"
37
+            temp_csv = os.path.join(TEMP_DIR, base_name)
38
+            
39
+            cmd = [
40
+                'gvm-script',
41
+                '--gmp-username', 'admin',
42
+                '--gmp-password', 'admin',  # Fix typo here ("assword" → "password")
43
+                'tls', '--hostname', '127.0.0.1', '--port', '9390',
44
+                'export-csv-report.gmp.py',
45
+                report_id,
46
+                temp_csv
47
+            ]
48
+            
49
+            try:
50
+                result = subprocess.run(
51
+                    cmd,
52
+                    check=True,
53
+                    capture_output=True,
54
+                    text=True
55
+                )  # Added closing parenthesis
56
+                print(f"Command output: {result.stdout}")
57
+                
58
+                final_path = f"{temp_csv}.csv"
59
+                
60
+                if os.path.exists(final_path):
61
+                    append_to_consolidated(final_path)
62
+                    os.remove(final_path)
63
+                    print(f"Processed {final_path}")
64
+                else:
65
+                    print(f"Error: Expected output file {final_path} not found")
66
+
67
+            except subprocess.CalledProcessError as e:
68
+                print(f"Error generating report {report_id}: {e.stderr}")
69
+
70
+    print(f"Consolidation complete. Final file: {CONSOLIDATED_CSV}")
71
+
72
+if __name__ == "__main__":
73
+    main()  # Properly closed main function

+ 75
- 0
Scanner/Vunerability_Scanner/generate_reports.py View File

@@ -0,0 +1,75 @@
1
+import os
2
+import csv
3
+import base64
4
+from gvm.connections import TLSConnection
5
+from gvm.protocols.gmp import Gmp
6
+from gvm.transforms import EtreeTransform
7
+
8
+# Set up data directory
9
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
10
+DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "data"))
11
+os.makedirs(DATA_DIR, exist_ok=True)
12
+
13
+REPORT_ID_CSV = os.path.join(DATA_DIR, "report_id.csv")
14
+OPENVAS_SCAN_CSV = os.path.join(DATA_DIR, "openvasscan.csv")
15
+
16
+OPENVAS_HOST = "localhost"
17
+OPENVAS_PORT = 9390
18
+USERNAME = "admin"
19
+PASSWORD = "admin"
20
+
21
+CSV_FORMAT_ID = "c1645568-627a-11e3-a660-406186ea4fc5"
22
+
23
+def get_report_csv_data(gmp, report_id):
24
+    report = gmp.get_report(report_id=report_id, report_format_id=CSV_FORMAT_ID)
25
+    base64_data = report.find(".//report_format").tail
26
+    if base64_data:
27
+        try:
28
+            decoded_data = base64.b64decode(base64_data).decode("utf-8")
29
+            return decoded_data
30
+        except Exception as e:
31
+            print(f"[✗] Error decoding report {report_id}: {e}")
32
+            return None
33
+    else:
34
+        print(f"[!] No data found in report {report_id}")
35
+        return None
36
+
37
+def save_individual_report(report_id, content):
38
+    report_path = os.path.join(DATA_DIR, f"report_{report_id}.csv")
39
+    with open(report_path, "w", encoding="utf-8") as f:
40
+        f.write(content)
41
+    print(f"[✓] Saved individual report to {report_path}")
42
+
43
+def append_to_aggregate(content, is_first=False):
44
+    mode = "w" if is_first else "a"
45
+    with open(OPENVAS_SCAN_CSV, mode, encoding="utf-8") as f:
46
+        if not is_first:
47
+            # Skip header row to not duplicate rows
48
+            content = "\n".join(content.splitlines()[1:])
49
+        f.write(content + "\n")
50
+
51
+def main():
52
+    connection = TLSConnection(hostname=OPENVAS_HOST, port=OPENVAS_PORT)
53
+    with Gmp(connection=connection, transform=EtreeTransform()) as gmp:
54
+        gmp.authenticate(username=USERNAME, password=PASSWORD)
55
+        print("Authenticated with OpenVAS")
56
+
57
+        first = True
58
+        try:
59
+            with open(REPORT_ID_CSV, newline="") as f:
60
+                reader = csv.DictReader(f)
61
+                for row in reader:
62
+                    report_id = row["Report ID"]
63
+                    print(f"Fetching report: {report_id}")
64
+                    content = get_report_csv_data(gmp, report_id)
65
+                    if content:
66
+                        #save_individual_report(report_id, content)
67
+                        append_to_aggregate(content, is_first=first)
68
+                        first = False
69
+        except FileNotFoundError:
70
+            print(f"[✗] Missing file: {REPORT_ID_CSV}")
71
+        except KeyError:
72
+            print(f"[✗] 'Report ID' column not found in {REPORT_ID_CSV}")
73
+
74
+if __name__ == "__main__":
75
+    main()

+ 73
- 0
Scanner/Vunerability_Scanner/getreports.py View File

@@ -0,0 +1,73 @@
1
+
2
+import csv
3
+import time
4
+import os
5
+from gvm.connections import TLSConnection
6
+from gvm.protocols.gmp import Gmp
7
+from gvm.transforms import EtreeTransform
8
+
9
+# Set up data directory
10
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
11
+DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "data"))
12
+os.makedirs(DATA_DIR, exist_ok=True)
13
+
14
+TASK_ID_CSV = os.path.join(DATA_DIR, "task_id.csv")
15
+REPORT_ID_CSV = os.path.join(DATA_DIR, "report_id.csv")
16
+
17
+OPENVAS_HOST = "localhost"
18
+OPENVAS_PORT = 9390
19
+USERNAME = "admin"
20
+PASSWORD = "admin"
21
+
22
+def wait_for_task_and_get_report(gmp, task_id):
23
+    while True:
24
+        response = gmp.get_task(task_id=task_id)
25
+        task = response.find("task")
26
+
27
+        if task is None:
28
+            print(f"Task with ID {task_id} not found. Skipping.")
29
+            return None
30
+
31
+        status = task.find("status").text
32
+        progress = task.find("progress").text
33
+        print(f"Task {task_id} - Status: {status}, Progress: {progress}%")
34
+
35
+
36
+        if status == "Done":
37
+            report_elem = task.find("last_report/report")
38
+            if report_elem is not None:
39
+                report_id = report_elem.get("id")
40
+                print(f"[✓] Task {task_id} finished. Report ID: {report_id}")
41
+                return report_id
42
+            else:
43
+                print(f"[✗] Task {task_id} is done, but no report found.")
44
+                return None
45
+        time.sleep(10)
46
+
47
+def main():
48
+    connection = TLSConnection(hostname=OPENVAS_HOST, port=OPENVAS_PORT)
49
+    with Gmp(connection=connection, transform=EtreeTransform()) as gmp:
50
+        gmp.authenticate(username=USERNAME, password=PASSWORD)
51
+        print("Authenticated with OpenVAS")
52
+
53
+        try:
54
+            with open(TASK_ID_CSV, newline="") as csvfile:
55
+                reader = csv.DictReader(csvfile)
56
+                task_ids = [row["Task ID"] for row in reader]
57
+
58
+            with open(REPORT_ID_CSV, "w", newline="") as outfile:
59
+                writer = csv.writer(outfile)
60
+                writer.writerow(["Task ID", "Report ID"])  # Header
61
+
62
+                for task_id in task_ids:
63
+                    print(f"Waiting for report from Task ID: {task_id}")
64
+                    report_id = wait_for_task_and_get_report(gmp, task_id)
65
+                    if report_id:
66
+                        writer.writerow([task_id, report_id])
67
+        except FileNotFoundError:
68
+            print(f"File not found: {TASK_ID_CSV}. Please run taskmaker.py first.")
69
+        except KeyError:
70
+            print("Error: CSV must contain a 'Task ID' column.")
71
+
72
+if __name__ == "__main__":
73
+    main()

+ 67
- 0
Scanner/Vunerability_Scanner/starttask.py View File

@@ -0,0 +1,67 @@
1
+import csv
2
+from gvm.connections import TLSConnection
3
+from gvm.protocols.gmp import Gmp
4
+from gvm.transforms import EtreeTransform
5
+import os
6
+
7
+# Set up data directory
8
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
9
+DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "data"))
10
+os.makedirs(DATA_DIR, exist_ok=True)
11
+
12
+# OpenVAS Configuration
13
+OPENVAS_HOST = "localhost"
14
+OPENVAS_PORT = 9390
15
+USERNAME = "admin"
16
+PASSWORD = "admin"
17
+
18
+# CSV file
19
+TASK_ID_CSV = os.path.join(DATA_DIR, "task_id.csv")
20
+
21
+
22
+def start_task(gmp, task_id):
23
+    """
24
+    Start a task in OpenVAS.
25
+    """
26
+    try:
27
+        gmp.start_task(task_id=task_id)
28
+        print(f"Started task with ID: {task_id}")
29
+    except Exception as e:
30
+        print(f"Failed to start task with ID {task_id}. Error: {e}")
31
+
32
+
33
+def read_csv_to_task_list(csv_file):
34
+    """
35
+    Read the CSV file and extract task IDs into a list.
36
+    """
37
+    task_list = []
38
+    with open(csv_file, newline="") as csvfile:
39
+        reader = csv.DictReader(csvfile)
40
+        for row in reader:
41
+            task_list.append(row["Task ID"])
42
+    return task_list
43
+
44
+
45
+def main():
46
+    connection = TLSConnection(hostname=OPENVAS_HOST, port=OPENVAS_PORT)
47
+    with Gmp(connection=connection, transform=EtreeTransform()) as gmp:
48
+        # Authenticate with OpenVAS
49
+        gmp.authenticate(username=USERNAME, password=PASSWORD)
50
+        print("Authenticated with OpenVAS")
51
+
52
+        # Read task IDs from the task_id.csv file
53
+        task_list = read_csv_to_task_list(TASK_ID_CSV)
54
+        if not task_list:
55
+            print("No task IDs found in the CSV file. Exiting.")
56
+            return
57
+
58
+        print(f"Found {len(task_list)} tasks to start.")
59
+
60
+        # Start each task
61
+        for task_id in task_list:
62
+            start_task(gmp, task_id)
63
+
64
+
65
+if __name__ == "__main__":
66
+    main()
67
+

+ 140
- 0
Scanner/Vunerability_Scanner/taskmaker.py View File

@@ -0,0 +1,140 @@
1
+import csv
2
+from gvm.connections import TLSConnection
3
+from gvm.protocols.gmp import Gmp
4
+from gvm.transforms import EtreeTransform
5
+import os
6
+
7
+# OpenVAS Configuration
8
+OPENVAS_HOST = "localhost"
9
+OPENVAS_PORT = 9390
10
+USERNAME = "admin"
11
+PASSWORD = "admin"
12
+
13
+# Set up data directory
14
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
15
+DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "data"))
16
+os.makedirs(DATA_DIR, exist_ok=True)
17
+
18
+# CSV files
19
+TARGET_ID_CSV = os.path.join(DATA_DIR, "target_id.csv")
20
+TASK_ID_CSV = os.path.join(DATA_DIR, "task_id.csv")
21
+
22
+
23
+def get_scan_config_id(gmp):
24
+    """
25
+    Retrieve the ID of the 'Full and Fast' scan configuration.
26
+    """
27
+    scan_configs = gmp.get_scan_configs()
28
+    for config in scan_configs.findall("config"):
29
+        if config.find("name").text == "Full and fast":
30
+            config_id = config.get("id")
31
+            print(f"'Full and Fast' scan configuration found with ID: {config_id}")
32
+            return config_id
33
+    print("Failed to find 'Full and Fast' scan configuration.")
34
+    return None
35
+
36
+
37
+def get_scanner_id(gmp):
38
+    """
39
+    Retrieve the ID of the default scanner.
40
+    """
41
+    scanners = gmp.get_scanners()
42
+    for scanner in scanners.findall("scanner"):
43
+        if "OpenVAS Default" in scanner.find("name").text:
44
+            scanner_id = scanner.get("id")
45
+            print(f"'OpenVAS Default' scanner found with ID: {scanner_id}")
46
+            return scanner_id
47
+    print("Failed to find 'OpenVAS Default' scanner.")
48
+    return None
49
+
50
+
51
+
52
+def read_csv_to_target_list(csv_file):
53
+    """
54
+    Read the CSV file and extract target IDs into a list.
55
+    """
56
+    target_list = []
57
+    with open(csv_file, newline="") as csvfile:
58
+        reader = csv.DictReader(csvfile)
59
+        for row in reader:
60
+            target_list.append(row["Target ID"])
61
+    return target_list
62
+
63
+def save_task_id_to_csv(target_id, task_id):
64
+    """
65
+    Save the task ID to the task_id.csv file.
66
+    """
67
+    with open(TASK_ID_CSV, "a", newline="") as csvfile:
68
+        writer = csv.writer(csvfile)
69
+        writer.writerow([target_id, task_id])
70
+    print(f"Saved task ID {task_id} for target ID {target_id} to {TASK_ID_CSV}")
71
+
72
+
73
+def create_task(gmp, task_name, target_id, scan_config_id, scanner_id):
74
+    """
75
+    Create a task in OpenVAS.
76
+    """
77
+    response = gmp.create_task(
78
+        name=task_name,
79
+        config_id=scan_config_id,
80
+        target_id=target_id,
81
+        scanner_id=scanner_id
82
+    )
83
+    task_id = response.get("id")
84
+    if task_id:
85
+        print(f"Created task '{task_name}' for target ID '{target_id}' with ID: {task_id}")
86
+    else:
87
+        print(f"Failed to create task for target ID {target_id}. Check logs.")
88
+    return task_id
89
+
90
+
91
+def main():
92
+    connection = TLSConnection(hostname=OPENVAS_HOST, port=OPENVAS_PORT)
93
+    with Gmp(connection=connection, transform=EtreeTransform()) as gmp:
94
+        # Authenticate with OpenVAS
95
+        gmp.authenticate(username=USERNAME, password=PASSWORD)
96
+        print("Authenticated with OpenVAS")
97
+
98
+        # Get the scan configuration ID
99
+        scan_config_id = get_scan_config_id(gmp)
100
+        if not scan_config_id:
101
+            print("Failed to retrieve a valid scan configuration. Exiting.")
102
+            return
103
+
104
+        # Get the scanner ID
105
+        scanner_id = get_scanner_id(gmp)
106
+        if not scanner_id:
107
+            print("Failed to retrieve a valid scanner. Exiting.")
108
+            return
109
+
110
+        # Read target IDs from the target_id.csv file
111
+        target_list = read_csv_to_target_list(TARGET_ID_CSV)
112
+        if not target_list:
113
+            print("No target IDs found in the CSV file. Exiting.")
114
+            return
115
+
116
+        print(f"Found {len(target_list)} targets to create tasks for.")
117
+
118
+        # Process each target
119
+        for target_id in target_list:
120
+            print(f"Processing target ID: {target_id}")
121
+
122
+            # Create task for the target
123
+            task_name = f"Task for Target {target_id}"
124
+            task_id = create_task(gmp, task_name, target_id, scan_config_id, scanner_id)
125
+            if task_id:
126
+                save_task_id_to_csv(target_id, task_id)
127
+            else:
128
+                print(f"Failed to create task for target ID {target_id}. Continuing.")
129
+
130
+
131
+if __name__ == "__main__":
132
+    # Ensure the task_id.csv file exists with headers
133
+    try:
134
+        with open(TASK_ID_CSV, "x", newline="") as csvfile:
135
+            writer = csv.writer(csvfile)
136
+            writer.writerow(["Target ID", "Task ID"])  # Write headers if the file doesn't exist
137
+    except FileExistsError:
138
+        pass  # File already exists, no need to create
139
+
140
+    main()

+ 446
- 0
Scanner/WebApp/webapp.py View File

@@ -0,0 +1,446 @@
1
+import dash
2
+from dash import dcc, html, dash_table
3
+from dash.dependencies import Input, Output, State, ALL
4
+import pandas as pd
5
+from datetime import datetime as dt
6
+import plotly.express as px
7
+import plotly.graph_objs as go
8
+import os
9
+
10
+
11
+
12
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
13
+DATA_DIR = os.path.abspath(os.path.join(BASE_DIR, "..", "data"))
14
+
15
+detailed_scan_csv = os.path.join(DATA_DIR, "detailed_scan_results.csv")
16
+openvas_csv = os.path.join(DATA_DIR, "openvasscan.csv")
17
+
18
+# Load and prepare the dataset
19
+df = pd.read_csv(detailed_scan_csv)
20
+vulnerability_data = pd.read_csv(openvas_csv)
21
+
22
+# Preparing grouped data
23
+grouped_data = vulnerability_data.groupby(['IP', 'NVT Name', 'Severity']).first().reset_index()
24
+grouped_data['Details'] = grouped_data.apply(lambda row: f"CVSS: {row['CVSS']}\nSeverity: {row['Severity']}\nSummary: {row['Summary']}\nSolution Type: {row['Solution Type']}", axis=1)
25
+
26
+# List of unique IPs for the dropdown
27
+unique_ips = vulnerability_data['IP'].unique().tolist()
28
+unique_ips.insert(0, 'All')
29
+
30
+# Convert Timestamp to datetime and sort
31
+df['Timestamp'] = pd.to_datetime(df['Timestamp'])
32
+df.sort_values('Timestamp', inplace=True)
33
+
34
+# Extract unique timestamps
35
+unique_timestamps = df['Timestamp'].unique()
36
+
37
+# Prepare data for the timeline graph, grouped by day
38
+df['Date'] = df['Timestamp'].dt.date
39
+ip_count_over_time = df.groupby('Date')['IP'].nunique().reset_index()
40
+ip_count_over_time.columns = ['Date', 'IP_Count']
41
+
42
+# Create the Plotly graph
43
+timeline_fig = px.line(ip_count_over_time, x='Date', y='IP_Count', title='Number of IPs Over Time')
44
+timeline_fig.update_layout(
45
+    xaxis_title="Date",
46
+    yaxis_title="IP Count"
47
+)
48
+
49
+# Initialize the Dash app
50
+app = dash.Dash(__name__)
51
+
52
+# Convert timestamps to strings for slider display
53
+timestamp_options = [{'label': str(ts), 'value': ts} for ts in df['Timestamp'].unique()]
54
+timestamp_values = [ts.value for ts in df['Timestamp']]
55
+
56
+def style_status_badge(status):
57
+    emoji_map = {
58
+        'Added': '🟩',
59
+        'Removed': '🟥',
60
+        'Still Active': '⚪'
61
+    }
62
+    return f"{emoji_map.get(status, '⬜')} {status}"
63
+
64
+
65
+app.layout = html.Div([
66
+    dcc.Tabs(id="tabs", children=[
67
+        dcc.Tab(label='Overview', children=[
68
+            html.Div([
69
+                dcc.RangeSlider(
70
+                    id='time-range-slider',
71
+                    min=0,
72
+                    max=len(unique_timestamps) - 1,
73
+                    value=[0, len(unique_timestamps) - 1],
74
+                    marks={i: {'label': str(ts)[:10]} for i, ts in enumerate(unique_timestamps)},
75
+                    step=1,
76
+                    allowCross=False
77
+                ),
78
+                dash_table.DataTable(
79
+                    id='table',
80
+                    columns=[{"name": i, "id": i, "presentation": "markdown"} if i == "Status" else {"name": i, "id": i} 
81
+                    for i in df.columns
82
+                    ] + [{"name": "Status", "id": "Status", "presentation": "markdown"}],
83
+                    sort_action='native',
84
+                    filter_action='native',
85
+                    style_table={'overflowX': 'auto'},
86
+                    style_data_conditional=[{'if': {'column_id': 'Status'}, 'textAlign': 'center', 'width': '120px'}]
87
+                ),
88
+                html.Div([
89
+                    dcc.Graph(
90
+                        id='timeline-graph',
91
+                        figure=timeline_fig
92
+                    ),
93
+                    dcc.Graph(id='open-ports-bar-chart')
94
+                ], style={'display': 'flex', 'flex-direction': 'row'}),
95
+                html.Div([
96
+                    dcc.Graph(id='severity-pie-chart')
97
+                    
98
+                ], style={'display': 'flex', 'flex-direction': 'row'}),
99
+                html.Div([
100
+                    dcc.Graph(id='ip-change-bar-chart'),
101
+                    dash_table.DataTable(
102
+                        id='ip-change-table',
103
+                        columns=[
104
+                            {"name": "IP", "id": "IP"},
105
+                            {"name": "Status", "id": "Status"}
106
+                        ],
107
+                        sort_action='native',
108
+                        filter_action='native',
109
+                        style_table={'overflowX': 'auto'}
110
+                    )
111
+                ], style={'display': 'flex', 'flex-direction': 'row'}),
112
+                html.Div(id='summary-section', style={'padding': '20px'})
113
+            ])
114
+        ]),
115
+        dcc.Tab(label='Vulnerability Analysis', children=[
116
+            html.Div([
117
+                dcc.Dropdown(
118
+                    id='severity-dropdown',
119
+                    options=[{'label': s, 'value': s} for s in ['All', 'High', 'Medium', 'Low']],
120
+                    value='All'
121
+                ),
122
+                dcc.Dropdown(
123
+                    id='ip-dropdown',
124
+                    options=[{'label': ip, 'value': ip} for ip in unique_ips],
125
+                    value='All'
126
+                ),
127
+                dcc.Graph(id='vulnerability-treemap'),
128
+                html.Div(id='details-and-ip-output'),
129
+                html.Div(id='clicked-ip', style={'display': 'none'})
130
+            ])
131
+        ]),
132
+        dcc.Tab(label='Port Heatmap', children=[  
133
+            html.Div([
134
+                dcc.Graph(id='ip-port-heatmap', style={'height': '700px', 'width': '100%'}),
135
+                html.Div([
136
+                html.P("🟦 = Port is Open"),
137
+                html.P("⬜ = Port is Closed"),
138
+                html.P("Each row represents a Host (IP), and each column is a Port."),
139
+                html.P("This heatmap shows which ports are open on each host at the selected time.")
140
+            ], style={
141
+                'padding': '10px',
142
+                'backgroundColor': '#f9f9f9',
143
+                'border': '1px solid #ccc',
144
+                'marginTop': '10px',
145
+                'borderRadius': '5px'
146
+            })
147
+            ])
148
+
149
+        ])
150
+    ])
151
+])
152
+
153
+@app.callback(
154
+    [Output('table', 'data'),
155
+     Output('table', 'style_data_conditional'),
156
+     Output('timeline-graph', 'figure'),
157
+     Output('open-ports-bar-chart', 'figure'),
158
+     Output('severity-pie-chart', 'figure'),
159
+     Output('ip-port-heatmap', 'figure'),
160
+     Output('ip-change-bar-chart', 'figure'),
161
+     Output('ip-change-table', 'data'),
162
+     Output('summary-section', 'children')],
163
+    [Input('time-range-slider', 'value')]
164
+)
165
+def update_overview_tab(time_range):
166
+    start_index, end_index = time_range
167
+    start_timestamp = unique_timestamps[start_index]
168
+    end_timestamp = unique_timestamps[end_index]
169
+
170
+    # Filter data within the selected time range
171
+    filtered_df = df[(df['Timestamp'] >= start_timestamp) & (df['Timestamp'] <= end_timestamp)].copy()
172
+
173
+    # Update table
174
+    filtered_df_selected = filtered_df.copy()
175
+
176
+
177
+    # Determine IPs in the time range
178
+    #all_ips = set(filtered_df['IP'])
179
+    #status_dict = {ip: 'Within Range' for ip in all_ips}
180
+
181
+    # Assign badge-style labels using style_status_badge
182
+    #filtered_df_selected['Status'] = filtered_df_selected['IP'].map(status_dict).fillna('Unknown')
183
+   # filtered_df_selected['Status'] = filtered_df_selected['Status'].apply(style_status_badge)
184
+    # Determine IPs in the time range
185
+    all_ips = set(filtered_df['IP'])
186
+
187
+    # Get previous IP set
188
+    if start_index > 0:
189
+        prev_timestamp = unique_timestamps[start_index - 1]
190
+    else:
191
+        prev_timestamp = start_timestamp
192
+
193
+    prev_ips = set(df[df['Timestamp'] == prev_timestamp]['IP'])
194
+    new_ips = all_ips - prev_ips
195
+    removed_ips = prev_ips - all_ips
196
+    existing_ips = all_ips.intersection(prev_ips)
197
+
198
+    # Add dummy rows for removed IPs (with NaNs or placeholders)
199
+    removed_rows = pd.DataFrame({
200
+        "IP": list(removed_ips),
201
+        "Hostname": "", "MAC Address": "", "Protocol": "", "Port": "", "Name": "",
202
+        "State": "", "Product": "", "Version": "", "Extra Info": "",
203
+        "Timestamp": pd.NaT, "Date": None
204
+    })
205
+    filtered_df_selected = pd.concat([filtered_df_selected, removed_rows], ignore_index=True)
206
+
207
+    
208
+
209
+    # Build status dictionary for badges
210
+    status_dict = {}
211
+    for ip in new_ips:
212
+        status_dict[ip] = 'Added'
213
+    for ip in removed_ips:
214
+        status_dict[ip] = 'Removed'
215
+    for ip in existing_ips:
216
+        status_dict[ip] = 'Still Active'
217
+
218
+    # Assign and badge
219
+    filtered_df_selected['Status'] = filtered_df_selected['IP'].map(status_dict).fillna('Unknown')
220
+    filtered_df_selected['Status'] = filtered_df_selected['Status'].apply(style_status_badge)
221
+
222
+    # Apply conditional formatting based on the 'Status' column
223
+    style = [
224
+    {
225
+        'if': {
226
+            'filter_query': '{Status} = "Added"',
227
+        },
228
+        'borderLeft': '4px solid green',
229
+        'backgroundColor': '#eaf7ea'  # very light green background
230
+    },
231
+    {
232
+        'if': {
233
+            'filter_query': '{Status} = "Removed"',
234
+        },
235
+        'borderLeft': '4px solid red',
236
+        'backgroundColor': '#fcebea'  # very light red background
237
+    },
238
+    {
239
+        'if': {
240
+            'filter_query': '{Status} = "Still Active"',
241
+        },
242
+        'borderLeft': '4px solid lightgray'
243
+    }
244
+]
245
+
246
+    # Update timeline graph, grouped by day
247
+    filtered_df['Date'] = filtered_df['Timestamp'].dt.date
248
+    ip_count_over_time = filtered_df.groupby('Date')['IP'].nunique().reset_index()
249
+    ip_count_over_time.columns = ['Date', 'IP_Count']
250
+    timeline_fig = px.line(ip_count_over_time, x='Date', y='IP_Count', title='Number of IPs Over Time')
251
+    timeline_fig.update_layout(
252
+        xaxis_title="Date",
253
+        yaxis_title="IP Count"
254
+    )
255
+
256
+    # Open ports bar chart
257
+    open_ports_count = filtered_df['Port'].value_counts().reset_index()
258
+    open_ports_count.columns = ['Port', 'Count']
259
+    open_ports_bar_chart = px.bar(open_ports_count, x='Port', y='Count', title='Distribution of Open Ports')
260
+    open_ports_bar_chart.update_layout(
261
+        xaxis_title="Port",
262
+        yaxis_title="Count"
263
+    )
264
+    open_ports_bar_chart.update_traces(marker_color='blue', marker_line_color='darkblue', marker_line_width=1.5, opacity=0.8)
265
+
266
+    # Severity pie chart
267
+    severity_count = vulnerability_data['Severity'].value_counts().reset_index()
268
+    severity_count.columns = ['Severity', 'Count']
269
+    severity_pie_chart = px.pie(severity_count, names='Severity', values='Count', title='Severity Distribution')
270
+
271
+    #  IP-Port Heatmap with Fixed Port Range and Binary Open/Closed
272
+
273
+    # Define all possible ports you want to show (e.g. top 1024)
274
+    # Only include ports that were actually scanned, but sorted
275
+    all_ports = sorted(filtered_df['Port'].dropna().astype(int).unique().tolist())
276
+
277
+    all_ips = set(filtered_df['IP'])
278
+
279
+
280
+
281
+    heatmap_df = (
282
+        filtered_df[["IP", "Port"]]
283
+        .dropna()
284
+        .assign(value=1)
285
+        .pivot_table(index="IP", columns="Port", values="value", fill_value=0)
286
+    )
287
+    heatmap_df.columns = heatmap_df.columns.astype(int)
288
+    heatmap_df = heatmap_df.sort_index(axis=1)
289
+
290
+    hover_text = [
291
+    [f"IP: {ip}<br>Port: {port}<br>Status: {'Open' if val == 1 else 'Closed'}"
292
+     for port, val in zip(heatmap_df.columns, row)]
293
+    for ip, row in zip(heatmap_df.index, heatmap_df.values)
294
+    ]
295
+
296
+
297
+    # Generate heatmap
298
+    ip_port_heatmap = go.Figure(data=go.Heatmap(
299
+        z=heatmap_df.values,
300
+        x=heatmap_df.columns,
301
+        y=heatmap_df.index,
302
+        text=hover_text,
303
+        hoverinfo='text',
304
+        colorscale=[[0, 'white'], [1, 'darkblue']],
305
+        zmin=0,
306
+        zmax=1,
307
+        zsmooth=False,
308
+        colorbar=dict(
309
+            title='Port Status',
310
+            tickvals=[0, 1],
311
+            ticktext=['Closed (White)', 'Open (Blue)']
312
+        )
313
+    ))
314
+    ip_port_heatmap.update_layout(
315
+        title='Binary Heatmap - Which Ports Are Open on Which Hosts',
316
+        xaxis_title='Port',
317
+        yaxis_title='IP',
318
+        height=600
319
+    )
320
+
321
+
322
+    # Determine IPs added and removed
323
+    if start_index > 0:
324
+        prev_timestamp = unique_timestamps[start_index - 1]
325
+    else:
326
+        prev_timestamp = start_timestamp
327
+
328
+    prev_ips = set(df[df['Timestamp'] == prev_timestamp]['IP'])
329
+    new_ips = all_ips - prev_ips
330
+    removed_ips = prev_ips - all_ips
331
+    existing_ips = all_ips.intersection(prev_ips)
332
+
333
+    # IP change table
334
+    ip_change_data = []
335
+    for ip in new_ips:
336
+        ip_change_data.append({"IP": ip, "Status": "Added"})
337
+    for ip in removed_ips:
338
+        ip_change_data.append({"IP": ip, "Status": "Removed"})
339
+    for ip in existing_ips:
340
+        ip_change_data.append({"IP": ip, "Status": "Still Active"})
341
+
342
+    # IP change bar chart
343
+    ip_change_summary = {
344
+        "Added": len(new_ips),
345
+        "Removed": len(removed_ips),
346
+        "Still Active": len(existing_ips)
347
+    }
348
+    ip_change_bar_chart = px.bar(
349
+        x=list(ip_change_summary.keys()),
350
+        y=list(ip_change_summary.values()),
351
+        title="IP Changes Summary"
352
+    )
353
+    ip_change_bar_chart.update_layout(
354
+        xaxis_title="Change Type",
355
+        yaxis_title="Count"
356
+    )
357
+    ip_change_bar_chart.update_traces(marker_color='purple', marker_line_color='darkblue', marker_line_width=1.5, opacity=0.8)
358
+
359
+    # Summary section
360
+    total_unique_ips = len(df['IP'].unique())
361
+    total_vulnerabilities = len(vulnerability_data)
362
+    most_common_ports = filtered_df['Port'].value_counts().head(5).to_dict()
363
+    most_dangerous_vulnerability = vulnerability_data.loc[vulnerability_data['CVSS'].idxmax()]
364
+    most_common_vulnerability = vulnerability_data['NVT Name'].value_counts().idxmax()
365
+    most_common_ip = df['IP'].value_counts().idxmax()
366
+    average_cvss_score = vulnerability_data['CVSS'].mean()
367
+    ips_with_most_vulnerabilities = vulnerability_data['IP'].value_counts().head(5).to_dict()
368
+    
369
+    summary_content = html.Div([
370
+        html.H3("Summary of Interesting Data"),
371
+        html.P(f"Total unique IPs: {total_unique_ips}"),
372
+        html.P(f"Total vulnerabilities recorded: {total_vulnerabilities}"),
373
+        html.P(f"Most dangerous vulnerability (highest CVSS score): {most_dangerous_vulnerability['NVT Name']} with CVSS score {most_dangerous_vulnerability['CVSS']}"),
374
+        html.P(f"Most common vulnerability: {most_common_vulnerability}"),
375
+        html.P(f"Most common IP: {most_common_ip}"),
376
+        html.P(f"Average CVSS score: {average_cvss_score:.2f}"),
377
+        html.H4("Most Common Ports:"),
378
+        html.Ul([html.Li(f"Port {port}: {count} times") for port, count in most_common_ports.items()]),
379
+        html.H4("IPs with the Most Vulnerabilities:"),
380
+        html.Ul([html.Li(f"IP {ip}: {count} vulnerabilities") for ip, count in ips_with_most_vulnerabilities.items()])
381
+    ])
382
+
383
+    return (filtered_df_selected.to_dict('records'), style, timeline_fig, open_ports_bar_chart, severity_pie_chart,
384
+            ip_port_heatmap, ip_change_bar_chart, ip_change_data, summary_content)
385
+
386
+@app.callback(
387
+    [Output('vulnerability-treemap', 'figure'),
388
+     Output('clicked-ip', 'children')],
389
+    [Input('severity-dropdown', 'value'),
390
+     Input('ip-dropdown', 'value'),
391
+     Input({'type': 'dynamic-ip', 'index': ALL}, 'n_clicks')],
392
+    [State({'type': 'dynamic-ip', 'index': ALL}, 'index')]
393
+)
394
+def update_treemap(selected_severity, selected_ip, n_clicks, ip_indices):
395
+    ctx = dash.callback_context
396
+    triggered_id = ctx.triggered[0]['prop_id'] if ctx.triggered else None
397
+    # Determine if the callback was triggered by a related IP link click
398
+    if ctx.triggered and 'dynamic-ip' in ctx.triggered[0]['prop_id']:
399
+        # Extract clicked IP
400
+        triggered_info = ctx.triggered[0]
401
+        button_id = triggered_info['prop_id'].split('}.')[0] + '}'
402
+        clicked_ip = json.loads(button_id)['index']
403
+    else:
404
+        clicked_ip = None
405
+    
406
+    # Filter data based on severity, dropdown IP, or clicked related IP
407
+    filtered_data = grouped_data.copy()
408
+    filtered_data['CVSS'] = filtered_data['CVSS'].fillna(0)
409
+    if selected_severity != 'All':
410
+        filtered_data = filtered_data[filtered_data['Severity'] == selected_severity]
411
+    if selected_ip != 'All':
412
+        filtered_data = filtered_data[filtered_data['IP'] == selected_ip]
413
+    if clicked_ip:
414
+        filtered_data = filtered_data[filtered_data['IP'] == clicked_ip]
415
+    filtered_data = filtered_data[filtered_data['CVSS'] > 0]
416
+
417
+    fig = px.treemap(
418
+        filtered_data,
419
+        path=['IP', 'NVT Name'],
420
+        values='CVSS',
421
+        color='CVSS',
422
+        color_continuous_scale='reds',
423
+        hover_data=['Details']
424
+    )
425
+    return fig, ""  # Reset clicked-ip because of bug
426
+
427
+# Callback to display details and related IPs
428
+@app.callback(
429
+    Output('details-and-ip-output', 'children'),
430
+    [Input('vulnerability-treemap', 'clickData')]
431
+)
432
+def display_details_and_ips(clickData):
433
+    if clickData is not None:
434
+        clicked_vuln = clickData['points'][0]['label'].split('<br>')[0]
435
+        details = clickData['points'][0]['customdata'][0]
436
+        matching_ips = vulnerability_data[vulnerability_data['NVT Name'] == clicked_vuln]['IP'].unique()
437
+
438
+        return html.Div([
439
+            html.Pre(f'Details of Selected Vulnerability:\n{details}'),
440
+            html.H4("Related IPs with the same vulnerability:"),
441
+            html.Div([html.A(ip, href='#', id={'type': 'dynamic-ip', 'index': ip}, style={'marginRight': '10px', 'cursor': 'pointer'}) for ip in matching_ips])
442
+        ])
443
+    return 'Click on a vulnerability to see details and related IPs.'
444
+
445
+if __name__ == '__main__':
446
+    app.run(debug=True)

+ 0
- 0
Scanner/install_docker.sh View File


+ 17
- 0
Scanner/openvas-docker/docker-compose.yml View File

@@ -0,0 +1,17 @@
1
+version: '3.8'
2
+
3
+services:
4
+  openvas:
5
+    image: immauss/openvas
6
+    container_name: openvas
7
+    restart: unless-stopped
8
+    ports:
9
+      - "9390:9390"   # GMP (Greenbone Management Protocol) for GVM Tools
10
+      - "8080:9392"   # Web UI Openvas
11
+    volumes:
12
+      - gvm-data:/data
13
+    shm_size: '2gb'
14
+
15
+volumes:
16
+  gvm-data:
17
+

+ 0
- 0
Scanner/setup_manual.sh View File