File size: 10,848 Bytes
f1e3e91
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
import streamlit as st
import pandas as pd
import time
import random
import folium
from folium.plugins import MarkerCluster
from streamlit_folium import st_folium
import pygeoip
from collections import deque, OrderedDict
import datetime
import plotly.graph_objs as go
from plotly.subplots import make_subplots

# Function to get geolocation
def get_geolocation(ip):
    gi = pygeoip.GeoIP('GeoLiteCity.dat')
    try:
        return gi.record_by_addr(ip)
    except:
        return None

# Function to simulate a DDoS attack
def simulate_ddos_attack():
    simulated_ips = [f"192.168.1.{random.randint(1, 255)}" for _ in range(10)]
    packets = random.randint(50, 200)  # Random packet count
    return simulated_ips, packets

# Set up the Streamlit app
st.title("Real-Time Network Traffic DDoS Monitor")

# Create a single stop button at the top of the app
stop_button = st.button('Stop', key='stop_button')

# Statistics section
st.header("Statistics")
col1, col2, col3 = st.columns(3)
with col1:
    total_packets = st.empty()
with col2:
    ddos_flows = st.empty()
with col3:
    benign_flows = st.empty()

# Divider line
st.markdown("---")

# Active Flows section
st.header("Active Flows")

# Create placeholders for the tables, graphs, and map
active_flows_placeholder = st.empty()
malicious_ips_placeholder = st.empty()
graphs_placeholder = st.empty()
map_placeholder = st.empty()

# Initialize map in session state if it doesn't exist
if 'map' not in st.session_state:
    st.session_state.map = folium.Map(location=[0, 0], zoom_start=2)
    st.session_state.marker_cluster = MarkerCluster().add_to(st.session_state.map)
    st.session_state.map_counter = 0

m = st.session_state.map
marker_cluster = st.session_state.marker_cluster

# Display the initial map
st_folium(m, width=700, height=500, key="initial_map")

# Load data in chunks
chunk_size = 1000  # Adjust this value based on your needs
data_iterator = pd.read_csv('SSDP_Flood_output_copy.csv', chunksize=chunk_size)

# Initialize data structures
if 'ip_packet_counts' not in st.session_state:
    st.session_state.ip_packet_counts = {}
if 'time_series_data' not in st.session_state:
    st.session_state.time_series_data = []
if 'ip_packet_time_series' not in st.session_state:
    st.session_state.ip_packet_time_series = {}
if 'recent_rows' not in st.session_state:
    st.session_state.recent_rows = deque(maxlen=10)  # Correctly structured as a deque
if 'malicious_ips' not in st.session_state:
    st.session_state.malicious_ips = OrderedDict()

# Initialize counters
if 'total_packet_count' not in st.session_state:
    st.session_state.total_packet_count = 0
if 'ddos_flow_count' not in st.session_state:
    st.session_state.ddos_flow_count = 0
if 'benign_flow_count' not in st.session_state:
    st.session_state.benign_flow_count = 0

# Flag to track if the map needs updating
map_updated = False

# Display Simulate DDoS Attack button
if st.button("Simulate DDoS Attack"):
    simulated_ips, packets = simulate_ddos_attack()
    current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]

    for ip in simulated_ips:
        # Create a structured row to append
        row = {
            'time': current_time,
            'src': ip,
            'sport': random.randint(1024, 65535),
            'dst': '192.168.0.1',  # Target IP
            'dport': 80,           # Target port
            'protocol': 'TCP',
            'packets': packets,
            'label': 1  # Simulate as malicious
        }
        
        # Ensure the row is appended correctly
        st.session_state.recent_rows.append(row)

        # Update counters
        st.session_state.total_packet_count += packets
        st.session_state.ddos_flow_count += 1

        # Update the statistics
        total_packets.metric("Total Packets", st.session_state.total_packet_count)
        ddos_flows.metric("DDoS Flows", st.session_state.ddos_flow_count)
        benign_flows.metric("Benign Flows", st.session_state.benign_flow_count)

        # Convert recent_rows deque to DataFrame
        try:
            active_flows_df = pd.DataFrame(list(st.session_state.recent_rows))
            # Ensure DataFrame has expected columns
            if not {'time', 'src', 'sport', 'dst', 'dport', 'protocol', 'packets'}.issubset(active_flows_df.columns):
                st.error("DataFrame does not have the expected structure.")
                continue
            
            # Update active flows
            active_flows_placeholder.dataframe(
                active_flows_df[['time', 'src', 'sport', 'dst', 'dport', 'protocol', 'packets']], 
                height=300, 
                use_container_width=True,
                hide_index=True
            )

            # Update the map with the simulated IP
            geo_info = get_geolocation(ip)
            if geo_info:
                folium.Marker(
                    location=[geo_info['latitude'], geo_info['longitude']],
                    popup=ip,
                    icon=folium.Icon(color='red', icon='info-sign')
                ).add_to(marker_cluster)

            # Update the map view
            map_updated = True

        except Exception as e:
            st.error(f"Error creating DataFrame: {str(e)}")

    if map_updated:
        map_placeholder.empty()
        st.session_state.map_counter += 1
        st_folium(m, width=700, height=500, key=f"map_{st.session_state.map_counter}")
        map_updated = False

# Process data in chunks
for chunk_index, chunk in enumerate(data_iterator):
    for row_index, row in chunk.iterrows():
        if stop_button:
            st.write('Stopped by user')
            break

        # Update counters
        st.session_state.total_packet_count += row['packets']
        if row['label'] == 1:
            st.session_state.ddos_flow_count += 1
        else:
            st.session_state.benign_flow_count += 1

        # Update statistics
        total_packets.metric("Total Packets", st.session_state.total_packet_count)
        ddos_flows.metric("DDoS Flows", st.session_state.ddos_flow_count)
        benign_flows.metric("Benign Flows", st.session_state.benign_flow_count)

        # Update the time column with current time
        current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")[:-3]
        row['time'] = current_time

        # Update active flows table
        st.session_state.recent_rows.append(dict(row))  # Append the dictionary of the row
        
        # Convert recent_rows deque to DataFrame
        try:
            active_flows_df = pd.DataFrame(list(st.session_state.recent_rows))
            active_flows_placeholder.dataframe(
                active_flows_df[['time', 'src', 'sport', 'dst', 'dport', 'protocol', 'packets']], 
                height=300, 
                use_container_width=True,
                hide_index=True
            )

        except Exception as e:
            st.error(f"Error creating DataFrame: {str(e)}")

        # Update the malicious IPs list if the IP is malicious
        if row['label'] == 1:
            if row['src'] not in st.session_state.malicious_ips:
                st.session_state.malicious_ips[row['src']] = True
                if len(st.session_state.malicious_ips) > 10:
                    st.session_state.malicious_ips.popitem(last=False)
                
                # Add new malicious IP to the map
                geo_info = get_geolocation(row['src'])
                if geo_info:
                    folium.Marker(
                        location=[geo_info['latitude'], geo_info['longitude']],
                        popup=row['src'],
                        icon=folium.Icon(color='red', icon='info-sign')
                    ).add_to(marker_cluster)
                
                # Set flag to update map
                map_updated = True
            
            # Format malicious IPs as a numbered list
            malicious_ips_text = "**Recent Malicious IPs:**\n"
            for i, ip in enumerate(st.session_state.malicious_ips.keys(), 1):
                malicious_ips_text += f"{i}. <span style='color: red;'>{ip}</span>\n"
            malicious_ips_placeholder.markdown(malicious_ips_text, unsafe_allow_html=True)
        
        # Update packet counts for the source IP
        src_ip = row['src']
        packets = row['packets']
        
        if src_ip not in st.session_state.ip_packet_counts:
            st.session_state.ip_packet_counts[src_ip] = 0
            st.session_state.ip_packet_time_series[src_ip] = []
        
        st.session_state.ip_packet_counts[src_ip] += packets
        st.session_state.ip_packet_time_series[src_ip].append((current_time, st.session_state.ip_packet_counts[src_ip]))

        # Add current total packet count to time series data
        st.session_state.time_series_data.append((current_time, st.session_state.total_packet_count))

        # Create and update the graphs
        if len(st.session_state.ip_packet_counts) > 0:
            fig = make_subplots(rows=3, cols=1, 
                                subplot_titles=("Top 10 Source IPs by Packet Count", 
                                                "Total Packet Count Over Time",
                                                "Packet Count per Source IP Over Time"))

            top_ips = sorted(st.session_state.ip_packet_counts.items(), key=lambda x: x[1], reverse=True)[:10]
            ips, counts = zip(*top_ips)

            fig.add_trace(go.Bar(x=ips, y=counts), row=1, col=1)

            times, packet_counts = zip(*st.session_state.time_series_data[-100:])
            fig.add_trace(go.Scatter(x=times, y=packet_counts, mode='lines'), row=2, col=1)

            for ip in ips:
                ip_times, ip_counts = zip(*st.session_state.ip_packet_time_series[ip][-100:])
                fig.add_trace(go.Scatter(x=ip_times, y=ip_counts, mode='lines', name=ip), row=3, col=1)

            fig.update_layout(height=1200, showlegend=True)
            fig.update_xaxes(title_text="Source IP", row=1, col=1)
            fig.update_xaxes(title_text="Time", row=2, col=1)
            fig.update_xaxes(title_text="Time", row=3, col=1)
            fig.update_yaxes(title_text="Packet Count", row=1, col=1)
            fig.update_yaxes(title_text="Total Packet Count", row=2, col=1)
            fig.update_yaxes(title_text="Packet Count", row=3, col=1)

            graphs_placeholder.plotly_chart(fig, use_container_width=True)

        # Update the map if new points were added
        if map_updated:
            map_placeholder.empty()
            st.session_state.map_counter += 1
            st_folium(m, width=700, height=500, key=f"map_{st.session_state.map_counter}")
            map_updated = False
        
        time.sleep(0.1)
    
    if stop_button:
        break

st.write("Data processing complete")