nafees369 commited on
Commit
5de4015
·
verified ·
1 Parent(s): 0ca8180

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -17
app.py CHANGED
@@ -30,7 +30,7 @@ class NetworkAnomalyDetector:
30
  def parse_pcap_file(self, file_path):
31
  """
32
  Parse network packet file with multiple parsing strategies
33
-
34
  :param file_path: Path to the packet capture file
35
  :return: DataFrame with packet features
36
  """
@@ -66,7 +66,7 @@ class NetworkAnomalyDetector:
66
  'timestamp': getattr(packet, 'time', 0)
67
  }
68
  packet_features.append(features)
69
-
70
  if packet_features:
71
  return pd.DataFrame(packet_features)
72
  except Exception as e:
@@ -76,7 +76,7 @@ class NetworkAnomalyDetector:
76
  if pyshark is not None:
77
  try:
78
  import asyncio
79
-
80
  async def parse_with_pyshark():
81
  capture = pyshark.FileCapture(file_path)
82
  local_features = []
@@ -92,14 +92,14 @@ class NetworkAnomalyDetector:
92
  local_features.append(features)
93
  except Exception as packet_error:
94
  parsing_errors.append(f"PyShark packet parsing error: {str(packet_error)}")
95
-
96
  capture.close()
97
  return local_features
98
 
99
  # Run the async function
100
  try:
101
  packet_features = asyncio.run(parse_with_pyshark())
102
-
103
  if packet_features:
104
  return pd.DataFrame(packet_features)
105
  except Exception as async_error:
@@ -109,7 +109,7 @@ class NetworkAnomalyDetector:
109
 
110
  # Fallback: Raw file reading
111
  packet_features = read_raw_file(file_path)
112
-
113
  if packet_features:
114
  # Log parsing errors if any occurred
115
  if parsing_errors:
@@ -189,7 +189,7 @@ class NetworkAnomalyDetector:
189
  """
190
  # Parse packet file
191
  packets_df = self.parse_pcap_file(file_path)
192
-
193
  if packets_df.empty:
194
  return {
195
  'summary': {
@@ -201,30 +201,30 @@ class NetworkAnomalyDetector:
201
 
202
  # Prepare features for anomaly detection
203
  feature_columns = ['length', 'timestamp']
204
-
205
  # Handle protocol and IP as categorical features
206
  packets_df['protocol_encoded'] = pd.Categorical(packets_df['protocol']).codes
207
  packets_df['src_ip_encoded'] = pd.Categorical(packets_df['src_ip']).codes
208
  packets_df['dst_ip_encoded'] = pd.Categorical(packets_df['dst_ip']).codes
209
-
210
  feature_columns.extend(['protocol_encoded', 'src_ip_encoded', 'dst_ip_encoded'])
211
-
212
  # Prepare features for anomaly detection
213
  features = packets_df[feature_columns]
214
-
215
  # Scale features
216
  features_scaled = self.scaler.fit_transform(features)
217
-
218
  # Detect anomalies
219
  anomaly_labels = self.isolation_forest.fit_predict(features_scaled)
220
  packets_df['is_anomaly'] = anomaly_labels == -1
221
-
222
  # Create summary
223
  summary = {
224
  'total_packets': len(packets_df),
225
  'isolation_forest_anomalies': sum(packets_df['is_anomaly'])
226
  }
227
-
228
  return {
229
  'summary': summary,
230
  'packets': packets_df
@@ -269,7 +269,7 @@ def create_gradio_interface():
269
 
270
  with gr.Row():
271
  file_input = gr.File(label="Upload PCAP File", type="filepath", file_types=['.pcap', '.pkt'])
272
-
273
  analyze_button = gr.Button("Analyze Network File", variant="primary")
274
 
275
  # Outputs
@@ -279,8 +279,8 @@ def create_gradio_interface():
279
 
280
  # Event handlers
281
  analyze_button.click(
282
- fn=analyze_network_file,
283
- inputs=[file_input],
284
  outputs=[summary_output, results_dataframe, error_output]
285
  )
286
 
 
30
  def parse_pcap_file(self, file_path):
31
  """
32
  Parse network packet file with multiple parsing strategies
33
+
34
  :param file_path: Path to the packet capture file
35
  :return: DataFrame with packet features
36
  """
 
66
  'timestamp': getattr(packet, 'time', 0)
67
  }
68
  packet_features.append(features)
69
+
70
  if packet_features:
71
  return pd.DataFrame(packet_features)
72
  except Exception as e:
 
76
  if pyshark is not None:
77
  try:
78
  import asyncio
79
+
80
  async def parse_with_pyshark():
81
  capture = pyshark.FileCapture(file_path)
82
  local_features = []
 
92
  local_features.append(features)
93
  except Exception as packet_error:
94
  parsing_errors.append(f"PyShark packet parsing error: {str(packet_error)}")
95
+
96
  capture.close()
97
  return local_features
98
 
99
  # Run the async function
100
  try:
101
  packet_features = asyncio.run(parse_with_pyshark())
102
+
103
  if packet_features:
104
  return pd.DataFrame(packet_features)
105
  except Exception as async_error:
 
109
 
110
  # Fallback: Raw file reading
111
  packet_features = read_raw_file(file_path)
112
+
113
  if packet_features:
114
  # Log parsing errors if any occurred
115
  if parsing_errors:
 
189
  """
190
  # Parse packet file
191
  packets_df = self.parse_pcap_file(file_path)
192
+
193
  if packets_df.empty:
194
  return {
195
  'summary': {
 
201
 
202
  # Prepare features for anomaly detection
203
  feature_columns = ['length', 'timestamp']
204
+
205
  # Handle protocol and IP as categorical features
206
  packets_df['protocol_encoded'] = pd.Categorical(packets_df['protocol']).codes
207
  packets_df['src_ip_encoded'] = pd.Categorical(packets_df['src_ip']).codes
208
  packets_df['dst_ip_encoded'] = pd.Categorical(packets_df['dst_ip']).codes
209
+
210
  feature_columns.extend(['protocol_encoded', 'src_ip_encoded', 'dst_ip_encoded'])
211
+
212
  # Prepare features for anomaly detection
213
  features = packets_df[feature_columns]
214
+
215
  # Scale features
216
  features_scaled = self.scaler.fit_transform(features)
217
+
218
  # Detect anomalies
219
  anomaly_labels = self.isolation_forest.fit_predict(features_scaled)
220
  packets_df['is_anomaly'] = anomaly_labels == -1
221
+
222
  # Create summary
223
  summary = {
224
  'total_packets': len(packets_df),
225
  'isolation_forest_anomalies': sum(packets_df['is_anomaly'])
226
  }
227
+
228
  return {
229
  'summary': summary,
230
  'packets': packets_df
 
269
 
270
  with gr.Row():
271
  file_input = gr.File(label="Upload PCAP File", type="filepath", file_types=['.pcap', '.pkt'])
272
+
273
  analyze_button = gr.Button("Analyze Network File", variant="primary")
274
 
275
  # Outputs
 
279
 
280
  # Event handlers
281
  analyze_button.click(
282
+ fn=analyze_network_file,
283
+ inputs=[file_input],
284
  outputs=[summary_output, results_dataframe, error_output]
285
  )
286