CodebaseAi commited on
Commit
59edcea
·
1 Parent(s): c2e4dd0

Detection3

Browse files
Files changed (1) hide show
  1. routes/offline_detection.py +76 -0
routes/offline_detection.py CHANGED
@@ -6,6 +6,8 @@ from werkzeug.utils import secure_filename
6
  from datetime import datetime
7
  from fpdf import FPDF
8
  from io import BytesIO
 
 
9
 
10
  # --- IMPORT UTILS ---
11
  from utils.pcap_to_csv import convert_pcap_to_csv
@@ -59,6 +61,80 @@ def download_sample(model_type):
59
  return jsonify(success=False, message="Sample file missing"), 404
60
  return send_file(file_path, as_attachment=True)
61
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  # --- ROUTE: PREDICT ---
63
  @offline_bp.route("/predict", methods=["POST"])
64
  def offline_predict():
 
6
  from datetime import datetime
7
  from fpdf import FPDF
8
  from io import BytesIO
9
+ import time
10
+ import requests
11
 
12
  # --- IMPORT UTILS ---
13
  from utils.pcap_to_csv import convert_pcap_to_csv
 
61
  return jsonify(success=False, message="Sample file missing"), 404
62
  return send_file(file_path, as_attachment=True)
63
 
64
+
65
+ # --- ROUTE: URL LIVE PROBE ---
66
+ @offline_bp.route("/analyze-url", methods=["POST"])
67
+ def analyze_url():
68
+ target_url = request.json.get("url")
69
+ if not target_url:
70
+ return jsonify(success=False, message="No URL provided"), 400
71
+
72
+ # Ensure URL is properly formatted
73
+ if not target_url.startswith("http"):
74
+ target_url = "https://" + target_url
75
+
76
+ # 1. Start "Synthetic Capture" (Timing the request)
77
+ start_ts = time.time()
78
+
79
+ try:
80
+ # Use a real user-agent to avoid being blocked by the site
81
+ headers_ua = {'User-Agent': 'Mozilla/5.0 (NIDS-Intelligence-Probe/1.0)'}
82
+ response = requests.get(target_url, timeout=10, headers=headers_ua, stream=True)
83
+ end_ts = time.time()
84
+
85
+ # 2. Extract Metadata for Synthetic Features
86
+ duration = end_ts - start_ts
87
+ # We read the content length or measure the response body
88
+ payload_bytes = len(response.content)
89
+ header_bytes = len(str(response.headers))
90
+
91
+ # 3. Map to your Model's Features (BCC Format)
92
+ # We simulate packet counts based on typical TCP handshakes (approx 8-10 packets per small request)
93
+ synthetic_row = {
94
+ "protocol": 6, # TCP/HTTPS
95
+ "src_port": 443,
96
+ "dst_port": 443,
97
+ "duration": duration,
98
+ "packets_count": 10,
99
+ "fwd_packets_count": 5,
100
+ "bwd_packets_count": 5,
101
+ "total_payload_bytes": payload_bytes,
102
+ "total_header_bytes": header_bytes,
103
+ "bytes_rate": payload_bytes / duration if duration > 0 else 0,
104
+ "packets_rate": 10 / duration if duration > 0 else 0,
105
+ "syn_flag_counts": 1,
106
+ "ack_flag_counts": 1,
107
+ "rst_flag_counts": 0,
108
+ "fin_flag_counts": 1
109
+ }
110
+
111
+ # 4. Convert to DataFrame for Prediction
112
+ df_url = pd.DataFrame([synthetic_row])
113
+
114
+ # --- REUSE YOUR PREDICTION LOGIC ---
115
+ # Note: You can call a helper function here or reuse the logic from offline_predict
116
+ # Make sure to apply the same scaler and model you loaded in the other route
117
+ model_data = load_model("bcc")
118
+ scaler = model_data.get('scaler')
119
+ encoder = model_data.get('encoder')
120
+ model = model_data['model']
121
+
122
+ # Scale and Predict
123
+ numeric_input = df_url[BCC_FEATURES].apply(pd.to_numeric).fillna(0)
124
+ scaled_data = scaler.transform(numeric_input.values)
125
+ preds = model.predict(scaled_data)
126
+ label = encoder.inverse_transform(preds)[0]
127
+
128
+ return jsonify({
129
+ "success": True,
130
+ "prediction": str(label),
131
+ "details": synthetic_row,
132
+ "url": target_url
133
+ })
134
+
135
+ except Exception as e:
136
+ return jsonify(success=False, message=f"URL Probe Failed: {str(e)}"), 500
137
+
138
  # --- ROUTE: PREDICT ---
139
  @offline_bp.route("/predict", methods=["POST"])
140
  def offline_predict():