CodebaseAi commited on
Commit
978b2fe
·
1 Parent(s): fff8e55

Added model

Browse files
Files changed (2) hide show
  1. requirements.txt +1 -0
  2. routes/offline_detection.py +58 -51
requirements.txt CHANGED
@@ -4,6 +4,7 @@ flask-cors==4.0.1
4
  Flask-SocketIO==5.3.6
5
  gunicorn==22.0.0
6
  eventlet==0.35.2
 
7
  requests
8
 
9
  # Machine Learning & Data (Stable Versions)
 
4
  Flask-SocketIO==5.3.6
5
  gunicorn==22.0.0
6
  eventlet==0.35.2
7
+ Flask-Mail==0.9.1
8
  requests
9
 
10
  # Machine Learning & Data (Stable Versions)
routes/offline_detection.py CHANGED
@@ -1,14 +1,18 @@
1
  import os
2
  import pandas as pd
 
3
  from flask import Blueprint, request, jsonify, send_file
4
  from werkzeug.utils import secure_filename
5
  from datetime import datetime
6
- import joblib
7
  from fpdf import FPDF
 
 
8
  from utils.pcap_to_csv import convert_pcap_to_csv
 
9
 
10
  offline_bp = Blueprint("offline_bp", __name__)
11
 
 
12
  UPLOAD_DIR = "uploads"
13
  SAMPLE_DIR = "sample"
14
  os.makedirs(UPLOAD_DIR, exist_ok=True)
@@ -16,7 +20,6 @@ os.makedirs(SAMPLE_DIR, exist_ok=True)
16
 
17
  ALLOWED_EXT = {"csv", "pcap"}
18
 
19
- # Features
20
  BCC_FEATURES = [
21
  "proto","src_port","dst_port","flow_duration","total_fwd_pkts","total_bwd_pkts",
22
  "flags_numeric","payload_len","header_len","rate","iat","syn","ack","rst","fin"
@@ -28,36 +31,18 @@ CICIDS_FEATURES = [
28
  "Flow IAT Mean","Fwd PSH Flags","Fwd URG Flags","Fwd IAT Mean"
29
  ]
30
 
31
- # Models
32
- bcc_model = joblib.load("ml_models/realtime_model.pkl")
33
- bcc_encoder = joblib.load("ml_models/realtime_encoder.pkl")
34
- bcc_scaler = joblib.load("ml_models/realtime_scaler.pkl")
35
-
36
- cicids_model = joblib.load("ml_models/rf_pipeline.joblib")
37
-
38
-
39
  def allowed(filename):
40
  return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXT
41
 
42
-
43
- # 📌 Sample CSV Download
44
- @offline_bp.route("/sample/<model>", methods=["GET"])
45
- def download_sample(model):
46
- file_path = None
47
- if model == "bcc":
48
- file_path = os.path.join(SAMPLE_DIR, "bcc_sample.csv")
49
- elif model == "cicids":
50
- file_path = os.path.join(SAMPLE_DIR, "cicids_sample.csv")
51
- else:
52
- return jsonify(success=False, message="Invalid model"), 400
53
-
54
  if not os.path.exists(file_path):
55
  return jsonify(success=False, message="Sample file missing"), 404
56
-
57
  return send_file(file_path, as_attachment=True)
58
 
59
-
60
- # 📌 Prediction API
61
  @offline_bp.route("/predict", methods=["POST"])
62
  def offline_predict():
63
  if "file" not in request.files:
@@ -73,43 +58,61 @@ def offline_predict():
73
  saved_path = os.path.join(UPLOAD_DIR, filename)
74
  file.save(saved_path)
75
 
76
- # PCAP Conversion
77
  if filename.lower().endswith(".pcap"):
78
- saved_path = convert_pcap_to_csv(saved_path)
79
-
80
-
81
- df = pd.read_csv(saved_path)
82
- # Prevent empty CSV prediction
83
- if df.shape[0] == 0:
84
- return jsonify(success=False, message="CSV has no data rows to analyze!"), 400
85
-
86
- expected = BCC_FEATURES if model_type == "bcc" else CICIDS_FEATURES
87
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
  missing = [c for c in expected if c not in df.columns]
89
  if missing:
90
- return jsonify(success=False, message=f"Missing features: {missing}")
91
-
92
- df = df[expected]
93
 
 
 
94
  if model_type == "bcc":
95
- scaled = bcc_scaler.transform(df)
96
- preds = bcc_model.predict(scaled)
97
- labels = bcc_encoder.inverse_transform(preds)
98
  else:
99
- labels = cicids_model.predict(df)
100
 
101
  df["prediction"] = labels
102
  class_counts = df["prediction"].value_counts().to_dict()
103
-
104
  results = [{"index": i, "class": lbl} for i, lbl in enumerate(labels)]
105
 
 
106
  result_file = os.path.join(UPLOAD_DIR, "last_results.csv")
107
  df.to_csv(result_file, index=False)
108
 
109
  return jsonify(success=True, classCounts=class_counts, results=results)
110
 
111
-
112
- # 📌 PDF Report Generation
113
  @offline_bp.route("/report", methods=["GET"])
114
  def offline_report():
115
  result_file = os.path.join(UPLOAD_DIR, "last_results.csv")
@@ -118,20 +121,24 @@ def offline_report():
118
 
119
  df = pd.read_csv(result_file)
120
  class_counts = df["prediction"].value_counts().to_dict()
121
-
122
  pdf_path = os.path.join(UPLOAD_DIR, "offline_report.pdf")
123
 
124
  pdf = FPDF()
125
  pdf.add_page()
126
  pdf.set_font("Arial", "B", 16)
127
- pdf.cell(0, 10, "AI-NIDS Offline Threat Analysis Report", ln=True)
 
128
 
129
  pdf.set_font("Arial", size=12)
130
- pdf.cell(0, 10, f"Generated: {datetime.now()}", ln=True)
131
  pdf.ln(5)
132
 
133
- for c, v in class_counts.items():
134
- pdf.cell(0, 8, f"{c}: {v}", ln=True)
 
 
 
 
135
 
136
  pdf.output(pdf_path)
137
  return send_file(pdf_path, as_attachment=True)
 
1
  import os
2
  import pandas as pd
3
+ import joblib
4
  from flask import Blueprint, request, jsonify, send_file
5
  from werkzeug.utils import secure_filename
6
  from datetime import datetime
 
7
  from fpdf import FPDF
8
+
9
+ # --- IMPORT UTILS ---
10
  from utils.pcap_to_csv import convert_pcap_to_csv
11
+ from utils.model_selector import load_model
12
 
13
  offline_bp = Blueprint("offline_bp", __name__)
14
 
15
+ # --- CONFIGURATION ---
16
  UPLOAD_DIR = "uploads"
17
  SAMPLE_DIR = "sample"
18
  os.makedirs(UPLOAD_DIR, exist_ok=True)
 
20
 
21
  ALLOWED_EXT = {"csv", "pcap"}
22
 
 
23
  BCC_FEATURES = [
24
  "proto","src_port","dst_port","flow_duration","total_fwd_pkts","total_bwd_pkts",
25
  "flags_numeric","payload_len","header_len","rate","iat","syn","ack","rst","fin"
 
31
  "Flow IAT Mean","Fwd PSH Flags","Fwd URG Flags","Fwd IAT Mean"
32
  ]
33
 
 
 
 
 
 
 
 
 
34
  def allowed(filename):
35
  return "." in filename and filename.rsplit(".", 1)[1].lower() in ALLOWED_EXT
36
 
37
+ # --- ROUTE: DOWNLOAD SAMPLE ---
38
+ @offline_bp.route("/sample/<model_type>", methods=["GET"])
39
+ def download_sample(model_type):
40
+ file_path = os.path.join(SAMPLE_DIR, f"{model_type}_sample.csv")
 
 
 
 
 
 
 
 
41
  if not os.path.exists(file_path):
42
  return jsonify(success=False, message="Sample file missing"), 404
 
43
  return send_file(file_path, as_attachment=True)
44
 
45
+ # --- ROUTE: PREDICT ---
 
46
  @offline_bp.route("/predict", methods=["POST"])
47
  def offline_predict():
48
  if "file" not in request.files:
 
58
  saved_path = os.path.join(UPLOAD_DIR, filename)
59
  file.save(saved_path)
60
 
61
+ # PCAP to CSV Conversion if needed
62
  if filename.lower().endswith(".pcap"):
63
+ try:
64
+ saved_path = convert_pcap_to_csv(saved_path)
65
+ except Exception as e:
66
+ return jsonify(success=False, message=f"PCAP conversion failed: {str(e)}"), 500
67
+
68
+ # Load Data
69
+ try:
70
+ df = pd.read_csv(saved_path)
71
+ if df.empty:
72
+ return jsonify(success=False, message="CSV has no data!"), 400
73
+ except Exception as e:
74
+ return jsonify(success=False, message=f"Error reading CSV: {str(e)}"), 400
75
+
76
+ # 🚀 DYNAMIC MODEL LOADING
77
+ # This prevents the "File Not Found" error at startup
78
+ try:
79
+ model_data = load_model(model_type)
80
+ model = model_data['model']
81
+
82
+ if model_type == "bcc":
83
+ encoder = model_data['encoder']
84
+ scaler = model_data['scaler']
85
+ expected = BCC_FEATURES
86
+ else:
87
+ expected = CICIDS_FEATURES
88
+ except Exception as e:
89
+ return jsonify(success=False, message=f"Model loading failed: {str(e)}"), 500
90
+
91
+ # Feature Verification
92
  missing = [c for c in expected if c not in df.columns]
93
  if missing:
94
+ return jsonify(success=False, message=f"Missing features: {missing}"), 400
 
 
95
 
96
+ # Prediction Logic
97
+ input_data = df[expected]
98
  if model_type == "bcc":
99
+ scaled = scaler.transform(input_data)
100
+ preds = model.predict(scaled)
101
+ labels = encoder.inverse_transform(preds)
102
  else:
103
+ labels = model.predict(input_data)
104
 
105
  df["prediction"] = labels
106
  class_counts = df["prediction"].value_counts().to_dict()
 
107
  results = [{"index": i, "class": lbl} for i, lbl in enumerate(labels)]
108
 
109
+ # Save results for PDF generation
110
  result_file = os.path.join(UPLOAD_DIR, "last_results.csv")
111
  df.to_csv(result_file, index=False)
112
 
113
  return jsonify(success=True, classCounts=class_counts, results=results)
114
 
115
+ # --- ROUTE: PDF REPORT ---
 
116
  @offline_bp.route("/report", methods=["GET"])
117
  def offline_report():
118
  result_file = os.path.join(UPLOAD_DIR, "last_results.csv")
 
121
 
122
  df = pd.read_csv(result_file)
123
  class_counts = df["prediction"].value_counts().to_dict()
 
124
  pdf_path = os.path.join(UPLOAD_DIR, "offline_report.pdf")
125
 
126
  pdf = FPDF()
127
  pdf.add_page()
128
  pdf.set_font("Arial", "B", 16)
129
+ pdf.cell(0, 10, "AI-NIDS Offline Threat Analysis Report", ln=True, align='C')
130
+ pdf.ln(10)
131
 
132
  pdf.set_font("Arial", size=12)
133
+ pdf.cell(0, 10, f"Date: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", ln=True)
134
  pdf.ln(5)
135
 
136
+ pdf.set_font("Arial", "B", 12)
137
+ pdf.cell(0, 10, "Classification Summary:", ln=True)
138
+ pdf.set_font("Arial", size=12)
139
+
140
+ for cls, count in class_counts.items():
141
+ pdf.cell(0, 8, f"- {cls}: {count} occurrences", ln=True)
142
 
143
  pdf.output(pdf_path)
144
  return send_file(pdf_path, as_attachment=True)