# GRADIO APPLICATION FOR HUGGING FACE SPACES # Loads the trained CNN and scaler to provide a web interface for network anomaly prediction. import os import joblib import numpy as np import pandas as pd import tensorflow as tf import gradio as gr from tensorflow.keras.models import load_model from sklearn.preprocessing import LabelEncoder # --- Model & Scaler Configuration --- H5_MODEL_FILE = "intrusion_detector_model.h5" SCALER_FILE_NAME = "scaler.pkl" # Threshold optimized in Cell 11 for better Attack Recall PREDICTION_THRESHOLD = 0.40 FEATURE_COUNT = 40 # Pre-defined list of all feature names (41 raw features) FEATURE_NAMES = [ 'duration', 'protocol_type', 'service', 'flag', 'src_bytes', 'dst_bytes', 'land', 'wrong_fragment', 'urgent', 'hot', 'num_failed_logins', 'logged_in', 'num_compromised', 'root_shell', 'su_attempted', 'num_root', 'num_file_creations', 'num_shells', 'num_access_files', 'num_outbound_cmds', 'is_host_login', 'is_guest_login', 'count', 'srv_count', 'serror_rate', 'srv_serror_rate', 'rerror_rate', 'srv_rerror_rate', 'same_srv_rate', 'diff_srv_rate', 'srv_diff_host_rate', 'dst_host_count', 'dst_host_srv_count', 'dst_host_same_srv_rate', 'dst_host_diff_srv_rate', 'dst_host_same_src_port_rate', 'dst_host_srv_diff_host_rate', 'dst_host_serror_rate', 'dst_host_srv_serror_rate', 'dst_host_rerror_rate', 'dst_host_srv_rerror_rate' ] # List of all possible service values (Must be comprehensive for correct OHE alignment) # This list MUST match all services encountered during training the scaler. SERVICES = [ 'http', 'smtp', 'ftp_data', 'private', 'ecr_i', 'other', 'domain_u', 'finger', 'telnet', 'ftp', 'pop_3', 'courier', 'eco_i', 'imap4', 'domain_n', 'auth', 'time', 'shell', 'login', 'hostnames', 'ntp_service', 'echo', 'discard', 'systat', 'ctf', 'ssh', 'iso_tsap', 'whois', 'remote_job', 'sunrpc', 'rje', 'gopher', 'netbios_ssn', 'pm_srv', 'mtp', 'exec', 'klogin', 'kshell', 'daytime', 'message', 'icmp', 'netstat', 'Z39_50', 'bgp', 'nnsp', 'ctinrp', 'IRC', 'urp_i', 'pop_2', 'aol', 'rev_telnet', 'tftp_u' ] # List of all possible flag values FLAGS = [ 'SF', 'S0', 'REJ', 'RSTO', 'SH', 'S1', 'S2', 'RSTOS0', 'S3', 'OTH', 'RSTR' ] # List of all possible protocol types PROTOCOLS = ['tcp', 'udp', 'icmp'] # --- Define ALL Expected OHE Columns --- PROTOCOL_OHE = [f'protocol_type_{p}' for p in PROTOCOLS] FLAG_OHE = [f'flag_{f}' for f in FLAGS] SERVICE_OHE = [f'service_{s}' for s in SERVICES] # Numerical and binary columns that were NOT OHE encoded NUMERICAL_BINARY_COLS = [ 'duration', 'src_bytes', 'dst_bytes', 'land', 'wrong_fragment', 'urgent', 'hot', 'num_failed_logins', 'logged_in', 'num_compromised', 'root_shell', 'su_attempted', 'num_root', 'num_file_creations', 'num_shells', 'num_access_files', 'num_outbound_cmds', 'is_host_login', 'is_guest_login', 'count', 'srv_count', 'serror_rate', 'srv_serror_rate', 'rerror_rate', 'srv_rerror_rate', 'same_srv_rate', 'diff_srv_rate', 'srv_diff_host_rate', 'dst_host_count', 'dst_host_srv_count', 'dst_host_same_srv_rate', 'dst_host_diff_srv_rate', 'dst_host_same_src_port_rate', 'dst_host_srv_diff_host_rate', 'dst_host_serror_rate', 'dst_host_srv_serror_rate', 'dst_host_rerror_rate', 'dst_host_srv_rerror_rate' ] # The full, master list of all columns after OHE # The length of this list (minus the original three categorical columns) should be 40, # which is the number of features the CNN was trained on. MASTER_OHE_COLUMNS = NUMERICAL_BINARY_COLS + PROTOCOL_OHE + SERVICE_OHE + FLAG_OHE # Global artifacts model = None scaler = None label_encoder = None MAPPING = {'normal': 0, 'anomaly': 1} # --- Model Loading and Initialization --- def load_artifacts(): """Loads the trained model and scaler globally.""" global model, scaler, label_encoder print("--- Starting Artifact Loading ---") # Check for file existence first if not os.path.exists(SCALER_FILE_NAME) or not os.path.exists(H5_MODEL_FILE): print(f"CRITICAL ERROR: One or both files are missing in the current directory:") print(f" Expected Scaler: {SCALER_FILE_NAME} (Exists: {os.path.exists(SCALER_FILE_NAME)})") print(f" Expected Model: {H5_MODEL_FILE} (Exists: {os.path.exists(H5_MODEL_FILE)})") print("Please ensure both files are uploaded to the root of your Hugging Face Space.") return False # 1. Load Scaler try: scaler = joblib.load(SCALER_FILE_NAME) print(f"✓ Scaler loaded from {SCALER_FILE_NAME}") except Exception as e: print(f"Error loading scaler. Check file format or compatibility: {e}") return False # 2. Load Model try: model = load_model(H5_MODEL_FILE, compile=False) print(f"✓ Model loaded from {H5_MODEL_FILE}") except Exception as e: print(f"Error loading model. Check Keras version compatibility: {e}") return False # 3. Initialize Label Encoder label_encoder = LabelEncoder() # Fit the encoder to the expected labels label_encoder.fit(list(MAPPING.keys())) print("✓ Label Encoder initialized.") print("--- Artifact Loading Complete ---") return True # Load artifacts on startup if not load_artifacts(): # If loading fails, the model/scaler remain None, and the prediction function will return the fatal error message. pass # --- Prediction Function (Debug Logging Added) --- def predict_intrusion(*inputs): """ Takes 41 raw network features, preprocesses them, and makes a prediction. """ if model is None or scaler is None: # Returns this if load_artifacts() failed return "

FATAL ERROR: Model Not Loaded. See Logs for File Check.

", "N/A" try: # 1. Create a dictionary from the inputs raw_input_dict = {FEATURE_NAMES[i]: [inputs[i]] for i in range(len(FEATURE_NAMES))} df = pd.DataFrame(raw_input_dict) # 2. Apply One-Hot Encoding (OHE) for categorical features categorical_cols = ['protocol_type', 'service', 'flag'] # The 'get_dummies' function converts categorical columns into numerical OHE columns df = pd.get_dummies(df, columns=categorical_cols, prefix=categorical_cols) # 3. Re-align columns to match training data # Crucial step: Reindex ensures the resulting DataFrame has the exact column order # and all the OHE columns (even if they are zero for the current input). df_aligned = df.reindex(columns=MASTER_OHE_COLUMNS, fill_value=0) # Drop the original categorical columns that were not part of the training data df_aligned = df_aligned.drop(columns=['protocol_type', 'service', 'flag'], errors='ignore') # --- CRITICAL DEBUGGING PRINTS --- # This print tells us the feature count *before* scaling print(f"Debug: DataFrame aligned with {df_aligned.shape[1]} columns before scaling.") # 4. Scale and Reshape for CNN # This is where the ValueError usually occurs if the column count is wrong. data_scaled = scaler.transform(df_aligned) # --- CRITICAL DEBUGGING PRINTS --- final_feature_count = data_scaled.shape[1] # This print tells us the feature count *after* scaling (it should be 40) print(f"Debug: Scaler output size: {final_feature_count} features.") if final_feature_count != FEATURE_COUNT: # If this error appears in the logs, it means the column list is mismatched. error_msg = f"SCALER ERROR: Expected {FEATURE_COUNT} features for the model, but the scaled data has {final_feature_count} features." print(f"CRITICAL: {error_msg}") return f"

{error_msg}

", "N/A" # Reshape for the 1D CNN: (1 sample, 40 features, 1 channel) X_processed = data_scaled.reshape(1, FEATURE_COUNT, 1) # 5. Predict probability prediction_prob = model.predict(X_processed, verbose=0)[0][0] # 6. Apply optimized threshold (0.40) prediction_int = 1 if prediction_prob >= PREDICTION_THRESHOLD else 0 # 7. Inverse transform the prediction prediction_label = label_encoder.inverse_transform([prediction_int])[0].upper() # 8. Determine result display if prediction_label == 'ANOMALY': color = "red" message = f"🚨 ANOMALY DETECTED! (Confidence: {prediction_prob:.4f})" else: color = "green" message = f"🟢 Connection is NORMAL. (Confidence: {1 - prediction_prob:.4f})" # Gradio requires HTML to display styled text html_output = f"

{message}

" return html_output, f"{prediction_prob:.4f}" except Exception as e: # Catch any runtime error during prediction and display it cleanly error_msg = f"RUNTIME ERROR during prediction: {type(e).__name__}: {str(e)}" print(f"CRITICAL: {error_msg}") return f"

{error_msg}

", "N/A" # --- Gradio Interface Definition (No change) --- # Define input components corresponding to the 41 features input_components = [ gr.Number(label='duration (float, sec)', value=0.0), gr.Dropdown(label='protocol_type', choices=PROTOCOLS, value='tcp'), gr.Dropdown(label='service', choices=SERVICES, value='http'), gr.Dropdown(label='flag', choices=FLAGS, value='SF'), gr.Number(label='src_bytes (int)', value=491), gr.Number(label='dst_bytes (int)', value=0), gr.Dropdown(label='land (binary)', choices=[0, 1], value=0), gr.Number(label='wrong_fragment (int)', value=0), gr.Number(label='urgent (int)', value=0), gr.Number(label='hot (int)', value=0), gr.Number(label='num_failed_logins (int)', value=0), gr.Dropdown(label='logged_in (binary)', choices=[0, 1], value=0), gr.Number(label='num_compromised (int)', value=0), gr.Dropdown(label='root_shell (binary)', choices=[0, 1], value=0), gr.Dropdown(label='su_attempted (binary)', choices=[0, 1], value=0), gr.Number(label='num_root (int)', value=0), gr.Number(label='num_file_creations (int)', value=0), gr.Number(label='num_shells (int)', value=0), gr.Number(label='num_access_files (int)', value=0), gr.Number(label='num_outbound_cmds (int)', value=0), gr.Dropdown(label='is_host_login (binary)', choices=[0, 1], value=0), gr.Dropdown(label='is_guest_login (binary)', choices=[0, 1], value=0), gr.Number(label='count (float)', value=2.0), gr.Number(label='srv_count (float)', value=2.0), gr.Number(label='serror_rate (float)', value=0.0), gr.Number(label='srv_serror_rate (float)', value=0.0), gr.Number(label='rerror_rate (float)', value=0.0), gr.Number(label='srv_rerror_rate (float)', value=0.0), gr.Number(label='same_srv_rate (float)', value=1.0), gr.Number(label='diff_srv_rate (float)', value=0.0), gr.Number(label='srv_diff_host_rate (float)', value=0.0), gr.Number(label='dst_host_count (float)', value=150.0), gr.Number(label='dst_host_srv_count (float)', value=25.0), gr.Number(label='dst_host_same_srv_rate (float)', value=0.17), gr.Number(label='dst_host_diff_srv_rate (float)', value=0.03), gr.Number(label='dst_host_same_src_port_rate (float)', value=0.17), gr.Number(label='dst_host_srv_diff_host_rate (float)', value=0.0), gr.Number(label='dst_host_serror_rate (float)', value=0.0), gr.Number(label='dst_host_srv_serror_rate (float)', value=0.0), gr.Number(label='dst_host_rerror_rate (float)', value=0.05), gr.Number(label='dst_host_srv_rerror_rate (float)', value=0.0) ] # Define output components output_components = [ gr.HTML(label="Prediction Result"), gr.Label(label="Attack Probability") ] # Combine all into the Gradio interface iface = gr.Interface( fn=predict_intrusion, inputs=input_components, outputs=output_components, title="CNN Network Intrusion Detector (KDDCup'99)", description=( "Enter the 41 features of a network connection record to determine if it is " "a **Normal** connection or an **Anomaly (Attack)**. This model is a 1D Convolutional Neural Network (CNN) " f"optimized for high Attack Recall (using a prediction threshold of **{PREDICTION_THRESHOLD}**).
" "Default values are set for a NORMAL FTP data connection." ), live=False, allow_flagging='never' ) # Launch the interface (Hugging Face Spaces runs this automatically) iface.launch()