DasariHarshitha commited on
Commit
b7e4816
·
verified ·
1 Parent(s): 6d8171b

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -99
app.py DELETED
@@ -1,99 +0,0 @@
1
- import streamlit as st
2
- import pandas as pd
3
- import numpy as np
4
- from sklearn.model_selection import train_test_split
5
- from sklearn.preprocessing import LabelEncoder, StandardScaler, OneHotEncoder
6
- from sklearn.compose import ColumnTransformer
7
- from tensorflow.keras.models import Sequential
8
- from tensorflow.keras.layers import Input, Dense
9
- from sklearn.metrics import classification_report, confusion_matrix
10
- import matplotlib.pyplot as plt
11
- import seaborn as sns
12
-
13
- # Load data
14
- @st.cache_data
15
- def load_data():
16
- return pd.read_csv("Global_Cybersecurity_Threats_2015-2024.csv")
17
-
18
- df = load_data()
19
- st.title("Cybersecurity Attack Type Classifier")
20
-
21
- st.write("### Data Preview", df.head())
22
-
23
- # Define features
24
- target = 'Attack Type'
25
-
26
- cat_features = [
27
- 'Country',
28
- 'Target Industry',
29
- 'Attack Source',
30
- 'Security Vulnerability Type',
31
- 'Defense Mechanism Used'
32
- ]
33
-
34
- num_features = [
35
- 'Year',
36
- 'Financial Loss (in Million $)',
37
- 'Number of Affected Users',
38
- 'Incident Resolution Time (in Hours)'
39
- ]
40
-
41
- # Preprocessing
42
- X = df.drop(columns=[target])
43
- y = df[target]
44
-
45
- preprocessor = ColumnTransformer(
46
- transformers=[
47
- ('num', StandardScaler(), num_features),
48
- ('cat', OneHotEncoder(handle_unknown='ignore'), cat_features)
49
- ]
50
- )
51
-
52
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=27)
53
- X_train = preprocessor.fit_transform(X_train)
54
- X_test = preprocessor.transform(X_test)
55
-
56
- le = LabelEncoder()
57
- y_train = le.fit_transform(y_train)
58
- y_test = le.transform(y_test)
59
-
60
- # Model settings
61
- st.sidebar.header("Model Parameters")
62
- epochs = st.sidebar.slider("Epochs", 5, 100, 30)
63
- batch_size = st.sidebar.selectbox("Batch Size", [8, 16, 32, 64], index=1)
64
-
65
- # Build model
66
- model = Sequential()
67
- model.add(Input(shape=(X_train.shape[1],)))
68
- model.add(Dense(16, activation='relu'))
69
- model.add(Dense(32, activation='relu'))
70
- model.add(Dense(len(np.unique(y)), activation='softmax'))
71
- model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
72
-
73
- if st.button("Train Model"):
74
- history = model.fit(X_train, y_train, validation_split=0.2, epochs=epochs, batch_size=batch_size, verbose=0)
75
- st.success("Model training complete.")
76
-
77
- # Plot loss
78
- st.subheader("Training History")
79
- fig, ax = plt.subplots()
80
- ax.plot(history.history['loss'], label='Training Loss')
81
- ax.plot(history.history['val_loss'], label='Validation Loss')
82
- ax.legend()
83
- st.pyplot(fig)
84
-
85
- # Evaluate
86
- st.subheader("Evaluation")
87
- test_loss, test_acc = model.evaluate(X_test, y_test, verbose=0)
88
- st.write(f"Test Accuracy: **{test_acc:.4f}**")
89
-
90
- y_pred = np.argmax(model.predict(X_test), axis=1)
91
- cm = confusion_matrix(y_test, y_pred)
92
- st.write("### Confusion Matrix")
93
- fig, ax = plt.subplots()
94
- sns.heatmap(cm, annot=True, fmt="d", cmap="Blues", ax=ax)
95
- st.pyplot(fig)
96
-
97
- report = classification_report(y_test, y_pred, target_names=le.classes_, output_dict=True)
98
- st.write("### Classification Report")
99
- st.dataframe(pd.DataFrame(report).transpose())