signal / app.py
mayss14's picture
appc.py
48036c3 verified
raw
history blame
4.39 kB
import streamlit as st
import time
import pandas as pd
from utils.preprocess import load_and_preprocess_dataset
from model.cnn_model import CNNModel
from model.sca import CustomSCA
# Configuration de la page
st.set_page_config(
page_title="Analyse CSI - Streamlit",
layout="wide"
)
# Titre
st.title("📶 Analyse CSI avec Streamlit sur Hugging Face")
# Section paramètres
with st.sidebar:
st.header("Paramètres")
base_path = st.text_input(
"Chemin des données",
value="data/Dataset/room1/1"
)
norm_type = st.radio(
"Type de normalisation",
["standard", "minmax"],
index=0
)
if st.button("Lancer le prétraitement", type="primary"):
with st.spinner("Traitement en cours..."):
try:
start_time = time.time()
df = load_and_preprocess_dataset(base_path, norm_type)
st.session_state['processed_df'] = df
st.session_state['process_time'] = time.time() - start_time
st.success("Prétraitement réussi!")
except Exception as e:
st.error(f"Erreur : {str(e)}")
# Affichage des résultats
if 'processed_df' in st.session_state:
st.write(f"**Temps de traitement :** {st.session_state['process_time']:.2f}s")
st.write(f"**Dimensions :** {st.session_state['processed_df'].shape[0]} lignes, {st.session_state['processed_df'].shape[1]} colonnes")
tab1, tab2 = st.tabs(["Données", "Statistiques"])
with tab1:
st.dataframe(st.session_state['processed_df'])
with tab2:
st.write(st.session_state['processed_df'].describe())
# Téléchargement
csv = st.session_state['processed_df'].to_csv(index=False).encode('utf-8')
st.download_button(
"Télécharger CSV",
csv,
"csi_processed.csv",
"text/csv"
)
else:
st.info("Configurez les paramètres et lancez le prétraitement")
import numpy as np
import streamlit as st
from models.cnn import CNNModel
from models.sca import CustomSCA
st.markdown("---")
st.subheader("🔧 Optimisation du modèle CNN via SCA")
if st.button("Optimiser"):
# Vérification si X_train et y_train sont prêts
try:
# Prépare tes données ici si ce n'est pas déjà fait
X_train = np.array(st.session_state['processed_df'].iloc[:, :-1])
y_train = np.array(st.session_state['processed_df'].iloc[:, -1])
# Remodèle X pour CNN (batch, time, features)
X_train = X_train.reshape((X_train.shape[0], X_train.shape[1], 1))
# 1. Définir la fonction objectif (mise à jour ici avec accès aux données)
def objective_function(solution):
params = {
'filter1': int(solution[0]),
'filter2': int(solution[1]),
'filter3': int(solution[2]),
'learning_rate': solution[3],
'dropout': solution[4]
}
model = CNNModel(input_shape=(X_train.shape[1], 1), num_classes=len(np.unique(y_train)))
_, val_acc = model.train(X_train, y_train, params, epochs=3)
return 1 - val_acc
# 2. Exécution de SCA
bounds = [
(32, 256), # filter1
(64, 512), # filter2
(128, 1024), # filter3
(0.0001, 0.01), # learning_rate
(0.1, 0.7) # dropout
]
sca = CustomSCA(obj_func=objective_function, bounds=bounds)
result = sca.optimize()
st.success("Optimisation terminée ✅")
st.write("### Meilleurs hyperparamètres trouvés :")
st.json({
'Filtres': [int(result['best_solution'][0]),
int(result['best_solution'][1]),
int(result['best_solution'][2])],
'Learning Rate': result['best_solution'][3],
'Dropout': result['best_solution'][4],
'Précision validation': f"{1 - result['best_fitness']:.2%}"
})
st.line_chart({
'Erreur': result['convergence_curve']
})
except Exception as e:
st.error(f"Erreur pendant l'optimisation : {e}")