Spaces:
Running
Running
| import streamlit as st | |
| import os | |
| import sys | |
| import subprocess | |
| import json | |
| import pandas as pd | |
| import plotly.express as px | |
| import shutil | |
| import tempfile | |
| from pathlib import Path | |
| # --- CONFIGURATION --- | |
| st.set_page_config(page_title="Coach Code Python", layout="wide") | |
| # Dictionnaire pédagogique mis à jour avec les lettres | |
| RUFF_CAT_MAP = { | |
| "F": "Erreurs Logiques (F)", | |
| "E": "Style PEP8 (E)", | |
| "W": "Avertissements (W)", | |
| "I": "Tri des Imports (I)", | |
| "B": "Bugs Potentiels (B)", | |
| "UP": "Modernisation (UP)", | |
| "N": "Nommage (N)", | |
| "D": "Documentation (D)", | |
| "ANN": "Annotations de type (ANN)", | |
| "T": "Tests & Debug (T)", | |
| "A": "Built-ins (A)" | |
| } | |
| def remove_excessive_blank_lines(code): | |
| """Supprime les lignes vides excessives (plus de 1 ligne vide consécutive)""" | |
| lines = code.split('\n') | |
| result = [] | |
| blank_count = 0 | |
| for line in lines: | |
| if line.strip() == '': | |
| blank_count += 1 | |
| if blank_count <= 1: # Garde max 1 ligne vide | |
| result.append(line) | |
| else: | |
| blank_count = 0 | |
| result.append(line) | |
| return '\n'.join(result) | |
| def run_ruff(paths, fix=False, disable_docs=False, compact=False, unsafe=False): | |
| """Exécute l'analyse et le formatage Ruff""" | |
| ignore_list = [] | |
| if compact: | |
| ignore_list.extend(["E302", "E303", "E305", "E301", "E401", "W391"]) | |
| if disable_docs: | |
| ignore_list.extend(["D100", "D101", "D102", "D103", "D104", "D107"]) | |
| cmd_check = [ | |
| sys.executable, "-m", "ruff", "check", *paths, | |
| "--output-format", "json", | |
| "--select", "ALL", | |
| "--isolated", "--no-cache", | |
| ] | |
| if ignore_list: | |
| cmd_check.extend(["--ignore", ",".join(ignore_list)]) | |
| if fix: | |
| cmd_check.append("--fix") | |
| if unsafe: | |
| cmd_check.append("--unsafe-fixes") | |
| use_shell = os.name == 'nt' | |
| result = subprocess.run(cmd_check, capture_output=True, text=True, encoding="utf-8", shell=use_shell) | |
| if fix: | |
| cmd_format = [sys.executable, "-m", "ruff", "format", *paths, "--isolated"] | |
| subprocess.run(cmd_format, capture_output=True, shell=use_shell) | |
| try: | |
| return json.loads(result.stdout) if result.stdout.strip() else [] | |
| except: | |
| return [] | |
| def get_stats(paths): | |
| total_size, total_lines = 0, 0 | |
| for p in paths: | |
| path_obj = Path(p) | |
| files = [path_obj] if path_obj.is_file() else path_obj.rglob("*.py") | |
| for f in files: | |
| try: | |
| content = f.read_text(errors='ignore') | |
| total_size += f.stat().st_size | |
| total_lines += len(content.splitlines()) | |
| except: continue | |
| return total_size, total_lines | |
| # --- SIDEBAR --- | |
| with st.sidebar: | |
| st.title("🛡️ Configuration") | |
| mode = st.radio("Source :", ["Fichier unique", "Plusieurs fichiers"], index=0) | |
| uploaded_files = [] | |
| if mode == "Fichier unique": | |
| f = st.file_uploader("Fichier .py", type="py") | |
| if f: uploaded_files = [f] | |
| else: | |
| f_list = st.file_uploader("Sélectionner fichiers", type="py", accept_multiple_files=True) | |
| if f_list: uploaded_files = f_list | |
| st.subheader("🛠️ Options de correction") | |
| opt_docs = st.checkbox("Désactiver l'obligation des commentaires", value=True) | |
| opt_compact = st.checkbox("Garder le code compact (sans espaces excessifs)", value=True) | |
| opt_unsafe = st.checkbox("Activer les corrections forcées (modernisation)", value=True) | |
| #st.divider() | |
| btn_analyze = st.button("🚀 Analyser & Corriger", use_container_width=True) | |
| # --- ZONE CENTRALE --- | |
| st.title("🐍 Coach Code Python") | |
| if btn_analyze and uploaded_files: | |
| with tempfile.TemporaryDirectory() as temp_dir: | |
| temp_workspace = Path(temp_dir) | |
| work_paths, code_before, code_after = [], "", "" | |
| try: | |
| for uploaded_file in uploaded_files: | |
| file_path = temp_workspace / uploaded_file.name | |
| file_data = uploaded_file.getvalue().decode("utf-8", errors="ignore") | |
| if mode == "Fichier unique": code_before = file_data | |
| file_path.write_text(file_data, encoding="utf-8") | |
| work_paths.append(str(file_path)) | |
| s_init, l_init = get_stats(work_paths) | |
| # Analyse initiale | |
| errors = run_ruff(work_paths, fix=False, disable_docs=opt_docs, compact=opt_compact, unsafe=opt_unsafe) | |
| # Correction | |
| run_ruff(work_paths, fix=True, disable_docs=opt_docs, compact=opt_compact, unsafe=opt_unsafe) | |
| if opt_compact: | |
| for path in work_paths: | |
| file_path = Path(path) | |
| content = file_path.read_text(encoding="utf-8") | |
| cleaned = remove_excessive_blank_lines(content) | |
| file_path.write_text(cleaned, encoding="utf-8") | |
| s_after, l_after = get_stats(work_paths) | |
| if mode == "Fichier unique": | |
| code_after = Path(work_paths[0]).read_text(encoding="utf-8") | |
| nb_err = len(errors) | |
| score = max(0, min(100, 100 - (nb_err / (l_init if l_init > 0 else 1)) * 100)) | |
| # Adaptation du titre sur les lignes | |
| if l_init - l_after > 0: | |
| Titre_lignes = "Lignes retirées" | |
| comptage_diff_lignes = l_init - l_after | |
| else: | |
| Titre_lignes = "Lignes ajoutées" | |
| comptage_diff_lignes = l_after - l_init | |
| # Adapatation du titre sur les poids | |
| if (s_init - s_after) > 0: | |
| Titre_poids = "Réduction du poids du fichier" | |
| diff_poids = s_init - s_after | |
| else: | |
| Titre_poids = "Hausse du poids du fichier" | |
| diff_poids = s_after - s_init | |
| m1, m2, m3, m4 = st.columns(4) | |
| m1.metric("Qualité du Code", f"{score:.1f}/100") | |
| m2.metric("Points corrigés", nb_err) | |
| m3.metric(Titre_lignes, comptage_diff_lignes) | |
| m4.metric(Titre_poids, f"{diff_poids} octets") | |
| # m1, m2, m3, m4 = st.columns(4) | |
| # m1.metric("Qualité du Code", f"{score:.1f}/100") | |
| # m2.metric("Points corrigés", nb_err) | |
| # m3.metric("Lignes modifiées", l_init - l_after) | |
| # m4.metric("Gain de poids", f"{s_init - s_after} octets") | |
| tabs = st.tabs(["📊 Statistiques", "📜 Rapport", "🔍 Comparatif"] if mode == "Fichier unique" else ["📊 Statistiques", "📜 Rapport"]) | |
| with tabs[0]: | |
| if nb_err > 0: | |
| df = pd.DataFrame(errors) | |
| df['Cat_Code'] = df['code'].str[0] | |
| df['Catégorie'] = df['Cat_Code'].map(lambda x: RUFF_CAT_MAP.get(x, f"Autre ({x})")) | |
| c1, c2 = st.columns(2) | |
| with c1: | |
| counts = df['code'].value_counts().reset_index().sort_values('count', ascending=True) | |
| fig = px.bar(counts, x='count', y='code', orientation='h', | |
| title="Fréquence par code d'erreur", | |
| color='count', color_continuous_scale='Blues') | |
| st.plotly_chart(fig, use_container_width=True) | |
| with c2: | |
| cat_counts = df['Catégorie'].value_counts().reset_index().sort_values('count', ascending=False) | |
| fig2 = px.bar(cat_counts, x='count', y='Catégorie', orientation='h', | |
| title="Problèmes par famille", | |
| color='Catégorie', color_discrete_sequence=px.colors.qualitative.G10) | |
| fig2.update_layout(showlegend=False, yaxis={'categoryorder':'total ascending'}) | |
| st.plotly_chart(fig2, use_container_width=True) | |
| else: | |
| st.success("✨ Félicitations ! Ruff n'a trouvé aucune erreur.") | |
| with tabs[1]: | |
| if nb_err > 0: | |
| # --- TRANSFORMATION DES DONNÉES POUR LE TABLEAU --- | |
| report_data = [] | |
| for err in errors: | |
| filename = Path(err['filename']).name | |
| line = err['location']['row'] | |
| col = err['location']['column'] | |
| report_data.append({ | |
| "Code": err['code'], | |
| "Message": err['message'], | |
| "Localisation": f"{filename} (L:{line}, C:{col})" | |
| }) | |
| st.dataframe(pd.DataFrame(report_data), use_container_width=True) | |
| if mode == "Fichier unique": | |
| with tabs[2]: | |
| col1, col2 = st.columns(2) | |
| col1.subheader("Version Originale") | |
| col1.code(code_before, language="python") | |
| col2.subheader("Version Corrigée") | |
| col2.code(code_after, language="python") | |
| except Exception as e: | |
| st.error(f"Erreur : {e}") | |
| if Path(".ruff_cache").exists(): | |
| shutil.rmtree(".ruff_cache") |