Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| import json | |
| from datetime import datetime | |
| import google.generativeai as genai | |
| from duckduckgo_search import DDGS | |
| from dotenv import load_dotenv | |
| # ----------------------------------------------------------------------------- | |
| # Configuration de l'environnement et des constantes | |
| # ----------------------------------------------------------------------------- | |
| load_dotenv() | |
| # Configurez vos clés API dans un fichier .env ou dans les secrets de Streamlit Cloud | |
| GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY") | |
| # Configuration de la bibliothèque Google | |
| if GOOGLE_API_KEY: | |
| try: | |
| genai.configure(api_key=GOOGLE_API_KEY) | |
| except Exception as e: | |
| st.error(f"Erreur de configuration de l'API Google : {e}") | |
| else: | |
| st.warning("La clé API Google (GOOGLE_API_KEY) n'est pas configurée. L'application ne pourra pas fonctionner.") | |
| # ----------------------------------------------------------------------------- | |
| # Définition des modèles disponibles | |
| # ----------------------------------------------------------------------------- | |
| AVAILABLE_MODELS = [ | |
| { | |
| "id": "gemini-1.5-flash-latest", | |
| "name": "Gemini 1.5 Flash (Rapide et efficace)", | |
| "provider": "google", | |
| }, | |
| { | |
| "id": "gemini-1.5-pro-latest", | |
| "name": "Gemini 1.5 Pro (Le plus performant)", | |
| "provider": "google", | |
| }, | |
| ] | |
| DEFAULT_MODEL_ID = "gemini-1.5-flash-latest" | |
| # ----------------------------------------------------------------------------- | |
| # Initialisation du Session State | |
| # ----------------------------------------------------------------------------- | |
| def initialize_session_state(): | |
| """Initialise toutes les variables nécessaires dans le session state pour éviter les erreurs.""" | |
| DEFAULT_SYSTEM_MESSAGE = "Vous êtes KolaChatBot, un assistant IA serviable, créatif et honnête. Répondez en français." | |
| DEFAULT_STARTER_MESSAGE = "Bonjour ! Je suis KolaChatBot. Comment puis-je vous aider aujourd'hui ? 🤖" | |
| if "selected_model_id" not in st.session_state: | |
| st.session_state.selected_model_id = DEFAULT_MODEL_ID | |
| if "system_message" not in st.session_state: | |
| st.session_state.system_message = DEFAULT_SYSTEM_MESSAGE | |
| if "starter_message" not in st.session_state: | |
| st.session_state.starter_message = DEFAULT_STARTER_MESSAGE | |
| if "chat_history" not in st.session_state: | |
| st.session_state.chat_history = [{"role": "assistant", "content": st.session_state.starter_message, "type": "text"}] | |
| if "max_response_length" not in st.session_state: | |
| st.session_state.max_response_length = 1024 | |
| if "temperature" not in st.session_state: | |
| st.session_state.temperature = 0.7 | |
| if "top_p" not in st.session_state: | |
| st.session_state.top_p = 0.95 | |
| if "enable_web_search" not in st.session_state: | |
| st.session_state.enable_web_search = False | |
| if 'last_search_results' not in st.session_state: | |
| st.session_state.last_search_results = None | |
| initialize_session_state() | |
| # ----------------------------------------------------------------------------- | |
| # Fonctions d'export de la conversation | |
| # ----------------------------------------------------------------------------- | |
| def format_history_to_txt(chat_history: list[dict]) -> str: | |
| lines = [f"KolaChatBot Conversation - Exporté le {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n"] | |
| for message in chat_history: | |
| role = "Utilisateur" if message["role"] == "user" else "KolaChatBot" | |
| lines.append(f"--- {role} ---\n{message['content']}\n\n") | |
| return "".join(lines) | |
| def format_history_to_json(chat_history: list[dict]) -> str: | |
| export_data = {"export_date": datetime.now().isoformat(), "conversation": chat_history} | |
| return json.dumps(export_data, indent=2, ensure_ascii=False) | |
| def format_history_to_md(chat_history: list[dict]) -> str: | |
| lines = [f"# KolaChatBot Conversation\n*Exporté le {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*\n\n"] | |
| for message in chat_history: | |
| avatar = "👤" if message["role"] == "user" else "🤖" | |
| role_label = "Utilisateur" if message["role"] == "user" else "KolaChatBot" | |
| lines.append(f"### {avatar} {role_label}\n\n{message['content']}\n\n---\n\n") | |
| return "".join(lines) | |
| # ----------------------------------------------------------------------------- | |
| # Fonctions principales (Recherche Web et Appel API) | |
| # ----------------------------------------------------------------------------- | |
| def perform_web_search(query: str, num_results: int = 5) -> tuple[str, list]: | |
| st.session_state.last_search_results = None | |
| try: | |
| with DDGS() as ddgs: | |
| results = list(ddgs.text(keywords=query, region='fr-fr', max_results=num_results)) | |
| if not results: | |
| return "Aucun résultat de recherche trouvé.", [] | |
| formatted_context = "" | |
| for i, res in enumerate(results): | |
| formatted_context += f"[Source {i+1}]\nTitre: {res.get('title', 'N/A')}\nExtrait: {res.get('body', 'N/A')}\nURL: {res.get('href', 'N/A')}\n\n" | |
| st.session_state.last_search_results = results | |
| return formatted_context, results | |
| except Exception as e: | |
| return f"Erreur lors de la recherche web: {e}", [] | |
| def get_gemini_response_stream(model_id: str, system_prompt: str, chat_history_for_api: list[dict], params: dict): | |
| """ | |
| Appelle l'API Google Gemini et retourne un générateur (stream) pour la réponse. | |
| CETTE FONCTION EST CORRIGÉE. | |
| """ | |
| if not GOOGLE_API_KEY: | |
| yield "Erreur: La clé API Google n'est pas configurée. Veuillez l'ajouter pour continuer." | |
| return | |
| try: | |
| model = genai.GenerativeModel( | |
| model_id, | |
| system_instruction=system_prompt | |
| ) | |
| # *** CORRECTION APPLIQUÉE ICI *** | |
| # Prépare l'historique complet pour l'API dans le format attendu. | |
| api_contents = [] | |
| for msg in chat_history_for_api: | |
| role = 'user' if msg['role'] == 'user' else 'model' | |
| api_contents.append({"role": role, "parts": [msg['content']]}) | |
| generation_config = genai.types.GenerationConfig( | |
| max_output_tokens=params.get("max_new_tokens"), | |
| temperature=params.get("temperature"), | |
| top_p=params.get("top_p"), | |
| ) | |
| # Appelle generate_content avec l'argument 'contents' qui contient toute la conversation. | |
| response_stream = model.generate_content( | |
| contents=api_contents, # Argument correct | |
| generation_config=generation_config, | |
| stream=True | |
| ) | |
| for chunk in response_stream: | |
| if chunk.parts: | |
| yield chunk.text | |
| except Exception as e: | |
| yield f"Erreur lors de l'appel à l'API Google: {e}" | |
| # ----------------------------------------------------------------------------- | |
| # Configuration de la page Streamlit et de la Sidebar | |
| # ----------------------------------------------------------------------------- | |
| st.set_page_config(page_title="KolaChatBot IA", page_icon="🤖", layout="wide") | |
| st.title("🤖 KolaChatBot IA") | |
| selected_model_info = next((m for m in AVAILABLE_MODELS if m['id'] == st.session_state.selected_model_id), None) | |
| st.markdown(f"*Modèle actuel : **{selected_model_info['name']}***") | |
| with st.sidebar: | |
| st.header("🛠️ Configuration") | |
| st.subheader("🧠 Sélection du Modèle") | |
| model_options = {model['id']: model['name'] for model in AVAILABLE_MODELS} | |
| def on_model_change(): | |
| st.session_state.chat_history = [{"role": "assistant", "content": st.session_state.starter_message, "type": "text"}] | |
| st.toast(f"Modèle changé. Conversation réinitialisée.") | |
| st.selectbox( | |
| "Choisir le modèle :", | |
| options=list(model_options.keys()), | |
| format_func=lambda x: model_options[x], | |
| key="selected_model_id", | |
| on_change=on_model_change, | |
| help="Changer de modèle démarre une nouvelle conversation." | |
| ) | |
| if not GOOGLE_API_KEY: | |
| st.error("❌ Clé API Google manquante.") | |
| st.subheader("⚙️ Paramètres de Génération") | |
| with st.expander("Ajuster les paramètres", expanded=False): | |
| st.slider("Max Tokens", 128, 8192, key="max_response_length", step=128) | |
| st.slider("Température", 0.0, 2.0, key="temperature", step=0.05) | |
| st.slider("Top-P", 0.0, 1.0, key="top_p", step=0.05) | |
| st.subheader("👤 Personnalisation") | |
| st.text_area("Message Système / Personnalité", height=100, key="system_message") | |
| st.text_area("Message de bienvenue", height=100, key="starter_message") | |
| st.subheader("🌐 Recherche Web (RAG)") | |
| st.checkbox("Activer la recherche web", key="enable_web_search") | |
| st.subheader("🔄 Gestion") | |
| col1, col2 = st.columns(2) | |
| if col1.button("♻️ Nouvelle Conv.", use_container_width=True): | |
| st.session_state.chat_history = [{"role": "assistant", "content": st.session_state.starter_message, "type": "text"}] | |
| st.toast("Nouvelle conversation démarrée.") | |
| st.rerun() | |
| if col2.button("🗑️ Effacer", type="primary", use_container_width=True): | |
| st.session_state.chat_history = [{"role": "assistant", "content": st.session_state.starter_message, "type": "text"}] | |
| st.toast("Conversation effacée.") | |
| st.rerun() | |
| st.subheader("📥 Exporter") | |
| if len(st.session_state.chat_history) > 1: | |
| ts = datetime.now().strftime("%Y%m%d_%H%M") | |
| st.download_button("TXT", format_history_to_txt(st.session_state.chat_history), f"kolachat_{ts}.txt") | |
| st.download_button("JSON", format_history_to_json(st.session_state.chat_history), f"kolachat_{ts}.json") | |
| st.download_button("Markdown", format_history_to_md(st.session_state.chat_history), f"kolachat_{ts}.md") | |
| else: | |
| st.caption("Conversation vide.") | |
| st.divider() | |
| st.markdown(""" | |
| **Auteur :** Sidoine K. YEBADOKPO | |
| *Expert en Analyse de Données* | |
| 📧 syebadokpo@gmail.com | |
| 📞 +229 96 91 13 46 | |
| """) | |
| # ----------------------------------------------------------------------------- | |
| # Interface de Chat Principale | |
| # ----------------------------------------------------------------------------- | |
| # Affichage de l'historique des messages | |
| for message in st.session_state.chat_history: | |
| avatar = "👤" if message["role"] == "user" else "🤖" | |
| with st.chat_message(message["role"], avatar=avatar): | |
| st.markdown(message["content"]) | |
| if message.get("sources"): | |
| with st.expander("Sources web consultées", expanded=False): | |
| for i, source in enumerate(message["sources"]): | |
| st.markdown(f"**{i+1}. {source.get('title', 'Titre inconnu')}**\n" | |
| f"[*Source*]({source.get('href', '#')})\n" | |
| f"> {source.get('body', 'Aucun extrait.')}\n---") | |
| # Logique de traitement de l'entrée utilisateur | |
| if prompt := st.chat_input("Envoyer un message...", disabled=not GOOGLE_API_KEY): | |
| st.session_state.chat_history.append({"role": "user", "content": prompt, "type": "text"}) | |
| with st.chat_message("user", avatar="👤"): | |
| st.markdown(prompt) | |
| with st.chat_message("assistant", avatar="🤖"): | |
| history_for_api = st.session_state.chat_history.copy() | |
| if st.session_state.enable_web_search: | |
| with st.spinner("KolaChatBot recherche sur le web..."): | |
| search_context, sources = perform_web_search(prompt) | |
| if sources: | |
| rag_prompt = ( | |
| "En te basant STRICTEMENT sur les informations suivantes, réponds à la question. " | |
| "Cite tes sources en utilisant le format [Source X] après chaque phrase concernée.\n\n" | |
| f"--- CONTEXTE ---\n{search_context}\n--- FIN DU CONTEXTE ---\n\n" | |
| f"Question : {prompt}" | |
| ) | |
| history_for_api[-1]['content'] = rag_prompt | |
| else: | |
| st.toast("La recherche web n'a pas fourni de résultats.") | |
| params = { | |
| "max_new_tokens": st.session_state.max_response_length, | |
| "temperature": st.session_state.temperature, | |
| "top_p": st.session_state.top_p, | |
| } | |
| response_content = st.write_stream(get_gemini_response_stream( | |
| st.session_state.selected_model_id, | |
| st.session_state.system_message, | |
| history_for_api, | |
| params | |
| )) | |
| assistant_message = {"role": "assistant", "content": response_content, "type": "text"} | |
| if st.session_state.get('last_search_results'): | |
| assistant_message["sources"] = st.session_state.last_search_results | |
| st.session_state.last_search_results = None | |
| st.session_state.chat_history.append(assistant_message) | |
| if "sources" in assistant_message: | |
| st.rerun() |