Spaces:
Sleeping
Sleeping
| from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
| from langchain_chroma import Chroma # Utilisation de la nouvelle importation | |
| from langchain_core.prompts import PromptTemplate | |
| from langchain_core.runnables import RunnablePassthrough | |
| from langchain_core.output_parsers import StrOutputParser | |
| from langchain_groq import ChatGroq | |
| from dotenv import load_dotenv | |
| import os | |
| # Charger les variables d'environnement | |
| load_dotenv() | |
| # Vérification des variables d'environnement | |
| if not os.getenv("GROQ_API_KEY"): | |
| raise ValueError("La clé API Groq n'est pas définie dans le fichier .env") | |
| if not os.getenv("GOOGLE_API_KEY"): | |
| raise ValueError("La clé API GEMINI n'est pas définie dans le fichier .env") | |
| # Initialiser le modèle LLM | |
| llm = ChatGroq( | |
| model="llama-3.1-8b-instant", | |
| temperature=0, | |
| api_key=os.getenv("GROQ_API_KEY") | |
| ) | |
| # Initialiser l'embedding | |
| embeddings = GoogleGenerativeAIEmbeddings( | |
| model="gemini-embedding-001", | |
| api_key=os.getenv("GEMINI_API_KEY") | |
| ) | |
| # Vérifier et charger la base de données Chroma | |
| CHROMA_PATH = os.path.abspath("./chroma_db") | |
| if not os.path.exists(CHROMA_PATH): | |
| raise FileNotFoundError(f"Le dossier de la base de données Chroma est introuvable à l'emplacement : {CHROMA_PATH}") | |
| print(f"Chargement de la base de données depuis : {CHROMA_PATH}") | |
| try: | |
| vectorstore = Chroma( | |
| persist_directory=CHROMA_PATH, | |
| embedding_function=embeddings | |
| ) | |
| # Vérifier que la collection contient des documents | |
| collection_count = vectorstore._collection.count() | |
| print(f"Nombre de documents dans la collection : {collection_count}") | |
| if collection_count == 0: | |
| raise ValueError("La base de données Chroma est vide") | |
| except Exception as e: | |
| print(f"Erreur lors du chargement de la base de données : {str(e)}") | |
| raise | |
| # Créer le retriever | |
| retriever = vectorstore.as_retriever( | |
| search_type="similarity", | |
| search_kwargs={"k": 5} | |
| ) | |
| # Définir le template de prompt | |
| rag_template = """You are an expert quiz creator. Based on the provided course material, create a multiple-choice quiz. | |
| GUIDELINES: | |
| 1. Generate 5 multiple-choice questions about: {question} | |
| 2. Each question should have 4 possible answers (A, B, C, D) | |
| 3. Only one answer should be correct | |
| 4. Questions should test understanding, not just recall | |
| 5. Base questions ONLY on the provided context | |
| 6. If the context doesn't contain enough information, say "Insufficient context to generate this question" | |
| 7. After each correct answer, provide a clear and concise explanation based strictly on the context | |
| 8. Do NOT use external knowledge | |
| FORMAT STRICTLY like this: | |
| Q1. [Question text] | |
| A) [Option A] | |
| B) [Option B] | |
| C) [Option C] | |
| D) [Option D] | |
| Answer: [Correct letter] | |
| Explanation: [2-4 sentence explanation referencing the context] | |
| CONTEXT: | |
| {context} | |
| QUIZ:""" | |
| # Créer la chaîne RAG | |
| rag_prompt = PromptTemplate.from_template(rag_template) | |
| def format_docs(docs): | |
| return "\n\n".join(doc.page_content for doc in docs) | |
| rag_chain = ( | |
| { | |
| "context": retriever | format_docs, | |
| "question": RunnablePassthrough() | |
| } | |
| | rag_prompt | |
| | llm | |
| | StrOutputParser() | |
| ) | |
| # Exemple d'utilisation | |
| if __name__ == "__main__": | |
| try: | |
| while True: | |
| question = input("\nEntrez votre question (ou 'quit' pour quitter) : ") | |
| if question.lower() == 'quit': | |
| break | |
| print("\n" + "="*50) | |
| print(f"Recherche pour : {question}") | |
| print("="*50 + "\n") | |
| # Afficher les résultats en temps réel | |
| for chunk in rag_chain.stream(question): | |
| print(chunk, end="", flush=True) | |
| except KeyboardInterrupt: | |
| print("\n\nArrêt du programme...") | |
| except Exception as e: | |
| print(f"\nUne erreur est survenue : {str(e)}") |