Spaces:
Running
Running
| #================imports============== | |
| import uuid | |
| import requests | |
| import os | |
| os.environ["USER_AGENT"] = "RAG-App/1.0" | |
| from typing import Dict, List, Any | |
| from dotenv import load_dotenv | |
| from bs4 import BeautifulSoup | |
| from langchain_core.globals import set_llm_cache | |
| from langchain_core.caches import InMemoryCache | |
| from langchain_community.document_loaders import WebBaseLoader | |
| from langchain_text_splitters import RecursiveCharacterTextSplitter | |
| from langchain_huggingface import HuggingFaceEmbeddings | |
| from langchain_community.vectorstores import Weaviate | |
| from langchain_community.vectorstores import FAISS | |
| from langchain_groq import ChatGroq | |
| from langchain_core.prompts import ChatPromptTemplate,MessagesPlaceholder | |
| from langchain_classic.chains.combine_documents import create_stuff_documents_chain | |
| from langchain_classic.chains import create_retrieval_chain | |
| from langchain_core.runnables.history import RunnableWithMessageHistory | |
| from langchain_community.chat_message_histories import ChatMessageHistory | |
| from langchain_core.chat_history import BaseChatMessageHistory | |
| #================== CONFIG================== | |
| load_dotenv() | |
| set_llm_cache(InMemoryCache()) | |
| api_key=os.environ["GROQ_API_KEY"] | |
| #os.environ["HF_API_KEY"] | |
| print("api chargée:" if api_key else "y'a probleme!!") | |
| #========== charger et decouper documents================= | |
| urls=[ | |
| "https://fr.wikipedia.org/wiki/%C3%89levage", | |
| "https://fr.wikipedia.org/wiki/La_P%C3%AAche" | |
| ] | |
| loader = WebBaseLoader(urls, | |
| requests_kwargs={ | |
| "headers":{ | |
| "User-Agent":"RAG-App/1.0" | |
| } | |
| } | |
| ) | |
| docs =loader.load() | |
| splitter = RecursiveCharacterTextSplitter(chunk_size= 1000, chunk_overlap=200) | |
| chunks= splitter.split_documents(docs) | |
| #============embeding et indexation vers faiss_db================ | |
| embeddings= HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2") | |
| faiss_db=FAISS.from_documents( | |
| documents=chunks, | |
| embedding=embeddings | |
| ) | |
| retriever=faiss_db.as_retriever(search_type="similarity", search_kwargs={"k":3}) | |
| #=============== LLM et Prompt================= | |
| llm = ChatGroq( | |
| model="llama-3.3-70b-versatile", | |
| temperature=0.0, | |
| max_tokens=1200 | |
| ) | |
| prompt = ChatPromptTemplate.from_messages([ | |
| ("system", """Tu es un assistant expert en dans le domaine de l'elevage et la pêche. Réponds clairement. | |
| Si tu ne connais pas, n'invente pas. Garde un ton amical. | |
| Contexte : | |
| {context}"""), | |
| MessagesPlaceholder(variable_name="chat_history"), | |
| ("human", "{input}"), | |
| ]) | |
| #============= CHAINE DE RECUPERATION======= | |
| stuff_chain= create_stuff_documents_chain(llm, prompt) | |
| rag_chain=create_retrieval_chain(retriever, stuff_chain) | |
| import gradio as gr | |
| store = {} | |
| def get_session_history(session_id:str)->BaseChatMessageHistory: | |
| if session_id not in store: | |
| store[session_id] = ChatMessageHistory() | |
| return store[session_id] | |
| # ======== CHAIN AVEC MÉMOIRE =========== | |
| convers_chain = RunnableWithMessageHistory( | |
| rag_chain, | |
| get_session_history, | |
| input_messages_key="input", | |
| history_messages_key="chat_history", | |
| output_messages_key="answer" | |
| ) | |
| # =============FONCTION CHAT ================ | |
| SESSION_ID = str(uuid.uuid4()) # session globale | |
| def chat_fn(message, history): | |
| result = convers_chain.invoke( | |
| {"input": message}, | |
| config={"configurable": {"session_id": SESSION_ID}} | |
| ) | |
| return result.get("answer", str(result)) | |
| # ================= GRADIO ==================== | |
| demo = gr.ChatInterface( | |
| fn=chat_fn, | |
| title="🤖 RAG:Specialist en Science Animale 👌", | |
| description="Posez vos questions sur l'élévage et la pêche", | |
| examples=[ | |
| "C'est quoi la pêche ?", | |
| "Explique l'élévage", | |
| "Quelle est la différence entre l'élévage et pêche ?" | |
| ] | |
| ) | |
| # ===================LANCEMENT ================ | |
| demo.launch() | |