| # 1️⃣ Imports | |
| import pandas as pd | |
| from langchain_core.documents import Document | |
| import os | |
| from dotenv import load_dotenv | |
| from functions.data_to_vectors import create_vectorstore | |
| from functions.llm_comm import llm_communication | |
| # --- Load environment variables --- | |
| load_dotenv() | |
| os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY") | |
| def rag(file, question): | |
| print("📊 Processing CSV File...") | |
| df = pd.read_csv(file) | |
| print("Extracted DataFrame:") | |
| print(df) | |
| print(f"Chat Input: {question}") | |
| # Convert each row into a Document | |
| documents = [ | |
| Document(page_content=row.to_string()) | |
| for _, row in df.iterrows() | |
| ] | |
| vectorstore = create_vectorstore(str(documents), "csv_store_chroma") | |
| retrieval_chain = llm_communication(vectorstore) | |
| # 8️⃣ Query pipeline | |
| if question: | |
| response = retrieval_chain.invoke({"input": question}) | |
| print(f"🧠 {question} {response['answer']}\n") | |
| return response['answer'] | |