Spaces:
Sleeping
Sleeping
| from src.config.env import OPENAI_API_KEY, GOOGLE_API_KEY, SERPER_API_KEY | |
| from src.interface.ui import create_interface | |
| from src.state import build_graph_with_callable | |
| from src.chains.qa_chain_openai import build_openai_rag_chain_and_llm | |
| from src.chains.qa_chain_gemini import build_gemini_rag_chain | |
| from src.agents.prompts import CHARACTER_PROMPT | |
| from src.agents.tools import get_tools | |
| from langchain.agents import create_react_agent, AgentExecutor | |
| from langchain.memory import ConversationBufferWindowMemory | |
| # === Choose your backend model === | |
| MODEL = "openai" # Change to "gemini" to use Google Gemini | |
| PDF_PATH = "resources/22_studenthandbook-22-23_f2.pdf" | |
| # === Build chain based on chosen model === | |
| if MODEL == "openai": | |
| rag_chain, llm = build_openai_rag_chain_and_llm(PDF_PATH) | |
| elif MODEL == "gemini": | |
| rag_chain = build_gemini_rag_chain(PDF_PATH) | |
| else: | |
| raise ValueError("Unsupported MODEL type. Choose 'openai' or 'gemini'.") | |
| # === Set up tools === | |
| tools = get_tools(rag_chain) | |
| # === Build the agent === | |
| agent = create_react_agent( | |
| llm=llm, | |
| tools=tools, | |
| prompt=CHARACTER_PROMPT | |
| ) | |
| memory = ConversationBufferWindowMemory(memory_key='chat_history', k=5, return_messages=True, output_key='output') | |
| agent_chain = AgentExecutor( | |
| agent=agent, | |
| tools=tools, | |
| memory=memory, | |
| max_iterations=5, | |
| handle_parsing_errors=True, | |
| verbose=True | |
| ) | |
| # === Build the state graph with agent=== | |
| app = build_graph_with_callable(agent_chain.invoke) | |
| # === Start the UI === | |
| create_interface(app.invoke) |