| import os | |
| from pydantic import BaseModel, Field | |
| from dotenv import load_dotenv | |
| from prompts import * | |
| from langchain_openai import ChatOpenAI | |
| from langchain_core.prompts import ChatPromptTemplate | |
| load_dotenv() | |
| class RouterResponse_2(BaseModel): | |
| route :list[str]= Field(description=("A list of keys relevant to the user's query")) | |
| class ExtractorResponse_2(BaseModel): | |
| information: str=Field(description=("Condensed information based on the context provided")) | |
| llm = ChatOpenAI(model="openai/gpt-4o-mini",temperature=0.7,base_url="https://openrouter.ai/api/v1", | |
| api_key=os.getenv("OPEN_ROUTER_API_KEY")) | |
| router_prompt_1 = ChatPromptTemplate.from_messages([ | |
| ("system", "You are a routing assistant."), | |
| ("user", router_instruction_prompt_1.format(query="{query}", previous_messages="{previous_messages}", | |
| format_instructions="{format_instructions}"))]) | |
| router_chain_1= router_prompt_1 | llm | |
| summary_prompt_1 = ChatPromptTemplate.from_messages([ | |
| ("system", "You are a Summarising assistant."), | |
| ("user", summary_prompt_instructions_1.format(query="{query}", previous_messages="{previous_messages}", | |
| data="{data}",format_instructions="{format_instructions}"))]) | |
| summary_chain_1 = summary_prompt_1 | llm | |
| router = router_instruction_prompt_2 | llm.with_structured_output(RouterResponse_2) | |
| extractor = extract_prompt_instructions_2 | llm.with_structured_output(ExtractorResponse_2) |