Spaces:
Sleeping
Sleeping
| from langchain_openai import ChatOpenAI | |
| from langchain.chains import create_retrieval_chain | |
| from langchain.chains.combine_documents import create_stuff_documents_chain | |
| from langchain_core.prompts import ChatPromptTemplate | |
| def manabQA(retriever, question): | |
| os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY") | |
| # Step 1: Initialize GPT-4o-mini LLM | |
| llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) | |
| # Step 2: Create system prompt for QA | |
| system_prompt = ( | |
| "You are a helpful assistant. Use the following context to answer the question. " | |
| "If you don't know the answer, say so.\n\n" | |
| "{context}" | |
| ) | |
| prompt = ChatPromptTemplate.from_messages([ | |
| ("system", system_prompt), | |
| ("human", "{input}") | |
| ]) | |
| # Step 3: Create document chain and full retrieval chain | |
| question_answer_chain = create_stuff_documents_chain(llm, prompt) | |
| qa_chain = create_retrieval_chain(retriever, question_answer_chain) | |
| # Step 4: Ask a question | |
| response = qa_chain.invoke({"input": question}) | |
| return response |