Spaces:
Sleeping
Sleeping
File size: 1,091 Bytes
9eb70b1 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 | from langchain_openai import ChatOpenAI
from langchain.chains import create_retrieval_chain
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain_core.prompts import ChatPromptTemplate
def manabQA(retriever, question):
os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
# Step 1: Initialize GPT-4o-mini LLM
llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
# Step 2: Create system prompt for QA
system_prompt = (
"You are a helpful assistant. Use the following context to answer the question. "
"If you don't know the answer, say so.\n\n"
"{context}"
)
prompt = ChatPromptTemplate.from_messages([
("system", system_prompt),
("human", "{input}")
])
# Step 3: Create document chain and full retrieval chain
question_answer_chain = create_stuff_documents_chain(llm, prompt)
qa_chain = create_retrieval_chain(retriever, question_answer_chain)
# Step 4: Ask a question
response = qa_chain.invoke({"input": question})
return response |