File size: 792 Bytes
2f81d82
0827021
b3de77b
 
2f81d82
 
b3de77b
0827021
b3de77b
 
 
0827021
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
from dotenv import load_dotenv
from langchain.chat_models import init_chat_model
from langchain_core.messages import HumanMessage

load_dotenv(override=True)

from .config import MODEL_NAME, MODEL_PROVIDER, PROMPT, TEMPERATURE

llm = init_chat_model(
    MODEL_NAME, model_provider=MODEL_PROVIDER, temperature=TEMPERATURE
)


def answer_question(query: str, contexts: list[str]) -> str:
    """
    Answer a question using the provided context.

    Args:
        query: The query to answer
        contexts: The context to use for answering the question

    Returns:
        The answer to the question
    """
    prompt = PROMPT.format(contexts=contexts, query=query)
    human_message = HumanMessage(content=prompt)

    response = llm.invoke([human_message])
    return response.content