DunasAnastasiia
Initial commit (Xet)
7c2e31a
from __future__ import annotations
from openai import OpenAI
def build_prompt(question: str, chunks: list[dict]) -> tuple[str, str]:
instructions = (
"You are a QA assistant. Answer ONLY using the provided context.\n"
"If the answer is not in the context, say you don't know based on the context.\n"
"When you use information from a chunk, cite it like [1], [2] matching the chunk numbers.\n"
"Be concise."
)
ctx_lines = []
for i, ch in enumerate(chunks, start=1):
ctx_lines.append(
f"[{i}] source_id={ch['source_id']} chunk_id={ch['chunk_id']}\n{ch['text']}\n"
)
input_text = (
"CONTEXT:\n"
+ "\n".join(ctx_lines)
+ "\nQUESTION:\n"
+ question.strip()
+ "\n\nANSWER:"
)
return instructions, input_text
def answer_with_provider(
api_key: str,
base_url: str,
model: str,
question: str,
chunks: list[dict],
) -> str:
"""
Works with OpenAI-compatible providers (Groq, OpenRouter, Together, etc.)
via Chat Completions API.
"""
client = OpenAI(api_key=api_key, base_url=base_url)
instructions, input_text = build_prompt(question, chunks)
resp = client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": instructions},
{"role": "user", "content": input_text},
],
temperature=0.2,
)
msg = resp.choices[0].message.content
return msg or ""