Baktabek's picture
Upload folder using huggingface_hub
409c17a verified
"""
Infrastructure - Prompt Builder Implementation
"""
from typing import List, Optional
from app.domain.interfaces import IPromptBuilder, LLMMessage
class DefaultPromptBuilder(IPromptBuilder):
"""Default prompt builder implementation"""
def build_rag_prompt(
self, query: str, context: List[str], system_prompt: Optional[str] = None
) -> List[LLMMessage]:
"""Build RAG prompt with query and context"""
if system_prompt is None:
system_prompt = """You are a helpful corporate onboarding assistant.
Answer questions based ONLY on the provided context.
If the answer is not in the context, say "I don't have enough information to answer that question."
Always cite your sources by referencing the relevant context sections."""
# Build context string
context_str = "\n\n---\n\n".join(
[f"[Context {i+1}]\n{ctx}" for i, ctx in enumerate(context)]
)
user_message = f"""Context:
{context_str}
Question: {query}
Please provide a clear, accurate answer based on the context above. Include citations."""
return [
LLMMessage(role="system", content=system_prompt),
LLMMessage(role="user", content=user_message),
]
def build_query_expansion_prompt(self, query: str) -> List[LLMMessage]:
"""Build prompt for query expansion"""
system_prompt = """You are a query expansion expert.
Generate 2-3 alternative phrasings of the user's question to improve retrieval.
Return only the alternative questions, one per line."""
user_message = f"""Original question: {query}
Generate alternative phrasings:"""
return [
LLMMessage(role="system", content=system_prompt),
LLMMessage(role="user", content=user_message),
]