from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint from langchain_core.prompts import ChatPromptTemplate import logging from src.prompts import SYSTEM_PROMPT from src.core.config import settings def get_chain(): prompt = ChatPromptTemplate.from_template(SYSTEM_PROMPT) # Verify token is available if not settings.HF_TOKEN: logging.error("HF_TOKEN is not set! Check environment variables.") raise ValueError("HF_TOKEN is required but not set") # Create base endpoint llm = HuggingFaceEndpoint( repo_id=settings.MODEL_NAME, huggingfacehub_api_token=settings.HF_TOKEN, temperature=settings.MODEL_TEMPERATURE, max_new_tokens=settings.MODEL_MAX_TOKENS, ) # Wrap with ChatHuggingFace for better conversational support chat_llm = ChatHuggingFace(llm=llm) return prompt | chat_llm