Spaces:
Build error
Build error
| """Groq LLM client using LangChain.""" | |
| import logging | |
| from typing import Any, List, Optional | |
| from langchain_groq import ChatGroq | |
| from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage | |
| from langchain_core.output_parsers import StrOutputParser | |
| from src.config.config import settings | |
| logger = logging.getLogger(__name__) | |
| class GroqClient: | |
| """Client for interacting with Groq LLM using LangChain.""" | |
| def __init__( | |
| self, | |
| api_key: Optional[str] = None, | |
| model_name: Optional[str] = None, | |
| temperature: float = 0, | |
| ) -> None: | |
| """Initialize Groq client. | |
| Args: | |
| api_key: Groq API key. If None, uses settings.groq_api_key. | |
| model_name: Model name. If None, uses settings.groq_model. | |
| temperature: Temperature for model generation. | |
| """ | |
| self.api_key = api_key or settings.groq_api_key | |
| self.model_name = model_name or settings.groq_model | |
| self.temperature = temperature | |
| self.max_tokens = settings.max_tokens | |
| if not self.api_key: | |
| raise ValueError("Groq API key is required. Set GROQ_API_KEY environment variable.") | |
| self.llm = ChatGroq( | |
| groq_api_key=self.api_key, | |
| model_name=self.model_name, | |
| temperature=self.temperature, | |
| max_tokens=self.max_tokens, | |
| ) | |
| self.output_parser = StrOutputParser() | |
| logger.info(f"Initialized Groq client with model: {self.model_name}") | |
| def invoke( | |
| self, | |
| prompt: str, | |
| system_message: Optional[str] = None, | |
| **kwargs: Any, | |
| ) -> str: | |
| """Invoke the LLM with a prompt. | |
| Args: | |
| prompt: User prompt. | |
| system_message: Optional system message. | |
| **kwargs: Additional arguments to pass to the LLM. | |
| Returns: | |
| Generated response as string. | |
| """ | |
| messages: List[BaseMessage] = [] | |
| if system_message: | |
| messages.append(SystemMessage(content=system_message)) | |
| messages.append(HumanMessage(content=prompt)) | |
| try: | |
| response = self.llm.invoke(messages, **kwargs) | |
| return self.output_parser.parse(response.content) | |
| except Exception as e: | |
| logger.error(f"Error invoking LLM: {str(e)}") | |
| raise | |