zazaman's picture
Add multilingual translation support with Qwen3-0.6B-GGUF and optimize for Hugging Face Spaces deployment
a2e1879
raw
history blame
1.24 kB
# llm_clients/base.py
from abc import ABC, abstractmethod
from typing import Generator, Any, Dict, List, Optional
class LlmClient(ABC):
"""Abstract base class for all LLM clients."""
def __init__(self, config: Dict[str, Any], system_prompt: str):
self.config = config
self.system_prompt = system_prompt
def generate_content(self, prompt: str, files: Optional[List[Dict[str, Any]]] = None) -> str:
"""Generates a non-streaming response from the LLM. Files parameter is optional and ignored by default."""
return self._generate_content_impl(prompt)
def generate_content_stream(self, prompt: str, files: Optional[List[Dict[str, Any]]] = None) -> Generator[Any, None, None]:
"""Generates a streaming response from the LLM. Files parameter is optional and ignored by default."""
return self._generate_content_stream_impl(prompt)
@abstractmethod
def _generate_content_impl(self, prompt: str) -> str:
"""Concrete implementation of content generation."""
pass
@abstractmethod
def _generate_content_stream_impl(self, prompt: str) -> Generator[Any, None, None]:
"""Concrete implementation of streaming content generation."""
pass