from abc import ABC, abstractmethod from typing import Dict, Any, Optional, List, Type, Union from pydantic import BaseModel from dataclasses import dataclass @dataclass class LLMCapabilities: supports_text_in: bool = True supports_text_out: bool = True supports_structured_json: bool = True supports_images_in: bool = False supports_audio_in: bool = False supports_images_out: bool = False supports_audio_out: bool = False supports_tools: bool = False class LLMClient(ABC): """ Abstract base class for LLM clients. Represent the capability of calling an LLM, decoupling it from specific providers. """ @property @abstractmethod def capabilities(self) -> LLMCapabilities: """ Return the capabilities of this LLM client. """ pass @abstractmethod async def generate( self, prompt: str, *, instruction: Optional[str] = None, schema: Optional[Type[BaseModel]] = None, temperature: Optional[float] = None, tools: Optional[List[Any]] = None, metadata: Optional[Dict[str, Any]] = None, name: Optional[str] = None, ) -> Union[str, Dict[str, Any], BaseModel]: """ Generate a response for a single prompt. """ pass @abstractmethod async def chat( self, messages: List[Dict[str, str]], *, instruction: Optional[str] = None, schema: Optional[Type[BaseModel]] = None, temperature: Optional[float] = None, tools: Optional[List[Any]] = None, metadata: Optional[Dict[str, Any]] = None, name: Optional[str] = None, ) -> Union[str, Dict[str, Any], BaseModel]: """ Conduct a multi-turn conversation. """ pass async def close(self): """ Optional: Close any underlying resources (e.g., aiohttp sessions). """ pass