Spaces:
Running
Running
File size: 1,961 Bytes
557ee65 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 | from abc import ABC, abstractmethod
from typing import Dict, Any, Optional, List, Type, Union
from pydantic import BaseModel
from dataclasses import dataclass
@dataclass
class LLMCapabilities:
supports_text_in: bool = True
supports_text_out: bool = True
supports_structured_json: bool = True
supports_images_in: bool = False
supports_audio_in: bool = False
supports_images_out: bool = False
supports_audio_out: bool = False
supports_tools: bool = False
class LLMClient(ABC):
"""
Abstract base class for LLM clients.
Represent the capability of calling an LLM, decoupling it from specific providers.
"""
@property
@abstractmethod
def capabilities(self) -> LLMCapabilities:
"""
Return the capabilities of this LLM client.
"""
pass
@abstractmethod
async def generate(
self,
prompt: str,
*,
instruction: Optional[str] = None,
schema: Optional[Type[BaseModel]] = None,
temperature: Optional[float] = None,
tools: Optional[List[Any]] = None,
metadata: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
) -> Union[str, Dict[str, Any], BaseModel]:
"""
Generate a response for a single prompt.
"""
pass
@abstractmethod
async def chat(
self,
messages: List[Dict[str, str]],
*,
instruction: Optional[str] = None,
schema: Optional[Type[BaseModel]] = None,
temperature: Optional[float] = None,
tools: Optional[List[Any]] = None,
metadata: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
) -> Union[str, Dict[str, Any], BaseModel]:
"""
Conduct a multi-turn conversation.
"""
pass
async def close(self):
"""
Optional: Close any underlying resources (e.g., aiohttp sessions).
"""
pass
|