Spaces:
Sleeping
Sleeping
File size: 1,667 Bytes
409c17a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
"""
Domain Layer - LLM Interface
Defines contract for LLM implementations.
"""
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import AsyncIterator, List, Optional
@dataclass
class LLMMessage:
"""Chat message"""
role: str # system, user, assistant
content: str
@dataclass
class LLMResponse:
"""LLM generation response"""
content: str
model: str
tokens_used: int
finish_reason: str
class ILLM(ABC):
"""Interface for LLM implementations"""
@abstractmethod
async def generate(
self,
messages: List[LLMMessage],
temperature: float = 0.7,
max_tokens: int = 2048,
stream: bool = False,
) -> LLMResponse:
"""Generate response from LLM"""
pass
@abstractmethod
async def generate_stream(
self,
messages: List[LLMMessage],
temperature: float = 0.7,
max_tokens: int = 2048,
) -> AsyncIterator[str]:
"""Generate streaming response from LLM"""
pass
@abstractmethod
def get_model_name(self) -> str:
"""Get model name"""
pass
class IPromptBuilder(ABC):
"""Interface for prompt building"""
@abstractmethod
def build_rag_prompt(
self, query: str, context: List[str], system_prompt: Optional[str] = None
) -> List[LLMMessage]:
"""Build RAG prompt with query and context"""
pass
@abstractmethod
def build_query_expansion_prompt(self, query: str) -> List[LLMMessage]:
"""Build prompt for query expansion"""
pass
|