File size: 643 Bytes
f435cf6
 
 
 
b99fc36
 
3e503c9
b99fc36
 
 
f435cf6
b99fc36
f435cf6
b99fc36
f435cf6
 
 
b99fc36
f435cf6
 
 
b99fc36
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
from pydantic import BaseModel, Field
from typing import Optional, Literal, Dict, Any


BackendType = Literal["hf_inference_api", "local_transformers"]


class ModelSpec(BaseModel):
    model_id: str = Field(..., description="HF model id, e.g. meta-llama/Meta-Llama-3-8B-Instruct")
    backend: BackendType = "hf_inference_api"
    temperature: float = 0.2
    max_new_tokens: int = 600
    top_p: float = 0.95
    repetition_penalty: float = 1.05
    system_prompt: Optional[str] = None
    extra: Dict[str, Any] = Field(default_factory=dict)


class RunConfig(BaseModel):
    analyzer: ModelSpec
    refactor: ModelSpec
    critic: ModelSpec