Update src/config.py
Browse files- src/config.py +9 -7
src/config.py
CHANGED
|
@@ -1,20 +1,22 @@
|
|
| 1 |
from pydantic import BaseModel, Field
|
| 2 |
from typing import Optional, Literal, Dict, Any
|
| 3 |
|
| 4 |
-
ProviderType = Literal["openai", "gemini", "deepseek"]
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
model_id: str = Field(..., description="Provider model id/name")
|
| 9 |
|
|
|
|
|
|
|
|
|
|
| 10 |
temperature: float = 0.2
|
| 11 |
-
max_new_tokens: int =
|
| 12 |
top_p: float = 0.95
|
| 13 |
-
|
| 14 |
system_prompt: Optional[str] = None
|
| 15 |
extra: Dict[str, Any] = Field(default_factory=dict)
|
| 16 |
|
|
|
|
| 17 |
class RunConfig(BaseModel):
|
| 18 |
analyzer: ModelSpec
|
| 19 |
refactor: ModelSpec
|
| 20 |
-
critic: ModelSpec
|
|
|
|
| 1 |
from pydantic import BaseModel, Field
|
| 2 |
from typing import Optional, Literal, Dict, Any
|
| 3 |
|
|
|
|
| 4 |
|
| 5 |
+
BackendType = Literal["hf_inference_api", "local_transformers"]
|
| 6 |
+
|
|
|
|
| 7 |
|
| 8 |
+
class ModelSpec(BaseModel):
|
| 9 |
+
model_id: str = Field(..., description="HF model id, e.g. meta-llama/Meta-Llama-3-8B-Instruct")
|
| 10 |
+
backend: BackendType = "hf_inference_api"
|
| 11 |
temperature: float = 0.2
|
| 12 |
+
max_new_tokens: int = 600
|
| 13 |
top_p: float = 0.95
|
| 14 |
+
repetition_penalty: float = 1.05
|
| 15 |
system_prompt: Optional[str] = None
|
| 16 |
extra: Dict[str, Any] = Field(default_factory=dict)
|
| 17 |
|
| 18 |
+
|
| 19 |
class RunConfig(BaseModel):
|
| 20 |
analyzer: ModelSpec
|
| 21 |
refactor: ModelSpec
|
| 22 |
+
critic: ModelSpec
|