Spaces:
Sleeping
Sleeping
File size: 2,920 Bytes
e066621 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | from enum import Enum
from typing import Optional, TypedDict, List
from pydantic import BaseModel, Field, ConfigDict
class File(BaseModel):
path: str = Field(description="The path to the file to be created or modified")
purpose: str = Field(description="The purpose of the file, e.g. 'main application logic', 'data processing module', etc.")
class Plan(BaseModel):
name: str = Field(description="The name of app to be built")
description: str = Field(description="A oneline description of the app to be built, e.g. 'A web application for managing personal finances'")
techstack: str = Field(description="The tech stack to be used for the app, e.g. 'python', 'javascript', 'react', 'flask', etc.")
features: list[str] = Field(description="A list of features that the app should have, e.g. 'user authentication', 'data visualization', etc.")
files: list[File] = Field(description="A list of files to be created, each with a 'path' and 'purpose'")
class ImplementationTask(BaseModel):
filepath: str = Field(description="The path to the file to be modified")
task_description: str = Field(description="A detailed description of the task to be performed on the file, e.g. 'add user authentication', 'implement data processing logic', etc.")
class TaskPlan(BaseModel):
implementation_steps: list[ImplementationTask] = Field(description="A list of steps to be taken to implement the task")
model_config = ConfigDict(extra="allow")
class CoderState(BaseModel):
task_plan: TaskPlan = Field(description="The plan for the task to be implemented")
current_step_idx: int = Field(0, description="The index of the current step in the implementation steps")
current_file_content: Optional[str] = Field(None, description="The content of the file currently being edited or created")
class ModelBackend(str, Enum):
GROQ = "groq"
GEMINI = "gemini"
OPENROUTER = "openrouter"
class AgentConfig(BaseModel):
"""Runtime configuration for the Gemini/Qwen style CLI agent."""
project_directory: str = Field(description="Absolute or relative path to the workspace root")
backend: ModelBackend = Field(default=ModelBackend.GROQ, description="LLM provider to use")
model: Optional[str] = Field(default=None, description="Override the default model for the backend")
temperature: float = Field(default=0.2, description="Sampling temperature for the agent responses")
recursion_limit: int = Field(default=100, description="LangGraph recursion limit")
auto_context: bool = Field(default=True, description="Automatically refresh project summary before each run")
class AgentGraphState(TypedDict, total=False):
"""State tracked across LangGraph executions for the CLI agent."""
project_directory: str
project_summary: Optional[str]
messages: List[dict]
pending_user_message: Optional[str]
refresh_context: bool
|