| import logging | |
| from typing import List, TypeVar, Optional | |
| from langchain_core.messages import AnyMessage, BaseMessage, SystemMessage | |
| from pydantic import BaseModel | |
| from langchain_core.tools import BaseTool | |
| from langchain_openai import ChatOpenAI | |
| OutStruct = TypeVar(name="OutStruct", bound=BaseModel) | |
| class AgentBlueprint: | |
| def __init__( | |
| self, | |
| agent_name: str, | |
| *, | |
| system_prompt: str = "", | |
| tools: List[BaseTool] = None, | |
| description: str = "", | |
| base_url: Optional[str] = None, | |
| llm: ChatOpenAI, | |
| ): | |
| self.agent_name = agent_name | |
| self.system_prompt = system_prompt | |
| self.llm = llm | |
| self.agent_description = description | |
| if tools: | |
| self.llm = self.llm.bind_tools(tools) | |
| self.tools = {tool.name: tool for tool in tools} | |
| self.logger = logging.getLogger(self.__class__.__name__) | |
| def __set_system_prompt(self, messages: List[AnyMessage]): | |
| if self.system_prompt: | |
| return [SystemMessage(content=self.system_prompt)] + messages | |
| return messages | |
| def call_agent(self, messages: List[AnyMessage]) -> BaseMessage: | |
| response = self.llm.with_retry(stop_after_attempt=2).invoke( | |
| input=self.__set_system_prompt(messages) | |
| ) | |
| response.name = self.agent_name | |
| return response | |
| async def acall_agent(self, messages: List[AnyMessage]) -> BaseMessage: | |
| response = await self.llm.with_retry(stop_after_attempt=2).ainvoke( | |
| input=self.__set_system_prompt(messages) | |
| ) | |
| response.name = self.agent_name | |
| return response | |
| def call_agent_structured( | |
| self, messages: List[AnyMessage], clazz: OutStruct | |
| ) -> OutStruct: | |
| response = ( | |
| self.llm.with_structured_output(clazz) | |
| .with_retry(stop_after_attempt=2) | |
| .invoke(input=self.__set_system_prompt(messages)) | |
| ) | |
| return response | |
| async def acall_agent_structured( | |
| self, messages: List[AnyMessage], clazz: OutStruct | |
| ) -> OutStruct: | |
| response = ( | |
| await self.llm.with_structured_output(clazz) | |
| .with_retry(stop_after_attempt=2) | |
| .ainvoke(input=self.__set_system_prompt(messages)) | |
| ) | |
| return response | |