Spaces:
Sleeping
Sleeping
| import os | |
| from typing import Optional | |
| import openai | |
| import anthropic | |
| from duckduckgo_search import DDGS | |
| class Agent: | |
| def __init__(self, base_model: str = "gpt-3.5-turbo", search_engine: str = "duckduckgo"): | |
| self.base_model = base_model | |
| self.search_engine = search_engine | |
| # Initialize API clients | |
| if "gpt" in base_model: | |
| openai.api_key = os.getenv("OPENAI_API_KEY") | |
| elif "claude" in base_model: | |
| self.claude = anthropic.Anthropic(api_key=os.getenv("ANTHROPIC_API_KEY")) | |
| def execute(self, message: str, project_name: str) -> Optional[str]: | |
| try: | |
| # Process the message based on the selected model | |
| if "gpt" in self.base_model: | |
| response = self._process_with_gpt(message) | |
| elif "claude" in self.base_model: | |
| response = self._process_with_claude(message) | |
| else: | |
| response = "Unsupported model selected" | |
| return response | |
| except Exception as e: | |
| return f"Error processing message: {str(e)}" | |
| def subsequent_execute(self, message: str, project_name: str) -> Optional[str]: | |
| return self.execute(message, project_name) | |
| def _process_with_gpt(self, message: str) -> str: | |
| response = openai.chat.completions.create( | |
| model=self.base_model, | |
| messages=[{"role": "user", "content": message}] | |
| ) | |
| return response.choices[0].message.content | |
| def _process_with_claude(self, message: str) -> str: | |
| message = anthropic.Message( | |
| role="user", | |
| content=message | |
| ) | |
| response = self.claude.messages.create( | |
| model="claude-3-opus-20240229", | |
| messages=[message] | |
| ) | |
| return response.content[0].text | |
| def _search_web(self, query: str, num_results: int = 5) -> list: | |
| if self.search_engine == "duckduckgo": | |
| with DDGS() as ddgs: | |
| return list(ddgs.text(query, max_results=num_results)) | |
| # Add support for other search engines as needed | |
| return [] |