smgp / src /model.py
muhammadmaazuddin's picture
feat: working on css computed syles on elements
16a46a4
# LLM client initialization moved from _agents.py
import os
from browser_use import ChatGoogle, ChatOpenAI as ChatOpenAIBrowserUse
from agents import OpenAIChatCompletionsModel, AsyncOpenAI
from dotenv import load_dotenv, find_dotenv
import os
_: bool = load_dotenv(find_dotenv())
# deepseek_api_key = os.getenv("DEEPSEEK_API_KEY")
google_api_key = os.getenv("GOOGLE_API_KEY")
# grok_api_key = os.getenv("GROK_API_KEY")
openrouter_api_key = os.getenv("OPENROUTER_API_KEY")
openai_api_key = os.getenv("OPENAI_API_KEY")
DEEPSEEK_BASE_URL = "https://api.deepseek.com/v1"
GROK_BASE_URL = "https://api.x.ai/v1"
GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1"
# QWEN_BASE_URL = 'https://dashscope-intl.aliyuncs.com/compatible-mode/v1'
openrouter_client = AsyncOpenAI(base_url=OPENROUTER_BASE_URL, api_key=openrouter_api_key)
# deepseek_client = AsyncOpenAI(base_url=DEEPSEEK_BASE_URL, api_key=deepseek_api_key)
# grok_client = AsyncOpenAI(base_url=GROK_BASE_URL, api_key=grok_api_key)
# gemini_client = AsyncOpenAI(base_url=GEMINI_BASE_URL, api_key=google_api_key)
openai_client = AsyncOpenAI(api_key=openai_api_key)
# llm = ChatOpenAI(model='qwen-vl-max', api_key=api_key, base_url=base_url)
def get_model(model_name: str) -> ChatGoogle | ChatOpenAIBrowserUse | OpenAIChatCompletionsModel | str:
if model_name.startswith("openrouter:"):
# Use the text after ':' as the model name
actual_model = model_name.split(":", 1)[1]
return OpenAIChatCompletionsModel(model=actual_model, openai_client=openrouter_client)
if model_name == "llm_browser_google":
return ChatGoogle(model="gemini-flash-latest", api_key=os.getenv("google_api_key"))
elif model_name.startswith("browser_agent_openrouter:"):
actual_model = model_name.split(":", 1)[1]
return ChatOpenAIBrowserUse(
model=actual_model,
base_url=OPENROUTER_BASE_URL,
api_key=openrouter_api_key,
)
if "/" in model_name:
return OpenAIChatCompletionsModel(model=model_name, openai_client=openrouter_client)
# elif "deepseek" in model_name:
# return OpenAIChatCompletionsModel(model=model_name, openai_client=deepseek_client)
# elif "grok" in model_name:
# return OpenAIChatCompletionsModel(model=model_name, openai_client=grok_client)
elif "gpt" in model_name:
return OpenAIChatCompletionsModel(model=model_name, openai_client=openai_client)
else:
return model_name