| import os
|
| from typing import Optional, Dict
|
|
|
| class AutoGenModelFactory:
|
| """
|
| Factory for creating AutoGen compatible model instances.
|
| """
|
|
|
| @staticmethod
|
| def get_model(provider: str = "openai",
|
| model_name: str = "gpt-4o",
|
| temperature: float = 0,
|
| model_info: Optional[Dict] = None
|
| ):
|
| """
|
| Returns an AutoGen OpenAIChatCompletionClient instance.
|
| """
|
|
|
|
|
| try:
|
| from autogen_ext.models.openai import OpenAIChatCompletionClient
|
| except ImportError as e:
|
| raise ImportError("AutoGen libraries (autogen-agentchat, autogen-ext[openai]) are not installed.") from e
|
|
|
|
|
|
|
|
|
| if provider.lower() == "openai":
|
| return OpenAIChatCompletionClient(
|
| model=model_name,
|
| api_key=os.environ.get("OPENAI_API_KEY"),
|
| temperature=temperature,
|
| )
|
|
|
|
|
|
|
|
|
| elif provider.lower() == "google" or provider.lower() == "gemini":
|
| api_key = os.environ.get("GOOGLE_API_KEY")
|
| return OpenAIChatCompletionClient(
|
| model=model_name,
|
| base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
|
| api_key=api_key,
|
| temperature=temperature,
|
| model_info={
|
| "family": "gemini",
|
| "vision": False,
|
| "function_calling": True,
|
| "json_output": True,
|
| "structured_output": True
|
| },
|
|
|
| extra_body={"n": 1},
|
| )
|
|
|
|
|
|
|
|
|
| elif provider.lower() == "groq":
|
| return OpenAIChatCompletionClient(
|
| model=model_name,
|
| base_url="https://api.groq.com/openai/v1",
|
| api_key=os.environ.get("GROQ_API_KEY"),
|
| temperature=temperature,
|
| )
|
|
|
|
|
|
|
|
|
| elif provider.lower() == "ollama":
|
|
|
| info = model_info if model_info is not None else {}
|
| base_url = os.environ.get("OLLAMA_BASE_URL", "http://localhost:11434/v1")
|
| return OpenAIChatCompletionClient(
|
| model=model_name,
|
| base_url=base_url,
|
| api_key="ollama",
|
| model_info=info,
|
| temperature=temperature,
|
| )
|
|
|
| else:
|
| raise ValueError(f"Unsupported AutoGen provider: {provider}")
|
|
|