Spaces:
Sleeping
Sleeping
| // PicoClaw - Ultra-lightweight personal AI agent | |
| // Inspired by and based on nanobot: https://github.com/HKUDS/nanobot | |
| // License: MIT | |
| // | |
| // Copyright (c) 2026 PicoClaw contributors | |
| package providers | |
| import ( | |
| "bytes" | |
| "context" | |
| "encoding/json" | |
| "fmt" | |
| "io" | |
| "net/http" | |
| "net/url" | |
| "strings" | |
| "time" | |
| "github.com/sipeed/picoclaw/pkg/auth" | |
| "github.com/sipeed/picoclaw/pkg/config" | |
| "github.com/sipeed/picoclaw/pkg/utils" | |
| ) | |
| type HTTPProvider struct { | |
| apiKey string | |
| apiBase string | |
| httpClient *http.Client | |
| } | |
| func NewHTTPProvider(apiKey, apiBase, proxy string) *HTTPProvider { | |
| var client *http.Client | |
| if proxy != "" { | |
| client = &http.Client{ | |
| Timeout: 120 * time.Second, | |
| } | |
| proxyURL, err := url.Parse(proxy) | |
| if err == nil { | |
| client.Transport = &http.Transport{ | |
| Proxy: http.ProxyURL(proxyURL), | |
| } | |
| } | |
| } else { | |
| // Use custom DNS resolver to avoid issues with local DNS | |
| client = utils.NewHTTPClientWithGoogleDNS() | |
| client.Timeout = 120 * time.Second | |
| } | |
| return &HTTPProvider{ | |
| apiKey: apiKey, | |
| apiBase: strings.TrimRight(apiBase, "/"), | |
| httpClient: client, | |
| } | |
| } | |
| func (p *HTTPProvider) Chat(ctx context.Context, messages []Message, tools []ToolDefinition, model string, options map[string]interface{}) (*LLMResponse, error) { | |
| if p.apiBase == "" { | |
| return nil, fmt.Errorf("API base not configured") | |
| } | |
| // Strip provider prefix from model name ONLY if not using OpenRouter or specific providers that require full IDs | |
| // OpenRouter requires full model IDs (e.g., nvidia/nemotron-nano-12b-v2-vl:free) | |
| if !strings.Contains(p.apiBase, "openrouter") { | |
| // Strip provider prefix from model name (e.g., moonshot/kimi-k2.5 -> kimi-k2.5, groq/openai/gpt-oss-120b -> openai/gpt-oss-120b, ollama/qwen2.5:14b -> qwen2.5:14b) | |
| if idx := strings.Index(model, "/"); idx != -1 { | |
| prefix := model[:idx] | |
| if prefix == "moonshot" || prefix == "nvidia" || prefix == "groq" || prefix == "ollama" { | |
| model = model[idx+1:] | |
| } | |
| } | |
| } | |
| // Handle models that don't support system messages (e.g., Google Gemma 3 via OpenRouter/Google AI Studio) | |
| // Error: "Invalid request: Developer instruction is not enabled for models/gemma-3-12b-it" | |
| // We merge system prompt into the first user message | |
| if strings.Contains(strings.ToLower(model), "gemma") || strings.Contains(strings.ToLower(model), "google/") { | |
| var newMessages []Message | |
| var systemContent string | |
| for _, msg := range messages { | |
| if msg.Role == "system" { | |
| systemContent += msg.Content + "\n\n" | |
| } else { | |
| newMessages = append(newMessages, msg) | |
| } | |
| } | |
| // Prepend system content to the first user message | |
| if systemContent != "" && len(newMessages) > 0 { | |
| // Find first user message | |
| foundUser := false | |
| for i, msg := range newMessages { | |
| if msg.Role == "user" { | |
| if len(msg.MultiContent) > 0 { | |
| // Add as a new text part at the beginning | |
| textPart := ContentPart{ | |
| Type: "text", | |
| Text: systemContent, | |
| } | |
| // Prepend to MultiContent | |
| newMessages[i].MultiContent = append([]ContentPart{textPart}, msg.MultiContent...) | |
| } else { | |
| // Prepend to Content string | |
| newMessages[i].Content = systemContent + msg.Content | |
| } | |
| foundUser = true | |
| break | |
| } | |
| } | |
| // If no user message found (rare), append a new one | |
| if !foundUser { | |
| newMessages = append(newMessages, Message{ | |
| Role: "user", | |
| Content: systemContent, | |
| }) | |
| } | |
| } | |
| messages = newMessages | |
| } | |
| requestBody := map[string]interface{}{ | |
| "model": model, | |
| "messages": messages, | |
| } | |
| if len(tools) > 0 { | |
| requestBody["tools"] = tools | |
| requestBody["tool_choice"] = "auto" | |
| } | |
| if maxTokens, ok := options["max_tokens"].(int); ok { | |
| lowerModel := strings.ToLower(model) | |
| if strings.Contains(lowerModel, "glm") || strings.Contains(lowerModel, "o1") { | |
| requestBody["max_completion_tokens"] = maxTokens | |
| } else { | |
| requestBody["max_tokens"] = maxTokens | |
| } | |
| } | |
| if temperature, ok := options["temperature"].(float64); ok { | |
| lowerModel := strings.ToLower(model) | |
| // Kimi k2 models only support temperature=1 | |
| if strings.Contains(lowerModel, "kimi") && strings.Contains(lowerModel, "k2") { | |
| requestBody["temperature"] = 1.0 | |
| } else { | |
| requestBody["temperature"] = temperature | |
| } | |
| } | |
| jsonData, err := json.Marshal(requestBody) | |
| if err != nil { | |
| return nil, fmt.Errorf("failed to marshal request: %w", err) | |
| } | |
| req, err := http.NewRequestWithContext(ctx, "POST", p.apiBase+"/chat/completions", bytes.NewReader(jsonData)) | |
| if err != nil { | |
| return nil, fmt.Errorf("failed to create request: %w", err) | |
| } | |
| req.Header.Set("Content-Type", "application/json") | |
| if p.apiKey != "" { | |
| req.Header.Set("Authorization", "Bearer "+p.apiKey) | |
| } | |
| resp, err := p.httpClient.Do(req) | |
| if err != nil { | |
| return nil, fmt.Errorf("failed to send request: %w", err) | |
| } | |
| defer resp.Body.Close() | |
| body, err := io.ReadAll(resp.Body) | |
| if err != nil { | |
| return nil, fmt.Errorf("failed to read response: %w", err) | |
| } | |
| if resp.StatusCode != http.StatusOK { | |
| return nil, fmt.Errorf("API request failed:\n Status: %d\n Body: %s", resp.StatusCode, string(body)) | |
| } | |
| return p.parseResponse(body) | |
| } | |
| func (p *HTTPProvider) parseResponse(body []byte) (*LLMResponse, error) { | |
| var apiResponse struct { | |
| Choices []struct { | |
| Message struct { | |
| Content string `json:"content"` | |
| ToolCalls []struct { | |
| ID string `json:"id"` | |
| Type string `json:"type"` | |
| Function *struct { | |
| Name string `json:"name"` | |
| Arguments string `json:"arguments"` | |
| } `json:"function"` | |
| } `json:"tool_calls"` | |
| } `json:"message"` | |
| FinishReason string `json:"finish_reason"` | |
| } `json:"choices"` | |
| Usage *UsageInfo `json:"usage"` | |
| } | |
| if err := json.Unmarshal(body, &apiResponse); err != nil { | |
| return nil, fmt.Errorf("failed to unmarshal response: %w", err) | |
| } | |
| if len(apiResponse.Choices) == 0 { | |
| return &LLMResponse{ | |
| Content: "", | |
| FinishReason: "stop", | |
| }, nil | |
| } | |
| choice := apiResponse.Choices[0] | |
| toolCalls := make([]ToolCall, 0, len(choice.Message.ToolCalls)) | |
| for _, tc := range choice.Message.ToolCalls { | |
| arguments := make(map[string]interface{}) | |
| name := "" | |
| // Handle OpenAI format with nested function object | |
| if tc.Type == "function" && tc.Function != nil { | |
| name = tc.Function.Name | |
| if tc.Function.Arguments != "" { | |
| if err := json.Unmarshal([]byte(tc.Function.Arguments), &arguments); err != nil { | |
| arguments["raw"] = tc.Function.Arguments | |
| } | |
| } | |
| } else if tc.Function != nil { | |
| // Legacy format without type field | |
| name = tc.Function.Name | |
| if tc.Function.Arguments != "" { | |
| if err := json.Unmarshal([]byte(tc.Function.Arguments), &arguments); err != nil { | |
| arguments["raw"] = tc.Function.Arguments | |
| } | |
| } | |
| } | |
| toolCalls = append(toolCalls, ToolCall{ | |
| ID: tc.ID, | |
| Name: name, | |
| Arguments: arguments, | |
| }) | |
| } | |
| return &LLMResponse{ | |
| Content: choice.Message.Content, | |
| ToolCalls: toolCalls, | |
| FinishReason: choice.FinishReason, | |
| Usage: apiResponse.Usage, | |
| }, nil | |
| } | |
| func (p *HTTPProvider) GetDefaultModel() string { | |
| return "" | |
| } | |
| func createClaudeAuthProvider() (LLMProvider, error) { | |
| cred, err := auth.GetCredential("anthropic") | |
| if err != nil { | |
| return nil, fmt.Errorf("loading auth credentials: %w", err) | |
| } | |
| if cred == nil { | |
| return nil, fmt.Errorf("no credentials for anthropic. Run: picoclaw auth login --provider anthropic") | |
| } | |
| return NewClaudeProviderWithTokenSource(cred.AccessToken, createClaudeTokenSource()), nil | |
| } | |
| func createCodexAuthProvider() (LLMProvider, error) { | |
| cred, err := auth.GetCredential("openai") | |
| if err != nil { | |
| return nil, fmt.Errorf("loading auth credentials: %w", err) | |
| } | |
| if cred == nil { | |
| return nil, fmt.Errorf("no credentials for openai. Run: picoclaw auth login --provider openai") | |
| } | |
| return NewCodexProviderWithTokenSource(cred.AccessToken, cred.AccountID, createCodexTokenSource()), nil | |
| } | |
| func CreateProvider(cfg *config.Config) (LLMProvider, error) { | |
| model := cfg.Agents.Defaults.Model | |
| providerName := strings.ToLower(cfg.Agents.Defaults.Provider) | |
| var apiKey, apiBase, proxy string | |
| lowerModel := strings.ToLower(model) | |
| // First, try to use explicitly configured provider | |
| if providerName != "" { | |
| switch providerName { | |
| case "groq": | |
| if cfg.Providers.Groq.APIKey != "" { | |
| apiKey = cfg.Providers.Groq.APIKey | |
| apiBase = cfg.Providers.Groq.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://api.groq.com/openai/v1" | |
| } | |
| } | |
| case "openai", "gpt": | |
| if cfg.Providers.OpenAI.APIKey != "" || cfg.Providers.OpenAI.AuthMethod != "" { | |
| if cfg.Providers.OpenAI.AuthMethod == "codex-cli" { | |
| return NewCodexProviderWithTokenSource("", "", CreateCodexCliTokenSource()), nil | |
| } | |
| if cfg.Providers.OpenAI.AuthMethod == "oauth" || cfg.Providers.OpenAI.AuthMethod == "token" { | |
| return createCodexAuthProvider() | |
| } | |
| apiKey = cfg.Providers.OpenAI.APIKey | |
| apiBase = cfg.Providers.OpenAI.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://api.openai.com/v1" | |
| } | |
| } | |
| case "anthropic", "claude": | |
| if cfg.Providers.Anthropic.APIKey != "" || cfg.Providers.Anthropic.AuthMethod != "" { | |
| if cfg.Providers.Anthropic.AuthMethod == "oauth" || cfg.Providers.Anthropic.AuthMethod == "token" { | |
| return createClaudeAuthProvider() | |
| } | |
| apiKey = cfg.Providers.Anthropic.APIKey | |
| apiBase = cfg.Providers.Anthropic.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://api.anthropic.com/v1" | |
| } | |
| } | |
| case "openrouter": | |
| if cfg.Providers.OpenRouter.APIKey != "" { | |
| apiKey = cfg.Providers.OpenRouter.APIKey | |
| if cfg.Providers.OpenRouter.APIBase != "" { | |
| apiBase = cfg.Providers.OpenRouter.APIBase | |
| } else { | |
| apiBase = "https://openrouter.ai/api/v1" | |
| } | |
| } | |
| case "zhipu", "glm": | |
| if cfg.Providers.Zhipu.APIKey != "" { | |
| apiKey = cfg.Providers.Zhipu.APIKey | |
| apiBase = cfg.Providers.Zhipu.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://open.bigmodel.cn/api/paas/v4" | |
| } | |
| } | |
| case "gemini", "google": | |
| if cfg.Providers.Gemini.APIKey != "" { | |
| apiKey = cfg.Providers.Gemini.APIKey | |
| apiBase = cfg.Providers.Gemini.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://generativelanguage.googleapis.com/v1beta" | |
| } | |
| } | |
| case "vllm": | |
| if cfg.Providers.VLLM.APIBase != "" { | |
| apiKey = cfg.Providers.VLLM.APIKey | |
| apiBase = cfg.Providers.VLLM.APIBase | |
| } | |
| case "shengsuanyun": | |
| if cfg.Providers.ShengSuanYun.APIKey != "" { | |
| apiKey = cfg.Providers.ShengSuanYun.APIKey | |
| apiBase = cfg.Providers.ShengSuanYun.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://router.shengsuanyun.com/api/v1" | |
| } | |
| } | |
| case "claude-cli", "claudecode", "claude-code": | |
| workspace := cfg.WorkspacePath() | |
| if workspace == "" { | |
| workspace = "." | |
| } | |
| return NewClaudeCliProvider(workspace), nil | |
| case "codex-cli", "codex-code": | |
| workspace := cfg.WorkspacePath() | |
| if workspace == "" { | |
| workspace = "." | |
| } | |
| return NewCodexCliProvider(workspace), nil | |
| case "deepseek": | |
| if cfg.Providers.DeepSeek.APIKey != "" { | |
| apiKey = cfg.Providers.DeepSeek.APIKey | |
| apiBase = cfg.Providers.DeepSeek.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://api.deepseek.com/v1" | |
| } | |
| if model != "deepseek-chat" && model != "deepseek-reasoner" { | |
| model = "deepseek-chat" | |
| } | |
| } | |
| case "stepfun": | |
| if cfg.Providers.StepFun.APIKey != "" { | |
| apiKey = cfg.Providers.StepFun.APIKey | |
| apiBase = cfg.Providers.StepFun.APIBase | |
| if apiBase == "" { | |
| apiBase = "https://api.stepfun.com/v1" | |
| } | |
| } | |
| case "github_copilot", "copilot": | |
| if cfg.Providers.GitHubCopilot.APIBase != "" { | |
| apiBase = cfg.Providers.GitHubCopilot.APIBase | |
| } else { | |
| apiBase = "localhost:4321" | |
| } | |
| return NewGitHubCopilotProvider(apiBase, cfg.Providers.GitHubCopilot.ConnectMode, model) | |
| } | |
| } | |
| // Fallback: detect provider from model name | |
| if apiKey == "" && apiBase == "" { | |
| switch { | |
| case (strings.Contains(lowerModel, "kimi") || strings.Contains(lowerModel, "moonshot") || strings.HasPrefix(model, "moonshot/")) && cfg.Providers.Moonshot.APIKey != "": | |
| apiKey = cfg.Providers.Moonshot.APIKey | |
| apiBase = cfg.Providers.Moonshot.APIBase | |
| proxy = cfg.Providers.Moonshot.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://api.moonshot.cn/v1" | |
| } | |
| case strings.HasPrefix(model, "openrouter/") || strings.HasPrefix(model, "anthropic/") || strings.HasPrefix(model, "openai/") || strings.HasPrefix(model, "meta-llama/") || strings.HasPrefix(model, "deepseek/") || strings.HasPrefix(model, "google/"): | |
| apiKey = cfg.Providers.OpenRouter.APIKey | |
| proxy = cfg.Providers.OpenRouter.Proxy | |
| if cfg.Providers.OpenRouter.APIBase != "" { | |
| apiBase = cfg.Providers.OpenRouter.APIBase | |
| } else { | |
| apiBase = "https://openrouter.ai/api/v1" | |
| } | |
| case (strings.Contains(lowerModel, "claude") || strings.HasPrefix(model, "anthropic/")) && (cfg.Providers.Anthropic.APIKey != "" || cfg.Providers.Anthropic.AuthMethod != ""): | |
| if cfg.Providers.Anthropic.AuthMethod == "oauth" || cfg.Providers.Anthropic.AuthMethod == "token" { | |
| return createClaudeAuthProvider() | |
| } | |
| apiKey = cfg.Providers.Anthropic.APIKey | |
| apiBase = cfg.Providers.Anthropic.APIBase | |
| proxy = cfg.Providers.Anthropic.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://api.anthropic.com/v1" | |
| } | |
| case (strings.Contains(lowerModel, "gpt") || strings.HasPrefix(model, "openai/")) && (cfg.Providers.OpenAI.APIKey != "" || cfg.Providers.OpenAI.AuthMethod != ""): | |
| if cfg.Providers.OpenAI.AuthMethod == "oauth" || cfg.Providers.OpenAI.AuthMethod == "token" { | |
| return createCodexAuthProvider() | |
| } | |
| apiKey = cfg.Providers.OpenAI.APIKey | |
| apiBase = cfg.Providers.OpenAI.APIBase | |
| proxy = cfg.Providers.OpenAI.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://api.openai.com/v1" | |
| } | |
| case (strings.Contains(lowerModel, "gemini") || strings.HasPrefix(model, "google/")) && cfg.Providers.Gemini.APIKey != "": | |
| apiKey = cfg.Providers.Gemini.APIKey | |
| apiBase = cfg.Providers.Gemini.APIBase | |
| proxy = cfg.Providers.Gemini.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://generativelanguage.googleapis.com/v1beta" | |
| } | |
| case (strings.Contains(lowerModel, "glm") || strings.Contains(lowerModel, "zhipu") || strings.Contains(lowerModel, "zai")) && cfg.Providers.Zhipu.APIKey != "": | |
| apiKey = cfg.Providers.Zhipu.APIKey | |
| apiBase = cfg.Providers.Zhipu.APIBase | |
| proxy = cfg.Providers.Zhipu.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://open.bigmodel.cn/api/paas/v4" | |
| } | |
| case (strings.Contains(lowerModel, "groq") || strings.HasPrefix(model, "groq/")) && cfg.Providers.Groq.APIKey != "": | |
| apiKey = cfg.Providers.Groq.APIKey | |
| apiBase = cfg.Providers.Groq.APIBase | |
| proxy = cfg.Providers.Groq.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://api.groq.com/openai/v1" | |
| } | |
| case (strings.Contains(lowerModel, "nvidia") || strings.HasPrefix(model, "nvidia/")) && cfg.Providers.Nvidia.APIKey != "": | |
| apiKey = cfg.Providers.Nvidia.APIKey | |
| apiBase = cfg.Providers.Nvidia.APIBase | |
| proxy = cfg.Providers.Nvidia.Proxy | |
| if apiBase == "" { | |
| apiBase = "https://integrate.api.nvidia.com/v1" | |
| } | |
| case (strings.Contains(lowerModel, "ollama") || strings.HasPrefix(model, "ollama/")) && cfg.Providers.Ollama.APIKey != "": | |
| fmt.Println("Ollama provider selected based on model name prefix") | |
| apiKey = cfg.Providers.Ollama.APIKey | |
| apiBase = cfg.Providers.Ollama.APIBase | |
| proxy = cfg.Providers.Ollama.Proxy | |
| if apiBase == "" { | |
| apiBase = "http://localhost:11434/v1" | |
| } | |
| fmt.Println("Ollama apiBase:", apiBase) | |
| case cfg.Providers.VLLM.APIBase != "": | |
| apiKey = cfg.Providers.VLLM.APIKey | |
| apiBase = cfg.Providers.VLLM.APIBase | |
| proxy = cfg.Providers.VLLM.Proxy | |
| default: | |
| if cfg.Providers.OpenRouter.APIKey != "" { | |
| apiKey = cfg.Providers.OpenRouter.APIKey | |
| proxy = cfg.Providers.OpenRouter.Proxy | |
| if cfg.Providers.OpenRouter.APIBase != "" { | |
| apiBase = cfg.Providers.OpenRouter.APIBase | |
| } else { | |
| apiBase = "https://openrouter.ai/api/v1" | |
| } | |
| } else { | |
| return nil, fmt.Errorf("no API key configured for model: %s", model) | |
| } | |
| } | |
| } | |
| if apiKey == "" && !strings.HasPrefix(model, "bedrock/") { | |
| return nil, fmt.Errorf("no API key configured for provider (model: %s)", model) | |
| } | |
| if apiBase == "" { | |
| return nil, fmt.Errorf("no API base configured for provider (model: %s)", model) | |
| } | |
| return NewHTTPProvider(apiKey, apiBase, proxy), nil | |
| } | |