| package service |
|
|
| import ( |
| "encoding/json" |
| "fmt" |
| "strings" |
|
|
| "github.com/QuantumNous/new-api/common" |
| "github.com/QuantumNous/new-api/constant" |
| "github.com/QuantumNous/new-api/dto" |
| "github.com/QuantumNous/new-api/relay/channel/openrouter" |
| relaycommon "github.com/QuantumNous/new-api/relay/common" |
| ) |
|
|
| func ClaudeToOpenAIRequest(claudeRequest dto.ClaudeRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) { |
| openAIRequest := dto.GeneralOpenAIRequest{ |
| Model: claudeRequest.Model, |
| MaxTokens: claudeRequest.MaxTokens, |
| Temperature: claudeRequest.Temperature, |
| TopP: claudeRequest.TopP, |
| Stream: claudeRequest.Stream, |
| } |
|
|
| isOpenRouter := info.ChannelType == constant.ChannelTypeOpenRouter |
|
|
| if claudeRequest.Thinking != nil && claudeRequest.Thinking.Type == "enabled" { |
| if isOpenRouter { |
| reasoning := openrouter.RequestReasoning{ |
| MaxTokens: claudeRequest.Thinking.GetBudgetTokens(), |
| } |
| reasoningJSON, err := json.Marshal(reasoning) |
| if err != nil { |
| return nil, fmt.Errorf("failed to marshal reasoning: %w", err) |
| } |
| openAIRequest.Reasoning = reasoningJSON |
| } else { |
| thinkingSuffix := "-thinking" |
| if strings.HasSuffix(info.OriginModelName, thinkingSuffix) && |
| !strings.HasSuffix(openAIRequest.Model, thinkingSuffix) { |
| openAIRequest.Model = openAIRequest.Model + thinkingSuffix |
| } |
| } |
| } |
|
|
| |
| if len(claudeRequest.StopSequences) == 1 { |
| openAIRequest.Stop = claudeRequest.StopSequences[0] |
| } else if len(claudeRequest.StopSequences) > 1 { |
| openAIRequest.Stop = claudeRequest.StopSequences |
| } |
|
|
| |
| tools, _ := common.Any2Type[[]dto.Tool](claudeRequest.Tools) |
| openAITools := make([]dto.ToolCallRequest, 0) |
| for _, claudeTool := range tools { |
| openAITool := dto.ToolCallRequest{ |
| Type: "function", |
| Function: dto.FunctionRequest{ |
| Name: claudeTool.Name, |
| Description: claudeTool.Description, |
| Parameters: claudeTool.InputSchema, |
| }, |
| } |
| openAITools = append(openAITools, openAITool) |
| } |
| openAIRequest.Tools = openAITools |
|
|
| |
| openAIMessages := make([]dto.Message, 0) |
|
|
| |
| if claudeRequest.System != nil { |
| if claudeRequest.IsStringSystem() && claudeRequest.GetStringSystem() != "" { |
| openAIMessage := dto.Message{ |
| Role: "system", |
| } |
| openAIMessage.SetStringContent(claudeRequest.GetStringSystem()) |
| openAIMessages = append(openAIMessages, openAIMessage) |
| } else { |
| systems := claudeRequest.ParseSystem() |
| if len(systems) > 0 { |
| openAIMessage := dto.Message{ |
| Role: "system", |
| } |
| isOpenRouterClaude := isOpenRouter && strings.HasPrefix(info.UpstreamModelName, "anthropic/claude") |
| if isOpenRouterClaude { |
| systemMediaMessages := make([]dto.MediaContent, 0, len(systems)) |
| for _, system := range systems { |
| message := dto.MediaContent{ |
| Type: "text", |
| Text: system.GetText(), |
| CacheControl: system.CacheControl, |
| } |
| systemMediaMessages = append(systemMediaMessages, message) |
| } |
| openAIMessage.SetMediaContent(systemMediaMessages) |
| } else { |
| systemStr := "" |
| for _, system := range systems { |
| if system.Text != nil { |
| systemStr += *system.Text |
| } |
| } |
| openAIMessage.SetStringContent(systemStr) |
| } |
| openAIMessages = append(openAIMessages, openAIMessage) |
| } |
| } |
| } |
| for _, claudeMessage := range claudeRequest.Messages { |
| openAIMessage := dto.Message{ |
| Role: claudeMessage.Role, |
| } |
|
|
| |
| if claudeMessage.IsStringContent() { |
| openAIMessage.SetStringContent(claudeMessage.GetStringContent()) |
| } else { |
| content, err := claudeMessage.ParseContent() |
| if err != nil { |
| return nil, err |
| } |
| contents := content |
| var toolCalls []dto.ToolCallRequest |
| mediaMessages := make([]dto.MediaContent, 0, len(contents)) |
|
|
| for _, mediaMsg := range contents { |
| switch mediaMsg.Type { |
| case "text": |
| message := dto.MediaContent{ |
| Type: "text", |
| Text: mediaMsg.GetText(), |
| CacheControl: mediaMsg.CacheControl, |
| } |
| mediaMessages = append(mediaMessages, message) |
| case "image": |
| |
| imageData := fmt.Sprintf("data:%s;base64,%s", mediaMsg.Source.MediaType, mediaMsg.Source.Data) |
| |
| mediaMessage := dto.MediaContent{ |
| Type: "image_url", |
| ImageUrl: &dto.MessageImageUrl{Url: imageData}, |
| } |
| mediaMessages = append(mediaMessages, mediaMessage) |
| case "tool_use": |
| toolCall := dto.ToolCallRequest{ |
| ID: mediaMsg.Id, |
| Type: "function", |
| Function: dto.FunctionRequest{ |
| Name: mediaMsg.Name, |
| Arguments: toJSONString(mediaMsg.Input), |
| }, |
| } |
| toolCalls = append(toolCalls, toolCall) |
| case "tool_result": |
| |
| toolName := mediaMsg.Name |
| if toolName == "" { |
| toolName = claudeRequest.SearchToolNameByToolCallId(mediaMsg.ToolUseId) |
| } |
| oaiToolMessage := dto.Message{ |
| Role: "tool", |
| Name: &toolName, |
| ToolCallId: mediaMsg.ToolUseId, |
| } |
| |
| if mediaMsg.IsStringContent() { |
| oaiToolMessage.SetStringContent(mediaMsg.GetStringContent()) |
| } else { |
| mediaContents := mediaMsg.ParseMediaContent() |
| encodeJson, _ := common.Marshal(mediaContents) |
| oaiToolMessage.SetStringContent(string(encodeJson)) |
| } |
| openAIMessages = append(openAIMessages, oaiToolMessage) |
| } |
| } |
|
|
| if len(toolCalls) > 0 { |
| openAIMessage.SetToolCalls(toolCalls) |
| } |
|
|
| if len(mediaMessages) > 0 && len(toolCalls) == 0 { |
| openAIMessage.SetMediaContent(mediaMessages) |
| } |
| } |
| if len(openAIMessage.ParseContent()) > 0 || len(openAIMessage.ToolCalls) > 0 { |
| openAIMessages = append(openAIMessages, openAIMessage) |
| } |
| } |
|
|
| openAIRequest.Messages = openAIMessages |
|
|
| return &openAIRequest, nil |
| } |
|
|
| func generateStopBlock(index int) *dto.ClaudeResponse { |
| return &dto.ClaudeResponse{ |
| Type: "content_block_stop", |
| Index: common.GetPointer[int](index), |
| } |
| } |
|
|
| func StreamResponseOpenAI2Claude(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) []*dto.ClaudeResponse { |
| if info.ClaudeConvertInfo.Done { |
| return nil |
| } |
|
|
| var claudeResponses []*dto.ClaudeResponse |
| if info.SendResponseCount == 1 { |
| msg := &dto.ClaudeMediaMessage{ |
| Id: openAIResponse.Id, |
| Model: openAIResponse.Model, |
| Type: "message", |
| Role: "assistant", |
| Usage: &dto.ClaudeUsage{ |
| InputTokens: info.GetEstimatePromptTokens(), |
| OutputTokens: 0, |
| }, |
| } |
| msg.SetContent(make([]any, 0)) |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_start", |
| Message: msg, |
| }) |
| |
| |
| |
| if openAIResponse.IsToolCall() { |
| info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools |
| var toolCall dto.ToolCallResponse |
| if len(openAIResponse.Choices) > 0 && len(openAIResponse.Choices[0].Delta.ToolCalls) > 0 { |
| toolCall = openAIResponse.Choices[0].Delta.ToolCalls[0] |
| } else { |
| first := openAIResponse.GetFirstToolCall() |
| if first != nil { |
| toolCall = *first |
| } else { |
| toolCall = dto.ToolCallResponse{} |
| } |
| } |
| resp := &dto.ClaudeResponse{ |
| Type: "content_block_start", |
| ContentBlock: &dto.ClaudeMediaMessage{ |
| Id: toolCall.ID, |
| Type: "tool_use", |
| Name: toolCall.Function.Name, |
| Input: map[string]interface{}{}, |
| }, |
| } |
| resp.SetIndex(0) |
| claudeResponses = append(claudeResponses, resp) |
| |
| if toolCall.Function.Arguments != "" { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_delta", |
| Delta: &dto.ClaudeMediaMessage{ |
| Type: "input_json_delta", |
| PartialJson: &toolCall.Function.Arguments, |
| }, |
| }) |
| } |
| } else { |
|
|
| } |
| |
| if len(openAIResponse.Choices) > 0 { |
| reasoning := openAIResponse.Choices[0].Delta.GetReasoningContent() |
| content := openAIResponse.Choices[0].Delta.GetContentString() |
|
|
| if reasoning != "" { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_start", |
| ContentBlock: &dto.ClaudeMediaMessage{ |
| Type: "thinking", |
| Thinking: common.GetPointer[string](""), |
| }, |
| }) |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_delta", |
| Delta: &dto.ClaudeMediaMessage{ |
| Type: "thinking_delta", |
| Thinking: &reasoning, |
| }, |
| }) |
| info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeThinking |
| } else if content != "" { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_start", |
| ContentBlock: &dto.ClaudeMediaMessage{ |
| Type: "text", |
| Text: common.GetPointer[string](""), |
| }, |
| }) |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_delta", |
| Delta: &dto.ClaudeMediaMessage{ |
| Type: "text_delta", |
| Text: common.GetPointer[string](content), |
| }, |
| }) |
| info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText |
| } |
| } |
|
|
| |
| if len(openAIResponse.Choices) > 0 && openAIResponse.Choices[0].FinishReason != nil && *openAIResponse.Choices[0].FinishReason != "" { |
| info.FinishReason = *openAIResponse.Choices[0].FinishReason |
| claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index)) |
| oaiUsage := openAIResponse.Usage |
| if oaiUsage == nil { |
| oaiUsage = info.ClaudeConvertInfo.Usage |
| } |
| if oaiUsage != nil { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_delta", |
| Usage: &dto.ClaudeUsage{ |
| InputTokens: oaiUsage.PromptTokens, |
| OutputTokens: oaiUsage.CompletionTokens, |
| CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens, |
| CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens, |
| }, |
| Delta: &dto.ClaudeMediaMessage{ |
| StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)), |
| }, |
| }) |
| } |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_stop", |
| }) |
| info.ClaudeConvertInfo.Done = true |
| } |
| return claudeResponses |
| } |
|
|
| if len(openAIResponse.Choices) == 0 { |
| |
| |
| if info.ClaudeConvertInfo.Done { |
| claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index)) |
| oaiUsage := info.ClaudeConvertInfo.Usage |
| if oaiUsage != nil { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_delta", |
| Usage: &dto.ClaudeUsage{ |
| InputTokens: oaiUsage.PromptTokens, |
| OutputTokens: oaiUsage.CompletionTokens, |
| CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens, |
| CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens, |
| }, |
| Delta: &dto.ClaudeMediaMessage{ |
| StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)), |
| }, |
| }) |
| } |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_stop", |
| }) |
| } |
| return claudeResponses |
| } else { |
| chosenChoice := openAIResponse.Choices[0] |
| doneChunk := chosenChoice.FinishReason != nil && *chosenChoice.FinishReason != "" |
| if doneChunk { |
| info.FinishReason = *chosenChoice.FinishReason |
| } |
|
|
| var claudeResponse dto.ClaudeResponse |
| var isEmpty bool |
| claudeResponse.Type = "content_block_delta" |
| if len(chosenChoice.Delta.ToolCalls) > 0 { |
| toolCalls := chosenChoice.Delta.ToolCalls |
| if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeTools { |
| claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index)) |
| info.ClaudeConvertInfo.Index++ |
| } |
| info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools |
|
|
| for i, toolCall := range toolCalls { |
| blockIndex := info.ClaudeConvertInfo.Index |
| if toolCall.Index != nil { |
| blockIndex = *toolCall.Index |
| } else if len(toolCalls) > 1 { |
| blockIndex = info.ClaudeConvertInfo.Index + i |
| } |
|
|
| idx := blockIndex |
| if toolCall.Function.Name != "" { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &idx, |
| Type: "content_block_start", |
| ContentBlock: &dto.ClaudeMediaMessage{ |
| Id: toolCall.ID, |
| Type: "tool_use", |
| Name: toolCall.Function.Name, |
| Input: map[string]interface{}{}, |
| }, |
| }) |
| } |
|
|
| if len(toolCall.Function.Arguments) > 0 { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &idx, |
| Type: "content_block_delta", |
| Delta: &dto.ClaudeMediaMessage{ |
| Type: "input_json_delta", |
| PartialJson: &toolCall.Function.Arguments, |
| }, |
| }) |
| } |
|
|
| info.ClaudeConvertInfo.Index = blockIndex |
| } |
| } else { |
| reasoning := chosenChoice.Delta.GetReasoningContent() |
| textContent := chosenChoice.Delta.GetContentString() |
| if reasoning != "" || textContent != "" { |
| if reasoning != "" { |
| if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeThinking { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_start", |
| ContentBlock: &dto.ClaudeMediaMessage{ |
| Type: "thinking", |
| Thinking: common.GetPointer[string](""), |
| }, |
| }) |
| } |
| info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeThinking |
| claudeResponse.Delta = &dto.ClaudeMediaMessage{ |
| Type: "thinking_delta", |
| Thinking: &reasoning, |
| } |
| } else { |
| if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeText { |
| if info.ClaudeConvertInfo.LastMessagesType == relaycommon.LastMessageTypeThinking || info.ClaudeConvertInfo.LastMessagesType == relaycommon.LastMessageTypeTools { |
| claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index)) |
| info.ClaudeConvertInfo.Index++ |
| } |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Index: &info.ClaudeConvertInfo.Index, |
| Type: "content_block_start", |
| ContentBlock: &dto.ClaudeMediaMessage{ |
| Type: "text", |
| Text: common.GetPointer[string](""), |
| }, |
| }) |
| } |
| info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText |
| claudeResponse.Delta = &dto.ClaudeMediaMessage{ |
| Type: "text_delta", |
| Text: common.GetPointer[string](textContent), |
| } |
| } |
| } else { |
| isEmpty = true |
| } |
| } |
|
|
| claudeResponse.Index = &info.ClaudeConvertInfo.Index |
| if !isEmpty && claudeResponse.Delta != nil { |
| claudeResponses = append(claudeResponses, &claudeResponse) |
| } |
|
|
| if doneChunk || info.ClaudeConvertInfo.Done { |
| claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index)) |
| oaiUsage := openAIResponse.Usage |
| if oaiUsage == nil { |
| oaiUsage = info.ClaudeConvertInfo.Usage |
| } |
| if oaiUsage != nil { |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_delta", |
| Usage: &dto.ClaudeUsage{ |
| InputTokens: oaiUsage.PromptTokens, |
| OutputTokens: oaiUsage.CompletionTokens, |
| CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens, |
| CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens, |
| }, |
| Delta: &dto.ClaudeMediaMessage{ |
| StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)), |
| }, |
| }) |
| } |
| claudeResponses = append(claudeResponses, &dto.ClaudeResponse{ |
| Type: "message_stop", |
| }) |
| info.ClaudeConvertInfo.Done = true |
| return claudeResponses |
| } |
| } |
|
|
| return claudeResponses |
| } |
|
|
| func ResponseOpenAI2Claude(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.ClaudeResponse { |
| var stopReason string |
| contents := make([]dto.ClaudeMediaMessage, 0) |
| claudeResponse := &dto.ClaudeResponse{ |
| Id: openAIResponse.Id, |
| Type: "message", |
| Role: "assistant", |
| Model: openAIResponse.Model, |
| } |
| for _, choice := range openAIResponse.Choices { |
| stopReason = stopReasonOpenAI2Claude(choice.FinishReason) |
| if choice.FinishReason == "tool_calls" { |
| for _, toolUse := range choice.Message.ParseToolCalls() { |
| claudeContent := dto.ClaudeMediaMessage{} |
| claudeContent.Type = "tool_use" |
| claudeContent.Id = toolUse.ID |
| claudeContent.Name = toolUse.Function.Name |
| var mapParams map[string]interface{} |
| if err := common.Unmarshal([]byte(toolUse.Function.Arguments), &mapParams); err == nil { |
| claudeContent.Input = mapParams |
| } else { |
| claudeContent.Input = toolUse.Function.Arguments |
| } |
| contents = append(contents, claudeContent) |
| } |
| } else { |
| claudeContent := dto.ClaudeMediaMessage{} |
| claudeContent.Type = "text" |
| claudeContent.SetText(choice.Message.StringContent()) |
| contents = append(contents, claudeContent) |
| } |
| } |
| claudeResponse.Content = contents |
| claudeResponse.StopReason = stopReason |
| claudeResponse.Usage = &dto.ClaudeUsage{ |
| InputTokens: openAIResponse.PromptTokens, |
| OutputTokens: openAIResponse.CompletionTokens, |
| } |
|
|
| return claudeResponse |
| } |
|
|
| func stopReasonOpenAI2Claude(reason string) string { |
| switch reason { |
| case "stop": |
| return "end_turn" |
| case "stop_sequence": |
| return "stop_sequence" |
| case "length": |
| fallthrough |
| case "max_tokens": |
| return "max_tokens" |
| case "tool_calls": |
| return "tool_use" |
| default: |
| return reason |
| } |
| } |
|
|
| func toJSONString(v interface{}) string { |
| b, err := json.Marshal(v) |
| if err != nil { |
| return "{}" |
| } |
| return string(b) |
| } |
|
|
| func GeminiToOpenAIRequest(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) { |
| openaiRequest := &dto.GeneralOpenAIRequest{ |
| Model: info.UpstreamModelName, |
| Stream: info.IsStream, |
| } |
|
|
| |
| var messages []dto.Message |
| for _, content := range geminiRequest.Contents { |
| message := dto.Message{ |
| Role: convertGeminiRoleToOpenAI(content.Role), |
| } |
|
|
| |
| var mediaContents []dto.MediaContent |
| var toolCalls []dto.ToolCallRequest |
| for _, part := range content.Parts { |
| if part.Text != "" { |
| mediaContent := dto.MediaContent{ |
| Type: "text", |
| Text: part.Text, |
| } |
| mediaContents = append(mediaContents, mediaContent) |
| } else if part.InlineData != nil { |
| mediaContent := dto.MediaContent{ |
| Type: "image_url", |
| ImageUrl: &dto.MessageImageUrl{ |
| Url: fmt.Sprintf("data:%s;base64,%s", part.InlineData.MimeType, part.InlineData.Data), |
| Detail: "auto", |
| MimeType: part.InlineData.MimeType, |
| }, |
| } |
| mediaContents = append(mediaContents, mediaContent) |
| } else if part.FileData != nil { |
| mediaContent := dto.MediaContent{ |
| Type: "image_url", |
| ImageUrl: &dto.MessageImageUrl{ |
| Url: part.FileData.FileUri, |
| Detail: "auto", |
| MimeType: part.FileData.MimeType, |
| }, |
| } |
| mediaContents = append(mediaContents, mediaContent) |
| } else if part.FunctionCall != nil { |
| |
| toolCall := dto.ToolCallRequest{ |
| ID: fmt.Sprintf("call_%d", len(toolCalls)+1), |
| Type: "function", |
| Function: dto.FunctionRequest{ |
| Name: part.FunctionCall.FunctionName, |
| Arguments: toJSONString(part.FunctionCall.Arguments), |
| }, |
| } |
| toolCalls = append(toolCalls, toolCall) |
| } else if part.FunctionResponse != nil { |
| |
| toolMessage := dto.Message{ |
| Role: "tool", |
| ToolCallId: fmt.Sprintf("call_%d", len(toolCalls)), |
| } |
| toolMessage.SetStringContent(toJSONString(part.FunctionResponse.Response)) |
| messages = append(messages, toolMessage) |
| } |
| } |
|
|
| |
| if len(toolCalls) > 0 { |
| |
| message.SetToolCalls(toolCalls) |
| } else if len(mediaContents) == 1 && mediaContents[0].Type == "text" { |
| |
| message.Content = mediaContents[0].Text |
| } else if len(mediaContents) > 0 { |
| |
| message.SetMediaContent(mediaContents) |
| } |
|
|
| |
| if len(message.ParseContent()) > 0 || len(message.ToolCalls) > 0 { |
| messages = append(messages, message) |
| } |
| } |
|
|
| openaiRequest.Messages = messages |
|
|
| if geminiRequest.GenerationConfig.Temperature != nil { |
| openaiRequest.Temperature = geminiRequest.GenerationConfig.Temperature |
| } |
| if geminiRequest.GenerationConfig.TopP > 0 { |
| openaiRequest.TopP = geminiRequest.GenerationConfig.TopP |
| } |
| if geminiRequest.GenerationConfig.TopK > 0 { |
| openaiRequest.TopK = int(geminiRequest.GenerationConfig.TopK) |
| } |
| if geminiRequest.GenerationConfig.MaxOutputTokens > 0 { |
| openaiRequest.MaxTokens = geminiRequest.GenerationConfig.MaxOutputTokens |
| } |
| |
| if len(geminiRequest.GenerationConfig.StopSequences) > 0 { |
| openaiRequest.Stop = geminiRequest.GenerationConfig.StopSequences[:4] |
| } |
| if geminiRequest.GenerationConfig.CandidateCount > 0 { |
| openaiRequest.N = geminiRequest.GenerationConfig.CandidateCount |
| } |
|
|
| |
| if len(geminiRequest.GetTools()) > 0 { |
| var tools []dto.ToolCallRequest |
| for _, tool := range geminiRequest.GetTools() { |
| if tool.FunctionDeclarations != nil { |
| |
| functionDeclarations, ok := tool.FunctionDeclarations.([]dto.FunctionRequest) |
| if ok { |
| for _, function := range functionDeclarations { |
| openAITool := dto.ToolCallRequest{ |
| Type: "function", |
| Function: dto.FunctionRequest{ |
| Name: function.Name, |
| Description: function.Description, |
| Parameters: function.Parameters, |
| }, |
| } |
| tools = append(tools, openAITool) |
| } |
| } |
| } |
| } |
| if len(tools) > 0 { |
| openaiRequest.Tools = tools |
| } |
| } |
|
|
| |
| if geminiRequest.SystemInstructions != nil { |
| |
| systemMessage := dto.Message{ |
| Role: "system", |
| Content: extractTextFromGeminiParts(geminiRequest.SystemInstructions.Parts), |
| } |
| openaiRequest.Messages = append([]dto.Message{systemMessage}, openaiRequest.Messages...) |
| } |
|
|
| return openaiRequest, nil |
| } |
|
|
| func convertGeminiRoleToOpenAI(geminiRole string) string { |
| switch geminiRole { |
| case "user": |
| return "user" |
| case "model": |
| return "assistant" |
| case "function": |
| return "function" |
| default: |
| return "user" |
| } |
| } |
|
|
| func extractTextFromGeminiParts(parts []dto.GeminiPart) string { |
| var texts []string |
| for _, part := range parts { |
| if part.Text != "" { |
| texts = append(texts, part.Text) |
| } |
| } |
| return strings.Join(texts, "\n") |
| } |
|
|
| |
| func ResponseOpenAI2Gemini(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse { |
| geminiResponse := &dto.GeminiChatResponse{ |
| Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)), |
| UsageMetadata: dto.GeminiUsageMetadata{ |
| PromptTokenCount: openAIResponse.PromptTokens, |
| CandidatesTokenCount: openAIResponse.CompletionTokens, |
| TotalTokenCount: openAIResponse.PromptTokens + openAIResponse.CompletionTokens, |
| }, |
| } |
|
|
| for _, choice := range openAIResponse.Choices { |
| candidate := dto.GeminiChatCandidate{ |
| Index: int64(choice.Index), |
| SafetyRatings: []dto.GeminiChatSafetyRating{}, |
| } |
|
|
| |
| var finishReason string |
| switch choice.FinishReason { |
| case "stop": |
| finishReason = "STOP" |
| case "length": |
| finishReason = "MAX_TOKENS" |
| case "content_filter": |
| finishReason = "SAFETY" |
| case "tool_calls": |
| finishReason = "STOP" |
| default: |
| finishReason = "STOP" |
| } |
| candidate.FinishReason = &finishReason |
|
|
| |
| content := dto.GeminiChatContent{ |
| Role: "model", |
| Parts: make([]dto.GeminiPart, 0), |
| } |
|
|
| |
| toolCalls := choice.Message.ParseToolCalls() |
| if len(toolCalls) > 0 { |
| for _, toolCall := range toolCalls { |
| |
| var args map[string]interface{} |
| if toolCall.Function.Arguments != "" { |
| if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil { |
| args = map[string]interface{}{"arguments": toolCall.Function.Arguments} |
| } |
| } else { |
| args = make(map[string]interface{}) |
| } |
|
|
| part := dto.GeminiPart{ |
| FunctionCall: &dto.FunctionCall{ |
| FunctionName: toolCall.Function.Name, |
| Arguments: args, |
| }, |
| } |
| content.Parts = append(content.Parts, part) |
| } |
| } else { |
| |
| textContent := choice.Message.StringContent() |
| if textContent != "" { |
| part := dto.GeminiPart{ |
| Text: textContent, |
| } |
| content.Parts = append(content.Parts, part) |
| } |
| } |
|
|
| candidate.Content = content |
| geminiResponse.Candidates = append(geminiResponse.Candidates, candidate) |
| } |
|
|
| return geminiResponse |
| } |
|
|
| |
| func StreamResponseOpenAI2Gemini(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse { |
| |
| hasContent := false |
| hasFinishReason := false |
| for _, choice := range openAIResponse.Choices { |
| if len(choice.Delta.GetContentString()) > 0 || (choice.Delta.ToolCalls != nil && len(choice.Delta.ToolCalls) > 0) { |
| hasContent = true |
| } |
| if choice.FinishReason != nil { |
| hasFinishReason = true |
| } |
| } |
|
|
| |
| if !hasContent && !hasFinishReason { |
| return nil |
| } |
|
|
| geminiResponse := &dto.GeminiChatResponse{ |
| Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)), |
| UsageMetadata: dto.GeminiUsageMetadata{ |
| PromptTokenCount: info.GetEstimatePromptTokens(), |
| CandidatesTokenCount: 0, |
| TotalTokenCount: info.GetEstimatePromptTokens(), |
| }, |
| } |
|
|
| if openAIResponse.Usage != nil { |
| geminiResponse.UsageMetadata.PromptTokenCount = openAIResponse.Usage.PromptTokens |
| geminiResponse.UsageMetadata.CandidatesTokenCount = openAIResponse.Usage.CompletionTokens |
| geminiResponse.UsageMetadata.TotalTokenCount = openAIResponse.Usage.TotalTokens |
| } |
|
|
| for _, choice := range openAIResponse.Choices { |
| candidate := dto.GeminiChatCandidate{ |
| Index: int64(choice.Index), |
| SafetyRatings: []dto.GeminiChatSafetyRating{}, |
| } |
|
|
| |
| if choice.FinishReason != nil { |
| var finishReason string |
| switch *choice.FinishReason { |
| case "stop": |
| finishReason = "STOP" |
| case "length": |
| finishReason = "MAX_TOKENS" |
| case "content_filter": |
| finishReason = "SAFETY" |
| case "tool_calls": |
| finishReason = "STOP" |
| default: |
| finishReason = "STOP" |
| } |
| candidate.FinishReason = &finishReason |
| } |
|
|
| |
| content := dto.GeminiChatContent{ |
| Role: "model", |
| Parts: make([]dto.GeminiPart, 0), |
| } |
|
|
| |
| if choice.Delta.ToolCalls != nil { |
| for _, toolCall := range choice.Delta.ToolCalls { |
| |
| var args map[string]interface{} |
| if toolCall.Function.Arguments != "" { |
| if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil { |
| args = map[string]interface{}{"arguments": toolCall.Function.Arguments} |
| } |
| } else { |
| args = make(map[string]interface{}) |
| } |
|
|
| part := dto.GeminiPart{ |
| FunctionCall: &dto.FunctionCall{ |
| FunctionName: toolCall.Function.Name, |
| Arguments: args, |
| }, |
| } |
| content.Parts = append(content.Parts, part) |
| } |
| } else { |
| |
| textContent := choice.Delta.GetContentString() |
| if textContent != "" { |
| part := dto.GeminiPart{ |
| Text: textContent, |
| } |
| content.Parts = append(content.Parts, part) |
| } |
| } |
|
|
| candidate.Content = content |
| geminiResponse.Candidates = append(geminiResponse.Candidates, candidate) |
| } |
|
|
| return geminiResponse |
| } |
|
|