File size: 3,224 Bytes
c4baeb2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
package model
import (
"encoding/json"
"fmt"
"pplx2api/logger"
"time"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
)
type ChatCompletionRequest struct {
Model string `json:"model"`
Messages []map[string]interface{} `json:"messages"`
Stream bool `json:"stream"`
Tools []map[string]interface{} `json:"tools,omitempty"`
}
// OpenAISrteamResponse 定义 OpenAI 的流式响应结构
type OpenAISrteamResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []StreamChoice `json:"choices"`
}
// Choice 结构表示 OpenAI 返回的单个选项
type StreamChoice struct {
Index int `json:"index"`
Delta Delta `json:"delta"`
Logprobs interface{} `json:"logprobs"`
FinishReason interface{} `json:"finish_reason"`
}
type NoStreamChoice struct {
Index int `json:"index"`
Message Message `json:"message"`
Logprobs interface{} `json:"logprobs"`
FinishReason string `json:"finish_reason"`
}
// Delta 结构用于存储返回的文本内容
type Delta struct {
Content string `json:"content"`
}
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
Refusal interface{} `json:"refusal"`
Annotation []interface{} `json:"annotation"`
}
type OpenAIResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []NoStreamChoice `json:"choices"`
Usage Usage `json:"usage"`
}
type Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
}
func ReturnOpenAIResponse(text string, stream bool, gc *gin.Context) error {
if stream {
return streamRespose(text, gc)
} else {
return noStreamResponse(text, gc)
}
}
func streamRespose(text string, gc *gin.Context) error {
openAIResp := &OpenAISrteamResponse{
ID: uuid.New().String(),
Object: "chat.completion.chunk",
Created: time.Now().Unix(),
Model: "claude-3-7-sonnet-20250219",
Choices: []StreamChoice{
{
Index: 0,
Delta: Delta{
Content: text,
},
Logprobs: nil,
FinishReason: nil,
},
},
}
jsonBytes, err := json.Marshal(openAIResp)
jsonBytes = append([]byte("data: "), jsonBytes...)
jsonBytes = append(jsonBytes, []byte("\n\n")...)
if err != nil {
logger.Error(fmt.Sprintf("Error marshalling JSON: %v", err))
return err
}
// 发送数据
gc.Writer.Write(jsonBytes)
gc.Writer.Flush()
return nil
}
func noStreamResponse(text string, gc *gin.Context) error {
openAIResp := &OpenAIResponse{
ID: uuid.New().String(),
Object: "chat.completion",
Created: time.Now().Unix(),
Model: "claude-3-7-sonnet-20250219",
Choices: []NoStreamChoice{
{
Index: 0,
Message: Message{
Role: "assistant",
Content: text,
},
Logprobs: nil,
FinishReason: "stop",
},
},
}
gc.JSON(200, openAIResp)
return nil
}
|