Upload 9 files
Browse files- .cnb.yml +36 -0
- Dockerfile +21 -0
- go.mod +5 -0
- go.sum +2 -0
- handlers.go +287 -0
- main.go +80 -0
- models.go +121 -0
- upstream.go +388 -0
- utils.go +40 -0
.cnb.yml
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"(main)":
|
| 2 |
+
push:
|
| 3 |
+
- runner:
|
| 4 |
+
cpus: 2
|
| 5 |
+
services:
|
| 6 |
+
- docker
|
| 7 |
+
stages:
|
| 8 |
+
- name: docker login
|
| 9 |
+
script: docker login -u ${CNB_TOKEN_USER_NAME} -p "${CNB_TOKEN}" ${CNB_DOCKER_REGISTRY}
|
| 10 |
+
- name: docker build
|
| 11 |
+
script: docker build -t ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:${CNB_BRANCH} .
|
| 12 |
+
- name: tag latest if main branch
|
| 13 |
+
script: |
|
| 14 |
+
if [ "${CNB_BRANCH}" = "main" ] || [ "${CNB_BRANCH}" = "master" ]; then
|
| 15 |
+
docker tag ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:${CNB_BRANCH} ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:latest
|
| 16 |
+
fi
|
| 17 |
+
- name: docker push
|
| 18 |
+
script: |
|
| 19 |
+
docker push ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:${CNB_BRANCH}
|
| 20 |
+
if [ "${CNB_BRANCH}" = "main" ] || [ "${CNB_BRANCH}" = "master" ]; then
|
| 21 |
+
docker push ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:latest
|
| 22 |
+
fi
|
| 23 |
+
$:
|
| 24 |
+
tag_push:
|
| 25 |
+
- runner:
|
| 26 |
+
cpus: 2
|
| 27 |
+
services:
|
| 28 |
+
- docker
|
| 29 |
+
stages:
|
| 30 |
+
- name: docker login
|
| 31 |
+
script: docker login -u ${CNB_TOKEN_USER_NAME} -p "${CNB_TOKEN}" ${CNB_DOCKER_REGISTRY}
|
| 32 |
+
- name: docker build
|
| 33 |
+
script: docker build -t ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:${CNB_BRANCH} .
|
| 34 |
+
- name: docker push
|
| 35 |
+
script: |
|
| 36 |
+
docker push ${CNB_DOCKER_REGISTRY}/${CNB_REPO_SLUG_LOWERCASE}:${CNB_BRANCH}
|
Dockerfile
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# 构建 go 服务镜像
|
| 2 |
+
FROM golang:1.24.1 as builder
|
| 3 |
+
|
| 4 |
+
ENV GOPROXY="https://mirrors.cloud.tencent.com/go/"
|
| 5 |
+
|
| 6 |
+
WORKDIR /app
|
| 7 |
+
|
| 8 |
+
COPY . .
|
| 9 |
+
|
| 10 |
+
# 构建应用
|
| 11 |
+
RUN CGO_ENABLED=0 GOOS=linux go build -o main .
|
| 12 |
+
|
| 13 |
+
FROM alpine:latest
|
| 14 |
+
|
| 15 |
+
ENV TZ="Asia/Shanghai"
|
| 16 |
+
WORKDIR /app
|
| 17 |
+
|
| 18 |
+
# 从构建阶段复制可执行文件
|
| 19 |
+
COPY --from=builder /app/main .
|
| 20 |
+
|
| 21 |
+
CMD ["./main"]
|
go.mod
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
module cnb.cool/0_0/learn/julep
|
| 2 |
+
|
| 3 |
+
go 1.24.1
|
| 4 |
+
|
| 5 |
+
require github.com/google/uuid v1.6.0 // indirect
|
go.sum
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
| 2 |
+
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
handlers.go
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"encoding/json"
|
| 5 |
+
"errors" // Import errors package
|
| 6 |
+
"fmt"
|
| 7 |
+
"log/slog"
|
| 8 |
+
"net/http"
|
| 9 |
+
"strings"
|
| 10 |
+
"time"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
// chatCompletionsHandler handles requests to the /v1/chat/completions endpoint.
|
| 14 |
+
func chatCompletionsHandler(logger *slog.Logger, w http.ResponseWriter, r *http.Request) {
|
| 15 |
+
requestID := generateUUID()
|
| 16 |
+
reqLogger := logger.With("request_id", requestID)
|
| 17 |
+
reqLogger.Info("Chat completion request received", "method", r.Method, "path", r.URL.Path)
|
| 18 |
+
|
| 19 |
+
// ... (Keep Method Check, Authentication, Decode Body, Validation as before) ...
|
| 20 |
+
// 1. Check Method
|
| 21 |
+
if r.Method != http.MethodPost {
|
| 22 |
+
writeJSONError(w, reqLogger, http.StatusMethodNotAllowed, fmt.Sprintf("Method %s not allowed", r.Method), stringPtr("invalid_request_error"), nil, nil)
|
| 23 |
+
return
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
// 2. Authentication (Basic Example)
|
| 27 |
+
authHeader := r.Header.Get("Authorization")
|
| 28 |
+
if authHeader == "" || !strings.HasPrefix(authHeader, "Bearer ") {
|
| 29 |
+
invalidAuthType := "missing_authorization"
|
| 30 |
+
if authHeader != "" {
|
| 31 |
+
invalidAuthType = "invalid_authorization_type"
|
| 32 |
+
}
|
| 33 |
+
writeJSONError(w, reqLogger, http.StatusUnauthorized, "Authorization header is required (e.g., 'Bearer YOUR_API_KEY')", &invalidAuthType, nil, nil)
|
| 34 |
+
return
|
| 35 |
+
}
|
| 36 |
+
reqLogger.Debug("Authorization header present") // Changed to Debug
|
| 37 |
+
|
| 38 |
+
// 3. Decode Request Body
|
| 39 |
+
var openaiRequest OpenAIRequest
|
| 40 |
+
r.Body = http.MaxBytesReader(w, r.Body, 1024*1024) // 1MB limit
|
| 41 |
+
err := json.NewDecoder(r.Body).Decode(&openaiRequest)
|
| 42 |
+
if err != nil {
|
| 43 |
+
var syntaxError *json.SyntaxError
|
| 44 |
+
var unmarshalTypeError *json.UnmarshalTypeError
|
| 45 |
+
errMsg := "Invalid JSON request body"
|
| 46 |
+
errCode := "invalid_json"
|
| 47 |
+
if errors.As(err, &syntaxError) { // Use errors.As
|
| 48 |
+
errMsg = fmt.Sprintf("Invalid JSON syntax at offset %d", syntaxError.Offset)
|
| 49 |
+
} else if errors.As(err, &unmarshalTypeError) { // Use errors.As
|
| 50 |
+
errMsg = fmt.Sprintf("Invalid type for field '%s', expected %s", unmarshalTypeError.Field, unmarshalTypeError.Type)
|
| 51 |
+
errCode = "invalid_field_type"
|
| 52 |
+
} else if err.Error() == "http: request body too large" {
|
| 53 |
+
errMsg = "Request body exceeds limit (1MB)"
|
| 54 |
+
errCode = "request_too_large"
|
| 55 |
+
writeJSONError(w, reqLogger, http.StatusRequestEntityTooLarge, errMsg, stringPtr("invalid_request_error"), &errCode, nil)
|
| 56 |
+
return
|
| 57 |
+
}
|
| 58 |
+
reqLogger.Error("Failed to decode request body", "error", err)
|
| 59 |
+
writeJSONError(w, reqLogger, http.StatusBadRequest, errMsg, stringPtr("invalid_request_error"), &errCode, nil)
|
| 60 |
+
return
|
| 61 |
+
}
|
| 62 |
+
defer r.Body.Close()
|
| 63 |
+
|
| 64 |
+
// 4. Input Validation
|
| 65 |
+
if len(openaiRequest.Messages) == 0 {
|
| 66 |
+
reqLogger.Warn("Validation failed: 'messages' field is empty")
|
| 67 |
+
param := "messages"
|
| 68 |
+
code := "missing_field"
|
| 69 |
+
writeJSONError(w, reqLogger, http.StatusBadRequest, "'messages' is a required field and must be a non-empty array", stringPtr("invalid_request_error"), &code, ¶m)
|
| 70 |
+
return
|
| 71 |
+
}
|
| 72 |
+
if openaiRequest.Model == "" {
|
| 73 |
+
reqLogger.Warn("Validation failed: 'model' field is empty")
|
| 74 |
+
param := "model"
|
| 75 |
+
code := "missing_field"
|
| 76 |
+
writeJSONError(w, reqLogger, http.StatusBadRequest, "'model' is a required field", stringPtr("invalid_request_error"), &code, ¶m)
|
| 77 |
+
return
|
| 78 |
+
}
|
| 79 |
+
reqLogger.Info("Request decoded and validated", "model", openaiRequest.Model, "stream_requested", openaiRequest.Stream)
|
| 80 |
+
|
| 81 |
+
// 5. Call Real Upstream (Julep)
|
| 82 |
+
// Pass the request context for timeout/cancellation propagation
|
| 83 |
+
// Pass request headers for Authorization etc.
|
| 84 |
+
finalOpenAIResponse, upstreamStatusCode, err := callJulepChat(r.Context(), reqLogger, r.Header, openaiRequest, requestID)
|
| 85 |
+
if err != nil {
|
| 86 |
+
reqLogger.Error("Upstream Julep call failed", "error", err, "status_code", upstreamStatusCode)
|
| 87 |
+
// Use the status code returned by callJulepChat for the client response
|
| 88 |
+
errType := "upstream_error"
|
| 89 |
+
if upstreamStatusCode == http.StatusGatewayTimeout {
|
| 90 |
+
errType = "gateway_timeout"
|
| 91 |
+
} else if upstreamStatusCode >= 400 && upstreamStatusCode < 500 {
|
| 92 |
+
errType = "invalid_request_error" // Or map specific Julep 4xx codes
|
| 93 |
+
}
|
| 94 |
+
writeJSONError(w, reqLogger, upstreamStatusCode, fmt.Sprintf("Upstream API error: %s", err.Error()), &errType, nil, nil)
|
| 95 |
+
return
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
// 6. Handle Client Response (using finalOpenAIResponse)
|
| 99 |
+
isStreaming := openaiRequest.Stream
|
| 100 |
+
|
| 101 |
+
if !isStreaming {
|
| 102 |
+
// 6.a. Send Non-Streaming Response
|
| 103 |
+
reqLogger.Info("Sending non-streaming response")
|
| 104 |
+
w.Header().Set("Content-Type", "application/json")
|
| 105 |
+
w.WriteHeader(http.StatusOK) // Status OK as the overall operation succeeded
|
| 106 |
+
if err := json.NewEncoder(w).Encode(finalOpenAIResponse); err != nil {
|
| 107 |
+
reqLogger.Error("Failed to encode non-streaming response", "error", err)
|
| 108 |
+
}
|
| 109 |
+
} else {
|
| 110 |
+
// 6.b. Send Simulated Streaming Response from the complete Julep data
|
| 111 |
+
reqLogger.Info("Sending simulated streaming response based on Julep result")
|
| 112 |
+
|
| 113 |
+
w.Header().Set("Content-Type", "text/event-stream")
|
| 114 |
+
w.Header().Set("Cache-Control", "no-cache")
|
| 115 |
+
w.Header().Set("Connection", "keep-alive")
|
| 116 |
+
// Optional: w.Header().Set("X-Accel-Buffering", "no")
|
| 117 |
+
|
| 118 |
+
flusher, ok := w.(http.Flusher)
|
| 119 |
+
if !ok {
|
| 120 |
+
reqLogger.Error("Streaming unsupported: ResponseWriter does not implement http.Flusher")
|
| 121 |
+
errType := "internal_server_error"
|
| 122 |
+
// It's likely too late to send a proper JSON error here if headers were already flushed implicitly.
|
| 123 |
+
// Best effort: log and potentially send plain text error before trying to write stream headers.
|
| 124 |
+
http.Error(w, "Internal Server Error: Streaming unsupported", http.StatusInternalServerError)
|
| 125 |
+
// Attempt to write JSON error anyway, might fail.
|
| 126 |
+
writeJSONError(w, reqLogger, http.StatusInternalServerError, "Streaming is not supported by the server configuration", &errType, nil, nil)
|
| 127 |
+
return
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
// Set status code *before* flushing or writing body
|
| 131 |
+
w.WriteHeader(http.StatusOK)
|
| 132 |
+
flusher.Flush() // Ensure headers are sent
|
| 133 |
+
|
| 134 |
+
// Stream the full response derived from Julep's data
|
| 135 |
+
err = streamFullResponseAsChunks(w, flusher, reqLogger, finalOpenAIResponse)
|
| 136 |
+
if err != nil {
|
| 137 |
+
reqLogger.Error("Error during streaming simulation", "error", err)
|
| 138 |
+
// Cannot send JSON error now. Client might see incomplete stream.
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
reqLogger.Info("Finished streaming response")
|
| 142 |
+
}
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
// streamFullResponseAsChunks takes a complete OpenAIResponse and sends it
|
| 146 |
+
// to the client as a series of SSE chunks, simulating a real stream.
|
| 147 |
+
func streamFullResponseAsChunks(w http.ResponseWriter, flusher http.Flusher, logger *slog.Logger, fullResp *OpenAIResponse) error {
|
| 148 |
+
if len(fullResp.Choices) == 0 {
|
| 149 |
+
logger.Warn("Full response has no choices to stream")
|
| 150 |
+
// Send DONE immediately if no choices
|
| 151 |
+
_, err := fmt.Fprintf(w, "data: [DONE]\n\n")
|
| 152 |
+
if err != nil {
|
| 153 |
+
return fmt.Errorf("failed to write [DONE] message: %w", err)
|
| 154 |
+
}
|
| 155 |
+
flusher.Flush()
|
| 156 |
+
return nil
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
choice := fullResp.Choices[0] // Assuming only one choice for simplicity
|
| 160 |
+
createdTime := fullResp.Created
|
| 161 |
+
|
| 162 |
+
// --- Send Initial Chunk (Role) ---
|
| 163 |
+
if choice.Message.Role != "" {
|
| 164 |
+
roleChunk := OpenAIChunk{
|
| 165 |
+
ID: fullResp.ID,
|
| 166 |
+
Object: "chat.completion.chunk",
|
| 167 |
+
Created: createdTime,
|
| 168 |
+
Model: fullResp.Model,
|
| 169 |
+
Choices: []OpenAIChunkChoice{
|
| 170 |
+
{
|
| 171 |
+
Index: choice.Index,
|
| 172 |
+
Delta: OpenAIDelta{Role: stringPtr(choice.Message.Role)},
|
| 173 |
+
FinishReason: nil,
|
| 174 |
+
},
|
| 175 |
+
},
|
| 176 |
+
}
|
| 177 |
+
if err := sendChunk(w, flusher, logger, roleChunk); err != nil {
|
| 178 |
+
return fmt.Errorf("failed to send role chunk: %w", err)
|
| 179 |
+
}
|
| 180 |
+
time.Sleep(10 * time.Millisecond) // Small delay
|
| 181 |
+
}
|
| 182 |
+
|
| 183 |
+
// --- Send Content Chunks ---
|
| 184 |
+
content := choice.Message.Content
|
| 185 |
+
if content != "" {
|
| 186 |
+
// Simulate streaming by breaking content into smaller parts
|
| 187 |
+
const chunkSize = 5 // Small chunk size for demonstration
|
| 188 |
+
for i := 0; i < len(content); i += chunkSize {
|
| 189 |
+
end := i + chunkSize
|
| 190 |
+
if end > len(content) {
|
| 191 |
+
end = len(content)
|
| 192 |
+
}
|
| 193 |
+
contentPiece := content[i:end]
|
| 194 |
+
|
| 195 |
+
contentChunk := OpenAIChunk{
|
| 196 |
+
ID: fullResp.ID,
|
| 197 |
+
Object: "chat.completion.chunk",
|
| 198 |
+
Created: createdTime, // Could update timestamp per chunk if desired
|
| 199 |
+
Model: fullResp.Model,
|
| 200 |
+
Choices: []OpenAIChunkChoice{
|
| 201 |
+
{
|
| 202 |
+
Index: choice.Index,
|
| 203 |
+
Delta: OpenAIDelta{Content: stringPtr(contentPiece)},
|
| 204 |
+
FinishReason: nil,
|
| 205 |
+
},
|
| 206 |
+
},
|
| 207 |
+
}
|
| 208 |
+
if err := sendChunk(w, flusher, logger, contentChunk); err != nil {
|
| 209 |
+
return fmt.Errorf("failed to send content chunk: %w", err)
|
| 210 |
+
}
|
| 211 |
+
time.Sleep(30 * time.Millisecond) // Simulate generation time between chunks
|
| 212 |
+
}
|
| 213 |
+
}
|
| 214 |
+
|
| 215 |
+
// --- Send Tool Calls Chunk (if any) ---
|
| 216 |
+
if len(choice.Message.ToolCalls) > 0 {
|
| 217 |
+
toolChunk := OpenAIChunk{
|
| 218 |
+
ID: fullResp.ID,
|
| 219 |
+
Object: "chat.completion.chunk",
|
| 220 |
+
Created: createdTime,
|
| 221 |
+
Model: fullResp.Model,
|
| 222 |
+
Choices: []OpenAIChunkChoice{
|
| 223 |
+
{
|
| 224 |
+
Index: choice.Index,
|
| 225 |
+
Delta: OpenAIDelta{ToolCalls: choice.Message.ToolCalls}, // Send all tool calls in one delta
|
| 226 |
+
FinishReason: nil,
|
| 227 |
+
},
|
| 228 |
+
},
|
| 229 |
+
}
|
| 230 |
+
if err := sendChunk(w, flusher, logger, toolChunk); err != nil {
|
| 231 |
+
return fmt.Errorf("failed to send tool_calls chunk: %w", err)
|
| 232 |
+
}
|
| 233 |
+
time.Sleep(10 * time.Millisecond) // Small delay
|
| 234 |
+
}
|
| 235 |
+
|
| 236 |
+
// --- Send Final Chunk (Finish Reason) ---
|
| 237 |
+
finalChunk := OpenAIChunk{
|
| 238 |
+
ID: fullResp.ID,
|
| 239 |
+
Object: "chat.completion.chunk",
|
| 240 |
+
Created: createdTime,
|
| 241 |
+
Model: fullResp.Model,
|
| 242 |
+
Choices: []OpenAIChunkChoice{
|
| 243 |
+
{
|
| 244 |
+
Index: choice.Index,
|
| 245 |
+
Delta: OpenAIDelta{}, // Empty delta
|
| 246 |
+
FinishReason: stringPtr(choice.FinishReason),
|
| 247 |
+
},
|
| 248 |
+
},
|
| 249 |
+
}
|
| 250 |
+
if err := sendChunk(w, flusher, logger, finalChunk); err != nil {
|
| 251 |
+
return fmt.Errorf("failed to send final chunk: %w", err)
|
| 252 |
+
}
|
| 253 |
+
|
| 254 |
+
// --- Send DONE message ---
|
| 255 |
+
logger.Debug("Sending [DONE] message")
|
| 256 |
+
_, err := fmt.Fprintf(w, "data: [DONE]\n\n")
|
| 257 |
+
if err != nil {
|
| 258 |
+
// Log error, but might not reach client if connection closed
|
| 259 |
+
logger.Error("Failed to write [DONE] message", "error", err)
|
| 260 |
+
return fmt.Errorf("failed to write [DONE] message: %w", err)
|
| 261 |
+
}
|
| 262 |
+
flusher.Flush() // Ensure DONE is sent
|
| 263 |
+
|
| 264 |
+
return nil
|
| 265 |
+
}
|
| 266 |
+
|
| 267 |
+
// sendChunk encodes the chunk to JSON and writes it in SSE format.
|
| 268 |
+
func sendChunk(w http.ResponseWriter, flusher http.Flusher, logger *slog.Logger, chunk OpenAIChunk) error {
|
| 269 |
+
chunkBytes, err := json.Marshal(chunk)
|
| 270 |
+
if err != nil {
|
| 271 |
+
logger.Error("Failed to marshal stream chunk", "error", err, "chunk_id", chunk.ID)
|
| 272 |
+
return fmt.Errorf("failed to marshal chunk: %w", err)
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
// Write in Server-Sent Event format: data: <json>\n\n
|
| 276 |
+
_, err = fmt.Fprintf(w, "data: %s\n\n", string(chunkBytes))
|
| 277 |
+
if err != nil {
|
| 278 |
+
// Log error, connection might be closed by client
|
| 279 |
+
logger.Error("Failed to write chunk to response writer", "error", err, "chunk_id", chunk.ID)
|
| 280 |
+
return fmt.Errorf("failed to write chunk: %w", err)
|
| 281 |
+
}
|
| 282 |
+
|
| 283 |
+
// Flush the buffer to send the chunk immediately
|
| 284 |
+
flusher.Flush()
|
| 285 |
+
logger.Debug("Sent chunk", "chunk_id", chunk.ID, "content_length", len(chunkBytes))
|
| 286 |
+
return nil
|
| 287 |
+
}
|
main.go
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"context"
|
| 5 |
+
"encoding/json"
|
| 6 |
+
"errors"
|
| 7 |
+
"log/slog"
|
| 8 |
+
"net/http"
|
| 9 |
+
"os"
|
| 10 |
+
"os/signal"
|
| 11 |
+
"syscall"
|
| 12 |
+
"time"
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
func main() {
|
| 16 |
+
// 1. Setup Logger
|
| 17 |
+
// Use JSON handler for structured logging
|
| 18 |
+
logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{
|
| 19 |
+
Level: slog.LevelDebug, // Log debug messages and above
|
| 20 |
+
}))
|
| 21 |
+
slog.SetDefault(logger) // Set as default logger for convenience
|
| 22 |
+
|
| 23 |
+
logger.Info("Starting Stream Converter API Server...")
|
| 24 |
+
|
| 25 |
+
// 2. Setup Router (Go 1.22+ ServeMux)
|
| 26 |
+
mux := http.NewServeMux()
|
| 27 |
+
|
| 28 |
+
// Wrap handler with logger middleware (or pass logger directly)
|
| 29 |
+
chatHandler := func(w http.ResponseWriter, r *http.Request) {
|
| 30 |
+
chatCompletionsHandler(logger, w, r)
|
| 31 |
+
}
|
| 32 |
+
mux.HandleFunc("POST /v1/chat/completions", chatHandler)
|
| 33 |
+
|
| 34 |
+
// Add a simple health check endpoint
|
| 35 |
+
mux.HandleFunc("GET /health", func(w http.ResponseWriter, r *http.Request) {
|
| 36 |
+
w.Header().Set("Content-Type", "application/json")
|
| 37 |
+
w.WriteHeader(http.StatusOK)
|
| 38 |
+
json.NewEncoder(w).Encode(map[string]string{"status": "ok"})
|
| 39 |
+
logger.Debug("Health check accessed")
|
| 40 |
+
})
|
| 41 |
+
|
| 42 |
+
// 3. Configure HTTP Server
|
| 43 |
+
server := &http.Server{
|
| 44 |
+
Addr: ":8080", // Listen on port 8080
|
| 45 |
+
Handler: mux,
|
| 46 |
+
ReadTimeout: 10 * time.Second, // Example timeout values
|
| 47 |
+
WriteTimeout: 90 * time.Second, // Longer for potential streaming
|
| 48 |
+
IdleTimeout: 120 * time.Second,
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
// 4. Start Server in a Goroutine
|
| 52 |
+
go func() {
|
| 53 |
+
logger.Info("Server listening", "address", server.Addr)
|
| 54 |
+
if err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
|
| 55 |
+
logger.Error("Server failed to start", "error", err)
|
| 56 |
+
os.Exit(1)
|
| 57 |
+
}
|
| 58 |
+
}()
|
| 59 |
+
|
| 60 |
+
// 5. Graceful Shutdown Handling
|
| 61 |
+
quit := make(chan os.Signal, 1)
|
| 62 |
+
// signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
| 63 |
+
signal.Notify(quit, os.Interrupt, syscall.SIGTERM) // More portable signals
|
| 64 |
+
|
| 65 |
+
// Block until a signal is received
|
| 66 |
+
sig := <-quit
|
| 67 |
+
logger.Info("Shutdown signal received", "signal", sig.String())
|
| 68 |
+
|
| 69 |
+
// Create a context with timeout for shutdown
|
| 70 |
+
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
| 71 |
+
defer cancel()
|
| 72 |
+
|
| 73 |
+
// Attempt graceful shutdown
|
| 74 |
+
if err := server.Shutdown(ctx); err != nil {
|
| 75 |
+
logger.Error("Server shutdown failed", "error", err)
|
| 76 |
+
os.Exit(1)
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
logger.Info("Server gracefully stopped")
|
| 80 |
+
}
|
models.go
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
// --- OpenAI Request Structures ---
|
| 4 |
+
|
| 5 |
+
// OpenAIRequest represents the incoming request body for chat completions.
|
| 6 |
+
type OpenAIRequest struct {
|
| 7 |
+
Model string `json:"model"`
|
| 8 |
+
Messages []OpenAIMessage `json:"messages"`
|
| 9 |
+
Stream bool `json:"stream"`
|
| 10 |
+
MaxTokens *int `json:"max_tokens,omitempty"` // Pointer for optional field
|
| 11 |
+
Temperature *float64 `json:"temperature,omitempty"` // Pointer for optional field
|
| 12 |
+
TopP *float64 `json:"top_p,omitempty"` // Pointer for optional field
|
| 13 |
+
Stop []string `json:"stop,omitempty"` // Can be string or array, handle accordingly if needed
|
| 14 |
+
PresencePenalty *float64 `json:"presence_penalty,omitempty"` // Pointer for optional field
|
| 15 |
+
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` // Pointer for optional field
|
| 16 |
+
Tools []OpenAITool `json:"tools,omitempty"`
|
| 17 |
+
ToolChoice any `json:"tool_choice,omitempty"` // Can be string or object
|
| 18 |
+
// Add other OpenAI parameters as needed
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
// OpenAIMessage represents a single message in the chat history.
|
| 22 |
+
type OpenAIMessage struct {
|
| 23 |
+
Role string `json:"role"` // "system", "user", "assistant", "tool"
|
| 24 |
+
Content string `json:"content"`
|
| 25 |
+
Name *string `json:"name,omitempty"` // For tool role
|
| 26 |
+
ToolCalls []OpenAIToolCall `json:"tool_calls,omitempty"` // For assistant message with tool calls
|
| 27 |
+
ToolCallID *string `json:"tool_call_id,omitempty"` // For tool role message
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// OpenAITool represents a tool definition.
|
| 31 |
+
type OpenAITool struct {
|
| 32 |
+
Type string `json:"type"` // e.g., "function"
|
| 33 |
+
Function OpenAIFunctionDef `json:"function"`
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
// OpenAIFunctionDef represents the definition of a function tool.
|
| 37 |
+
type OpenAIFunctionDef struct {
|
| 38 |
+
Name string `json:"name"`
|
| 39 |
+
Description string `json:"description,omitempty"`
|
| 40 |
+
Parameters any `json:"parameters"` // Typically a map[string]any representing JSON Schema
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
// --- OpenAI Response Structures (Non-Streaming) ---
|
| 44 |
+
|
| 45 |
+
// OpenAIResponse represents the full response for a non-streaming chat completion.
|
| 46 |
+
type OpenAIResponse struct {
|
| 47 |
+
ID string `json:"id"`
|
| 48 |
+
Object string `json:"object"` // "chat.completion"
|
| 49 |
+
Created int64 `json:"created"` // Unix timestamp
|
| 50 |
+
Model string `json:"model"`
|
| 51 |
+
Choices []OpenAIChoice `json:"choices"`
|
| 52 |
+
Usage *OpenAIUsage `json:"usage,omitempty"`
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
// OpenAIChoice represents a single choice in the non-streaming response.
|
| 56 |
+
type OpenAIChoice struct {
|
| 57 |
+
Index int `json:"index"`
|
| 58 |
+
Message OpenAIMessage `json:"message"`
|
| 59 |
+
FinishReason string `json:"finish_reason"` // "stop", "length", "tool_calls", "content_filter", "function_call" (legacy)
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
// OpenAIToolCall represents a tool call made by the model.
|
| 63 |
+
type OpenAIToolCall struct {
|
| 64 |
+
ID string `json:"id"`
|
| 65 |
+
Type string `json:"type"` // always "function" for now
|
| 66 |
+
Function OpenAIFunction `json:"function"`
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
// OpenAIFunction represents the function call details.
|
| 70 |
+
type OpenAIFunction struct {
|
| 71 |
+
Name string `json:"name"`
|
| 72 |
+
Arguments string `json:"arguments"` // JSON string arguments
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
// OpenAIUsage represents token usage statistics.
|
| 76 |
+
type OpenAIUsage struct {
|
| 77 |
+
PromptTokens int `json:"prompt_tokens"`
|
| 78 |
+
CompletionTokens int `json:"completion_tokens"`
|
| 79 |
+
TotalTokens int `json:"total_tokens"`
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
// --- OpenAI Response Structures (Streaming) ---
|
| 83 |
+
|
| 84 |
+
// OpenAIChunk represents a single chunk in a streaming chat completion response.
|
| 85 |
+
type OpenAIChunk struct {
|
| 86 |
+
ID string `json:"id"`
|
| 87 |
+
Object string `json:"object"` // "chat.completion.chunk"
|
| 88 |
+
Created int64 `json:"created"`
|
| 89 |
+
Model string `json:"model"`
|
| 90 |
+
Choices []OpenAIChunkChoice `json:"choices"`
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
// OpenAIChunkChoice represents a choice within a streaming chunk.
|
| 94 |
+
type OpenAIChunkChoice struct {
|
| 95 |
+
Index int `json:"index"`
|
| 96 |
+
Delta OpenAIDelta `json:"delta"` // The changes in this chunk
|
| 97 |
+
FinishReason *string `json:"finish_reason,omitempty"` // Pointer as it's only in the last chunk for a choice
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
// OpenAIDelta represents the changed fields in a streaming chunk.
|
| 101 |
+
// Only one of these fields will typically be populated in a single chunk.
|
| 102 |
+
type OpenAIDelta struct {
|
| 103 |
+
Role *string `json:"role,omitempty"` // Pointer for optional field
|
| 104 |
+
Content *string `json:"content,omitempty"` // Pointer for optional field
|
| 105 |
+
ToolCalls []OpenAIToolCall `json:"tool_calls,omitempty"` // Sent as a complete array in one chunk
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
// --- Error Response ---
|
| 109 |
+
|
| 110 |
+
// ErrorResponse defines the standard JSON error format.
|
| 111 |
+
type ErrorResponse struct {
|
| 112 |
+
Error APIError `json:"error"`
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
// APIError defines the structure of the error object.
|
| 116 |
+
type APIError struct {
|
| 117 |
+
Message string `json:"message"`
|
| 118 |
+
Type *string `json:"type,omitempty"` // e.g., "invalid_request_error"
|
| 119 |
+
Param *string `json:"param,omitempty"` // e.g., "messages"
|
| 120 |
+
Code *string `json:"code,omitempty"` // e.g., "missing_field"
|
| 121 |
+
}
|
upstream.go
ADDED
|
@@ -0,0 +1,388 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"bytes" // 用于创建请求体
|
| 5 |
+
"context" // 用于超时和取消
|
| 6 |
+
"encoding/json"
|
| 7 |
+
"errors"
|
| 8 |
+
"fmt"
|
| 9 |
+
"io" // 用于读取响应体
|
| 10 |
+
"log/slog"
|
| 11 |
+
"net/http" // 用于 HTTP 请求
|
| 12 |
+
"os"
|
| 13 |
+
"strings"
|
| 14 |
+
"time"
|
| 15 |
+
// 确保已导入
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
// JulepApiBaseURL 存储从环境变量读取的上游 API 基础 URL
|
| 19 |
+
var JulepApiBaseURL string
|
| 20 |
+
var apiClient *http.Client // 包级别的 HTTP 客户端
|
| 21 |
+
|
| 22 |
+
const (
|
| 23 |
+
defaultTimeout = 30 * time.Second // 默认 HTTP 请求超时
|
| 24 |
+
chatTimeout = 90 * time.Second // Chat 请求可能需要更长时间
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
func init() {
|
| 28 |
+
JulepApiBaseURL = os.Getenv("JULEP_API_BASE_URL")
|
| 29 |
+
if JulepApiBaseURL == "" {
|
| 30 |
+
slog.Error("Fatal: JULEP_API_BASE_URL environment variable not set.")
|
| 31 |
+
os.Exit(1)
|
| 32 |
+
}
|
| 33 |
+
JulepApiBaseURL = strings.TrimSuffix(JulepApiBaseURL, "/")
|
| 34 |
+
slog.Info("Julep API Base URL configured", "url", JulepApiBaseURL)
|
| 35 |
+
|
| 36 |
+
// 创建可复用的 HTTP 客户端
|
| 37 |
+
apiClient = &http.Client{
|
| 38 |
+
Timeout: defaultTimeout, // 设置默认超时
|
| 39 |
+
Transport: &http.Transport{
|
| 40 |
+
MaxIdleConns: 100,
|
| 41 |
+
MaxIdleConnsPerHost: 10,
|
| 42 |
+
IdleConnTimeout: 90 * time.Second,
|
| 43 |
+
},
|
| 44 |
+
}
|
| 45 |
+
slog.Info("HTTP client initialized")
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
// --- Julep Specific Request/Response Structs ---
|
| 49 |
+
// (可以放在 models.go 或这里,放在这里让 upstream.go 更独立)
|
| 50 |
+
|
| 51 |
+
type CreateAgentPayload struct {
|
| 52 |
+
Name string `json:"name"`
|
| 53 |
+
About string `json:"about"`
|
| 54 |
+
// 添加 Julep Agent 的其他字段...
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
type CreateSessionPayload struct {
|
| 58 |
+
AgentID string `json:"agent"` // Julep API 使用 "agent" 字段
|
| 59 |
+
// 添加 Julep Session 的其他字段...
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
// JulepMessage mirrors the structure within Julep's chat payload/response
|
| 63 |
+
type JulepMessage struct {
|
| 64 |
+
Role string `json:"role"`
|
| 65 |
+
Content string `json:"content"`
|
| 66 |
+
Name *string `json:"name,omitempty"`
|
| 67 |
+
ToolCallID *string `json:"tool_call_id,omitempty"`
|
| 68 |
+
ToolCalls []JulepToolCall `json:"tool_calls,omitempty"`
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
// JulepToolCall mirrors the tool call structure within Julep's format
|
| 72 |
+
type JulepToolCall struct {
|
| 73 |
+
ID string `json:"id"`
|
| 74 |
+
Type string `json:"type"` // e.g., "function"
|
| 75 |
+
Function JulepFunction `json:"function,omitempty"`
|
| 76 |
+
// Add other Julep tool call types if needed
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
// JulepFunction mirrors the function structure within Julep's tool call
|
| 80 |
+
type JulepFunction struct {
|
| 81 |
+
Name string `json:"name"`
|
| 82 |
+
Arguments string `json:"arguments"` // Assuming arguments are a JSON string
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
// JulepChatPayload represents the body sent to Julep's /chat endpoint
|
| 86 |
+
type JulepChatPayload struct {
|
| 87 |
+
Messages []JulepMessage `json:"messages"`
|
| 88 |
+
Model *string `json:"model,omitempty"` // Julep might use model from agent/session
|
| 89 |
+
Stream bool `json:"stream"` // Julep doesn't stream, but payload might accept it
|
| 90 |
+
MaxTokens *int `json:"max_tokens,omitempty"`
|
| 91 |
+
Temperature *float64 `json:"temperature,omitempty"`
|
| 92 |
+
TopP *float64 `json:"top_p,omitempty"`
|
| 93 |
+
Stop []string `json:"stop,omitempty"`
|
| 94 |
+
PresencePenalty *float64 `json:"presence_penalty,omitempty"`
|
| 95 |
+
FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"`
|
| 96 |
+
Tools []OpenAITool `json:"tools,omitempty"` // Assuming Julep uses OpenAI tool format directly
|
| 97 |
+
ToolChoice any `json:"tool_choice,omitempty"` // Assuming Julep uses OpenAI format
|
| 98 |
+
// Add other Julep specific parameters if needed
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
// JulepChatResponse represents the non-streaming response from Julep's /chat endpoint
|
| 102 |
+
type JulepChatResponse struct {
|
| 103 |
+
ID string `json:"id"` // Julep's own response ID (might differ from session ID)
|
| 104 |
+
CreatedAt time.Time `json:"created_at"`
|
| 105 |
+
Choices []JulepChoice `json:"choices"`
|
| 106 |
+
Usage *JulepUsage `json:"usage,omitempty"`
|
| 107 |
+
// Add other fields from Julep response
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
type JulepChoice struct {
|
| 111 |
+
Index int `json:"index"`
|
| 112 |
+
Message JulepMessage `json:"message"`
|
| 113 |
+
FinishReason string `json:"finish_reason"`
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
type JulepUsage struct {
|
| 117 |
+
PromptTokens int `json:"prompt_tokens"`
|
| 118 |
+
CompletionTokens int `json:"completion_tokens"`
|
| 119 |
+
TotalTokens int `json:"total_tokens"`
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
// --- Conversion Functions ---
|
| 123 |
+
|
| 124 |
+
// convertOpenaiToJulep converts OpenAI request payload to Julep chat payload
|
| 125 |
+
func convertOpenaiToJulep(openaiReq OpenAIRequest) JulepChatPayload {
|
| 126 |
+
julepMessages := make([]JulepMessage, len(openaiReq.Messages))
|
| 127 |
+
for i, msg := range openaiReq.Messages {
|
| 128 |
+
julepToolCalls := make([]JulepToolCall, len(msg.ToolCalls))
|
| 129 |
+
for j, tc := range msg.ToolCalls {
|
| 130 |
+
julepToolCalls[j] = JulepToolCall{
|
| 131 |
+
ID: tc.ID,
|
| 132 |
+
Type: tc.Type,
|
| 133 |
+
Function: JulepFunction{ // Assuming only function type for now
|
| 134 |
+
Name: tc.Function.Name,
|
| 135 |
+
Arguments: tc.Function.Arguments,
|
| 136 |
+
},
|
| 137 |
+
}
|
| 138 |
+
}
|
| 139 |
+
julepMessages[i] = JulepMessage{
|
| 140 |
+
Role: msg.Role,
|
| 141 |
+
Content: msg.Content,
|
| 142 |
+
Name: msg.Name,
|
| 143 |
+
ToolCallID: msg.ToolCallID,
|
| 144 |
+
ToolCalls: julepToolCalls,
|
| 145 |
+
}
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
payload := JulepChatPayload{
|
| 149 |
+
Messages: julepMessages,
|
| 150 |
+
Model: &openaiReq.Model, // Pass model if Julep expects it here
|
| 151 |
+
Stream: false, // Force false as Julep doesn't support streaming response
|
| 152 |
+
MaxTokens: openaiReq.MaxTokens,
|
| 153 |
+
Temperature: openaiReq.Temperature,
|
| 154 |
+
TopP: openaiReq.TopP,
|
| 155 |
+
Stop: openaiReq.Stop,
|
| 156 |
+
PresencePenalty: openaiReq.PresencePenalty,
|
| 157 |
+
FrequencyPenalty: openaiReq.FrequencyPenalty,
|
| 158 |
+
Tools: openaiReq.Tools,
|
| 159 |
+
ToolChoice: openaiReq.ToolChoice,
|
| 160 |
+
}
|
| 161 |
+
// Clean up nil model pointer if model string is empty
|
| 162 |
+
if openaiReq.Model == "" {
|
| 163 |
+
payload.Model = nil
|
| 164 |
+
}
|
| 165 |
+
return payload
|
| 166 |
+
}
|
| 167 |
+
|
| 168 |
+
// convertJulepToOpenai converts Julep chat response to OpenAI response format
|
| 169 |
+
// Takes sessionID to use it as the OpenAI response ID, as per JS example.
|
| 170 |
+
func convertJulepToOpenai(julepResp *JulepChatResponse, modelName string, sessionID string) *OpenAIResponse {
|
| 171 |
+
openaiChoices := make([]OpenAIChoice, len(julepResp.Choices))
|
| 172 |
+
for i, choice := range julepResp.Choices {
|
| 173 |
+
openaiToolCalls := make([]OpenAIToolCall, len(choice.Message.ToolCalls))
|
| 174 |
+
for j, tc := range choice.Message.ToolCalls {
|
| 175 |
+
openaiToolCalls[j] = OpenAIToolCall{
|
| 176 |
+
ID: tc.ID,
|
| 177 |
+
Type: tc.Type,
|
| 178 |
+
Function: OpenAIFunction{
|
| 179 |
+
Name: tc.Function.Name,
|
| 180 |
+
Arguments: tc.Function.Arguments,
|
| 181 |
+
},
|
| 182 |
+
}
|
| 183 |
+
}
|
| 184 |
+
openaiChoices[i] = OpenAIChoice{
|
| 185 |
+
Index: choice.Index,
|
| 186 |
+
Message: OpenAIMessage{
|
| 187 |
+
Role: choice.Message.Role,
|
| 188 |
+
Content: choice.Message.Content,
|
| 189 |
+
ToolCalls: openaiToolCalls,
|
| 190 |
+
},
|
| 191 |
+
FinishReason: choice.FinishReason,
|
| 192 |
+
}
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
var openaiUsage *OpenAIUsage
|
| 196 |
+
if julepResp.Usage != nil {
|
| 197 |
+
openaiUsage = &OpenAIUsage{
|
| 198 |
+
PromptTokens: julepResp.Usage.PromptTokens,
|
| 199 |
+
CompletionTokens: julepResp.Usage.CompletionTokens,
|
| 200 |
+
TotalTokens: julepResp.Usage.TotalTokens,
|
| 201 |
+
}
|
| 202 |
+
}
|
| 203 |
+
|
| 204 |
+
return &OpenAIResponse{
|
| 205 |
+
ID: sessionID, // Use the generated Session ID as OpenAI ID
|
| 206 |
+
Object: "chat.completion",
|
| 207 |
+
Created: julepResp.CreatedAt.Unix(),
|
| 208 |
+
Model: modelName, // Use the model requested by the client
|
| 209 |
+
Choices: openaiChoices,
|
| 210 |
+
Usage: openaiUsage,
|
| 211 |
+
}
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
// --- API Call Functions ---
|
| 215 |
+
|
| 216 |
+
// makeJulepRequest performs the actual HTTP request to a Julep endpoint.
|
| 217 |
+
// It handles request creation, sending, and basic response/error handling.
|
| 218 |
+
func makeJulepRequest(ctx context.Context, logger *slog.Logger, method, url string, headers http.Header, requestBody any, responseTarget any, reqID string) (int, error) {
|
| 219 |
+
logAttrs := []any{"request_id", reqID, "method", method, "url", url}
|
| 220 |
+
logger.Debug("Making Julep API request...", logAttrs...)
|
| 221 |
+
|
| 222 |
+
var reqBodyReader io.Reader
|
| 223 |
+
if requestBody != nil {
|
| 224 |
+
jsonBody, err := json.Marshal(requestBody)
|
| 225 |
+
if err != nil {
|
| 226 |
+
logger.Error("Failed to marshal Julep request body", append(logAttrs, "error", err)...)
|
| 227 |
+
return 0, fmt.Errorf("failed to marshal request body: %w", err)
|
| 228 |
+
}
|
| 229 |
+
reqBodyReader = bytes.NewBuffer(jsonBody)
|
| 230 |
+
logAttrs = append(logAttrs, "body_size", len(jsonBody)) // Log body size
|
| 231 |
+
}
|
| 232 |
+
|
| 233 |
+
httpReq, err := http.NewRequestWithContext(ctx, method, url, reqBodyReader)
|
| 234 |
+
if err != nil {
|
| 235 |
+
logger.Error("Failed to create Julep HTTP request", append(logAttrs, "error", err)...)
|
| 236 |
+
return 0, fmt.Errorf("failed to create HTTP request: %w", err)
|
| 237 |
+
}
|
| 238 |
+
|
| 239 |
+
// Copy essential headers (Authorization, Content-Type if body exists)
|
| 240 |
+
// Avoid copying Host, Content-Length etc.
|
| 241 |
+
if auth := headers.Get("Authorization"); auth != "" {
|
| 242 |
+
httpReq.Header.Set("Authorization", auth)
|
| 243 |
+
}
|
| 244 |
+
// Only set Content-Type if we have a body
|
| 245 |
+
if requestBody != nil {
|
| 246 |
+
httpReq.Header.Set("Content-Type", "application/json")
|
| 247 |
+
}
|
| 248 |
+
// Add other necessary headers if Julep requires them
|
| 249 |
+
|
| 250 |
+
startTime := time.Now()
|
| 251 |
+
httpResp, err := apiClient.Do(httpReq)
|
| 252 |
+
duration := time.Since(startTime)
|
| 253 |
+
logAttrs = append(logAttrs, "duration_ms", duration.Milliseconds())
|
| 254 |
+
|
| 255 |
+
if err != nil {
|
| 256 |
+
// Handle context deadline exceeded specifically
|
| 257 |
+
if errors.Is(err, context.DeadlineExceeded) {
|
| 258 |
+
logger.Error("Julep API request timed out", append(logAttrs, "error", err)...)
|
| 259 |
+
return http.StatusGatewayTimeout, fmt.Errorf("request to %s timed out: %w", url, err)
|
| 260 |
+
}
|
| 261 |
+
logger.Error("Julep API request failed", append(logAttrs, "error", err)...)
|
| 262 |
+
return 0, fmt.Errorf("failed to send request to %s: %w", url, err)
|
| 263 |
+
}
|
| 264 |
+
defer httpResp.Body.Close()
|
| 265 |
+
|
| 266 |
+
logAttrs = append(logAttrs, "status_code", httpResp.StatusCode)
|
| 267 |
+
|
| 268 |
+
// Read the body regardless of status code for potential error messages
|
| 269 |
+
respBodyBytes, readErr := io.ReadAll(httpResp.Body)
|
| 270 |
+
if readErr != nil {
|
| 271 |
+
logger.Warn("Failed to read Julep response body", append(logAttrs, "read_error", readErr)...)
|
| 272 |
+
// Continue processing status code error if possible
|
| 273 |
+
} else {
|
| 274 |
+
logAttrs = append(logAttrs, "response_size", len(respBodyBytes))
|
| 275 |
+
// Log trimmed response body for debugging (be careful with sensitive data)
|
| 276 |
+
// logger.Debug("Julep response body", append(logAttrs, "body", string(respBodyBytes))...)
|
| 277 |
+
}
|
| 278 |
+
|
| 279 |
+
// Check for non-successful status codes
|
| 280 |
+
if httpResp.StatusCode < 200 || httpResp.StatusCode >= 300 {
|
| 281 |
+
errMsg := fmt.Sprintf("Julep API returned error status %d", httpResp.StatusCode)
|
| 282 |
+
if len(respBodyBytes) > 0 {
|
| 283 |
+
errMsg = fmt.Sprintf("%s: %s", errMsg, string(respBodyBytes))
|
| 284 |
+
}
|
| 285 |
+
logger.Error("Julep API request returned non-2xx status", logAttrs...)
|
| 286 |
+
// Return the status code and an error containing the message
|
| 287 |
+
return httpResp.StatusCode, fmt.Errorf(errMsg)
|
| 288 |
+
}
|
| 289 |
+
|
| 290 |
+
// If successful and a response target is provided, decode the body
|
| 291 |
+
if responseTarget != nil && len(respBodyBytes) > 0 {
|
| 292 |
+
if err := json.Unmarshal(respBodyBytes, responseTarget); err != nil {
|
| 293 |
+
logger.Error("Failed to unmarshal Julep response body", append(logAttrs, "error", err, "body_preview", string(respBodyBytes[:min(len(respBodyBytes), 100)]))...)
|
| 294 |
+
return httpResp.StatusCode, fmt.Errorf("failed to decode Julep response: %w", err)
|
| 295 |
+
}
|
| 296 |
+
logger.Debug("Successfully decoded Julep response", logAttrs...)
|
| 297 |
+
} else {
|
| 298 |
+
logger.Debug("Julep request successful, no response body expected or decoded.", logAttrs...)
|
| 299 |
+
}
|
| 300 |
+
|
| 301 |
+
return httpResp.StatusCode, nil
|
| 302 |
+
}
|
| 303 |
+
|
| 304 |
+
// callJulepChat orchestrates the calls to Julep: create agent, create session, then chat.
|
| 305 |
+
func callJulepChat(ctx context.Context, logger *slog.Logger, headers http.Header, openaiReq OpenAIRequest, requestID string) (*OpenAIResponse, int, error) {
|
| 306 |
+
reqLogger := logger.With("request_id", requestID)
|
| 307 |
+
|
| 308 |
+
// --- 1. Create Agent ---
|
| 309 |
+
agentID := generateUUID()
|
| 310 |
+
agentURL := fmt.Sprintf("%s/agents/%s", JulepApiBaseURL, agentID)
|
| 311 |
+
agentPayload := CreateAgentPayload{
|
| 312 |
+
Name: fmt.Sprintf("temp-agent-%s", agentID),
|
| 313 |
+
About: "Temporary agent created for a chat session via proxy.",
|
| 314 |
+
}
|
| 315 |
+
reqLogger.Info("Creating temporary Julep agent", "agent_id", agentID)
|
| 316 |
+
statusCode, err := makeJulepRequest(ctx, reqLogger, http.MethodPost, agentURL, headers, agentPayload, nil, requestID) // No response body needed for agent creation? Adjust if needed.
|
| 317 |
+
if err != nil {
|
| 318 |
+
reqLogger.Error("Failed to create Julep agent", "error", err, "status_code", statusCode)
|
| 319 |
+
// Map status code for client response
|
| 320 |
+
if statusCode == 0 || statusCode >= 500 {
|
| 321 |
+
return nil, http.StatusBadGateway, fmt.Errorf("failed to initialize session (agent creation failed): %w", err)
|
| 322 |
+
}
|
| 323 |
+
return nil, statusCode, fmt.Errorf("failed to create agent: %w", err) // Propagate client-side errors if needed
|
| 324 |
+
}
|
| 325 |
+
reqLogger.Info("Julep agent created successfully", "agent_id", agentID)
|
| 326 |
+
|
| 327 |
+
// --- 2. Create Session ---
|
| 328 |
+
sessionID := generateUUID() // Julep uses UUID in path, so generate one here
|
| 329 |
+
sessionURL := fmt.Sprintf("%s/sessions/%s", JulepApiBaseURL, sessionID)
|
| 330 |
+
sessionPayload := CreateSessionPayload{
|
| 331 |
+
AgentID: agentID, // Link to the created agent
|
| 332 |
+
}
|
| 333 |
+
reqLogger.Info("Creating temporary Julep session", "session_id", sessionID, "linked_agent_id", agentID)
|
| 334 |
+
statusCode, err = makeJulepRequest(ctx, reqLogger, http.MethodPost, sessionURL, headers, sessionPayload, nil, requestID) // No response body needed? Adjust if needed.
|
| 335 |
+
if err != nil {
|
| 336 |
+
reqLogger.Error("Failed to create Julep session", "error", err, "status_code", statusCode)
|
| 337 |
+
// Maybe cleanup agent here if session fails? Omitted for simplicity.
|
| 338 |
+
if statusCode == 0 || statusCode >= 500 {
|
| 339 |
+
return nil, http.StatusBadGateway, fmt.Errorf("failed to initialize session (session creation failed): %w", err)
|
| 340 |
+
}
|
| 341 |
+
return nil, statusCode, fmt.Errorf("failed to create session: %w", err)
|
| 342 |
+
}
|
| 343 |
+
reqLogger.Info("Julep session created successfully", "session_id", sessionID)
|
| 344 |
+
|
| 345 |
+
// --- 3. Call Chat Endpoint ---
|
| 346 |
+
chatURL := fmt.Sprintf("%s/sessions/%s/chat", JulepApiBaseURL, sessionID)
|
| 347 |
+
julepPayload := convertOpenaiToJulep(openaiReq)
|
| 348 |
+
reqLogger.Info("Calling Julep chat endpoint", "url", chatURL)
|
| 349 |
+
|
| 350 |
+
// Use a longer timeout context specifically for the chat call if needed
|
| 351 |
+
chatCtx := ctx // Use original context by default
|
| 352 |
+
if _, ok := ctx.Deadline(); !ok { // If no deadline set on original context, apply chat timeout
|
| 353 |
+
var cancel context.CancelFunc
|
| 354 |
+
chatCtx, cancel = context.WithTimeout(context.Background(), chatTimeout)
|
| 355 |
+
defer cancel()
|
| 356 |
+
reqLogger.Debug("Applying specific timeout for chat request", "timeout", chatTimeout)
|
| 357 |
+
}
|
| 358 |
+
|
| 359 |
+
var julepResponse JulepChatResponse
|
| 360 |
+
statusCode, err = makeJulepRequest(chatCtx, reqLogger, http.MethodPost, chatURL, headers, julepPayload, &julepResponse, requestID)
|
| 361 |
+
if err != nil {
|
| 362 |
+
reqLogger.Error("Julep chat request failed", "error", err, "status_code", statusCode)
|
| 363 |
+
// Map Julep error status codes to appropriate client responses
|
| 364 |
+
if statusCode == 0 || statusCode >= 500 || statusCode == http.StatusGatewayTimeout || statusCode == http.StatusServiceUnavailable {
|
| 365 |
+
return nil, http.StatusBadGateway, fmt.Errorf("upstream API error during chat: %w", err)
|
| 366 |
+
}
|
| 367 |
+
// Propagate other errors (e.g., 4xx from Julep)
|
| 368 |
+
return nil, statusCode, fmt.Errorf("julep chat API error: %w", err)
|
| 369 |
+
}
|
| 370 |
+
reqLogger.Info("Julep chat request successful")
|
| 371 |
+
|
| 372 |
+
// --- 4. Convert Julep Response to OpenAI Response ---
|
| 373 |
+
openaiResponse := convertJulepToOpenai(&julepResponse, openaiReq.Model, sessionID)
|
| 374 |
+
|
| 375 |
+
// Optional: Consider deleting the temporary agent/session here
|
| 376 |
+
// reqLogger.Info("Skipping temporary agent/session cleanup for now.")
|
| 377 |
+
|
| 378 |
+
// Return the converted response, final status (OK), and no error
|
| 379 |
+
return openaiResponse, http.StatusOK, nil
|
| 380 |
+
}
|
| 381 |
+
|
| 382 |
+
// Helper for logging byte slices
|
| 383 |
+
func min(a, b int) int {
|
| 384 |
+
if a < b {
|
| 385 |
+
return a
|
| 386 |
+
}
|
| 387 |
+
return b
|
| 388 |
+
}
|
utils.go
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
package main
|
| 2 |
+
|
| 3 |
+
import (
|
| 4 |
+
"encoding/json"
|
| 5 |
+
"log/slog"
|
| 6 |
+
"net/http"
|
| 7 |
+
|
| 8 |
+
"github.com/google/uuid"
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
// writeJSONError sends a standard JSON error response.
|
| 12 |
+
func writeJSONError(w http.ResponseWriter, logger *slog.Logger, statusCode int, message string, errType *string, errCode *string, param *string) {
|
| 13 |
+
w.Header().Set("Content-Type", "application/json")
|
| 14 |
+
w.WriteHeader(statusCode)
|
| 15 |
+
resp := ErrorResponse{
|
| 16 |
+
Error: APIError{
|
| 17 |
+
Message: message,
|
| 18 |
+
Type: errType,
|
| 19 |
+
Code: errCode,
|
| 20 |
+
Param: param,
|
| 21 |
+
},
|
| 22 |
+
}
|
| 23 |
+
if err := json.NewEncoder(w).Encode(resp); err != nil {
|
| 24 |
+
// If encoding fails, log it but we can't send another response
|
| 25 |
+
logger.Error("Failed to encode error response", "error", err)
|
| 26 |
+
}
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
// generateUUID creates a new UUID string.
|
| 30 |
+
func generateUUID() string {
|
| 31 |
+
return uuid.NewString()
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
// Helper function to get string pointer
|
| 35 |
+
func stringPtr(s string) *string {
|
| 36 |
+
if s == "" {
|
| 37 |
+
return nil // Don't return pointer to empty string unless intended
|
| 38 |
+
}
|
| 39 |
+
return &s
|
| 40 |
+
}
|