KaThaNg commited on
Commit
7ee65e2
·
verified ·
1 Parent(s): 999cf16

Update convert.go

Browse files
Files changed (1) hide show
  1. convert.go +30 -41
convert.go CHANGED
@@ -24,14 +24,11 @@ func estimateTokens(text string) int {
24
  func calculateInputTokensFromClaudeRequest(claudeReq *ClaudeRequest) int {
25
  totalChars := 0
26
 
27
- // Process system prompt
28
  if len(claudeReq.System) > 0 {
29
- // Try unmarshaling as string first
30
  var systemStr string
31
  if err := json.Unmarshal(claudeReq.System, &systemStr); err == nil {
32
  totalChars += len(systemStr)
33
  } else {
34
- // Try unmarshaling as list of blocks
35
  var systemBlocks []ClaudeContentBlock
36
  if err := json.Unmarshal(claudeReq.System, &systemBlocks); err == nil {
37
  for _, block := range systemBlocks {
@@ -40,19 +37,17 @@ func calculateInputTokensFromClaudeRequest(claudeReq *ClaudeRequest) int {
40
  }
41
  }
42
  } else {
 
43
  log.Printf("WARN: Could not parse system prompt format: %s", string(claudeReq.System))
44
  }
45
  }
46
  }
47
 
48
- // Process messages
49
  for _, msg := range claudeReq.Messages {
50
- // Try unmarshaling as string first
51
  var contentStr string
52
  if err := json.Unmarshal(msg.Content, &contentStr); err == nil {
53
  totalChars += len(contentStr)
54
  } else {
55
- // Try unmarshaling as list of blocks
56
  var contentBlocks []ClaudeContentBlock
57
  if err := json.Unmarshal(msg.Content, &contentBlocks); err == nil {
58
  for _, block := range contentBlocks {
@@ -61,13 +56,15 @@ func calculateInputTokensFromClaudeRequest(claudeReq *ClaudeRequest) int {
61
  }
62
  }
63
  } else {
 
64
  log.Printf("WARN: Could not parse message content format for role %s: %s", msg.Role, string(msg.Content))
65
  }
66
  }
67
  }
68
 
69
- estimated := estimateTokens(fmt.Sprintf("%d", totalChars)) // Pass total chars as string to estimate
70
- log.Printf("DEBUG: Estimated input characters: %d, Estimated input tokens: %d", totalChars, estimated)
 
71
  return estimated
72
  }
73
 
@@ -75,15 +72,12 @@ func calculateInputTokensFromClaudeRequest(claudeReq *ClaudeRequest) int {
75
  func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, error) {
76
  openAIMessages := []OpenAIMessage{}
77
 
78
- // --- Handle System Prompt ---
79
  if len(claudeReq.System) > 0 {
80
  systemContent := ""
81
  var systemStr string
82
- // Try simple string first
83
  if err := json.Unmarshal(claudeReq.System, &systemStr); err == nil {
84
  systemContent = systemStr
85
  } else {
86
- // Try list of blocks
87
  var systemBlocks []ClaudeContentBlock
88
  if err := json.Unmarshal(claudeReq.System, &systemBlocks); err == nil {
89
  var parts []string
@@ -94,8 +88,8 @@ func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, err
94
  }
95
  systemContent = strings.Join(parts, "\n")
96
  } else {
 
97
  log.Printf("WARN: Could not parse system prompt format for conversion: %s", string(claudeReq.System))
98
- // Decide how to handle - skip system prompt or return error? Skipping for now.
99
  }
100
  }
101
  if systemContent != "" {
@@ -103,20 +97,18 @@ func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, err
103
  }
104
  }
105
 
106
- // --- Handle Messages ---
107
  for _, msg := range claudeReq.Messages {
108
  if msg.Role != "user" && msg.Role != "assistant" {
 
109
  log.Printf("WARN: Skipping message with unsupported role: %s", msg.Role)
110
  continue
111
  }
112
 
113
  messageContent := ""
114
  var contentStr string
115
- // Try simple string first
116
  if err := json.Unmarshal(msg.Content, &contentStr); err == nil {
117
  messageContent = contentStr
118
  } else {
119
- // Try list of blocks
120
  var contentBlocks []ClaudeContentBlock
121
  if err := json.Unmarshal(msg.Content, &contentBlocks); err == nil {
122
  var parts []string
@@ -124,20 +116,22 @@ func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, err
124
  if block.Type == "text" {
125
  parts = append(parts, block.Text)
126
  } else {
 
127
  log.Printf("WARN: Skipping non-text content block type '%s' in message for role %s", block.Type, msg.Role)
128
  }
129
  }
130
  messageContent = strings.Join(parts, "\n")
131
  } else {
 
132
  log.Printf("WARN: Could not parse message content format for role %s during conversion: %s", msg.Role, string(msg.Content))
133
- // Skip message if content parsing fails
134
  continue
135
  }
136
  }
137
 
138
- if messageContent != "" || msg.Role == "assistant" { // Allow empty assistant messages if needed? Check OpenAI spec. Usually needs content.
139
  openAIMessages = append(openAIMessages, OpenAIMessage{Role: msg.Role, Content: messageContent})
140
  } else {
 
141
  log.Printf("WARN: Skipping message for role %s with no valid text content after parsing.", msg.Role)
142
  }
143
  }
@@ -146,28 +140,25 @@ func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, err
146
  return nil, errors.New("conversion resulted in no valid messages for OpenAI request")
147
  }
148
 
149
- // --- Construct OpenAI Request ---
150
  openAIReq := &OpenAIRequest{
151
- Model: claudeReq.Model, // Use the model specified in Claude request
152
  Messages: openAIMessages,
153
  Stream: claudeReq.Stream,
154
  MaxTokens: claudeReq.MaxTokens,
155
  Temperature: claudeReq.Temperature,
156
  TopP: claudeReq.TopP,
157
- TopK: claudeReq.TopK, // <<<<< GÁN GIÁ TRỊ TopK TỪ YÊU CẦU CLAUDE
158
- // Stop sequences mapping
159
  Stop: claudeReq.StopSequences,
160
  }
161
 
162
- // Default model if not provided
163
  if openAIReq.Model == "" {
164
- openAIReq.Model = "gpt-3.5-turbo" // Or get from config
165
  }
166
 
167
- // Ghi log nếu TopK được gửi đi (để tiện debug)
168
- if openAIReq.TopK != nil {
169
- log.Printf("INFO: Attempting to send non-standard 'top_k': %d to upstream OpenAI endpoint.", *openAIReq.TopK)
170
- }
171
 
172
  return openAIReq, nil
173
  }
@@ -175,7 +166,7 @@ func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, err
175
  // mapOpenAIFinishReasonToClaude maps OpenAI finish reason to Claude stop reason
176
  func mapOpenAIFinishReasonToClaude(openAIFinishReason *string) string {
177
  if openAIFinishReason == nil {
178
- return "end_turn" // Default if nil
179
  }
180
  reason := *openAIFinishReason
181
  switch reason {
@@ -186,10 +177,11 @@ func mapOpenAIFinishReasonToClaude(openAIFinishReason *string) string {
186
  case "function_call", "tool_calls":
187
  return "tool_use"
188
  case "content_filter":
189
- return "stop_sequence" // Or maybe map to an error type?
190
  default:
 
191
  log.Printf("WARN: Unknown OpenAI finish reason '%s', mapping to 'end_turn'", reason)
192
- return "end_turn" // Default for unknown reasons
193
  }
194
  }
195
 
@@ -202,41 +194,38 @@ func convertOpenAIResponseToClaude(openAIResp *OpenAIResponse, claudeRequestID s
202
  choice := openAIResp.Choices[0]
203
  claudeStopReason := mapOpenAIFinishReasonToClaude(choice.FinishReason)
204
 
205
- // --- Prepare Usage ---
206
  claudeUsage := ClaudeUsage{}
207
  if openAIResp.Usage != nil {
208
  claudeUsage.InputTokens = openAIResp.Usage.PromptTokens
209
  claudeUsage.OutputTokens = openAIResp.Usage.CompletionTokens
210
  } else {
 
211
  log.Printf("WARN: [%s] Usage data missing in non-streaming OpenAI response", claudeRequestID)
212
- // Potentially estimate usage here if critical, otherwise leave as zero
213
  }
214
 
215
- // --- Prepare Content ---
216
  claudeContent := []ClaudeContentBlock{
217
  {
218
  Type: "text",
219
- Text: choice.Message.Content, // Assuming message content is always text
220
  },
221
  }
222
 
223
- // --- Construct Claude Response ---
224
  claudeResp := &ClaudeResponse{
225
- ID: openAIResp.ID, // Use OpenAI's response ID
226
  Type: "message",
227
- Role: "assistant", // Assuming OpenAI response role is assistant
228
  Content: claudeContent,
229
- Model: openAIResp.Model, // Use the model OpenAI reported
230
  StopReason: claudeStopReason,
231
- StopSequence: nil, // Typically null
232
  Usage: claudeUsage,
233
  }
234
 
235
- // Use original request ID if OpenAI ID is missing (shouldn't happen often)
236
  if claudeResp.ID == "" {
 
237
  log.Printf("WARN: OpenAI response ID missing, using original request ID: %s", claudeRequestID)
238
  claudeResp.ID = claudeRequestID
239
  }
240
 
241
  return claudeResp, nil
242
- }
 
24
  func calculateInputTokensFromClaudeRequest(claudeReq *ClaudeRequest) int {
25
  totalChars := 0
26
 
 
27
  if len(claudeReq.System) > 0 {
 
28
  var systemStr string
29
  if err := json.Unmarshal(claudeReq.System, &systemStr); err == nil {
30
  totalChars += len(systemStr)
31
  } else {
 
32
  var systemBlocks []ClaudeContentBlock
33
  if err := json.Unmarshal(claudeReq.System, &systemBlocks); err == nil {
34
  for _, block := range systemBlocks {
 
37
  }
38
  }
39
  } else {
40
+ // Giữ lại: Cảnh báo về việc không parse được system prompt
41
  log.Printf("WARN: Could not parse system prompt format: %s", string(claudeReq.System))
42
  }
43
  }
44
  }
45
 
 
46
  for _, msg := range claudeReq.Messages {
 
47
  var contentStr string
48
  if err := json.Unmarshal(msg.Content, &contentStr); err == nil {
49
  totalChars += len(contentStr)
50
  } else {
 
51
  var contentBlocks []ClaudeContentBlock
52
  if err := json.Unmarshal(msg.Content, &contentBlocks); err == nil {
53
  for _, block := range contentBlocks {
 
56
  }
57
  }
58
  } else {
59
+ // Giữ lại: Cảnh báo về việc không parse được message content
60
  log.Printf("WARN: Could not parse message content format for role %s: %s", msg.Role, string(msg.Content))
61
  }
62
  }
63
  }
64
 
65
+ estimated := estimateTokens(fmt.Sprintf("%d", totalChars))
66
+ // hiệu hóa: Log DEBUG không cần thiết cho hoạt động bình thường
67
+ // log.Printf("DEBUG: Estimated input characters: %d, Estimated input tokens: %d", totalChars, estimated)
68
  return estimated
69
  }
70
 
 
72
  func convertClaudeRequestToOpenAI(claudeReq *ClaudeRequest) (*OpenAIRequest, error) {
73
  openAIMessages := []OpenAIMessage{}
74
 
 
75
  if len(claudeReq.System) > 0 {
76
  systemContent := ""
77
  var systemStr string
 
78
  if err := json.Unmarshal(claudeReq.System, &systemStr); err == nil {
79
  systemContent = systemStr
80
  } else {
 
81
  var systemBlocks []ClaudeContentBlock
82
  if err := json.Unmarshal(claudeReq.System, &systemBlocks); err == nil {
83
  var parts []string
 
88
  }
89
  systemContent = strings.Join(parts, "\n")
90
  } else {
91
+ // Giữ lại: Cảnh báo
92
  log.Printf("WARN: Could not parse system prompt format for conversion: %s", string(claudeReq.System))
 
93
  }
94
  }
95
  if systemContent != "" {
 
97
  }
98
  }
99
 
 
100
  for _, msg := range claudeReq.Messages {
101
  if msg.Role != "user" && msg.Role != "assistant" {
102
+ // Giữ lại: Cảnh báo
103
  log.Printf("WARN: Skipping message with unsupported role: %s", msg.Role)
104
  continue
105
  }
106
 
107
  messageContent := ""
108
  var contentStr string
 
109
  if err := json.Unmarshal(msg.Content, &contentStr); err == nil {
110
  messageContent = contentStr
111
  } else {
 
112
  var contentBlocks []ClaudeContentBlock
113
  if err := json.Unmarshal(msg.Content, &contentBlocks); err == nil {
114
  var parts []string
 
116
  if block.Type == "text" {
117
  parts = append(parts, block.Text)
118
  } else {
119
+ // Giữ lại: Cảnh báo
120
  log.Printf("WARN: Skipping non-text content block type '%s' in message for role %s", block.Type, msg.Role)
121
  }
122
  }
123
  messageContent = strings.Join(parts, "\n")
124
  } else {
125
+ // Giữ lại: Cảnh báo
126
  log.Printf("WARN: Could not parse message content format for role %s during conversion: %s", msg.Role, string(msg.Content))
 
127
  continue
128
  }
129
  }
130
 
131
+ if messageContent != "" || msg.Role == "assistant" {
132
  openAIMessages = append(openAIMessages, OpenAIMessage{Role: msg.Role, Content: messageContent})
133
  } else {
134
+ // Giữ lại: Cảnh báo
135
  log.Printf("WARN: Skipping message for role %s with no valid text content after parsing.", msg.Role)
136
  }
137
  }
 
140
  return nil, errors.New("conversion resulted in no valid messages for OpenAI request")
141
  }
142
 
 
143
  openAIReq := &OpenAIRequest{
144
+ Model: claudeReq.Model,
145
  Messages: openAIMessages,
146
  Stream: claudeReq.Stream,
147
  MaxTokens: claudeReq.MaxTokens,
148
  Temperature: claudeReq.Temperature,
149
  TopP: claudeReq.TopP,
150
+ TopK: claudeReq.TopK,
 
151
  Stop: claudeReq.StopSequences,
152
  }
153
 
 
154
  if openAIReq.Model == "" {
155
+ openAIReq.Model = "gpt-3.5-turbo"
156
  }
157
 
158
+ // hiệu hóa: Log INFO này thể hữu ích khi debug TopK, nhưng không cần thiết cho hoạt động bình thường
159
+ // if openAIReq.TopK != nil {
160
+ // log.Printf("INFO: Attempting to send non-standard 'top_k': %d to upstream OpenAI endpoint.", *openAIReq.TopK)
161
+ // }
162
 
163
  return openAIReq, nil
164
  }
 
166
  // mapOpenAIFinishReasonToClaude maps OpenAI finish reason to Claude stop reason
167
  func mapOpenAIFinishReasonToClaude(openAIFinishReason *string) string {
168
  if openAIFinishReason == nil {
169
+ return "end_turn"
170
  }
171
  reason := *openAIFinishReason
172
  switch reason {
 
177
  case "function_call", "tool_calls":
178
  return "tool_use"
179
  case "content_filter":
180
+ return "stop_sequence"
181
  default:
182
+ // Giữ lại: Cảnh báo về lý do không xác định
183
  log.Printf("WARN: Unknown OpenAI finish reason '%s', mapping to 'end_turn'", reason)
184
+ return "end_turn"
185
  }
186
  }
187
 
 
194
  choice := openAIResp.Choices[0]
195
  claudeStopReason := mapOpenAIFinishReasonToClaude(choice.FinishReason)
196
 
 
197
  claudeUsage := ClaudeUsage{}
198
  if openAIResp.Usage != nil {
199
  claudeUsage.InputTokens = openAIResp.Usage.PromptTokens
200
  claudeUsage.OutputTokens = openAIResp.Usage.CompletionTokens
201
  } else {
202
+ // Giữ lại: Cảnh báo quan trọng
203
  log.Printf("WARN: [%s] Usage data missing in non-streaming OpenAI response", claudeRequestID)
 
204
  }
205
 
 
206
  claudeContent := []ClaudeContentBlock{
207
  {
208
  Type: "text",
209
+ Text: choice.Message.Content,
210
  },
211
  }
212
 
 
213
  claudeResp := &ClaudeResponse{
214
+ ID: openAIResp.ID,
215
  Type: "message",
216
+ Role: "assistant",
217
  Content: claudeContent,
218
+ Model: openAIResp.Model,
219
  StopReason: claudeStopReason,
220
+ StopSequence: nil,
221
  Usage: claudeUsage,
222
  }
223
 
 
224
  if claudeResp.ID == "" {
225
+ // Giữ lại: Cảnh báo
226
  log.Printf("WARN: OpenAI response ID missing, using original request ID: %s", claudeRequestID)
227
  claudeResp.ID = claudeRequestID
228
  }
229
 
230
  return claudeResp, nil
231
+ }