|
|
import fetch from 'node-fetch'
|
|
|
import { getModelById, getEndpointByType } from '../../configs/config.js'
|
|
|
import { transformToAnthropic, getAnthropicHeaders } from '../../transformers/requests/anthropic.js'
|
|
|
import { transformToOpenAI, getOpenAIHeaders } from '../../transformers/requests/openai.js'
|
|
|
import { transformToCommon, getCommonHeaders } from '../../transformers/requests/common.js'
|
|
|
import { AnthropicResponseTransformer } from '../../transformers/responses/anthropic.js'
|
|
|
import { OpenAIResponseTransformer } from '../../transformers/responses/openai.js'
|
|
|
import { getNextProxyAgent } from '../../managers/proxy.js'
|
|
|
import { getAuthHeader } from '../utils/auth.js'
|
|
|
import { convertResponseToChatCompletion } from '../utils/converter.js'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
export async function handleChatCompletions(req, res) {
|
|
|
try {
|
|
|
const openaiRequest = req.body
|
|
|
const modelId = openaiRequest.model
|
|
|
|
|
|
if (!modelId) {
|
|
|
return res.status(400).json({ error: '需要提供 model 参数' })
|
|
|
}
|
|
|
|
|
|
const model = getModelById(modelId)
|
|
|
if (!model) {
|
|
|
return res.status(404).json({ error: `未找到模型 ${modelId}` })
|
|
|
}
|
|
|
|
|
|
const endpoint = getEndpointByType(model.type)
|
|
|
if (!endpoint) {
|
|
|
return res.status(500).json({ error: `未找到端点类型 ${model.type}` })
|
|
|
}
|
|
|
|
|
|
|
|
|
const authHeader = getAuthHeader(req)
|
|
|
if (!authHeader) {
|
|
|
return res.status(401).json({
|
|
|
error: '未提供认证信息',
|
|
|
message: '请在请求头中提供 Authorization 或 x-api-key'
|
|
|
})
|
|
|
}
|
|
|
|
|
|
let transformedRequest
|
|
|
let headers
|
|
|
const clientHeaders = req.headers
|
|
|
|
|
|
if ((openaiRequest.model === 'claude-sonnet-4-5-20250929' || openaiRequest.model === 'claude-haiku-4-5-20251001' || openaiRequest.model === 'claude-opus-4-1-20250805' || openaiRequest.model === 'claude-opus-4-5-20251101') && openaiRequest.temperature && openaiRequest.top_p) {
|
|
|
delete openaiRequest.top_p
|
|
|
}
|
|
|
|
|
|
|
|
|
if (model.type === 'anthropic') {
|
|
|
transformedRequest = transformToAnthropic(openaiRequest)
|
|
|
const isStreaming = openaiRequest.stream === true
|
|
|
headers = getAnthropicHeaders(authHeader, clientHeaders, isStreaming, modelId)
|
|
|
} else if (model.type === 'openai') {
|
|
|
transformedRequest = transformToOpenAI(openaiRequest)
|
|
|
headers = getOpenAIHeaders(authHeader, clientHeaders)
|
|
|
} else if (model.type === 'common') {
|
|
|
transformedRequest = transformToCommon(openaiRequest)
|
|
|
headers = getCommonHeaders(authHeader, clientHeaders)
|
|
|
} else {
|
|
|
return res.status(500).json({ error: `未知的端点类型: ${model.type}` })
|
|
|
}
|
|
|
|
|
|
const proxyAgentInfo = getNextProxyAgent(endpoint.base_url)
|
|
|
const fetchOptions = {
|
|
|
method: 'POST',
|
|
|
headers,
|
|
|
body: JSON.stringify(transformedRequest)
|
|
|
}
|
|
|
|
|
|
if (proxyAgentInfo?.agent) {
|
|
|
fetchOptions.agent = proxyAgentInfo.agent
|
|
|
}
|
|
|
|
|
|
const response = await fetch(endpoint.base_url, fetchOptions)
|
|
|
|
|
|
if (!response.ok) {
|
|
|
const errorText = await response.text()
|
|
|
console.error(`端点错误: ${response.status}`, errorText)
|
|
|
return res.status(response.status).json({
|
|
|
error: `端点返回 ${response.status}`,
|
|
|
details: errorText
|
|
|
})
|
|
|
}
|
|
|
|
|
|
const isStreaming = transformedRequest.stream === true
|
|
|
|
|
|
if (isStreaming) {
|
|
|
res.setHeader('Content-Type', 'text/event-stream')
|
|
|
res.setHeader('Cache-Control', 'no-cache')
|
|
|
res.setHeader('Connection', 'keep-alive')
|
|
|
|
|
|
|
|
|
if (model.type === 'common') {
|
|
|
try {
|
|
|
for await (const chunk of response.body) {
|
|
|
res.write(chunk)
|
|
|
}
|
|
|
res.end()
|
|
|
} catch (streamError) {
|
|
|
console.error('流错误:', streamError)
|
|
|
res.end()
|
|
|
}
|
|
|
} else {
|
|
|
|
|
|
let transformer
|
|
|
if (model.type === 'anthropic') {
|
|
|
transformer = new AnthropicResponseTransformer(modelId, `chatcmpl-${Date.now()}`)
|
|
|
} else if (model.type === 'openai') {
|
|
|
transformer = new OpenAIResponseTransformer(modelId, `chatcmpl-${Date.now()}`)
|
|
|
}
|
|
|
|
|
|
try {
|
|
|
for await (const chunk of transformer.transformStream(response.body)) {
|
|
|
res.write(chunk)
|
|
|
}
|
|
|
res.end()
|
|
|
} catch (streamError) {
|
|
|
console.error('流错误:', streamError)
|
|
|
res.end()
|
|
|
}
|
|
|
}
|
|
|
} else {
|
|
|
const data = await response.json()
|
|
|
if (model.type === 'openai') {
|
|
|
try {
|
|
|
const converted = convertResponseToChatCompletion(data)
|
|
|
res.json(converted)
|
|
|
} catch (e) {
|
|
|
res.json(data)
|
|
|
}
|
|
|
} else {
|
|
|
res.json(data)
|
|
|
}
|
|
|
}
|
|
|
|
|
|
} catch (error) {
|
|
|
console.error('/v1/chat/completions 错误:', error)
|
|
|
res.status(500).json({
|
|
|
error: '内部服务器错误',
|
|
|
message: error.message
|
|
|
})
|
|
|
}
|
|
|
}
|
|
|
|
|
|
|