fdsrsr / index.js
waeef's picture
Update index.js
08d83f2 verified
import express from 'express';
import fetch from 'node-fetch';
import dotenv from 'dotenv';
dotenv.config();
// ================= 配置读取区域 =================
const PORT = process.env.PORT || 3000;
const PROXY_AUTH_KEY = process.env.PROXY_AUTH_KEY;
const UPSTREAM_URL = process.env.UPSTREAM_URL || 'https://llm-gateway.assemblyai.com/v1/chat/completions';
// 读取并处理 Key 列表
const rawKeys = process.env.UPSTREAM_KEYS || '';
// 以逗号分割,去除首尾空格,并过滤空项
const UPSTREAM_KEYS = rawKeys.split(';').map(k => k.trim()).filter(k => k.length > 0);
// 检查配置是否正确
if (!PROXY_AUTH_KEY) {
console.error('Error: 请在环境变量中配置 PROXY_AUTH_KEY');
process.exit(1);
}
if (UPSTREAM_KEYS.length === 0) {
console.error('Error: 请在环境变量 UPSTREAM_KEYS 中至少配置一个 Key');
process.exit(1);
}
// ==============================================
// Key 轮询索引
let keyIndex = 0;
/**
* 获取下一个 Key (Round-Robin / 轮询)
*/
const getNextKey = () => {
const key = UPSTREAM_KEYS[keyIndex];
keyIndex = (keyIndex + 1) % UPSTREAM_KEYS.length;
return key;
};
const app = express();
// 解析 JSON 请求体,调大限制以防止大文本报错
app.use(express.json({ limit: '50mb' }));
/**
* 中间件:鉴权校验
*/
const authMiddleware = (req, res, next) => {
const authHeader = req.headers['authorization'];
if (!authHeader) {
return res.status(401).json({ error: 'Missing Authorization header' });
}
// 提取 Bearer token
const token = authHeader.replace('Bearer ', '').trim();
if (token !== PROXY_AUTH_KEY) {
return res.status(403).json({ error: 'Invalid Proxy Authorization Key' });
}
next();
};
/**
* 消息处理逻辑:
* 1. system -> user
* 2. 过滤图片
* 3. 合并相同 Role
* 4. 格式化 content 为数组对象
*/
const processMessages = (messages) => {
if (!Array.isArray(messages)) return [];
const processedMessages = [];
let currentMessage = null;
for (const msg of messages) {
// 1. 转换 role: system -> user
const role = msg.role === 'system' ? 'user' : msg.role;
// 2. 提取纯文本内容 (过滤图片)
let textContent = '';
if (typeof msg.content === 'string') {
textContent = msg.content;
} else if (Array.isArray(msg.content)) {
// 过滤 content 数组,只保留 type='text',并忽略 type='image_url'
textContent = msg.content
.filter(item => item.type === 'text')
.map(item => item.text)
.join('\n');
}
// 如果该条消息没有文本内容,直接跳过
if (!textContent) continue;
// 3. 合并逻辑
if (currentMessage && currentMessage.role === role) {
// 相同角色,合并内容,使用 \n 拼接
currentMessage.textBuffer += '\n' + textContent;
} else {
// 不同角色,先保存上一个(如果有)
if (currentMessage) {
processedMessages.push({
role: currentMessage.role,
content: currentMessage.textBuffer
});
}
// 开启新消息块
currentMessage = {
role: role,
textBuffer: textContent
};
}
}
// 把最后一条暂存的消息推入数组
if (currentMessage) {
processedMessages.push({
role: currentMessage.role,
content: currentMessage.textBuffer
});
}
return processedMessages;
};
/**
* 路由:Chat Completions
*/
app.post('/v1/chat/completions', authMiddleware, async (req, res) => {
try {
const { model, messages, stream = false, ...restBody } = req.body;
// 预处理消息
const newMessages = processMessages(messages);
// 获取轮询 Key
const currentKey = getNextKey();
console.log(`[Request] Model: ${model}, Using Key Index: ${keyIndex === 0 ? UPSTREAM_KEYS.length - 1 : keyIndex - 1}`);
// 构建发往上游的请求体
const upstreamBody = {
model: model,
messages: newMessages,
stream: stream,
...restBody
};
// 发送请求
const response = await fetch(UPSTREAM_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${currentKey}`
},
body: JSON.stringify(upstreamBody)
});
if (!response.ok) {
const errText = await response.text();
console.error('[Upstream Error]', response.status, errText);
return res.status(response.status).send(errText);
}
// 处理流式响应 (Stream)
if (stream) {
res.setHeader('Content-Type', 'text/event-stream');
res.setHeader('Cache-Control', 'no-cache');
res.setHeader('Connection', 'keep-alive');
if (response.body) {
for await (const chunk of response.body) {
res.write(chunk);
}
res.end();
}
} else {
// 处理普通响应
const data = await response.json();
res.json(data);
}
} catch (error) {
console.error('[Server Error]', error);
res.status(500).json({ error: 'Internal Server Error', details: error.message });
}
});
/**
* 路由:模型列表
*/
app.get('/v1/models', authMiddleware, (req, res) => {
const modelsData = {
object: "list",
data: [
{
"id": "claude-3-haiku-20240307",
"name": "claude-3-haiku-20240307",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 4096
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "claude-3-5-haiku-20241022",
"name": "claude-3-5-haiku-20241022",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 8192
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "claude-sonnet-4-20250514",
"name": "claude-sonnet-4-20250514",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 64000
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "claude-sonnet-4-5-20250929",
"name": "claude-sonnet-4-5-20250929",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 64000
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "claude-haiku-4-5-20251001",
"name": "claude-haiku-4-5-20251001",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 64000
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "claude-opus-4-20250514",
"name": "claude-opus-4-20250514",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 32768
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gpt-oss-20b",
"name": "gpt-oss-20b",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 131072,
"max_completion_tokens": 131072
},
"context_length": 131072,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gpt-oss-120b",
"name": "gpt-oss-120b",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 131072,
"max_completion_tokens": 131072
},
"context_length": 131072,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gpt-5",
"name": "gpt-5",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 400000,
"max_completion_tokens": 128000
},
"context_length": 400000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gpt-5-nano",
"name": "gpt-5-nano",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 400000,
"max_completion_tokens": 128000
},
"context_length": 400000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gpt-5-mini",
"name": "gpt-5-mini",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 400000,
"max_completion_tokens": 128000
},
"context_length": 400000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gpt-4.1",
"name": "gpt-4.1",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 1047576,
"max_completion_tokens": 32768
},
"context_length": 1047576,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "chatgpt-4o-latest",
"name": "chatgpt-4o-latest",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 128000,
"max_completion_tokens": 16384
},
"context_length": 128000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gemini-2.5-flash-lite",
"name": "gemini-2.5-flash-lite",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 1048576,
"max_completion_tokens": 65535
},
"context_length": 1048576,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gemini-2.5-flash",
"name": "gemini-2.5-flash",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 1048576,
"max_completion_tokens": 65535
},
"context_length": 1048576,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gemini-2.5-pro",
"name": "gemini-2.5-pro",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 200000,
"max_completion_tokens": 65535
},
"context_length": 200000,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
},
{
"id": "gemini-3-pro-preview",
"name": "gemini-3-pro-preview",
"description": "",
"top_provider": {
"is_moderated": false,
"context_length": 1048575,
"max_completion_tokens": 65535
},
"context_length": 1048575,
"supported_parameters": [],
"default_parameters": {
"temperature": null,
"top_p": null,
"frequency_penalty": null
}
}
]
};
res.json(modelsData);
});
// 启动服务器
app.listen(PORT, () => {
console.log(`Proxy Server running on port ${PORT}`);
console.log(`Loaded ${UPSTREAM_KEYS.length} upstream keys.`);
});