简单兼容一下sd的接口格式,方便工具调用
Browse files- src/api/client.js +26 -0
- src/server/index.js +72 -1
src/api/client.js
CHANGED
|
@@ -339,6 +339,32 @@ export async function generateAssistantResponseNoStream(requestBody, token) {
|
|
| 339 |
return { content, toolCalls, usage: usageData };
|
| 340 |
}
|
| 341 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 342 |
export function closeRequester() {
|
| 343 |
if (requester) requester.close();
|
| 344 |
}
|
|
|
|
| 339 |
return { content, toolCalls, usage: usageData };
|
| 340 |
}
|
| 341 |
|
| 342 |
+
export async function generateImageForSD(requestBody, token) {
|
| 343 |
+
const headers = buildHeaders(token);
|
| 344 |
+
let data;
|
| 345 |
+
//console.log(JSON.stringify(requestBody,null,2));
|
| 346 |
+
|
| 347 |
+
try {
|
| 348 |
+
if (useAxios) {
|
| 349 |
+
data = (await axios(buildAxiosConfig(config.api.noStreamUrl, headers, requestBody))).data;
|
| 350 |
+
} else {
|
| 351 |
+
const response = await requester.antigravity_fetch(config.api.noStreamUrl, buildRequesterConfig(headers, requestBody));
|
| 352 |
+
if (response.status !== 200) {
|
| 353 |
+
const errorBody = await response.text();
|
| 354 |
+
throw { status: response.status, message: errorBody };
|
| 355 |
+
}
|
| 356 |
+
data = await response.json();
|
| 357 |
+
}
|
| 358 |
+
} catch (error) {
|
| 359 |
+
await handleApiError(error, token);
|
| 360 |
+
}
|
| 361 |
+
|
| 362 |
+
const parts = data.response?.candidates?.[0]?.content?.parts || [];
|
| 363 |
+
const images = parts.filter(p => p.inlineData).map(p => p.inlineData.data);
|
| 364 |
+
|
| 365 |
+
return images;
|
| 366 |
+
}
|
| 367 |
+
|
| 368 |
export function closeRequester() {
|
| 369 |
if (requester) requester.close();
|
| 370 |
}
|
src/server/index.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
import express from 'express';
|
| 2 |
import path from 'path';
|
| 3 |
import { fileURLToPath } from 'url';
|
| 4 |
-
import { generateAssistantResponse, generateAssistantResponseNoStream, getAvailableModels, closeRequester } from '../api/client.js';
|
| 5 |
import { generateRequestBody } from '../utils/utils.js';
|
| 6 |
import logger from '../utils/logger.js';
|
| 7 |
import config from '../config/config.js';
|
|
@@ -98,6 +98,76 @@ app.get('/v1/models', async (req, res) => {
|
|
| 98 |
}
|
| 99 |
});
|
| 100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
| 102 |
|
| 103 |
app.post('/v1/chat/completions', async (req, res) => {
|
|
@@ -133,6 +203,7 @@ app.post('/v1/chat/completions', async (req, res) => {
|
|
| 133 |
setStreamHeaders(res);
|
| 134 |
|
| 135 |
if (isImageModel) {
|
|
|
|
| 136 |
const { content, usage } = await generateAssistantResponseNoStream(requestBody, token);
|
| 137 |
writeStreamData(res, createStreamChunk(id, created, model, { content }));
|
| 138 |
writeStreamData(res, { ...createStreamChunk(id, created, model, {}, 'stop'), usage });
|
|
|
|
| 1 |
import express from 'express';
|
| 2 |
import path from 'path';
|
| 3 |
import { fileURLToPath } from 'url';
|
| 4 |
+
import { generateAssistantResponse, generateAssistantResponseNoStream, getAvailableModels, generateImageForSD, closeRequester } from '../api/client.js';
|
| 5 |
import { generateRequestBody } from '../utils/utils.js';
|
| 6 |
import logger from '../utils/logger.js';
|
| 7 |
import config from '../config/config.js';
|
|
|
|
| 98 |
}
|
| 99 |
});
|
| 100 |
|
| 101 |
+
// ==================== Stable Diffusion API ====================
|
| 102 |
+
|
| 103 |
+
app.get('/sdapi/v1/sd-models', async (req, res) => {
|
| 104 |
+
try {
|
| 105 |
+
const models = await getAvailableModels();
|
| 106 |
+
const imageModels = models.data
|
| 107 |
+
.filter(m => m.id.includes('-image'))
|
| 108 |
+
.map(m => ({
|
| 109 |
+
title: m.id,
|
| 110 |
+
model_name: m.id,
|
| 111 |
+
hash: null,
|
| 112 |
+
sha256: null,
|
| 113 |
+
filename: m.id,
|
| 114 |
+
config: null
|
| 115 |
+
}));
|
| 116 |
+
res.json(imageModels);
|
| 117 |
+
} catch (error) {
|
| 118 |
+
logger.error('获取SD模型列表失败:', error.message);
|
| 119 |
+
res.status(500).json({ error: error.message });
|
| 120 |
+
}
|
| 121 |
+
});
|
| 122 |
+
|
| 123 |
+
app.post('/sdapi/v1/txt2img', async (req, res) => {
|
| 124 |
+
const { prompt, negative_prompt, steps, cfg_scale, width, height, seed, sampler_name } = req.body;
|
| 125 |
+
|
| 126 |
+
try {
|
| 127 |
+
if (!prompt) {
|
| 128 |
+
return res.status(400).json({ error: 'prompt is required' });
|
| 129 |
+
}
|
| 130 |
+
|
| 131 |
+
const token = await tokenManager.getToken();
|
| 132 |
+
if (!token) {
|
| 133 |
+
throw new Error('没有可用的token');
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
const model = 'gemini-3-pro-image';
|
| 137 |
+
const messages = [{ role: 'user', content: prompt }];
|
| 138 |
+
const requestBody = generateRequestBody(messages, model, {}, null, token);
|
| 139 |
+
|
| 140 |
+
requestBody.request.generationConfig = { candidateCount: 1 };
|
| 141 |
+
requestBody.requestType = 'image_gen';
|
| 142 |
+
delete requestBody.request.systemInstruction;
|
| 143 |
+
delete requestBody.request.tools;
|
| 144 |
+
delete requestBody.request.toolConfig;
|
| 145 |
+
|
| 146 |
+
const images = await generateImageForSD(requestBody, token);
|
| 147 |
+
|
| 148 |
+
if (images.length === 0) {
|
| 149 |
+
throw new Error('未生成图片');
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
res.json({
|
| 153 |
+
images,
|
| 154 |
+
parameters: { prompt, negative_prompt, steps, cfg_scale, width, height, seed, sampler_name },
|
| 155 |
+
info: JSON.stringify({ prompt, seed: seed || -1 })
|
| 156 |
+
});
|
| 157 |
+
} catch (error) {
|
| 158 |
+
logger.error('SD生图失败:', error.message);
|
| 159 |
+
res.status(500).json({ error: error.message });
|
| 160 |
+
}
|
| 161 |
+
});
|
| 162 |
+
|
| 163 |
+
app.get('/sdapi/v1/options', (req, res) => {
|
| 164 |
+
res.json({
|
| 165 |
+
sd_model_checkpoint: 'gemini-3-pro-image',
|
| 166 |
+
sd_vae: 'auto',
|
| 167 |
+
CLIP_stop_at_last_layers: 1
|
| 168 |
+
});
|
| 169 |
+
});
|
| 170 |
+
|
| 171 |
|
| 172 |
|
| 173 |
app.post('/v1/chat/completions', async (req, res) => {
|
|
|
|
| 203 |
setStreamHeaders(res);
|
| 204 |
|
| 205 |
if (isImageModel) {
|
| 206 |
+
//console.log(JSON.stringify(requestBody,null,2));
|
| 207 |
const { content, usage } = await generateAssistantResponseNoStream(requestBody, token);
|
| 208 |
writeStreamData(res, createStreamChunk(id, created, model, { content }));
|
| 209 |
writeStreamData(res, { ...createStreamChunk(id, created, model, {}, 'stop'), usage });
|