File size: 14,372 Bytes
4cb1301
 
f973658
feee929
0fadeb3
c801c74
 
4bf0fdd
c801c74
4bf0fdd
 
 
 
 
 
 
a0b676c
f973658
2e9246b
f973658
2e9246b
f973658
0fadeb3
 
 
4bf0fdd
0fadeb3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a6d2f09
 
0fadeb3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e9246b
 
 
f973658
 
 
2e9246b
 
f973658
 
10feaec
4bf0fdd
0fadeb3
4bf0fdd
 
873f10f
0fadeb3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2e9246b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0937160
2e9246b
 
c801c74
2e9246b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
687d134
0937160
687d134
2e9246b
0937160
2e9246b
 
0937160
2e9246b
 
407d303
2e9246b
 
feee929
30e53c1
2e9246b
a0b676c
 
 
 
 
 
 
0fadeb3
701685c
 
0fadeb3
 
 
 
701685c
 
0fadeb3
 
ec6e4a1
 
 
 
 
 
ddd87f2
0fadeb3
 
 
 
 
2e9246b
0fadeb3
 
 
 
2e9246b
 
0fadeb3
ddd87f2
 
 
701685c
ddd87f2
 
 
0fadeb3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ddd87f2
 
 
0fadeb3
 
 
701685c
f973658
 
873f10f
 
 
 
ec6e4a1
 
 
 
 
 
873f10f
 
 
 
 
 
 
 
 
 
 
 
 
f973658
0fadeb3
 
 
 
 
 
 
f973658
0fadeb3
 
 
 
 
f973658
2e9246b
873f10f
 
 
 
 
 
 
 
 
 
 
 
 
f973658
873f10f
 
 
 
 
 
2e9246b
0fadeb3
2e9246b
873f10f
0fadeb3
2e9246b
873f10f
 
 
 
 
 
 
 
 
 
 
 
 
f973658
 
0fadeb3
 
 
 
 
 
 
3c5c286
 
873f10f
 
 
 
 
 
 
 
 
 
3c5c286
873f10f
 
 
3c5c286
 
feee929
407d303
2e9246b
 
701685c
2e9246b
 
ec6e4a1
 
 
 
 
 
2e9246b
 
 
 
ddd87f2
2e9246b
 
407d303
2e9246b
 
407d303
3c5c286
2e9246b
 
407d303
0fadeb3
a0b676c
407d303
feee929
407d303
 
 
0fadeb3
 
a0b676c
 
 
407d303
 
 
a0b676c
 
 
 
 
ddd87f2
feee929
 
 
10feaec
 
407d303
3c5c286
 
 
 
 
 
 
 
a0b676c
 
 
 
 
 
 
 
 
 
 
 
 
 
feee929
 
ddd87f2
feee929
a0b676c
ddd87f2
 
a0b676c
10feaec
 
e6f591e
 
 
 
 
 
 
ec6e4a1
 
 
 
 
 
e6f591e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f973658
2e9246b
10feaec
0fadeb3
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
import tokenManager from '../auth/token_manager.js';
import config from '../config/config.js';
import AntigravityRequester from '../AntigravityRequester.js';
import { saveBase64Image } from '../utils/imageStorage.js';
import logger from '../utils/logger.js';
import memoryManager, { MemoryPressure } from '../utils/memoryManager.js';
import { httpRequest, httpStreamRequest } from '../utils/httpClient.js';
import { MODEL_LIST_CACHE_TTL } from '../constants/index.js';
import { createApiError } from '../utils/errors.js';
import {
  getLineBuffer,
  releaseLineBuffer,
  parseAndEmitStreamChunk,
  convertToToolCall,
  registerStreamMemoryCleanup
} from './stream_parser.js';
import { setReasoningSignature, setToolSignature } from '../utils/thoughtSignatureCache.js';

// 请求客户端:优先使用 AntigravityRequester,失败则降级到 axios
let requester = null;
let useAxios = false;

// ==================== 模型列表缓存(智能管理) ====================
// 缓存过期时间根据内存压力动态调整
const getModelCacheTTL = () => {
  const baseTTL = config.cache?.modelListTTL || MODEL_LIST_CACHE_TTL;
  const pressure = memoryManager.currentPressure;
  // 高压力时缩短缓存时间
  if (pressure === MemoryPressure.CRITICAL) return Math.min(baseTTL, 5 * 60 * 1000);
  if (pressure === MemoryPressure.HIGH) return Math.min(baseTTL, 15 * 60 * 1000);
  return baseTTL;
};

let modelListCache = null;
let modelListCacheTime = 0;

// 默认模型列表(当 API 请求失败时使用)
const DEFAULT_MODELS = [
  'claude-opus-4-5',
  'claude-opus-4-5-thinking',
  'claude-sonnet-4-5-thinking',
  'claude-sonnet-4-5',
  'gemini-3-pro-high',
  'gemini-2.5-flash-lite',
  'gemini-3-pro-image',
  'gemini-3-pro-image-4K',
  'gemini-3-pro-image-2K',
  'gemini-2.5-flash-thinking',
  'gemini-2.5-pro',
  'gemini-2.5-flash',
  'gemini-3-pro-low',
  'chat_20706',
  'rev19-uic3-1p',
  'gpt-oss-120b-medium',
  'chat_23310'
];

// 生成默认模型列表响应
function getDefaultModelList() {
  const created = Math.floor(Date.now() / 1000);
  return {
    object: 'list',
    data: DEFAULT_MODELS.map(id => ({
      id,
      object: 'model',
      created,
      owned_by: 'google'
    }))
  };
}

if (config.useNativeAxios === true) {
  useAxios = true;
} else {
  try {
    requester = new AntigravityRequester();
  } catch (error) {
    console.warn('AntigravityRequester 初始化失败,降级使用 axios:', error.message);
    useAxios = true;
  }
}

// 注册对象池与模型缓存的内存清理回调
function registerMemoryCleanup() {
  // 由流式解析模块管理自身对象池大小
  registerStreamMemoryCleanup();

  memoryManager.registerCleanup((pressure) => {
    // 高压力或紧急时清理模型缓存
    if (pressure === MemoryPressure.HIGH || pressure === MemoryPressure.CRITICAL) {
      const ttl = getModelCacheTTL();
      const now = Date.now();
      if (modelListCache && (now - modelListCacheTime) > ttl) {
        modelListCache = null;
        modelListCacheTime = 0;
        logger.info('已清理过期模型列表缓存');
      }
    }
    
    if (pressure === MemoryPressure.CRITICAL && modelListCache) {
      modelListCache = null;
      modelListCacheTime = 0;
      logger.info('紧急清理模型列表缓存');
    }
  });
}

// 初始化时注册清理回调
registerMemoryCleanup();

// ==================== 辅助函数 ====================

function buildHeaders(token) {
  return {
    'Host': config.api.host,
    'User-Agent': config.api.userAgent,
    'Authorization': `Bearer ${token.access_token}`,
    'Content-Type': 'application/json',
    'Accept-Encoding': 'gzip'
  };
}

function buildRequesterConfig(headers, body = null) {
  const reqConfig = {
    method: 'POST',
    headers,
    timeout_ms: config.timeout,
    proxy: config.proxy
  };
  if (body !== null) reqConfig.body = JSON.stringify(body);
  return reqConfig;
}


// 统一错误处理
async function handleApiError(error, token) {
  const status = error.response?.status || error.status || error.statusCode || 500;
  let errorBody = error.message;
  
  if (error.response?.data?.readable) {
    const chunks = [];
    for await (const chunk of error.response.data) {
      chunks.push(chunk);
    }
    errorBody = Buffer.concat(chunks).toString();
  } else if (typeof error.response?.data === 'object') {
    errorBody = JSON.stringify(error.response.data, null, 2);
  } else if (error.response?.data) {
    errorBody = error.response.data;
  }
  
  if (status === 403) {
    if (JSON.stringify(errorBody).includes("The caller does not")){
      throw createApiError(`超出模型最大上下文。错误详情: ${errorBody}`, status, errorBody);
    }
    tokenManager.disableCurrentToken(token);
    throw createApiError(`该账号没有使用权限,已自动禁用。错误详情: ${errorBody}`, status, errorBody);
  }
  
  throw createApiError(`API请求失败 (${status}): ${errorBody}`, status, errorBody);
}


// ==================== 导出函数 ====================

export async function generateAssistantResponse(requestBody, token, callback) {
  
  const headers = buildHeaders(token);
  // 在 state 中临时缓存思维链签名,供流式多片段复用,并携带 session 与 model 信息以写入全局缓存
  const state = {
    toolCalls: [],
    reasoningSignature: null,
    sessionId: requestBody.request?.sessionId,
    model: requestBody.model
  };
  const lineBuffer = getLineBuffer(); // 从对象池获取
  
  const processChunk = (chunk) => {
    const lines = lineBuffer.append(chunk);
    for (let i = 0; i < lines.length; i++) {
      parseAndEmitStreamChunk(lines[i], state, callback);
    }
  };
  
  try {
    if (useAxios) {
      const response = await httpStreamRequest({
        method: 'POST',
        url: config.api.url,
        headers,
        data: requestBody
      });
      
      // 使用 Buffer 直接处理,避免 toString 的内存分配
      response.data.on('data', chunk => {
        processChunk(typeof chunk === 'string' ? chunk : chunk.toString('utf8'));
      });
      
      await new Promise((resolve, reject) => {
        response.data.on('end', () => {
          releaseLineBuffer(lineBuffer); // 归还到对象池
          resolve();
        });
        response.data.on('error', reject);
      });
    } else {
      const streamResponse = requester.antigravity_fetchStream(config.api.url, buildRequesterConfig(headers, requestBody));
      let errorBody = '';
      let statusCode = null;

      await new Promise((resolve, reject) => {
        streamResponse
          .onStart(({ status }) => { statusCode = status; })
          .onData((chunk) => {
            if (statusCode !== 200) {
              errorBody += chunk;
            } else {
              processChunk(chunk);
            }
          })
          .onEnd(() => {
            releaseLineBuffer(lineBuffer); // 归还到对象池
            if (statusCode !== 200) {
              reject({ status: statusCode, message: errorBody });
            } else {
              resolve();
            }
          })
          .onError(reject);
      });
    }
  } catch (error) {
    releaseLineBuffer(lineBuffer); // 确保归还
    await handleApiError(error, token);
  }
}

// 内部工具:从远端拉取完整模型原始数据
async function fetchRawModels(headers, token) {
  try {
    if (useAxios) {
      const response = await httpRequest({
        method: 'POST',
        url: config.api.modelsUrl,
        headers,
        data: {}
      });
      return response.data;
    }
    const response = await requester.antigravity_fetch(config.api.modelsUrl, buildRequesterConfig(headers, {}));
    if (response.status !== 200) {
      const errorBody = await response.text();
      throw { status: response.status, message: errorBody };
    }
    return await response.json();
  } catch (error) {
    await handleApiError(error, token);
  }
}

export async function getAvailableModels() {
  // 检查缓存是否有效(动态 TTL)
  const now = Date.now();
  const ttl = getModelCacheTTL();
  if (modelListCache && (now - modelListCacheTime) < ttl) {
    return modelListCache;
  }
  
  const token = await tokenManager.getToken();
  if (!token) {
    // 没有 token 时返回默认模型列表
    logger.warn('没有可用的 token,返回默认模型列表');
    return getDefaultModelList();
  }
  
  const headers = buildHeaders(token);
  const data = await fetchRawModels(headers, token);
  if (!data) {
    // fetchRawModels 里已经做了统一错误处理,这里兜底为默认列表
    return getDefaultModelList();
  }

  const created = Math.floor(Date.now() / 1000);
  const modelList = Object.keys(data.models || {}).map(id => ({
    id,
    object: 'model',
    created,
    owned_by: 'google'
  }));
  
  // 添加默认模型(如果 API 返回的列表中没有)
  const existingIds = new Set(modelList.map(m => m.id));
  for (const defaultModel of DEFAULT_MODELS) {
    if (!existingIds.has(defaultModel)) {
      modelList.push({
        id: defaultModel,
        object: 'model',
        created,
        owned_by: 'google'
      });
    }
  }
  
  const result = {
    object: 'list',
    data: modelList
  };
  
  // 更新缓存
  modelListCache = result;
  modelListCacheTime = now;
  const currentTTL = getModelCacheTTL();
  logger.info(`模型列表已缓存 (有效期: ${currentTTL / 1000}秒, 模型数量: ${modelList.length})`);
  
  return result;
}

// 清除模型列表缓存(可用于手动刷新)
export function clearModelListCache() {
  modelListCache = null;
  modelListCacheTime = 0;
  logger.info('模型列表缓存已清除');
}

export async function getModelsWithQuotas(token) {
  const headers = buildHeaders(token);
  const data = await fetchRawModels(headers, token);
  if (!data) return {};

  const quotas = {};
  Object.entries(data.models || {}).forEach(([modelId, modelData]) => {
    if (modelData.quotaInfo) {
      quotas[modelId] = {
        r: modelData.quotaInfo.remainingFraction,
        t: modelData.quotaInfo.resetTime
      };
    }
  });
  
  return quotas;
}

export async function generateAssistantResponseNoStream(requestBody, token) {
  
  const headers = buildHeaders(token);
  let data;
  
  try {
    if (useAxios) {
      data = (await httpRequest({
        method: 'POST',
        url: config.api.noStreamUrl,
        headers,
        data: requestBody
      })).data;
    } else {
      const response = await requester.antigravity_fetch(config.api.noStreamUrl, buildRequesterConfig(headers, requestBody));
      if (response.status !== 200) {
        const errorBody = await response.text();
        throw { status: response.status, message: errorBody };
      }
      data = await response.json();
    }
  } catch (error) {
    await handleApiError(error, token);
  }
  //console.log(JSON.stringify(data));
  // 解析响应内容
  const parts = data.response?.candidates?.[0]?.content?.parts || [];
  let content = '';
  let reasoningContent = '';
  let reasoningSignature = null;
  const toolCalls = [];
  const imageUrls = [];
  
  for (const part of parts) {
    if (part.thought === true) {
      // 思维链内容 - 使用 DeepSeek 格式的 reasoning_content
      reasoningContent += part.text || '';
      if (part.thoughtSignature && !reasoningSignature) {
        reasoningSignature = part.thoughtSignature;
      }
    } else if (part.text !== undefined) {
      content += part.text;
    } else if (part.functionCall) {
      const toolCall = convertToToolCall(part.functionCall, requestBody.request?.sessionId, requestBody.model);
      if (part.thoughtSignature) {
        toolCall.thoughtSignature = part.thoughtSignature;
      }
      toolCalls.push(toolCall);
    } else if (part.inlineData) {
      // 保存图片到本地并获取 URL
      const imageUrl = saveBase64Image(part.inlineData.data, part.inlineData.mimeType);
      imageUrls.push(imageUrl);
    }
  }
  
  // 提取 token 使用统计
  const usage = data.response?.usageMetadata;
  const usageData = usage ? {
    prompt_tokens: usage.promptTokenCount || 0,
    completion_tokens: usage.candidatesTokenCount || 0,
    total_tokens: usage.totalTokenCount || 0
  } : null;
  
  // 将新的签名写入全局缓存(按 sessionId + model),供后续请求兜底使用
  const sessionId = requestBody.request?.sessionId;
  const model = requestBody.model;
  if (sessionId && model) {
    if (reasoningSignature) {
      setReasoningSignature(sessionId, model, reasoningSignature);
    }
    // 工具签名:取第一个带 thoughtSignature 的工具作为缓存源
    const toolSig = toolCalls.find(tc => tc.thoughtSignature)?.thoughtSignature;
    if (toolSig) {
      setToolSignature(sessionId, model, toolSig);
    }
  }

  // 生图模型:转换为 markdown 格式
  if (imageUrls.length > 0) {
    let markdown = content ? content + '\n\n' : '';
    markdown += imageUrls.map(url => `![image](${url})`).join('\n\n');
    return { content: markdown, reasoningContent: reasoningContent || null, reasoningSignature, toolCalls, usage: usageData };
  }
  
  return { content, reasoningContent: reasoningContent || null, reasoningSignature, toolCalls, usage: usageData };
}

export async function generateImageForSD(requestBody, token) {
  const headers = buildHeaders(token);
  let data;
  //console.log(JSON.stringify(requestBody,null,2));
  
  try {
    if (useAxios) {
      data = (await httpRequest({
        method: 'POST',
        url: config.api.noStreamUrl,
        headers,
        data: requestBody
      })).data;
    } else {
      const response = await requester.antigravity_fetch(config.api.noStreamUrl, buildRequesterConfig(headers, requestBody));
      if (response.status !== 200) {
        const errorBody = await response.text();
        throw { status: response.status, message: errorBody };
      }
      data = await response.json();
    }
  } catch (error) {
    await handleApiError(error, token);
  }
  
  const parts = data.response?.candidates?.[0]?.content?.parts || [];
  const images = parts.filter(p => p.inlineData).map(p => p.inlineData.data);
  
  return images;
}

export function closeRequester() {
  if (requester) requester.close();
}

// 导出内存清理注册函数(供外部调用)
export { registerMemoryCleanup };