liuw15 commited on
Commit
f973658
·
1 Parent(s): c3b917d

Reapply "添加指纹请求器,使用useNativeFetch: false启用,并且添加回退机制"

Browse files
config.json CHANGED
@@ -19,5 +19,6 @@
19
  "maxRequestSize": "50mb",
20
  "apiKey": "sk-text"
21
  },
 
22
  "systemInstruction": "你是聊天机器人,名字叫萌萌,如同名字这般,你的性格是软软糯糯萌萌哒的,专门为用户提供聊天和情绪价值,协助进行小说创作或者角色扮演"
23
  }
 
19
  "maxRequestSize": "50mb",
20
  "apiKey": "sk-text"
21
  },
22
+ "useNativeFetch": false,
23
  "systemInstruction": "你是聊天机器人,名字叫萌萌,如同名字这般,你的性格是软软糯糯萌萌哒的,专门为用户提供聊天和情绪价值,协助进行小说创作或者角色扮演"
24
  }
src/AntigravityRequester.js ADDED
@@ -0,0 +1,282 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { spawn } from 'child_process';
2
+ import os from 'os';
3
+ import path from 'path';
4
+ import { fileURLToPath } from 'url';
5
+ import fs from 'fs';
6
+
7
+ const __dirname = path.dirname(fileURLToPath(import.meta.url));
8
+
9
+ class antigravityRequester {
10
+ constructor(options = {}) {
11
+ this.binPath = options.binPath;
12
+ this.executablePath = options.executablePath || this._getExecutablePath();
13
+ this.proc = null;
14
+ this.requestId = 0;
15
+ this.pendingRequests = new Map();
16
+ this.buffer = '';
17
+ this.writeQueue = Promise.resolve();
18
+ }
19
+
20
+ _getExecutablePath() {
21
+ const platform = os.platform();
22
+ const arch = os.arch();
23
+
24
+ let filename;
25
+ if (platform === 'win32' && arch === 'x64') {
26
+ filename = 'antigravity_requester_windows_amd64.exe';
27
+ } else if (platform === 'android' && arch === 'arm64') {
28
+ filename = 'antigravity_requester_android_arm64';
29
+ } else if (platform === 'linux' && arch === 'x64') {
30
+ filename = 'antigravity_requester_linux_amd64';
31
+ } else {
32
+ throw new Error(`Unsupported platform/architecture: ${platform}/${arch}. Only supports: windows/x64, android/arm64, linux/x64`);
33
+ }
34
+
35
+ const binPath = this.binPath || path.join(__dirname, 'bin');
36
+ const requester_execPath = path.join(binPath, filename);
37
+ // 设置执行权限(非Windows平台)
38
+ if (platform !== 'win32') {
39
+ try {
40
+ fs.chmodSync(requester_execPath, 0o755);
41
+ } catch (error) {
42
+ console.warn(`Could not set executable permissions: ${error.message}`);
43
+ }
44
+ }
45
+ return requester_execPath;
46
+ }
47
+
48
+ _ensureProcess() {
49
+ if (this.proc) return;
50
+
51
+ this.proc = spawn(this.executablePath, [], {
52
+ stdio: ['pipe', 'pipe', 'pipe']
53
+ });
54
+
55
+ // 设置 stdin 为非阻塞模式
56
+ if (this.proc.stdin.setDefaultEncoding) {
57
+ this.proc.stdin.setDefaultEncoding('utf8');
58
+ }
59
+
60
+ this.proc.stdout.on('data', (data) => {
61
+ this.buffer += data.toString();
62
+ const lines = this.buffer.split('\n');
63
+ this.buffer = lines.pop();
64
+
65
+ for (const line of lines) {
66
+ if (!line.trim()) continue;
67
+ try {
68
+ const response = JSON.parse(line);
69
+ const pending = this.pendingRequests.get(response.id);
70
+ if (!pending) continue;
71
+
72
+ if (pending.streamResponse) {
73
+ pending.streamResponse._handleChunk(response);
74
+ if (response.type === 'end' || response.type === 'error') {
75
+ this.pendingRequests.delete(response.id);
76
+ }
77
+ } else {
78
+ this.pendingRequests.delete(response.id);
79
+ if (response.ok) {
80
+ pending.resolve(new antigravityResponse(response));
81
+ } else {
82
+ pending.reject(new Error(response.error || 'Request failed'));
83
+ }
84
+ }
85
+ } catch (e) {
86
+ console.error('Failed to parse response:', e);
87
+ }
88
+ }
89
+ });
90
+
91
+ this.proc.stderr.on('data', (data) => {
92
+ console.error('antigravityRequester stderr:', data.toString());
93
+ });
94
+
95
+ this.proc.on('close', () => {
96
+ this.proc = null;
97
+ for (const [id, pending] of this.pendingRequests) {
98
+ if (pending.reject) {
99
+ pending.reject(new Error('Process closed'));
100
+ } else if (pending.streamResponse && pending.streamResponse._onError) {
101
+ pending.streamResponse._onError(new Error('Process closed'));
102
+ }
103
+ }
104
+ this.pendingRequests.clear();
105
+ });
106
+ }
107
+
108
+ async antigravity_fetch(url, options = {}) {
109
+ this._ensureProcess();
110
+
111
+ const id = `req-${++this.requestId}`;
112
+ const request = {
113
+ id,
114
+ url,
115
+ method: options.method || 'GET',
116
+ headers: options.headers,
117
+ body: options.body,
118
+ timeout_ms: options.timeout || 30000,
119
+ proxy: options.proxy,
120
+ response_format: 'text',
121
+ ...options
122
+ };
123
+
124
+ return new Promise((resolve, reject) => {
125
+ this.pendingRequests.set(id, { resolve, reject });
126
+ this._writeRequest(request);
127
+ });
128
+ }
129
+
130
+ antigravity_fetchStream(url, options = {}) {
131
+ this._ensureProcess();
132
+
133
+ const id = `req-${++this.requestId}`;
134
+ const request = {
135
+ id,
136
+ url,
137
+ method: options.method || 'GET',
138
+ headers: options.headers,
139
+ body: options.body,
140
+ timeout_ms: options.timeout || 30000,
141
+ proxy: options.proxy,
142
+ stream: true,
143
+ ...options
144
+ };
145
+
146
+ const streamResponse = new StreamResponse(id);
147
+ this.pendingRequests.set(id, { streamResponse });
148
+ this._writeRequest(request);
149
+
150
+ return streamResponse;
151
+ }
152
+
153
+ _writeRequest(request) {
154
+ this.writeQueue = this.writeQueue.then(() => {
155
+ return new Promise((resolve, reject) => {
156
+ const data = JSON.stringify(request) + '\n';
157
+ const canWrite = this.proc.stdin.write(data);
158
+ if (canWrite) {
159
+ resolve();
160
+ } else {
161
+ // 等待 drain 事件
162
+ this.proc.stdin.once('drain', resolve);
163
+ this.proc.stdin.once('error', reject);
164
+ }
165
+ });
166
+ }).catch(err => {
167
+ console.error('Write request failed:', err);
168
+ });
169
+ }
170
+
171
+ close() {
172
+ if (this.proc) {
173
+ this.proc.stdin.end();
174
+ this.proc = null;
175
+ }
176
+ }
177
+ }
178
+
179
+ class StreamResponse {
180
+ constructor(id) {
181
+ this.id = id;
182
+ this.status = null;
183
+ this.statusText = null;
184
+ this.headers = null;
185
+ this.chunks = [];
186
+ this._onStart = null;
187
+ this._onData = null;
188
+ this._onEnd = null;
189
+ this._onError = null;
190
+ this._ended = false;
191
+ this._error = null;
192
+ this._textPromiseResolve = null;
193
+ this._textPromiseReject = null;
194
+ }
195
+
196
+ _handleChunk(chunk) {
197
+ if (chunk.type === 'start') {
198
+ this.status = chunk.status;
199
+ this.headers = new Map(Object.entries(chunk.headers || {}));
200
+ if (this._onStart) this._onStart({ status: chunk.status, headers: this.headers });
201
+ } else if (chunk.type === 'data') {
202
+ const data = chunk.encoding === 'base64'
203
+ ? Buffer.from(chunk.data, 'base64').toString('utf8')
204
+ : chunk.data;
205
+ this.chunks.push(data);
206
+ if (this._onData) this._onData(data);
207
+ } else if (chunk.type === 'end') {
208
+ this._ended = true;
209
+ if (this._textPromiseResolve) this._textPromiseResolve(this.chunks.join(''));
210
+ if (this._onEnd) this._onEnd();
211
+ } else if (chunk.type === 'error') {
212
+ this._ended = true;
213
+ this._error = new Error(chunk.error);
214
+ if (this._textPromiseReject) this._textPromiseReject(this._error);
215
+ if (this._onError) this._onError(this._error);
216
+ }
217
+ }
218
+
219
+ onStart(callback) {
220
+ this._onStart = callback;
221
+ return this;
222
+ }
223
+
224
+ onData(callback) {
225
+ this._onData = callback;
226
+ return this;
227
+ }
228
+
229
+ onEnd(callback) {
230
+ this._onEnd = callback;
231
+ return this;
232
+ }
233
+
234
+ onError(callback) {
235
+ this._onError = callback;
236
+ return this;
237
+ }
238
+
239
+ async text() {
240
+ if (this._ended) {
241
+ if (this._error) throw this._error;
242
+ return this.chunks.join('');
243
+ }
244
+ return new Promise((resolve, reject) => {
245
+ this._textPromiseResolve = resolve;
246
+ this._textPromiseReject = reject;
247
+ });
248
+ }
249
+ }
250
+
251
+ class antigravityResponse {
252
+ constructor(response) {
253
+ this._response = response;
254
+ this.ok = response.ok;
255
+ this.status = response.status;
256
+ this.statusText = response.status_text;
257
+ this.url = response.url;
258
+ this.headers = new Map(Object.entries(response.headers || {}));
259
+ this.redirected = response.redirected;
260
+ }
261
+
262
+ async text() {
263
+ if (this._response.body_encoding === 'base64') {
264
+ return Buffer.from(this._response.body, 'base64').toString('utf8');
265
+ }
266
+ return this._response.body;
267
+ }
268
+
269
+ async json() {
270
+ const text = await this.text();
271
+ return JSON.parse(text);
272
+ }
273
+
274
+ async buffer() {
275
+ if (this._response.body_encoding === 'base64') {
276
+ return Buffer.from(this._response.body, 'base64');
277
+ }
278
+ return Buffer.from(this._response.body, 'utf8');
279
+ }
280
+ }
281
+
282
+ export default antigravityRequester;
src/api/client.js CHANGED
@@ -1,6 +1,21 @@
1
  import tokenManager from '../auth/token_manager.js';
2
  import config from '../config/config.js';
3
  import { generateToolCallId } from '../utils/idGenerator.js';
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
  export async function generateAssistantResponse(requestBody, callback) {
6
  const token = await tokenManager.getToken();
@@ -10,8 +25,114 @@ export async function generateAssistantResponse(requestBody, callback) {
10
  }
11
 
12
  const url = config.api.url;
 
 
 
 
 
 
 
13
 
14
- const response = await fetch(url, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  method: 'POST',
16
  headers: {
17
  'Host': config.api.host,
@@ -20,31 +141,53 @@ export async function generateAssistantResponse(requestBody, callback) {
20
  'Content-Type': 'application/json',
21
  'Accept-Encoding': 'gzip'
22
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  body: JSON.stringify(requestBody)
24
  });
25
 
 
 
 
 
 
26
  if (!response.ok) {
27
- const errorText = await response.text();
28
- if (response.status === 403) {
29
- tokenManager.disableCurrentToken(token);
30
- throw new Error(`该账号没有使用权限,已自动禁用。错误详情: ${errorText}`);
31
- }
32
- throw new Error(`API请求失败 (${response.status}): ${errorText}`);
33
  }
34
 
35
- const reader = response.body.getReader();
36
- const decoder = new TextDecoder();
37
  let thinkingStarted = false;
38
  let toolCalls = [];
 
 
 
39
 
40
  while (true) {
41
  const { done, value } = await reader.read();
42
  if (done) break;
43
-
44
- const chunk = decoder.decode(value);
45
- const lines = chunk.split('\n').filter(line => line.startsWith('data: '));
46
-
47
- for (const line of lines) {
 
48
  const jsonStr = line.slice(6);
49
  try {
50
  const data = JSON.parse(jsonStr);
@@ -75,8 +218,7 @@ export async function generateAssistantResponse(requestBody, callback) {
75
  }
76
  }
77
  }
78
-
79
- // 当遇到 finishReason 时,发送所有收集的工具调用
80
  if (data.response?.candidates?.[0]?.finishReason && toolCalls.length > 0) {
81
  if (thinkingStarted) {
82
  callback({ type: 'thinking', content: '\n</think>\n' });
@@ -92,34 +234,8 @@ export async function generateAssistantResponse(requestBody, callback) {
92
  }
93
  }
94
 
95
- export async function getAvailableModels() {
96
- const token = await tokenManager.getToken();
97
-
98
- if (!token) {
99
- throw new Error('没有可用的token,请运行 npm run login 获取token');
100
  }
101
-
102
- const response = await fetch(config.api.modelsUrl, {
103
- method: 'POST',
104
- headers: {
105
- 'Host': config.api.host,
106
- 'User-Agent': config.api.userAgent,
107
- 'Authorization': `Bearer ${token.access_token}`,
108
- 'Content-Type': 'application/json',
109
- 'Accept-Encoding': 'gzip'
110
- },
111
- body: JSON.stringify({})
112
- });
113
-
114
- const data = await response.json();
115
-
116
- return {
117
- object: 'list',
118
- data: Object.keys(data.models).map(id => ({
119
- id,
120
- object: 'model',
121
- created: Math.floor(Date.now() / 1000),
122
- owned_by: 'google'
123
- }))
124
- };
125
  }
 
1
  import tokenManager from '../auth/token_manager.js';
2
  import config from '../config/config.js';
3
  import { generateToolCallId } from '../utils/idGenerator.js';
4
+ import AntigravityRequester from '../AntigravityRequester.js';
5
+
6
+ let requester = null;
7
+ let useNativeFetch = false;
8
+
9
+ if (config.useNativeFetch !== true) {
10
+ try {
11
+ requester = new AntigravityRequester();
12
+ } catch (error) {
13
+ console.warn('AntigravityRequester initialization failed, falling back to native fetch:', error.message);
14
+ useNativeFetch = true;
15
+ }
16
+ } else {
17
+ useNativeFetch = true;
18
+ }
19
 
20
  export async function generateAssistantResponse(requestBody, callback) {
21
  const token = await tokenManager.getToken();
 
25
  }
26
 
27
  const url = config.api.url;
28
+ const headers = {
29
+ 'Host': config.api.host,
30
+ 'User-Agent': config.api.userAgent,
31
+ 'Authorization': `Bearer ${token.access_token}`,
32
+ 'Content-Type': 'application/json',
33
+ 'Accept-Encoding': 'gzip'
34
+ };
35
 
36
+ if (useNativeFetch) {
37
+ return await generateWithNativeFetch(url, headers, requestBody, callback, token);
38
+ }
39
+
40
+ const streamResponse = requester.antigravity_fetchStream(url, {
41
+ method: 'POST',
42
+ headers,
43
+ body: JSON.stringify(requestBody)
44
+ });
45
+
46
+ let thinkingStarted = false;
47
+ let toolCalls = [];
48
+ let buffer = '';
49
+ let errorBody = '';
50
+ let statusCode = null;
51
+
52
+ await new Promise((resolve, reject) => {
53
+ streamResponse
54
+ .onStart(({ status }) => {
55
+ statusCode = status;
56
+ if (status === 403) {
57
+ tokenManager.disableCurrentToken(token);
58
+ }
59
+ })
60
+ .onData((chunk) => {
61
+ if (statusCode !== 200) {
62
+ errorBody += chunk;
63
+ return;
64
+ }
65
+
66
+ buffer += chunk;
67
+ const lines = buffer.split('\n');
68
+ buffer = lines.pop();
69
+
70
+ for (const line of lines.filter(l => l.startsWith('data: '))) {
71
+ const jsonStr = line.slice(6);
72
+ try {
73
+ const data = JSON.parse(jsonStr);
74
+ const parts = data.response?.candidates?.[0]?.content?.parts;
75
+ if (parts) {
76
+ for (const part of parts) {
77
+ if (part.thought === true) {
78
+ if (!thinkingStarted) {
79
+ callback({ type: 'thinking', content: '<think>\n' });
80
+ thinkingStarted = true;
81
+ }
82
+ callback({ type: 'thinking', content: part.text || '' });
83
+ } else if (part.text !== undefined) {
84
+ if (thinkingStarted) {
85
+ callback({ type: 'thinking', content: '\n</think>\n' });
86
+ thinkingStarted = false;
87
+ }
88
+ callback({ type: 'text', content: part.text });
89
+ } else if (part.functionCall) {
90
+ toolCalls.push({
91
+ id: part.functionCall.id || generateToolCallId(),
92
+ type: 'function',
93
+ function: {
94
+ name: part.functionCall.name,
95
+ arguments: JSON.stringify(part.functionCall.args)
96
+ }
97
+ });
98
+ }
99
+ }
100
+ }
101
+
102
+ if (data.response?.candidates?.[0]?.finishReason && toolCalls.length > 0) {
103
+ if (thinkingStarted) {
104
+ callback({ type: 'thinking', content: '\n</think>\n' });
105
+ thinkingStarted = false;
106
+ }
107
+ callback({ type: 'tool_calls', tool_calls: toolCalls });
108
+ toolCalls = [];
109
+ }
110
+ } catch (e) {
111
+ // 忽略解析错误
112
+ }
113
+ }
114
+ })
115
+ .onEnd(() => {
116
+ if (statusCode === 403) {
117
+ reject(new Error(`该账号没有使用权限,已自动禁用。错误详情: ${errorBody}`));
118
+ } else if (statusCode !== 200) {
119
+ reject(new Error(`API请求失败 (${statusCode}): ${errorBody}`));
120
+ } else {
121
+ resolve();
122
+ }
123
+ })
124
+ .onError(reject);
125
+ });
126
+ }
127
+
128
+ export async function getAvailableModels() {
129
+ const token = await tokenManager.getToken();
130
+
131
+ if (!token) {
132
+ throw new Error('没有可用的token,请运行 npm run login 获取token');
133
+ }
134
+
135
+ const response = await fetch(config.api.modelsUrl, {
136
  method: 'POST',
137
  headers: {
138
  'Host': config.api.host,
 
141
  'Content-Type': 'application/json',
142
  'Accept-Encoding': 'gzip'
143
  },
144
+ body: JSON.stringify({})
145
+ });
146
+
147
+ const data = await response.json();
148
+
149
+ return {
150
+ object: 'list',
151
+ data: Object.keys(data.models).map(id => ({
152
+ id,
153
+ object: 'model',
154
+ created: Math.floor(Date.now() / 1000),
155
+ owned_by: 'google'
156
+ }))
157
+ };
158
+ }
159
+
160
+ async function generateWithNativeFetch(url, headers, requestBody, callback, token) {
161
+ const response = await fetch(url, {
162
+ method: 'POST',
163
+ headers,
164
  body: JSON.stringify(requestBody)
165
  });
166
 
167
+ if (response.status === 403) {
168
+ tokenManager.disableCurrentToken(token);
169
+ throw new Error('该账号没有使用权限,已自动禁用');
170
+ }
171
+
172
  if (!response.ok) {
173
+ throw new Error(`API请求失败 (${response.status})`);
 
 
 
 
 
174
  }
175
 
 
 
176
  let thinkingStarted = false;
177
  let toolCalls = [];
178
+ const reader = response.body.getReader();
179
+ const decoder = new TextDecoder();
180
+ let buffer = '';
181
 
182
  while (true) {
183
  const { done, value } = await reader.read();
184
  if (done) break;
185
+
186
+ buffer += decoder.decode(value, { stream: true });
187
+ const lines = buffer.split('\n');
188
+ buffer = lines.pop();
189
+
190
+ for (const line of lines.filter(l => l.startsWith('data: '))) {
191
  const jsonStr = line.slice(6);
192
  try {
193
  const data = JSON.parse(jsonStr);
 
218
  }
219
  }
220
  }
221
+
 
222
  if (data.response?.candidates?.[0]?.finishReason && toolCalls.length > 0) {
223
  if (thinkingStarted) {
224
  callback({ type: 'thinking', content: '\n</think>\n' });
 
234
  }
235
  }
236
 
237
+ export function closeRequester() {
238
+ if (requester) {
239
+ requester.close();
 
 
240
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
241
  }
src/bin/antigravity_requester_android_arm64 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39d953841d55d4673f77fb2a225d76bc202569a3726ed476a7daaa218e3d4bd6
3
+ size 8192353
src/bin/antigravity_requester_linux_amd64 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f362ec3859dc5d189f67de7d7426ab88e9c84819697dca2e3e7ac3d6b1a0846
3
+ size 7930040
src/bin/antigravity_requester_windows_amd64.exe ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a755b4a7cb2bdadd278c5701c13e3d9bcd03d0051df66f6b5a4745e81f229a77
3
+ size 8096768
src/config/config.js CHANGED
@@ -11,6 +11,7 @@ const defaultConfig = {
11
  },
12
  defaults: { temperature: 1, top_p: 0.85, top_k: 50, max_tokens: 8096 },
13
  security: { maxRequestSize: '50mb', apiKey: null },
 
14
  systemInstruction: '你是聊天机器人,专门为用户提供聊天和情绪价值,协助进行小说创作或者角色扮演,也可以提供数学或者代码上的建议'
15
  };
16
 
 
11
  },
12
  defaults: { temperature: 1, top_p: 0.85, top_k: 50, max_tokens: 8096 },
13
  security: { maxRequestSize: '50mb', apiKey: null },
14
+ useNativeFetch: false,
15
  systemInstruction: '你是聊天机器人,专门为用户提供聊天和情绪价值,协助进行小说创作或者角色扮演,也可以提供数学或者代码上的建议'
16
  };
17
 
src/server/index.js CHANGED
@@ -1,5 +1,5 @@
1
  import express from 'express';
2
- import { generateAssistantResponse, getAvailableModels } from '../api/client.js';
3
  import { generateRequestBody } from '../utils/utils.js';
4
  import logger from '../utils/logger.js';
5
  import config from '../config/config.js';
@@ -177,6 +177,7 @@ server.on('error', (error) => {
177
 
178
  const shutdown = () => {
179
  logger.info('正在关闭服务器...');
 
180
  server.close(() => {
181
  logger.info('服务器已关闭');
182
  process.exit(0);
 
1
  import express from 'express';
2
+ import { generateAssistantResponse, getAvailableModels, closeRequester } from '../api/client.js';
3
  import { generateRequestBody } from '../utils/utils.js';
4
  import logger from '../utils/logger.js';
5
  import config from '../config/config.js';
 
177
 
178
  const shutdown = () => {
179
  logger.info('正在关闭服务器...');
180
+ closeRequester();
181
  server.close(() => {
182
  logger.info('服务器已关闭');
183
  process.exit(0);