GeminiBot commited on
Commit
950df70
·
1 Parent(s): 3727a06

Implement connection heartbeats and safer 500ms request gap

Browse files
Files changed (2) hide show
  1. src/duckai.ts +1 -1
  2. src/server.ts +36 -23
src/duckai.ts CHANGED
@@ -7,7 +7,7 @@ import UserAgent from "user-agents";
7
  let activeRequests = 0;
8
  const MAX_CONCURRENT = 50; // Сколько всего запросов может "висеть" одновременно
9
  let lastRequestStartTime = 0;
10
- const MIN_GAP_MS = 300; // ЖЕСТКИЙ ИНТЕРВАЛ: 3.3 запроса в секунду (безопасный порог для одного IP)
11
 
12
  export class DuckAI {
13
 
 
7
  let activeRequests = 0;
8
  const MAX_CONCURRENT = 50; // Сколько всего запросов может "висеть" одновременно
9
  let lastRequestStartTime = 0;
10
+ const MIN_GAP_MS = 500; // ЖЕСТКИЙ ИНТЕРВАЛ: 2 запроса в секунду (МАКСИМАЛЬНАЯ БЕЗОПАСНОСТЬ)
11
 
12
  export class DuckAI {
13
 
src/server.ts CHANGED
@@ -84,30 +84,43 @@ const server = createServer(async (req: IncomingMessage, res: ServerResponse) =>
84
  return sendJSON(200, { status: "online", model: "distributed-v1" });
85
  }
86
 
87
- // Chat Completions
88
- if (url.pathname === "/v1/chat/completions" && method === "POST") {
89
- let body = "";
90
- req.on("data", (chunk) => { body += chunk; });
91
- req.on("end", async () => {
92
- try {
93
- // Log full request for debugging
94
- log(`INCOMING REQUEST:\n${body}`);
95
-
96
- const jsonBody = JSON.parse(body);
97
- const completion = await openAIService.createChatCompletion(jsonBody);
98
-
99
- // Log success summary
100
- log(`SUCCESS: Sent response for ${jsonBody.model}`);
101
-
102
- sendJSON(200, completion);
103
- } catch (error: any) {
104
- log(`ERROR: ${error.message}\nSTACK: ${error.stack}`);
105
- sendJSON(500, { error: error.message, details: error.stack });
106
- }
107
- });
108
- return;
109
- }
110
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
  // === DIAGNOSTIC ENDPOINT ===
112
  if (url.pathname === "/v1/diagnose" && method === "GET") {
113
  log("RUNNING SYSTEM DIAGNOSTICS...");
 
84
  return sendJSON(200, { status: "online", model: "distributed-v1" });
85
  }
86
 
87
+ // Chat Completions
88
+ if (url.pathname === "/v1/chat/completions" && method === "POST") {
89
+ let body = "";
90
+ req.on("data", (chunk) => { body += chunk; });
91
+ req.on("end", async () => {
92
+ // Устанавливаем заголовки сразу, чтобы начать стрим пустых байтов (Heartbeat)
93
+ res.writeHead(200, {
94
+ "Content-Type": "application/json",
95
+ ...corsHeaders,
96
+ "X-Content-Type-Options": "nosniff"
97
+ });
 
 
 
 
 
 
 
 
 
 
 
 
98
 
99
+ // Запускаем "перекличку" (Heartbeat)
100
+ const heartbeat = setInterval(() => {
101
+ res.write("\n"); // Шлем невидимый байт для поддержания связи
102
+ }, 5000);
103
+
104
+ try {
105
+ log(`INCOMING REQUEST (Size: ${body.length})`);
106
+
107
+ const jsonBody = JSON.parse(body);
108
+ const completion = await openAIService.createChatCompletion(jsonBody);
109
+
110
+ clearInterval(heartbeat);
111
+ log(`SUCCESS: Sent response for ${jsonBody.model}`);
112
+
113
+ res.write(JSON.stringify(completion));
114
+ res.end();
115
+ } catch (error: any) {
116
+ clearInterval(heartbeat);
117
+ log(`ERROR: ${error.message}`);
118
+ res.write(JSON.stringify({ error: error.message, details: error.stack }));
119
+ res.end();
120
+ }
121
+ });
122
+ return;
123
+ }
124
  // === DIAGNOSTIC ENDPOINT ===
125
  if (url.pathname === "/v1/diagnose" && method === "GET") {
126
  log("RUNNING SYSTEM DIAGNOSTICS...");