magicboris commited on
Commit
d31db84
·
verified ·
1 Parent(s): 04b83cf

Update server.js

Browse files
Files changed (1) hide show
  1. server.js +73 -110
server.js CHANGED
@@ -1,48 +1,53 @@
1
- /**
2
- * Helix ↔︎ OpenAI 代理(全路径 + 流式过滤)
3
- * 2025‑04‑19
4
- */
5
- addEventListener('fetch', e => e.respondWith(router(e.request)));
6
-
7
- async function router(req) {
8
- const url = new URL(req.url);
9
- const p = url.pathname.replace(/\/+$/, '');
10
- const m = req.method;
11
-
12
- if (m === 'OPTIONS') return cors(new Response(null, { status: 204 }));
13
-
14
- if ((p === '/models' || p === '/v1/models') && m === 'GET')
15
- return cors(modelsList());
16
-
17
- if (
18
- (p === '/chat/completions' || p === '/v1/chat/completions') &&
19
- m === 'POST'
20
- )
21
- return cors(await chatProxy(req));
22
-
23
- return cors(json({ error: 'Not Found', path: p }, 404));
24
- }
25
-
26
- /* ---------- 模型列表 ---------- */
27
- function modelsList() {
28
- return json({
29
  object: 'list',
30
  data: [
31
- { id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  { id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' }
33
  ]
34
  });
35
- }
36
-
37
- /* ---------- 聊天 ---------- */
38
- async function chatProxy(req) {
39
- let body;
40
- try {
41
- body = await req.json();
42
- } catch {
43
- return json({ error: 'Bad JSON' }, 400);
44
- }
45
 
 
 
46
  const {
47
  model,
48
  messages = [],
@@ -52,10 +57,11 @@ async function chatProxy(req) {
52
  presence_penalty,
53
  frequency_penalty,
54
  ...rest
55
- } = body;
56
 
 
57
  const historyText = messages
58
- .map(m => (m.role === 'user' ? 'user:' : 'AI:') + m.content)
59
  .join('\n');
60
 
61
  const helixPayload = {
@@ -64,7 +70,10 @@ async function chatProxy(req) {
64
  provider: getProvider(model),
65
  model,
66
  messages: [
67
- { role: 'user', content: { content_type: 'text', parts: [historyText] } }
 
 
 
68
  ],
69
  temperature,
70
  top_p,
@@ -73,22 +82,20 @@ async function chatProxy(req) {
73
  ...rest
74
  };
75
 
76
- const helixRes = await fetch(
77
- 'https://app.tryhelix.ai/api/v1/sessions/chat',
78
- {
79
- method: 'POST',
80
- headers: {
81
- 'Content-Type': 'application/json',
82
- Authorization: req.headers.get('Authorization') || ''
83
- },
84
- body: JSON.stringify(helixPayload)
85
- }
86
- );
87
 
88
  if (!stream) {
89
- const data = await helixRes.json();
90
- const reply = data?.choices?.[0]?.message?.content ?? '';
91
- const openai = {
92
  id: `chatcmpl-proxy-${data.id ?? Date.now()}`,
93
  object: 'chat.completion',
94
  created: Math.floor(Date.now() / 1000),
@@ -100,66 +107,22 @@ async function chatProxy(req) {
100
  finish_reason: 'stop'
101
  }
102
  ]
103
- };
104
- return json(openai, helixRes.status);
105
  }
106
 
107
- /* ---- 流式过滤 ---- */
108
- const filteredStream = helixRes.body
109
- .pipeThrough(new TextDecoderStream())
110
- .pipeThrough(sseLineSplitter())
111
- .pipeThrough(new TransformStream({
112
- transform(line, controller) {
113
- if (!line.startsWith('data:')) { controller.enqueue(line); return; }
114
- const payload = line.slice(5).trim();
115
- if (payload === '[DONE]') { controller.enqueue(line); return; }
116
- try {
117
- const obj = JSON.parse(payload);
118
- if (obj.choices === null || (Array.isArray(obj.choices) && obj.choices.length === 0))
119
- return;
120
- } catch { /* ignore */ }
121
- controller.enqueue(line);
122
- }
123
- }))
124
- .pipeThrough(new TextEncoderStream());
125
-
126
- return new Response(filteredStream, {
127
- status: helixRes.status,
128
- headers: { 'Content-Type': 'text/event-stream' }
129
- });
130
- }
131
 
132
- /* ---------- 工具 ---------- */
133
  function getProvider(m) {
134
  if (/^gpt-[34]|^gpt-3\.5/.test(m)) return 'openai';
135
  if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(m)) return 'helix';
136
  return 'togetherai';
137
  }
138
- const json = (o, s = 200) =>
139
- new Response(JSON.stringify(o), {
140
- status: s,
141
- headers: { 'Content-Type': 'application/json' }
142
- });
143
-
144
- function cors(res) {
145
- const h = new Headers(res.headers);
146
- h.set('Access-Control-Allow-Origin', '*');
147
- h.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
148
- h.set('Access-Control-Allow-Headers', 'Content-Type, Authorization');
149
- return new Response(res.body, { ...res, headers: h });
150
- }
151
 
152
- function sseLineSplitter() {
153
- let buf = '';
154
- return new TransformStream({
155
- transform(chunk, controller) {
156
- buf += chunk;
157
- const lines = buf.split('\n');
158
- buf = lines.pop();
159
- for (const l of lines) controller.enqueue(l + '\n');
160
- },
161
- flush(controller) {
162
- if (buf) controller.enqueue(buf);
163
- }
164
- });
165
- }
 
1
+ import express from 'express';
2
+ import cors from 'cors';
3
+ import fetch from 'node-fetch';
4
+
5
+ const app = express();
6
+
7
+ // CORS
8
+ app.use(cors({
9
+ origin: '*',
10
+ methods: ['GET', 'POST', 'OPTIONS'],
11
+ allowedHeaders: ['Content-Type', 'Authorization']
12
+ }));
13
+ app.use(express.json());
14
+ app.options('*', (req, res) => res.sendStatus(204));
15
+
16
+ // Список моделей
17
+ app.get(['/models', '/v1/models'], (req, res) => {
18
+ res.json({
 
 
 
 
 
 
 
 
 
 
19
  object: 'list',
20
  data: [
21
+ // OpenAI GPT‑4.1 / 4.5
22
+ { id: 'gpt-4.1', object: 'model', created: 0, owned_by: 'helix' },
23
+ { id: 'gpt-4.1-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
24
+ { id: 'gpt-4.1-mini', object: 'model', created: 0, owned_by: 'helix' },
25
+ { id: 'gpt-4.1-mini-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
26
+ { id: 'gpt-4.1-nano', object: 'model', created: 0, owned_by: 'helix' },
27
+ { id: 'gpt-4.1-nano-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
28
+ { id: 'gpt-4.5-preview', object: 'model', created: 0, owned_by: 'helix' },
29
+ { id: 'gpt-4.5-preview-2025-02-27', object: 'model', created: 0, owned_by: 'helix' },
30
+
31
+ // OpenAI — GPT‑4o
32
+ { id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' },
33
+ { id: 'gpt-4o-2024-05-13', object: 'model', created: 0, owned_by: 'helix' },
34
+ { id: 'gpt-4o-2024-08-06', object: 'model', created: 0, owned_by: 'helix' },
35
+ { id: 'gpt-4o-2024-11-20', object: 'model', created: 0, owned_by: 'helix' },
36
+ { id: 'gpt-4o-mini', object: 'model', created: 0, owned_by: 'helix' },
37
+ { id: 'gpt-4o-mini-2024-07-18', object: 'model', created: 0, owned_by: 'helix' },
38
+ { id: 'gpt-4o-search-preview', object: 'model', created: 0, owned_by: 'helix' },
39
+ { id: 'gpt-4o-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' },
40
+ { id: 'gpt-4o-mini-search-preview', object: 'model', created: 0, owned_by: 'helix' },
41
+ { id: 'gpt-4o-mini-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' },
42
+
43
+ // Helix — GPT‑3.5 Turbo
44
  { id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' }
45
  ]
46
  });
47
+ });
 
 
 
 
 
 
 
 
 
48
 
49
+ // Прокси для чата
50
+ app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => {
51
  const {
52
  model,
53
  messages = [],
 
57
  presence_penalty,
58
  frequency_penalty,
59
  ...rest
60
+ } = req.body;
61
 
62
+ // Заменили префиксы "用户:" и "AI:" на английские
63
  const historyText = messages
64
+ .map(m => (m.role === 'user' ? 'User: ' : 'Assistant: ') + m.content)
65
  .join('\n');
66
 
67
  const helixPayload = {
 
70
  provider: getProvider(model),
71
  model,
72
  messages: [
73
+ {
74
+ role: 'user',
75
+ content: { content_type: 'text', parts: [historyText] }
76
+ }
77
  ],
78
  temperature,
79
  top_p,
 
82
  ...rest
83
  };
84
 
85
+ // Отправляем в Helix
86
+ const helixRes = await fetch('https://app.tryhelix.ai/api/v1/sessions/chat', {
87
+ method: 'POST',
88
+ headers: {
89
+ 'Content-Type': 'application/json',
90
+ Authorization: req.header('authorization') || ''
91
+ },
92
+ body: JSON.stringify(helixPayload)
93
+ });
 
 
94
 
95
  if (!stream) {
96
+ const data = await helixRes.json();
97
+ const reply = data?.choices?.[0]?.message?.content ?? '';
98
+ return res.status(helixRes.status).json({
99
  id: `chatcmpl-proxy-${data.id ?? Date.now()}`,
100
  object: 'chat.completion',
101
  created: Math.floor(Date.now() / 1000),
 
107
  finish_reason: 'stop'
108
  }
109
  ]
110
+ });
 
111
  }
112
 
113
+ // Если нужен стрим — прокидываем SSE напрямую
114
+ res.status(helixRes.status);
115
+ res.set('Content-Type', 'text/event-stream');
116
+ helixRes.body.pipe(res);
117
+ });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
 
 
119
  function getProvider(m) {
120
  if (/^gpt-[34]|^gpt-3\.5/.test(m)) return 'openai';
121
  if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(m)) return 'helix';
122
  return 'togetherai';
123
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
124
 
125
+ const PORT = process.env.PORT || 7860;
126
+ app.listen(PORT, () => {
127
+ console.log(`🚀 Server listening on port ${PORT}`);
128
+ });