import express from 'express' import cors from 'cors' import fetch from 'node-fetch' const app = express() // Считываем ключ Helix из переменных окружения const HELIX_API_KEY = process.env.HELIX_API_KEY || '' // CORS app.use(cors({ origin: '*', methods: ['GET', 'POST', 'OPTIONS'], allowedHeaders: ['Content-Type', 'Authorization'] })) app.use(express.json()) app.options('*', (req, res) => res.sendStatus(204)) // Список доступных моделей app.get(['/models', '/v1/models'], (req, res) => { res.json({ object: 'list', data: [ // OpenAI — GPT‑4.1 / 4.5 { id: 'gpt-4.1', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.1-2025-04-14', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.1-mini', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.1-mini-2025-04-14', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.1-nano', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.1-nano-2025-04-14', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.5-preview', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4.5-preview-2025-02-27', object: 'model', created: 0, owned_by: 'helix' }, // OpenAI — GPT‑4o { id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-2024-05-13', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-2024-08-06', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-2024-11-20', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-mini', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-mini-2024-07-18', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-search-preview', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-mini-search-preview', object: 'model', created: 0, owned_by: 'helix' }, { id: 'gpt-4o-mini-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' }, // Helix — GPT‑3.5 Turbo { id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' } ] }) }) // Прокси для /chat/completions app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => { const { model, messages = [], stream = false, temperature, top_p, presence_penalty, frequency_penalty, ...rest } = req.body // Собираем историю, заменяя китайские префиксы на English const historyText = messages .map(m => (m.role === 'user' ? 'User: ' : 'Assistant: ') + m.content) .join('\n') // Формируем полезную нагрузку для Helix const helixPayload = { type: 'text', stream, provider: getProvider(model), model, messages: [ { role: 'user', content: { content_type: 'text', parts: [historyText] } } ], temperature, top_p, presence_penalty, frequency_penalty, ...rest } // Заголовок авторизации для Helix const authHeader = HELIX_API_KEY ? `Bearer ${HELIX_API_KEY}` : (req.header('authorization') || '') // Отправляем запрос в Helix const helixRes = await fetch( 'https://app.tryhelix.ai/api/v1/sessions/chat', { method: 'POST', headers: { 'Content-Type': 'application/json', Authorization: authHeader }, body: JSON.stringify(helixPayload) } ) if (!stream) { const data = await helixRes.json() const reply = data?.choices?.[0]?.message?.content ?? '' return res.status(helixRes.status).json({ id: `chatcmpl-proxy-${data.id ?? Date.now()}`, object: 'chat.completion', created: Math.floor(Date.now() / 1000), model, choices: [ { index: 0, message: { role: 'assistant', content: reply }, finish_reason: 'stop' } ] }) } // При стриме прокидываем SSE напрямую res.status(helixRes.status) res.set('Content-Type', 'text/event-stream') helixRes.body.pipe(res) }) // Определяем провайдера по имени модели function getProvider(modelId) { if (/^gpt-[34]|^gpt-3\.5/.test(modelId)) return 'openai' if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(modelId)) return 'helix' return 'togetherai' } const PORT = process.env.PORT || 7860 app.listen(PORT, () => { console.log(`🚀 Server listening on port ${PORT}`) })