Update server.js
Browse files
server.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
|
|
| 1 |
const express = require('express');
|
| 2 |
const rateLimit = require('express-rate-limit');
|
| 3 |
-
const
|
| 4 |
|
| 5 |
const app = express();
|
| 6 |
app.use(express.json());
|
|
@@ -8,8 +9,6 @@ app.use(express.json());
|
|
| 8 |
// Доверие к одному прокси (например, Heroku)
|
| 9 |
app.set('trust proxy', 1);
|
| 10 |
|
| 11 |
-
const openai_keys = process.env.OPENAI_KEY.split(',');
|
| 12 |
-
|
| 13 |
function getRandomApiKey() {
|
| 14 |
const randomIndex = Math.floor(Math.random() * openai_keys.length);
|
| 15 |
return openai_keys[randomIndex];
|
|
@@ -35,50 +34,35 @@ app.post('/update', async (req, res) => {
|
|
| 35 |
});
|
| 36 |
|
| 37 |
async function sendRequest(prompt, prs) {
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
try {
|
| 42 |
-
const response = await axios.post(firstUrl, {
|
| 43 |
-
messages: [{'role': 'system', 'content': prs}, {'role': 'user', 'content': prompt}],
|
| 44 |
-
max_tokens: 1200,
|
| 45 |
-
temperature: 0.19,
|
| 46 |
-
}, {
|
| 47 |
-
headers: {
|
| 48 |
-
'Content-Type': 'application/json',
|
| 49 |
-
},
|
| 50 |
-
});
|
| 51 |
-
|
| 52 |
-
if (response.data.choices && response.data.choices.length > 0 && response.data.choices[0].message) {
|
| 53 |
-
return response.data.choices[0].message.content.trim();
|
| 54 |
-
} else {
|
| 55 |
-
throw new Error("Ошибка прочтения на первой ссылке");
|
| 56 |
-
}
|
| 57 |
-
} catch (error) {
|
| 58 |
-
console.error("Первая попытка не удалась:", error);
|
| 59 |
-
// Попытка с другой ссылкой
|
| 60 |
try {
|
| 61 |
-
const
|
| 62 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
max_tokens: 1200,
|
| 64 |
-
temperature: 0.19,
|
| 65 |
-
}, {
|
| 66 |
-
headers: {
|
| 67 |
-
'Content-Type': 'application/json',
|
| 68 |
-
},
|
| 69 |
});
|
| 70 |
-
|
| 71 |
-
if (
|
| 72 |
-
return
|
| 73 |
} else {
|
| 74 |
-
throw new Error("Ошибка прочтения
|
| 75 |
}
|
| 76 |
-
} catch (
|
| 77 |
-
console.error("
|
| 78 |
throw new Error("Ошибка при генерации");
|
| 79 |
}
|
| 80 |
}
|
| 81 |
-
}
|
| 82 |
|
| 83 |
app.post('/pl', async (req, res) => {
|
| 84 |
const prompt = req.body.prompt;
|
|
@@ -98,7 +82,7 @@ app.post('/pl', async (req, res) => {
|
|
| 98 |
const content = await sendRequest(prompt, prs);
|
| 99 |
res.json({ content });
|
| 100 |
} catch (error) {
|
| 101 |
-
|
| 102 |
}
|
| 103 |
});
|
| 104 |
|
|
@@ -128,4 +112,4 @@ app.post('/plbeta', async (req, res) => {
|
|
| 128 |
const port = 7860;
|
| 129 |
app.listen(port, () => {
|
| 130 |
console.log(`API сервер запущен на порту ${port}`);
|
| 131 |
-
});
|
|
|
|
| 1 |
+
require('dotenv').config();
|
| 2 |
const express = require('express');
|
| 3 |
const rateLimit = require('express-rate-limit');
|
| 4 |
+
const { HfInference } = require('@huggingface/inference');
|
| 5 |
|
| 6 |
const app = express();
|
| 7 |
app.use(express.json());
|
|
|
|
| 9 |
// Доверие к одному прокси (например, Heroku)
|
| 10 |
app.set('trust proxy', 1);
|
| 11 |
|
|
|
|
|
|
|
| 12 |
function getRandomApiKey() {
|
| 13 |
const randomIndex = Math.floor(Math.random() * openai_keys.length);
|
| 14 |
return openai_keys[randomIndex];
|
|
|
|
| 34 |
});
|
| 35 |
|
| 36 |
async function sendRequest(prompt, prs) {
|
| 37 |
+
const hf_api_key = getRandomApiKey();
|
| 38 |
+
const client = new HfInference(hf_api_key);
|
| 39 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
try {
|
| 41 |
+
const chatCompletion = await client.chatCompletion({
|
| 42 |
+
model: "Qwen/Qwen2.5-72B-Instruct",
|
| 43 |
+
messages: [
|
| 44 |
+
{
|
| 45 |
+
role: "system",
|
| 46 |
+
content: prs
|
| 47 |
+
},
|
| 48 |
+
{
|
| 49 |
+
role: "user",
|
| 50 |
+
content: prompt
|
| 51 |
+
}
|
| 52 |
+
],
|
| 53 |
max_tokens: 1200,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
});
|
| 55 |
+
|
| 56 |
+
if (chatCompletion && chatCompletion.choices && chatCompletion.choices.length > 0 && chatCompletion.choices[0].message) {
|
| 57 |
+
return chatCompletion.choices[0].message.content.trim();
|
| 58 |
} else {
|
| 59 |
+
throw new Error("Ошибка прочтения ответа");
|
| 60 |
}
|
| 61 |
+
} catch (error) {
|
| 62 |
+
console.error("Ошибка при обращении к Hugging Face:", error);
|
| 63 |
throw new Error("Ошибка при генерации");
|
| 64 |
}
|
| 65 |
}
|
|
|
|
| 66 |
|
| 67 |
app.post('/pl', async (req, res) => {
|
| 68 |
const prompt = req.body.prompt;
|
|
|
|
| 82 |
const content = await sendRequest(prompt, prs);
|
| 83 |
res.json({ content });
|
| 84 |
} catch (error) {
|
| 85 |
+
res.json({ content: `{"error":"", "title":"Ошибка", "text":"Произошла ошибка на сервере. (${error.message})", "okb":"Ок", "oklink":"", "cancelable":"true"}` });
|
| 86 |
}
|
| 87 |
});
|
| 88 |
|
|
|
|
| 112 |
const port = 7860;
|
| 113 |
app.listen(port, () => {
|
| 114 |
console.log(`API сервер запущен на порту ${port}`);
|
| 115 |
+
});
|