caiosilva1221 commited on
Commit
ffa81b7
·
verified ·
1 Parent(s): b285579

Update server.js

Browse files
Files changed (1) hide show
  1. server.js +26 -378
server.js CHANGED
@@ -1,439 +1,87 @@
 
 
1
  import express from "express";
2
  import path from "path";
3
  import { fileURLToPath } from "url";
4
  import dotenv from "dotenv";
5
- import cookieParser from "cookie-parser";
6
- import {
7
- createRepo,
8
- uploadFiles,
9
- whoAmI,
10
- spaceInfo,
11
- fileExists,
12
- } from "@huggingface/hub";
13
- import { InferenceClient } from "@huggingface/inference";
14
  import bodyParser from "body-parser";
 
15
 
16
- import checkUser from "./middlewares/checkUser.js";
17
- import { PROVIDERS } from "./utils/providers.js";
18
- import { COLORS } from "./utils/colors.js";
19
-
20
- // Load environment variables from .env file
21
  dotenv.config();
22
 
23
  const app = express();
24
-
25
- const ipAddresses = new Map();
26
-
27
  const __filename = fileURLToPath(import.meta.url);
28
  const __dirname = path.dirname(__filename);
29
-
30
  const PORT = process.env.APP_PORT || 3000;
31
- const REDIRECT_URI =
32
- process.env.REDIRECT_URI || `http://localhost:${PORT}/auth/login`;
33
  const MODEL_ID = "deepseek-ai/DeepSeek-V3-0324";
34
- const MAX_REQUESTS_PER_IP = 2;
35
 
36
- app.use(cookieParser());
37
  app.use(bodyParser.json());
38
  app.use(express.static(path.join(__dirname, "dist")));
39
 
40
- const getPTag = (repoId) => {
41
- return `<p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=${repoId}" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p>`;
42
- };
43
-
44
- app.get("/api/login", (_req, res) => {
45
- const redirectUrl = `https://huggingface.co/oauth/authorize?client_id=${process.env.OAUTH_CLIENT_ID}&redirect_uri=${REDIRECT_URI}&response_type=code&scope=openid%20profile%20write-repos%20manage-repos%20inference-api&prompt=consent&state=1234567890`;
46
- res.status(200).send({
47
- ok: true,
48
- redirectUrl,
49
- });
50
- });
51
- app.get("/auth/login", async (req, res) => {
52
- const { code } = req.query;
53
-
54
- if (!code) {
55
- return res.redirect(302, "/");
56
- }
57
- const Authorization = `Basic ${Buffer.from(
58
- `${process.env.OAUTH_CLIENT_ID}:${process.env.OAUTH_CLIENT_SECRET}`
59
- ).toString("base64")}`;
60
-
61
- const request_auth = await fetch("https://huggingface.co/oauth/token", {
62
- method: "POST",
63
- headers: {
64
- "Content-Type": "application/x-www-form-urlencoded",
65
- Authorization,
66
- },
67
- body: new URLSearchParams({
68
- grant_type: "authorization_code",
69
- code: code,
70
- redirect_uri: REDIRECT_URI,
71
- }),
72
- });
73
-
74
- const response = await request_auth.json();
75
-
76
- if (!response.access_token) {
77
- return res.redirect(302, "/");
78
- }
79
-
80
- res.cookie("hf_token", response.access_token, {
81
- httpOnly: false,
82
- secure: true,
83
- sameSite: "none",
84
- maxAge: 30 * 24 * 60 * 60 * 1000,
85
- });
86
-
87
- return res.redirect(302, "/");
88
- });
89
- app.get("/auth/logout", (req, res) => {
90
- res.clearCookie("hf_token", {
91
- httpOnly: false,
92
- secure: true,
93
- sameSite: "none",
94
- });
95
- return res.redirect(302, "/");
96
- });
97
-
98
- app.get("/api/@me", checkUser, async (req, res) => {
99
- let { hf_token } = req.cookies;
100
-
101
- if (process.env.HF_TOKEN && process.env.HF_TOKEN !== "") {
102
- return res.send({
103
- preferred_username: "local-use",
104
- isLocalUse: true,
105
- });
106
- }
107
-
108
- try {
109
- const request_user = await fetch("https://huggingface.co/oauth/userinfo", {
110
- headers: {
111
- Authorization: `Bearer ${hf_token}`,
112
- },
113
- });
114
-
115
- const user = await request_user.json();
116
- res.send(user);
117
- } catch (err) {
118
- res.clearCookie("hf_token", {
119
- httpOnly: false,
120
- secure: true,
121
- sameSite: "none",
122
- });
123
- res.status(401).send({
124
- ok: false,
125
- message: err.message,
126
- });
127
- }
128
- });
129
-
130
- app.post("/api/deploy", checkUser, async (req, res) => {
131
- const { html, title, path, prompts } = req.body;
132
- if (!html || (!path && !title)) {
133
- return res.status(400).send({
134
- ok: false,
135
- message: "Missing required fields",
136
- });
137
- }
138
-
139
- let { hf_token } = req.cookies;
140
- if (process.env.HF_TOKEN && process.env.HF_TOKEN !== "") {
141
- hf_token = process.env.HF_TOKEN;
142
- }
143
-
144
- try {
145
- const repo = {
146
- type: "space",
147
- name: path ?? "",
148
- };
149
-
150
- let readme;
151
- let newHtml = html;
152
-
153
- if (!path || path === "") {
154
- const { name: username } = await whoAmI({ accessToken: hf_token });
155
- const newTitle = title
156
- .toLowerCase()
157
- .replace(/[^a-z0-9]+/g, "-")
158
- .split("-")
159
- .filter(Boolean)
160
- .join("-")
161
- .slice(0, 96);
162
-
163
- const repoId = `${username}/${newTitle}`;
164
- repo.name = repoId;
165
-
166
- await createRepo({
167
- repo,
168
- accessToken: hf_token,
169
- });
170
- const colorFrom = COLORS[Math.floor(Math.random() * COLORS.length)];
171
- const colorTo = COLORS[Math.floor(Math.random() * COLORS.length)];
172
- readme = `---
173
- title: ${newTitle}
174
- emoji: 🐳
175
- colorFrom: ${colorFrom}
176
- colorTo: ${colorTo}
177
- sdk: static
178
- pinned: false
179
- tags:
180
- - deepsite
181
- ---
182
-
183
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference`;
184
- }
185
-
186
- newHtml = html.replace(/<\/body>/, `${getPTag(repo.name)}</body>`);
187
- const file = new Blob([newHtml], { type: "text/html" });
188
- file.name = "index.html"; // Add name property to the Blob
189
-
190
- // create prompt.txt file with all the prompts used, split by new line
191
- const newPrompts = ``.concat(prompts.map((prompt) => prompt).join("\n"));
192
- const promptFile = new Blob([newPrompts], { type: "text/plain" });
193
- promptFile.name = "prompts.txt"; // Add name property to the Blob
194
-
195
- const files = [file, promptFile];
196
- if (readme) {
197
- const readmeFile = new Blob([readme], { type: "text/markdown" });
198
- readmeFile.name = "README.md"; // Add name property to the Blob
199
- files.push(readmeFile);
200
- }
201
- await uploadFiles({
202
- repo,
203
- files,
204
- accessToken: hf_token,
205
- });
206
- return res.status(200).send({ ok: true, path: repo.name });
207
- } catch (err) {
208
- return res.status(500).send({
209
- ok: false,
210
- message: err.message,
211
- });
212
- }
213
- });
214
-
215
  app.post("/api/ask-ai", async (req, res) => {
216
- const { prompt, html, previousPrompt, provider } = req.body;
217
  if (!prompt) {
218
- return res.status(400).send({
219
- ok: false,
220
- message: "Missing required fields",
221
- });
222
  }
223
 
224
- let { hf_token } = req.cookies;
225
- let token = hf_token;
226
 
227
- if (process.env.HF_TOKEN && process.env.HF_TOKEN !== "") {
228
- token = process.env.HF_TOKEN;
229
- }
230
-
231
- const ip =
232
- req.headers["x-forwarded-for"]?.split(",")[0].trim() ||
233
- req.headers["x-real-ip"] ||
234
- req.socket.remoteAddress ||
235
- req.ip ||
236
- "0.0.0.0";
237
-
238
- if (!token) {
239
- ipAddresses.set(ip, (ipAddresses.get(ip) || 0) + 1);
240
- if (ipAddresses.get(ip) > MAX_REQUESTS_PER_IP) {
241
- return res.status(429).send({
242
- ok: false,
243
- openLogin: true,
244
- message: "Log In to continue using the service",
245
- });
246
- }
247
-
248
- token = process.env.DEFAULT_HF_TOKEN;
249
- }
250
-
251
- // Set up response headers for streaming
252
  res.setHeader("Content-Type", "text/plain");
253
  res.setHeader("Cache-Control", "no-cache");
254
  res.setHeader("Connection", "keep-alive");
255
 
256
- const client = new InferenceClient(token);
257
  let completeResponse = "";
258
 
259
- let TOKENS_USED = prompt?.length;
260
- if (previousPrompt) TOKENS_USED += previousPrompt.length;
261
- if (html) TOKENS_USED += html.length;
262
-
263
- const DEFAULT_PROVIDER = PROVIDERS.novita;
264
- const selectedProvider =
265
- provider === "auto"
266
- ? DEFAULT_PROVIDER
267
- : PROVIDERS[provider] ?? DEFAULT_PROVIDER;
268
-
269
- if (provider !== "auto" && TOKENS_USED >= selectedProvider.max_tokens) {
270
- return res.status(400).send({
271
- ok: false,
272
- openSelectProvider: true,
273
- message: `Context is too long. ${selectedProvider.name} allow ${selectedProvider.max_tokens} max tokens.`,
274
- });
275
- }
276
-
277
  try {
278
  const chatCompletion = client.chatCompletionStream({
279
  model: MODEL_ID,
280
- provider: selectedProvider.id,
281
  messages: [
282
  {
283
  role: "system",
284
- content: `ONLY USE HTML, CSS AND JAVASCRIPT. If you want to use ICON make sure to import the library first. Try to create the best UI possible by using only HTML, CSS and JAVASCRIPT. Use as much as you can TailwindCSS for the CSS, if you can't do something with TailwindCSS, then use custom CSS (make sure to import <script src="https://cdn.tailwindcss.com"></script> in the head). Also, try to ellaborate as much as you can, to create something unique. ALWAYS GIVE THE RESPONSE INTO A SINGLE HTML FILE`,
 
285
  },
286
  ...(previousPrompt
287
- ? [
288
- {
289
- role: "user",
290
- content: previousPrompt,
291
- },
292
- ]
293
  : []),
294
  ...(html
295
- ? [
296
- {
297
- role: "assistant",
298
- content: `The current code is: ${html}.`,
299
- },
300
- ]
301
  : []),
302
- {
303
- role: "user",
304
- content: prompt,
305
- },
306
  ],
307
- ...(selectedProvider.id !== "sambanova"
308
- ? {
309
- max_tokens: selectedProvider.max_tokens,
310
- }
311
- : {}),
312
  });
313
 
314
  while (true) {
315
  const { done, value } = await chatCompletion.next();
316
- if (done) {
317
- break;
318
- }
319
  const chunk = value.choices[0]?.delta?.content;
320
  if (chunk) {
321
- if (provider !== "sambanova") {
322
- res.write(chunk);
323
- completeResponse += chunk;
324
 
325
- if (completeResponse.includes("</html>")) {
326
- break;
327
- }
328
- } else {
329
- let newChunk = chunk;
330
- if (chunk.includes("</html>")) {
331
- // Replace everything after the last </html> tag with an empty string
332
- newChunk = newChunk.replace(/<\/html>[\s\S]*/, "</html>");
333
- }
334
- completeResponse += newChunk;
335
- res.write(newChunk);
336
- if (newChunk.includes("</html>")) {
337
- break;
338
- }
339
- }
340
  }
341
  }
342
- // End the response stream
343
- res.end();
344
- } catch (error) {
345
- if (error.message.includes("exceeded your monthly included credits")) {
346
- return res.status(402).send({
347
- ok: false,
348
- openProModal: true,
349
- message: error.message,
350
- });
351
- }
352
- if (!res.headersSent) {
353
- res.status(500).send({
354
- ok: false,
355
- message:
356
- error.message || "An error occurred while processing your request.",
357
- });
358
- } else {
359
- // Otherwise end the stream
360
- res.end();
361
- }
362
- }
363
- });
364
-
365
- app.get("/api/remix/:username/:repo", async (req, res) => {
366
- const { username, repo } = req.params;
367
- const { hf_token } = req.cookies;
368
-
369
- let token = hf_token || process.env.DEFAULT_HF_TOKEN;
370
-
371
- if (process.env.HF_TOKEN && process.env.HF_TOKEN !== "") {
372
- token = process.env.HF_TOKEN;
373
- }
374
-
375
- const repoId = `${username}/${repo}`;
376
-
377
- const url = `https://huggingface.co/spaces/${repoId}/raw/main/index.html`;
378
- try {
379
- const space = await spaceInfo({
380
- name: repoId,
381
- accessToken: token,
382
- additionalFields: ["author"],
383
- });
384
 
385
- if (!space || space.sdk !== "static" || space.private) {
386
- return res.status(404).send({
387
- ok: false,
388
- message: "Space not found",
389
- });
390
- }
391
-
392
- const response = await fetch(url);
393
- if (!response.ok) {
394
- return res.status(404).send({
395
- ok: false,
396
- message: "Space not found",
397
- });
398
- }
399
- let html = await response.text();
400
- // remove the last p tag including this url https://enzostvs-deepsite.hf.space
401
- html = html.replace(getPTag(repoId), "");
402
-
403
- let user = null;
404
-
405
- if (token) {
406
- const request_user = await fetch(
407
- "https://huggingface.co/oauth/userinfo",
408
- {
409
- headers: {
410
- Authorization: `Bearer ${hf_token}`,
411
- },
412
- }
413
- )
414
- .then((res) => res.json())
415
- .catch(() => null);
416
-
417
- user = request_user;
418
- }
419
-
420
- res.status(200).send({
421
- ok: true,
422
- html,
423
- isOwner: space.author === user?.preferred_username,
424
- path: repoId,
425
- });
426
  } catch (error) {
427
- return res.status(500).send({
428
  ok: false,
429
- message: error.message,
430
  });
431
  }
432
  });
 
433
  app.get("*", (_req, res) => {
434
  res.sendFile(path.join(__dirname, "dist", "index.html"));
435
  });
436
 
437
  app.listen(PORT, () => {
438
- console.log(`Server is running on port ${PORT}`);
439
  });
 
1
+ // server.js adaptado para remover login e usar OpenRouter
2
+
3
  import express from "express";
4
  import path from "path";
5
  import { fileURLToPath } from "url";
6
  import dotenv from "dotenv";
 
 
 
 
 
 
 
 
 
7
  import bodyParser from "body-parser";
8
+ import { InferenceClient } from "@huggingface/inference";
9
 
10
+ // Load env vars
 
 
 
 
11
  dotenv.config();
12
 
13
  const app = express();
 
 
 
14
  const __filename = fileURLToPath(import.meta.url);
15
  const __dirname = path.dirname(__filename);
 
16
  const PORT = process.env.APP_PORT || 3000;
 
 
17
  const MODEL_ID = "deepseek-ai/DeepSeek-V3-0324";
 
18
 
 
19
  app.use(bodyParser.json());
20
  app.use(express.static(path.join(__dirname, "dist")));
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  app.post("/api/ask-ai", async (req, res) => {
23
+ const { prompt, html, previousPrompt } = req.body;
24
  if (!prompt) {
25
+ return res.status(400).send({ ok: false, message: "Missing prompt" });
 
 
 
26
  }
27
 
28
+ // Token fixo da OpenRouter
29
+ const OPENROUTER_API_KEY = "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";
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  res.setHeader("Content-Type", "text/plain");
32
  res.setHeader("Cache-Control", "no-cache");
33
  res.setHeader("Connection", "keep-alive");
34
 
35
+ const client = new InferenceClient(OPENROUTER_API_KEY);
36
  let completeResponse = "";
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  try {
39
  const chatCompletion = client.chatCompletionStream({
40
  model: MODEL_ID,
41
+ provider: "openrouter",
42
  messages: [
43
  {
44
  role: "system",
45
+ content:
46
+ "ONLY USE HTML, CSS AND JAVASCRIPT. Use TailwindCSS whenever possible. Return the full code as a single HTML file.",
47
  },
48
  ...(previousPrompt
49
+ ? [{ role: "user", content: previousPrompt }]
 
 
 
 
 
50
  : []),
51
  ...(html
52
+ ? [{ role: "assistant", content: `The current code is: ${html}.` }]
 
 
 
 
 
53
  : []),
54
+ { role: "user", content: prompt },
 
 
 
55
  ],
56
+ max_tokens: 4000,
 
 
 
 
57
  });
58
 
59
  while (true) {
60
  const { done, value } = await chatCompletion.next();
61
+ if (done) break;
62
+
 
63
  const chunk = value.choices[0]?.delta?.content;
64
  if (chunk) {
65
+ completeResponse += chunk;
66
+ res.write(chunk);
 
67
 
68
+ if (completeResponse.includes("</html>")) break;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  }
70
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
+ res.end();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
  } catch (error) {
74
+ res.status(500).send({
75
  ok: false,
76
+ message: error.message || "AI request failed",
77
  });
78
  }
79
  });
80
+
81
  app.get("*", (_req, res) => {
82
  res.sendFile(path.join(__dirname, "dist", "index.html"));
83
  });
84
 
85
  app.listen(PORT, () => {
86
+ console.log(`Server running on port ${PORT}`);
87
  });