Spaces:
Running
Running
Update app.js
Browse files
app.js
CHANGED
|
@@ -1,28 +1,23 @@
|
|
| 1 |
import express from 'express';
|
| 2 |
-
import cors from 'cors';
|
| 3 |
import dotenv from 'dotenv';
|
| 4 |
-
import path from 'path';
|
| 5 |
import OpenAI from "openai";
|
| 6 |
import { BedrockRuntimeClient, ConverseCommand } from "@aws-sdk/client-bedrock-runtime";
|
|
|
|
| 7 |
|
| 8 |
dotenv.config();
|
| 9 |
-
|
| 10 |
const app = express();
|
| 11 |
const PORT = process.env.PORT || 7860;
|
| 12 |
|
| 13 |
-
|
| 14 |
-
app.use(cors()); // 2. Enable CORS for all origins
|
| 15 |
app.use(express.json());
|
| 16 |
|
| 17 |
-
//
|
| 18 |
-
app.use((req, res, next) => {
|
| 19 |
-
console.log(`\x1b[36m[TRAFFIC] ${req.method} ${req.url}\x1b[0m`);
|
| 20 |
-
next();
|
| 21 |
-
});
|
| 22 |
-
|
| 23 |
-
// --- CLIENTS ---
|
| 24 |
const bedrockClient = new BedrockRuntimeClient({
|
| 25 |
-
region:
|
|
|
|
|
|
|
|
|
|
| 26 |
credentials: {
|
| 27 |
accessKeyId: "AKIARRXB77NRTRLNWEBT",
|
| 28 |
secretAccessKey: "VC74ji226XOLVOH3U1lgP2nfaPirD9+HNgROp7AB",
|
|
@@ -36,98 +31,71 @@ const azureOpenAI = new OpenAI({
|
|
| 36 |
defaultHeaders: { "api-key": "7U3m9NRkE38ThSWTr92hMgQ4hDCUFI9MAnFNrCgRL7MhdvckfTXwJQQJ99CBACHYHv6XJ3w3AAAAACOGV22P" }
|
| 37 |
});
|
| 38 |
|
| 39 |
-
// --- ROUTES ---
|
| 40 |
app.post('/api/generate', async (req, res) => {
|
| 41 |
const { model, prompt } = req.body;
|
| 42 |
-
console.log(`
|
| 43 |
-
|
| 44 |
try {
|
| 45 |
-
let result;
|
| 46 |
if (model === "claude") {
|
| 47 |
const command = new ConverseCommand({
|
|
|
|
|
|
|
| 48 |
modelId: "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6",
|
| 49 |
messages: [{ role: "user", content: [{ text: prompt }] }],
|
| 50 |
-
inferenceConfig: { maxTokens:
|
| 51 |
-
additionalModelRequestFields: {
|
|
|
|
|
|
|
|
|
|
| 52 |
});
|
|
|
|
| 53 |
const response = await bedrockClient.send(command);
|
| 54 |
-
|
|
|
|
| 55 |
} else {
|
| 56 |
const response = await azureOpenAI.chat.completions.create({
|
| 57 |
model: "gpt-5-mini",
|
| 58 |
messages: [{ role: "user", content: prompt }],
|
| 59 |
reasoning_effort: "high"
|
| 60 |
});
|
| 61 |
-
|
| 62 |
}
|
| 63 |
-
res.json({ success: true, data: result });
|
| 64 |
} catch (err) {
|
| 65 |
-
|
| 66 |
-
|
|
|
|
|
|
|
| 67 |
}
|
| 68 |
});
|
| 69 |
|
| 70 |
app.get('/', (req, res) => {
|
| 71 |
res.send(`
|
| 72 |
-
<
|
| 73 |
-
<
|
| 74 |
-
<
|
| 75 |
-
|
| 76 |
-
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
<
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
</div>
|
| 95 |
-
<script>
|
| 96 |
-
async function run() {
|
| 97 |
-
const out = document.getElementById('output');
|
| 98 |
-
out.innerText = "Thinking...";
|
| 99 |
-
try {
|
| 100 |
-
// Using relative path to ensure it stays within the Space's routing
|
| 101 |
-
const res = await fetch('./api/generate', {
|
| 102 |
-
method: 'POST',
|
| 103 |
-
headers: { 'Content-Type': 'application/json' },
|
| 104 |
-
body: JSON.stringify({
|
| 105 |
-
model: document.getElementById('m').value,
|
| 106 |
-
prompt: document.getElementById('p').value
|
| 107 |
-
})
|
| 108 |
-
});
|
| 109 |
-
|
| 110 |
-
const contentType = res.headers.get("content-type");
|
| 111 |
-
if (!contentType || !contentType.includes("application/json")) {
|
| 112 |
-
const html = await res.text();
|
| 113 |
-
out.innerHTML = "<span style='color:red'>Error: Server returned HTML (404/500). Check terminal.</span>";
|
| 114 |
-
console.error("Received HTML instead of JSON", html);
|
| 115 |
-
return;
|
| 116 |
}
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
out.innerText = data.success ? data.data : "Error: " + data.error;
|
| 120 |
-
} catch (e) {
|
| 121 |
-
out.innerText = "Fetch Error: " + e.message;
|
| 122 |
-
}
|
| 123 |
-
}
|
| 124 |
-
</script>
|
| 125 |
-
</body>
|
| 126 |
-
</html>
|
| 127 |
`);
|
| 128 |
});
|
| 129 |
|
| 130 |
-
|
| 131 |
-
app.listen(PORT, '0.0.0.0', () => {
|
| 132 |
-
console.log(`\x1b[32m[SERVER] Running at http://0.0.0.0:${PORT}\x1b[0m`);
|
| 133 |
-
});
|
|
|
|
| 1 |
import express from 'express';
|
| 2 |
+
import cors from 'cors';
|
| 3 |
import dotenv from 'dotenv';
|
|
|
|
| 4 |
import OpenAI from "openai";
|
| 5 |
import { BedrockRuntimeClient, ConverseCommand } from "@aws-sdk/client-bedrock-runtime";
|
| 6 |
+
import { NodeHttpHandler } from "@smithy/node-http-handler"; // Import this
|
| 7 |
|
| 8 |
dotenv.config();
|
|
|
|
| 9 |
const app = express();
|
| 10 |
const PORT = process.env.PORT || 7860;
|
| 11 |
|
| 12 |
+
app.use(cors());
|
|
|
|
| 13 |
app.use(express.json());
|
| 14 |
|
| 15 |
+
// --- FIX: USE HTTP/1.1 TO PREVENT RECURSION CRASH ---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
const bedrockClient = new BedrockRuntimeClient({
|
| 17 |
+
region: "us-east-1",
|
| 18 |
+
requestHandler: new NodeHttpHandler({
|
| 19 |
+
http2Handler: undefined, // Disabling HTTP/2 stops the stack overflow bug
|
| 20 |
+
}),
|
| 21 |
credentials: {
|
| 22 |
accessKeyId: "AKIARRXB77NRTRLNWEBT",
|
| 23 |
secretAccessKey: "VC74ji226XOLVOH3U1lgP2nfaPirD9+HNgROp7AB",
|
|
|
|
| 31 |
defaultHeaders: { "api-key": "7U3m9NRkE38ThSWTr92hMgQ4hDCUFI9MAnFNrCgRL7MhdvckfTXwJQQJ99CBACHYHv6XJ3w3AAAAACOGV22P" }
|
| 32 |
});
|
| 33 |
|
|
|
|
| 34 |
app.post('/api/generate', async (req, res) => {
|
| 35 |
const { model, prompt } = req.body;
|
| 36 |
+
console.log(`[TRAFFIC] Request for ${model}`);
|
| 37 |
+
|
| 38 |
try {
|
|
|
|
| 39 |
if (model === "claude") {
|
| 40 |
const command = new ConverseCommand({
|
| 41 |
+
// TRY THIS: If the long ARN fails, use the short ID below
|
| 42 |
+
// modelId: "us.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
| 43 |
modelId: "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6",
|
| 44 |
messages: [{ role: "user", content: [{ text: prompt }] }],
|
| 45 |
+
inferenceConfig: { maxTokens: 2000, temperature: 1 },
|
| 46 |
+
additionalModelRequestFields: {
|
| 47 |
+
thinking: { type: "adaptive" },
|
| 48 |
+
output_config: { effort: "high" }
|
| 49 |
+
}
|
| 50 |
});
|
| 51 |
+
|
| 52 |
const response = await bedrockClient.send(command);
|
| 53 |
+
const text = response.output.message.content.find(b => b.text)?.text;
|
| 54 |
+
res.json({ success: true, data: text });
|
| 55 |
} else {
|
| 56 |
const response = await azureOpenAI.chat.completions.create({
|
| 57 |
model: "gpt-5-mini",
|
| 58 |
messages: [{ role: "user", content: prompt }],
|
| 59 |
reasoning_effort: "high"
|
| 60 |
});
|
| 61 |
+
res.json({ success: true, data: response.choices[0].message.content });
|
| 62 |
}
|
|
|
|
| 63 |
} catch (err) {
|
| 64 |
+
// Now that we've disabled HTTP/2, this error will actually print
|
| 65 |
+
// instead of crashing the server.
|
| 66 |
+
console.error(`❌ [${model.toUpperCase()} ERROR]:`, err.name, err.message);
|
| 67 |
+
res.status(500).json({ success: false, error: `${err.name}: ${err.message}` });
|
| 68 |
}
|
| 69 |
});
|
| 70 |
|
| 71 |
app.get('/', (req, res) => {
|
| 72 |
res.send(`
|
| 73 |
+
<body style="background:#111;color:#eee;font-family:sans-serif;padding:50px;">
|
| 74 |
+
<div style="max-width:500px;margin:auto;background:#222;padding:20px;border-radius:10px;">
|
| 75 |
+
<h2>Hollowpad Battle Arena</h2>
|
| 76 |
+
<textarea id="p" style="width:100%;height:100px;"></textarea><br>
|
| 77 |
+
<select id="m" style="width:100%;margin:10px 0;padding:10px;">
|
| 78 |
+
<option value="gpt">GPT-5 Mini</option>
|
| 79 |
+
<option value="claude">Claude Sonnet 4.6</option>
|
| 80 |
+
</select><br>
|
| 81 |
+
<button onclick="run()" style="width:100%;padding:10px;background:#0078d4;color:white;border:none;">Run Battle</button>
|
| 82 |
+
<div id="out" style="margin-top:20px;white-space:pre-wrap;color:#ccc;"></div>
|
| 83 |
+
</div>
|
| 84 |
+
<script>
|
| 85 |
+
async function run() {
|
| 86 |
+
const out = document.getElementById('out');
|
| 87 |
+
out.innerText = "Processing...";
|
| 88 |
+
const res = await fetch('/api/generate', {
|
| 89 |
+
method: 'POST',
|
| 90 |
+
headers: {'Content-Type': 'application/json'},
|
| 91 |
+
body: JSON.stringify({ model: document.getElementById('m').value, prompt: document.getElementById('p').value })
|
| 92 |
+
});
|
| 93 |
+
const data = await res.json();
|
| 94 |
+
out.innerText = data.success ? data.data : "ERROR: " + data.error;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
}
|
| 96 |
+
</script>
|
| 97 |
+
</body>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 98 |
`);
|
| 99 |
});
|
| 100 |
|
| 101 |
+
app.listen(PORT, '0.0.0.0', () => console.log(`Server live on port ${PORT}`));
|
|
|
|
|
|
|
|
|