Spaces:
Running
Running
Update app.js
Browse files
app.js
CHANGED
|
@@ -47,7 +47,8 @@ import { NodeHttpHandler } from "@smithy/node-http-handler";
|
|
| 47 |
// ββ CONFIG ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 48 |
const PORT = parseInt(process.env.PORT || "7860");
|
| 49 |
const AWS_REGION = process.env.AWS_REGION || "us-east-1";
|
| 50 |
-
const BEDROCK_MODEL_ID = "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-
|
|
|
|
| 51 |
|
| 52 |
const DATASET_PATH = process.env.DATASET_PATH || "axl_dataset.json";
|
| 53 |
const DEFAULT_RPS = parseFloat(process.env.DEFAULT_RPS || "3");
|
|
@@ -1793,13 +1794,31 @@ OUTPUT THE JSON ARRAY NOW:`;
|
|
| 1793 |
}
|
| 1794 |
|
| 1795 |
async function callBedrock(prompt) {
|
|
|
|
|
|
|
| 1796 |
const command = new ConverseCommand({
|
| 1797 |
modelId: BEDROCK_MODEL_ID,
|
| 1798 |
messages: [{ role: "user", content:[{ text: prompt }] }],
|
| 1799 |
inferenceConfig: {
|
| 1800 |
maxTokens: 10000,
|
| 1801 |
temperature: 0.9
|
| 1802 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1803 |
});
|
| 1804 |
|
| 1805 |
const t0 = Date.now();
|
|
|
|
| 47 |
// ββ CONFIG ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 48 |
const PORT = parseInt(process.env.PORT || "7860");
|
| 49 |
const AWS_REGION = process.env.AWS_REGION || "us-east-1";
|
| 50 |
+
const BEDROCK_MODEL_ID = "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-haiku-4-5-20251001-v1:0";
|
| 51 |
+
//"arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6";
|
| 52 |
|
| 53 |
const DATASET_PATH = process.env.DATASET_PATH || "axl_dataset.json";
|
| 54 |
const DEFAULT_RPS = parseFloat(process.env.DEFAULT_RPS || "3");
|
|
|
|
| 1794 |
}
|
| 1795 |
|
| 1796 |
async function callBedrock(prompt) {
|
| 1797 |
+
const model = BEDROCK_MODEL_ID;
|
| 1798 |
+
|
| 1799 |
const command = new ConverseCommand({
|
| 1800 |
modelId: BEDROCK_MODEL_ID,
|
| 1801 |
messages: [{ role: "user", content:[{ text: prompt }] }],
|
| 1802 |
inferenceConfig: {
|
| 1803 |
maxTokens: 10000,
|
| 1804 |
temperature: 0.9
|
| 1805 |
+
},
|
| 1806 |
+
additionalModelRequestFields: (function() {
|
| 1807 |
+
if (model.includes("haiku")) {
|
| 1808 |
+
return {
|
| 1809 |
+
reasoning_config: {
|
| 1810 |
+
type: "enabled",
|
| 1811 |
+
budget_tokens: 2048
|
| 1812 |
+
}
|
| 1813 |
+
};
|
| 1814 |
+
} else if (model.includes("claude")) {
|
| 1815 |
+
return {
|
| 1816 |
+
// thinking: { type: "adaptive" },
|
| 1817 |
+
output_config: { effort: "high" }
|
| 1818 |
+
};
|
| 1819 |
+
}
|
| 1820 |
+
return undefined;
|
| 1821 |
+
})()
|
| 1822 |
});
|
| 1823 |
|
| 1824 |
const t0 = Date.now();
|