Spaces:
Running
Running
File size: 4,281 Bytes
8ef18c1 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 2e99d45 a543ef3 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 | import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand } from "@aws-sdk/client-bedrock-runtime";
import { NodeHttpHandler } from "@smithy/node-http-handler";
const bedrockClient = new BedrockRuntimeClient({
region: "us-east-1",
requestHandler: new NodeHttpHandler({ http2Handler: undefined })
});
function getModelId(model) {
if (model === "maverick") return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/us.meta.llama4-maverick-17b-instruct-v1:0";
if (model === "haiku") return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-haiku-4-5-20251001-v1:0";
return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6";
}
export const generateCompletion = async ({ model, prompt, system_prompt, images }) => {
try {
let contentBlock =[];
// 🚨 THE FIX: AWS SDK v3 requires Uint8Array for images.
// A standard Node Buffer will cause a "Missing Keys" Validation Error!
if (images && Array.isArray(images) && images.length > 0) {
images.forEach((imgStr) => {
if (typeof imgStr === 'string') {
// Strip the base64 prefix just in case it was sent from the frontend
const b64 = imgStr.replace(/^data:image\/\w+;base64,/, "");
const buffer = Buffer.from(b64, 'base64');
contentBlock.push({
image: {
format: 'jpeg',
source: { bytes: new Uint8Array(buffer) } // <-- The crucial fix
}
});
}
});
}
// Always put the text prompt at the end of the content array
contentBlock.push({ text: prompt });
const command = new ConverseCommand({
modelId: getModelId(model),
system: [{ text: system_prompt || "You are an elite video metadata extractor." }],
messages: [{ role: "user", content: contentBlock }],
inferenceConfig: { maxTokens: 4000, temperature: 0.7 }
});
const response = await bedrockClient.send(command);
const text = response.output.message.content.find(b => b.text)?.text;
return { success: true, data: text };
} catch (error) {
console.error("❌ AI Engine Error:", error.message);
// Log deep AWS validation issues if they happen
if (error.$metadata) {
console.error("AWS Meta Details:", error.$metadata);
}
return { success: false, error: error.message };
}
};
// If you are using the streaming function anywhere, apply the exact same fix:
export const streamCompletion = async ({ model, prompt, system_prompt, images, res }) => {
try {
const bedrockModelId = getModelId(model);
let contentBlock =[];
if (images && Array.isArray(images) && images.length > 0) {
images.forEach((imgStr) => {
if (typeof imgStr === 'string') {
const b64 = imgStr.replace(/^data:image\/\w+;base64,/, "");
const buffer = Buffer.from(b64, 'base64');
contentBlock.push({
image: { format: 'jpeg', source: { bytes: new Uint8Array(buffer) } }
});
}
});
}
contentBlock.push({ text: prompt });
const command = new ConverseStreamCommand({
modelId: bedrockModelId,
system:[{ text: system_prompt || "You are a professional assistant." }],
messages: [{ role: "user", content: contentBlock }],
inferenceConfig: { maxTokens: 4000, temperature: 0.7 }
});
const response = await bedrockClient.send(command);
for await (const chunk of response.stream) {
if (chunk.contentBlockDelta?.delta?.text) {
res.write(chunk.contentBlockDelta.delta.text);
}
}
res.end();
} catch (error) {
console.error("❌ Stream Error:", error.message);
res.write(`\n\nERROR: ${error.message}`);
res.end();
}
}; |