import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand } from "@aws-sdk/client-bedrock-runtime"; import { NodeHttpHandler } from "@smithy/node-http-handler"; const bedrockClient = new BedrockRuntimeClient({ region: "us-east-1", requestHandler: new NodeHttpHandler({ http2Handler: undefined }) }); function getModelId(model) { if (model === "maverick") return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/us.meta.llama4-maverick-17b-instruct-v1:0"; if (model === "haiku") return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-haiku-4-5-20251001-v1:0"; return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6"; } export const generateCompletion = async ({ model, prompt, system_prompt, images }) => { try { let contentBlock =[]; // 🚨 THE FIX: AWS SDK v3 requires Uint8Array for images. // A standard Node Buffer will cause a "Missing Keys" Validation Error! if (images && Array.isArray(images) && images.length > 0) { images.forEach((imgStr) => { if (typeof imgStr === 'string') { // Strip the base64 prefix just in case it was sent from the frontend const b64 = imgStr.replace(/^data:image\/\w+;base64,/, ""); const buffer = Buffer.from(b64, 'base64'); contentBlock.push({ image: { format: 'jpeg', source: { bytes: new Uint8Array(buffer) } // <-- The crucial fix } }); } }); } // Always put the text prompt at the end of the content array contentBlock.push({ text: prompt }); const command = new ConverseCommand({ modelId: getModelId(model), system: [{ text: system_prompt || "You are an elite video metadata extractor." }], messages: [{ role: "user", content: contentBlock }], inferenceConfig: { maxTokens: 4000, temperature: 0.7 } }); const response = await bedrockClient.send(command); const text = response.output.message.content.find(b => b.text)?.text; return { success: true, data: text }; } catch (error) { console.error("❌ AI Engine Error:", error.message); // Log deep AWS validation issues if they happen if (error.$metadata) { console.error("AWS Meta Details:", error.$metadata); } return { success: false, error: error.message }; } }; // If you are using the streaming function anywhere, apply the exact same fix: export const streamCompletion = async ({ model, prompt, system_prompt, images, res }) => { try { const bedrockModelId = getModelId(model); let contentBlock =[]; if (images && Array.isArray(images) && images.length > 0) { images.forEach((imgStr) => { if (typeof imgStr === 'string') { const b64 = imgStr.replace(/^data:image\/\w+;base64,/, ""); const buffer = Buffer.from(b64, 'base64'); contentBlock.push({ image: { format: 'jpeg', source: { bytes: new Uint8Array(buffer) } } }); } }); } contentBlock.push({ text: prompt }); const command = new ConverseStreamCommand({ modelId: bedrockModelId, system:[{ text: system_prompt || "You are a professional assistant." }], messages: [{ role: "user", content: contentBlock }], inferenceConfig: { maxTokens: 4000, temperature: 0.7 } }); const response = await bedrockClient.send(command); for await (const chunk of response.stream) { if (chunk.contentBlockDelta?.delta?.text) { res.write(chunk.contentBlockDelta.delta.text); } } res.end(); } catch (error) { console.error("❌ Stream Error:", error.message); res.write(`\n\nERROR: ${error.message}`); res.end(); } };