everydaycats commited on
Commit
8ef18c1
·
verified ·
1 Parent(s): f2553eb

Update ai_engine.js

Browse files
Files changed (1) hide show
  1. ai_engine.js +144 -8
ai_engine.js CHANGED
@@ -1,4 +1,144 @@
 
 
 
 
1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand } from "@aws-sdk/client-bedrock-runtime";
3
  import { NodeHttpHandler } from "@smithy/node-http-handler";
4
 
@@ -31,13 +171,7 @@ export const generateCompletion = async ({ model, prompt, system_prompt, images
31
  let contentBlock = [{ text: prompt }];
32
 
33
  if (images && images.length > 0) {
34
- /* const imageBlocks = images.map(imgStr => {
35
- const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
36
- return {
37
- image: { format: 'png', source: { bytes: Buffer.from(base64Data, 'base64') } }
38
- };
39
- });
40
- */
41
  const imageBlocks = images.map(imgStr => {
42
  // 1. Extract the actual base64 data
43
  const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
@@ -119,4 +253,6 @@ export const streamCompletion = async ({ model, prompt, system_prompt, images, r
119
  res.write(`__USAGE__${JSON.stringify({ totalTokenCount })}`);
120
  res.end();
121
 
122
- };
 
 
 
1
+ import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand } from "@aws-sdk/client-bedrock-runtime";
2
+ import { NodeHttpHandler } from "@smithy/node-http-handler";
3
+
4
+ const CLAUDE_SYSTEM_PROMPT = "You are a pro. Provide elite, high-level technical responses.";
5
 
6
+ const bedrockClient = new BedrockRuntimeClient({
7
+ region: "us-east-1",
8
+ requestHandler: new NodeHttpHandler({
9
+ http2Handler: undefined,
10
+ })
11
+ });
12
+
13
+ function getBedrockModelId(modelName) {
14
+ switch(modelName) {
15
+ case "haiku":
16
+ return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-haiku-4-5-20251001-v1:0";
17
+ case "maverick":
18
+ return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/us.meta.llama4-maverick-17b-instruct-v1:0";
19
+ case "claude":
20
+ default:
21
+ return "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6";
22
+ }
23
+ }
24
+
25
+ // Helper to detect audio format and strip data URI
26
+ const processAudioBlocks = (audios) => {
27
+ return audios.map(audioStr => {
28
+ const base64Data = audioStr.replace(/^data:audio\/\w+;base64,/, "");
29
+
30
+ let detectedFormat = 'mp3'; // Default
31
+ if (audioStr.includes('audio/wav')) detectedFormat = 'wav';
32
+ if (audioStr.includes('audio/ogg')) detectedFormat = 'ogg';
33
+ if (audioStr.includes('audio/flac')) detectedFormat = 'flac';
34
+ if (audioStr.includes('audio/amr')) detectedFormat = 'amr';
35
+
36
+ return {
37
+ audio: {
38
+ format: detectedFormat,
39
+ source: { bytes: Buffer.from(base64Data, 'base64') }
40
+ }
41
+ };
42
+ });
43
+ };
44
+
45
+ export const generateCompletion = async ({ model, prompt, system_prompt, images, audios }) => {
46
+ const bedrockModelId = getBedrockModelId(model);
47
+ let contentBlock = [{ text: prompt }];
48
+
49
+ // Process Images
50
+ if (images && images.length > 0) {
51
+ const imageBlocks = images.map(imgStr => {
52
+ const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
53
+ let detectedFormat = 'jpeg';
54
+ if (imgStr.includes('image/png')) detectedFormat = 'png';
55
+ if (imgStr.includes('image/webp')) detectedFormat = 'webp';
56
+ if (imgStr.includes('image/gif')) detectedFormat = 'gif';
57
+
58
+ return {
59
+ image: {
60
+ format: detectedFormat,
61
+ source: { bytes: Buffer.from(base64Data, 'base64') }
62
+ }
63
+ };
64
+ });
65
+ contentBlock = [...imageBlocks, ...contentBlock];
66
+ }
67
+
68
+ // 🚨 NEW: Process Audio
69
+ if (audios && audios.length > 0) {
70
+ const audioBlocks = processAudioBlocks(audios);
71
+ contentBlock = [...audioBlocks, ...contentBlock];
72
+ }
73
+
74
+ const command = new ConverseCommand({
75
+ modelId: bedrockModelId,
76
+ system: [{ text: system_prompt || CLAUDE_SYSTEM_PROMPT }],
77
+ messages: [{ role: "user", content: contentBlock }],
78
+ inferenceConfig: {
79
+ maxTokens: model.includes("haiku") ? 32000 : 4000,
80
+ temperature: 1
81
+ },
82
+ performanceConfig: model.includes("maverick") ? { latency: "standard" } : undefined,
83
+ additionalModelRequestFields: (function() {
84
+ if (model.includes("haiku")) return { reasoning_config: { type: "enabled", budget_tokens: 2048 } };
85
+ if (model.includes("claude")) return { output_config: { effort: "high" } };
86
+ return undefined;
87
+ })()
88
+ });
89
+
90
+ const response = await bedrockClient.send(command);
91
+ const text = response.output.message.content.find(b => b.text)?.text;
92
+ const tokenUsage = response.usage ? (response.usage.inputTokens + response.usage.outputTokens) : 0;
93
+
94
+ return { success: true, data: text, usage: { totalTokenCount: tokenUsage } };
95
+ };
96
+
97
+ export const streamCompletion = async ({ model, prompt, system_prompt, images, audios, res }) => {
98
+ let totalTokenCount = 0;
99
+ const bedrockModelId = getBedrockModelId(model);
100
+ let contentBlock = [{ text: prompt }];
101
+
102
+ // Process Images
103
+ if (images && images.length > 0) {
104
+ const imageBlocks = images.map(imgStr => {
105
+ const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
106
+ return { image: { format: 'png', source: { bytes: Buffer.from(base64Data, 'base64') } } };
107
+ });
108
+ contentBlock = [...imageBlocks, ...contentBlock];
109
+ }
110
+
111
+ // 🚨 NEW: Process Audio
112
+ if (audios && audios.length > 0) {
113
+ const audioBlocks = processAudioBlocks(audios);
114
+ contentBlock = [...audioBlocks, ...contentBlock];
115
+ }
116
+
117
+ const command = new ConverseStreamCommand({
118
+ modelId: bedrockModelId,
119
+ system: [{ text: system_prompt || CLAUDE_SYSTEM_PROMPT }],
120
+ messages: [{ role: "user", content: contentBlock }],
121
+ inferenceConfig: { maxTokens: 48000, temperature: 1 },
122
+ additionalModelRequestFields: model.includes("claude") ? { output_config: { effort: "high" } } : undefined
123
+ });
124
+
125
+ const response = await bedrockClient.send(command);
126
+ for await (const chunk of response.stream) {
127
+ if (chunk.contentBlockDelta) {
128
+ const delta = chunk.contentBlockDelta.delta;
129
+ if (delta.reasoningContent && delta.reasoningContent.text) res.write(`__THINK__${delta.reasoningContent.text}`);
130
+ else if (delta.text) res.write(delta.text);
131
+ }
132
+ if (chunk.metadata && chunk.metadata.usage) {
133
+ totalTokenCount = (chunk.metadata.usage.inputTokens || 0) + (chunk.metadata.usage.outputTokens || 0);
134
+ }
135
+ }
136
+ res.write(`__USAGE__${JSON.stringify({ totalTokenCount })}`);
137
+ res.end();
138
+ };
139
+
140
+
141
+ /*
142
  import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand } from "@aws-sdk/client-bedrock-runtime";
143
  import { NodeHttpHandler } from "@smithy/node-http-handler";
144
 
 
171
  let contentBlock = [{ text: prompt }];
172
 
173
  if (images && images.length > 0) {
174
+
 
 
 
 
 
 
175
  const imageBlocks = images.map(imgStr => {
176
  // 1. Extract the actual base64 data
177
  const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
 
253
  res.write(`__USAGE__${JSON.stringify({ totalTokenCount })}`);
254
  res.end();
255
 
256
+ };
257
+
258
+ */