Pepguy commited on
Commit
e2c761a
·
verified ·
1 Parent(s): c894814

Update app.js

Browse files
Files changed (1) hide show
  1. app.js +73 -17
app.js CHANGED
@@ -10,14 +10,17 @@ const app = express();
10
  const PORT = process.env.PORT || 7860;
11
 
12
  app.use(cors());
13
- app.use(express.json({ limit: '50mb' }));
14
 
 
15
  const CLAUDE_SYSTEM_PROMPT = "You are a pro. Provide elite, high-level technical responses.";
16
  const GPT_SYSTEM_PROMPT = "You are a worker. Be concise, efficient, and get the job done.";
17
 
18
  const bedrockClient = new BedrockRuntimeClient({
19
  region: "us-east-1" ,
20
- requestHandler: new NodeHttpHandler({ http2Handler: undefined })
 
 
21
  });
22
 
23
  const azureOpenAI = new OpenAI({
@@ -28,7 +31,9 @@ const azureOpenAI = new OpenAI({
28
  });
29
 
30
  app.post('/api/generate', async (req, res) => {
31
- const { model, prompt, system_prompt } = req.body;
 
 
32
  try {
33
  if (model === "claude") {
34
  const command = new ConverseCommand({
@@ -36,26 +41,37 @@ app.post('/api/generate', async (req, res) => {
36
  system: [{ text: system_prompt || CLAUDE_SYSTEM_PROMPT }],
37
  messages: [{ role: "user", content: [{ text: prompt }] }],
38
  inferenceConfig: { maxTokens: 48000, temperature: 1 },
39
- additionalModelRequestFields: { thinking: { type: "adaptive" }, output_config: { effort: "high" } }
 
 
 
40
  });
 
41
  const response = await bedrockClient.send(command);
42
  const text = response.output.message.content.find(b => b.text)?.text;
43
  res.json({ success: true, data: text });
44
  } else {
45
  const response = await azureOpenAI.chat.completions.create({
46
  model: "gpt-5-mini",
47
- messages: [{ role: "system", content: system_prompt || GPT_SYSTEM_PROMPT }, { role: "user", content: prompt }],
 
 
 
48
  reasoning_effort: "high"
49
  });
50
  res.json({ success: true, data: response.choices[0].message.content });
51
  }
52
  } catch (err) {
53
- res.status(500).json({ success: false, error: err.message });
 
54
  }
55
  });
56
 
 
57
  app.post('/api/stream', async (req, res) => {
58
- const { model, prompt, system_prompt, images } = req.body;
 
 
59
  res.setHeader('Content-Type', 'text/plain; charset=utf-8');
60
  res.setHeader('Transfer-Encoding', 'chunked');
61
  res.setHeader('X-Accel-Buffering', 'no');
@@ -63,56 +79,96 @@ app.post('/api/stream', async (req, res) => {
63
 
64
  try {
65
  if (model === "claude") {
 
66
  let contentBlock = [{ text: prompt }];
 
67
  if (images && images.length > 0) {
 
 
68
  const imageBlocks = images.map(imgStr => {
69
  const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
70
- return { image: { format: 'png', source: { bytes: Buffer.from(base64Data, 'base64') } } };
 
 
 
 
 
71
  });
72
  contentBlock = [...imageBlocks, ...contentBlock];
73
  }
 
74
  const command = new ConverseStreamCommand({
75
  modelId: "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6",
76
  system: [{ text: system_prompt || CLAUDE_SYSTEM_PROMPT }],
77
  messages: [{ role: "user", content: contentBlock }],
78
  inferenceConfig: { maxTokens: 48000, temperature: 1 },
79
- additionalModelRequestFields: { thinking: { type: "adaptive" }, output_config: { effort: "high" } }
 
 
 
80
  });
 
81
  const response = await bedrockClient.send(command);
 
82
  for await (const chunk of response.stream) {
83
  if (chunk.contentBlockDelta) {
84
  const delta = chunk.contentBlockDelta.delta;
85
- if (delta.reasoningContent?.text) res.write(`__THINK__${delta.reasoningContent.text}`);
86
- else if (delta.text) res.write(delta.text);
 
 
 
87
  }
88
  }
89
  res.end();
 
90
  } else {
91
- let messagesPayload = [{ role: "system", content: system_prompt || GPT_SYSTEM_PROMPT }];
 
 
 
 
 
92
  if (images && images.length > 0) {
93
- let userContent = [{ type: "text", text: prompt }];
94
- images.forEach(img => userContent.push({ type: "image_url", image_url: { url: img } }));
 
 
 
 
 
 
95
  messagesPayload.push({ role: "user", content: userContent });
96
  } else {
97
  messagesPayload.push({ role: "user", content: prompt });
98
  }
 
99
  const stream = await azureOpenAI.chat.completions.create({
100
  model: "gpt-5-mini",
101
  messages: messagesPayload,
102
  reasoning_effort: "high",
103
  stream: true,
104
  });
 
105
  for await (const chunk of stream) {
106
  const delta = chunk.choices[0]?.delta;
107
- if (delta?.reasoning_content) res.write(`__THINK__${delta.reasoning_content}`);
108
- else if (delta?.content) res.write(delta.content);
 
 
 
109
  }
110
  res.end();
111
  }
112
  } catch (err) {
 
113
  res.write(`ERROR: ${err.message}`);
114
  res.end();
115
  }
116
  });
117
 
118
- app.listen(PORT, '0.0.0.0', () => console.log(`Main AI Agents live on port ${PORT}`));
 
 
 
 
 
10
  const PORT = process.env.PORT || 7860;
11
 
12
  app.use(cors());
13
+ app.use(express.json({ limit: '50mb' })); // Increased limit for images
14
 
15
+ // --- SYSTEM PROMPT DEFINITIONS ---
16
  const CLAUDE_SYSTEM_PROMPT = "You are a pro. Provide elite, high-level technical responses.";
17
  const GPT_SYSTEM_PROMPT = "You are a worker. Be concise, efficient, and get the job done.";
18
 
19
  const bedrockClient = new BedrockRuntimeClient({
20
  region: "us-east-1" ,
21
+ requestHandler: new NodeHttpHandler({
22
+ http2Handler: undefined,
23
+ })
24
  });
25
 
26
  const azureOpenAI = new OpenAI({
 
31
  });
32
 
33
  app.post('/api/generate', async (req, res) => {
34
+ const { model, prompt, system_prompt} = req.body;
35
+ console.log(`[TRAFFIC] Request for ${model}`);
36
+
37
  try {
38
  if (model === "claude") {
39
  const command = new ConverseCommand({
 
41
  system: [{ text: system_prompt || CLAUDE_SYSTEM_PROMPT }],
42
  messages: [{ role: "user", content: [{ text: prompt }] }],
43
  inferenceConfig: { maxTokens: 48000, temperature: 1 },
44
+ additionalModelRequestFields: {
45
+ thinking: { type: "adaptive" },
46
+ output_config: { effort: "high" }
47
+ }
48
  });
49
+
50
  const response = await bedrockClient.send(command);
51
  const text = response.output.message.content.find(b => b.text)?.text;
52
  res.json({ success: true, data: text });
53
  } else {
54
  const response = await azureOpenAI.chat.completions.create({
55
  model: "gpt-5-mini",
56
+ messages: [
57
+ { role: "system", content: system_prompt || GPT_SYSTEM_PROMPT },
58
+ { role: "user", content: prompt }
59
+ ],
60
  reasoning_effort: "high"
61
  });
62
  res.json({ success: true, data: response.choices[0].message.content });
63
  }
64
  } catch (err) {
65
+ console.error(`❌ [${model.toUpperCase()} ERROR]:`, err.name, err.message);
66
+ res.status(500).json({ success: false, error: `${err.name}: ${err.message}` });
67
  }
68
  });
69
 
70
+ // --- STREAMING ENDPOINT WITH IMAGE SUPPORT ---
71
  app.post('/api/stream', async (req, res) => {
72
+ const { model, prompt, system_prompt, images } = req.body; // Expect images array (base64)
73
+ console.log(`[STREAM] Request for ${model} ${images?.length ? 'with images' : ''}`);
74
+
75
  res.setHeader('Content-Type', 'text/plain; charset=utf-8');
76
  res.setHeader('Transfer-Encoding', 'chunked');
77
  res.setHeader('X-Accel-Buffering', 'no');
 
79
 
80
  try {
81
  if (model === "claude") {
82
+ // Construct Content Block for Claude
83
  let contentBlock = [{ text: prompt }];
84
+
85
  if (images && images.length > 0) {
86
+ // Prepend images to content block
87
+ // Bedrock expects pure base64 without data prefix
88
  const imageBlocks = images.map(imgStr => {
89
  const base64Data = imgStr.replace(/^data:image\/\w+;base64,/, "");
90
+ return {
91
+ image: {
92
+ format: 'png', // Assuming PNG or generic byte handling
93
+ source: { bytes: Buffer.from(base64Data, 'base64') }
94
+ }
95
+ };
96
  });
97
  contentBlock = [...imageBlocks, ...contentBlock];
98
  }
99
+
100
  const command = new ConverseStreamCommand({
101
  modelId: "arn:aws:bedrock:us-east-1:106774395747:inference-profile/global.anthropic.claude-sonnet-4-6",
102
  system: [{ text: system_prompt || CLAUDE_SYSTEM_PROMPT }],
103
  messages: [{ role: "user", content: contentBlock }],
104
  inferenceConfig: { maxTokens: 48000, temperature: 1 },
105
+ additionalModelRequestFields: {
106
+ thinking: { type: "adaptive" },
107
+ output_config: { effort: "high" }
108
+ }
109
  });
110
+
111
  const response = await bedrockClient.send(command);
112
+
113
  for await (const chunk of response.stream) {
114
  if (chunk.contentBlockDelta) {
115
  const delta = chunk.contentBlockDelta.delta;
116
+ if (delta.reasoningContent && delta.reasoningContent.text) {
117
+ res.write(`__THINK__${delta.reasoningContent.text}`);
118
+ } else if (delta.text) {
119
+ res.write(delta.text);
120
+ }
121
  }
122
  }
123
  res.end();
124
+
125
  } else {
126
+ // Construct Content Block for OpenAI
127
+ let messagesPayload = [
128
+ { role: "system", content: system_prompt || GPT_SYSTEM_PROMPT }
129
+ ];
130
+
131
+ let userContent = [];
132
  if (images && images.length > 0) {
133
+ // OpenAI supports mixed content array
134
+ userContent.push({ type: "text", text: prompt });
135
+ images.forEach(imgStr => {
136
+ userContent.push({
137
+ type: "image_url",
138
+ image_url: { url: imgStr } // OpenAI accepts data URI directly
139
+ });
140
+ });
141
  messagesPayload.push({ role: "user", content: userContent });
142
  } else {
143
  messagesPayload.push({ role: "user", content: prompt });
144
  }
145
+
146
  const stream = await azureOpenAI.chat.completions.create({
147
  model: "gpt-5-mini",
148
  messages: messagesPayload,
149
  reasoning_effort: "high",
150
  stream: true,
151
  });
152
+
153
  for await (const chunk of stream) {
154
  const delta = chunk.choices[0]?.delta;
155
+ if (delta?.reasoning_content) {
156
+ res.write(`__THINK__${delta.reasoning_content}`);
157
+ } else if (delta?.content) {
158
+ res.write(delta.content);
159
+ }
160
  }
161
  res.end();
162
  }
163
  } catch (err) {
164
+ console.error(`❌ [STREAM ERROR]:`, err.message);
165
  res.write(`ERROR: ${err.message}`);
166
  res.end();
167
  }
168
  });
169
 
170
+ app.post('/api/stream', async (req, res) => {
171
+ res.json({ success: true });
172
+ });
173
+
174
+ app.listen(PORT, '0.0.0.0', () => console.log(`Main AI Agent live on port ${PORT}`));