Fred808 commited on
Commit
d51d586
Β·
verified Β·
1 Parent(s): c810aba

Update webhook.js

Browse files
Files changed (1) hide show
  1. webhook.js +50 -82
webhook.js CHANGED
@@ -1,9 +1,8 @@
1
- // restaurantBot.js
2
-
3
- const express = require("express");
4
- const axios = require("axios");
5
- const { Configuration, OpenAI } = require("openai");
6
- require("dotenv").config();
7
 
8
  const router = express.Router();
9
 
@@ -17,7 +16,6 @@ const {
17
  NVIDIA_API_KEY, // Your NVIDIA secret key for LLM fallback
18
  } = process.env;
19
 
20
- // Validate Environment Variables
21
  if (
22
  !META_ACCESS_TOKEN ||
23
  !META_PHONE_NUMBER_ID ||
@@ -31,57 +29,42 @@ if (
31
  }
32
 
33
  // -----------------------------------------------------------------------------
34
- // Set up the OpenAI client for NVIDIA LLM fallback (if needed)
35
  // -----------------------------------------------------------------------------
36
- const llmConfig = new Configuration({
37
  apiKey: NVIDIA_API_KEY,
38
- // Use NVIDIA's integration endpoint:
39
- basePath: "https://integrate.api.nvidia.com/v1",
40
  });
41
- const llmOpenai = new OpenAIApi(llmConfig);
42
 
43
  /**
44
- * Call the LLM fallback directly using the OpenAI API configured for NVIDIA.
45
- * This function returns a Promise that resolves to the complete streamed response.
46
  */
47
  async function callLLMFallback(message) {
48
- return new Promise(async (resolve, reject) => {
49
- try {
50
- const response = await llmOpenai.createChatCompletion(
51
- {
52
- model: "meta/llama-3.1-405b-instruct",
53
- messages: [{ role: "user", content: message }],
54
- temperature: 0.2,
55
- top_p: 0.7,
56
- max_tokens: 1024,
57
- stream: true,
58
- },
59
- { responseType: "stream" }
60
- );
61
-
62
- let fullText = "";
63
- response.data.on("data", (chunk) => {
64
- fullText += chunk.toString();
65
- });
66
- response.data.on("end", () => {
67
- resolve(fullText);
68
- });
69
- response.data.on("error", (err) => {
70
- console.error("Error in LLM fallback stream:", err);
71
- reject(err);
72
- });
73
- } catch (error) {
74
- console.error("Error calling LLM fallback:", error);
75
- reject(error);
76
  }
77
- });
 
 
 
 
78
  }
79
 
80
  // -----------------------------------------------------------------------------
81
  // Helper Functions for calling Python API endpoints
82
  // -----------------------------------------------------------------------------
83
-
84
- // Forward a generic chat request to the Python chatbot endpoint and format response.
85
  async function getPythonChatResponse(userId, message) {
86
  try {
87
  console.log("Forwarding chat request to Python API...");
@@ -90,19 +73,14 @@ async function getPythonChatResponse(userId, message) {
90
  message: message,
91
  });
92
 
93
- // If the response data is an object, format it into a friendly string.
94
  if (typeof response.data === "object" && response.data !== null) {
95
  let result = response.data.response || "";
96
-
97
- // If there's a menu array, format it nicely.
98
  if (Array.isArray(response.data.menu)) {
99
  result += "\n\n*Menu:*\n";
100
  response.data.menu.forEach((item) => {
101
  result += `β€’ *${item.name}* - ${item.description} - ₦${item.price}\n`;
102
  });
103
  }
104
-
105
- // Append any follow-up text.
106
  if (response.data.follow_up) {
107
  result += "\n" + response.data.follow_up;
108
  }
@@ -115,7 +93,6 @@ async function getPythonChatResponse(userId, message) {
115
  }
116
  }
117
 
118
- // Retrieve the chat history for the given user from the Python API.
119
  async function getPythonChatHistory(userId) {
120
  try {
121
  console.log("Requesting chat history from Python API...");
@@ -127,7 +104,6 @@ async function getPythonChatHistory(userId) {
127
  }
128
  }
129
 
130
- // Retrieve order details from the Python API.
131
  async function getPythonOrderDetails(orderId) {
132
  try {
133
  console.log("Requesting order details from Python API...");
@@ -139,7 +115,6 @@ async function getPythonOrderDetails(orderId) {
139
  }
140
  }
141
 
142
- // Retrieve user profile from the Python API.
143
  async function getPythonUserProfile(userId) {
144
  try {
145
  console.log("Requesting user profile from Python API...");
@@ -151,7 +126,6 @@ async function getPythonUserProfile(userId) {
151
  }
152
  }
153
 
154
- // Retrieve analytics from the Python API.
155
  async function getPythonAnalytics() {
156
  try {
157
  console.log("Requesting analytics from Python API...");
@@ -166,7 +140,7 @@ async function getPythonAnalytics() {
166
  // -----------------------------------------------------------------------------
167
  // Utility: Log chat history locally (for debugging purposes)
168
  // -----------------------------------------------------------------------------
169
- const chatHistories = {}; // For debugging: { userId: [ {timestamp, direction, message}, ... ] }
170
  function logChat(userId, direction, message) {
171
  if (!chatHistories[userId]) {
172
  chatHistories[userId] = [];
@@ -205,34 +179,27 @@ router.post("/", async (req, res) => {
205
 
206
  if (body.object === "whatsapp_business_account") {
207
  const changes = body.entry[0]?.changes[0]?.value;
208
-
209
- // Handle incoming messages
210
  if (changes?.messages) {
211
  const messageObj = changes.messages[0];
212
  const { from, text } = messageObj;
213
  const userMessage = text?.body;
214
  console.log(`Message from ${from}: ${userMessage}`);
215
-
216
- // Log the incoming message locally for debugging
217
  logChat(from, "inbound", userMessage);
218
-
219
- // Generate a response based on the user's message
220
  const botResponse = await generateResponse(userMessage, from);
221
  console.log("Bot response:", botResponse);
222
  logChat(from, "outbound", botResponse);
223
-
224
- // Send the response back to the user via WhatsApp API.
225
  await sendMessage(from, botResponse);
226
  return res.status(200).send("EVENT_RECEIVED");
227
  }
228
-
229
- // Handle status updates (e.g., delivery receipts)
230
  if (changes?.statuses) {
231
  console.log("Status update received:", JSON.stringify(changes.statuses, null, 2));
232
  return res.status(200).send("EVENT_RECEIVED");
233
  }
234
  }
235
-
236
  console.log("No valid content found in webhook.");
237
  res.sendStatus(404);
238
  } catch (error) {
@@ -241,53 +208,54 @@ router.post("/", async (req, res) => {
241
  }
242
  });
243
 
 
 
 
 
244
  // -----------------------------------------------------------------------------
245
  // Enhanced Response Generator
246
  // -----------------------------------------------------------------------------
247
  async function generateResponse(message, from) {
248
  const lowerMessage = message.toLowerCase();
249
 
250
- // 1. Welcome and Main Menu
251
  if (lowerMessage === "hi" || lowerMessage === "hello" || lowerMessage.includes("welcome")) {
252
- return `πŸ‘‹ Hi there! Welcome to *Angelo Foods*! πŸ”πŸ•\nPlease choose an option:\n1️⃣ *View Menu*\n2️⃣ *Place an Order*\n3️⃣ *Payment Status*\n4️⃣ *My Profile*\n5️⃣ *Chat History*\n6️⃣ *Help & Support*\nSimply type the number or the option name.`;
253
  }
254
 
255
- // 2. Menu Display
256
  if (lowerMessage === "1" || (lowerMessage.includes("menu") && !lowerMessage.includes("order"))) {
257
- // Display the menu (with images and details)
258
- const menuResponse = await getPythonChatResponse(from, "menu");
259
- return menuResponse;
260
  }
261
 
262
- // 3. Order Flow – Triggered by multiple keywords
263
  if (
264
  lowerMessage === "2" ||
265
  lowerMessage.includes("order") ||
266
  lowerMessage.includes("buy") ||
267
  lowerMessage.includes("food")
268
  ) {
269
- // Check if the message exactly matches one of the dish names from the menu.
270
  let selectedDish = null;
271
- for (let item of menu_items) {
272
  if (lowerMessage.includes(item.name.toLowerCase())) {
273
  selectedDish = item.name;
274
  break;
275
  }
276
  }
277
- // If a dish is selected, pass it along as part of the order command.
278
  if (selectedDish) {
 
279
  return await getPythonChatResponse(from, `order: ${selectedDish}`);
280
  }
281
- // Otherwise, if user simply says "order", "buy", or "food", delegate to the order flow in Python API.
282
  return await getPythonChatResponse(from, "order");
283
  }
284
 
285
- // 4. Payment Status
286
  if (lowerMessage === "3" || lowerMessage.includes("payment")) {
287
  return await getPythonChatResponse(from, "payment");
288
  }
289
 
290
- // 5. User Profile
291
  if (lowerMessage === "4" || lowerMessage.includes("profile")) {
292
  const profile = await getPythonUserProfile(from);
293
  if (profile) {
@@ -297,7 +265,7 @@ async function generateResponse(message, from) {
297
  }
298
  }
299
 
300
- // 6. Chat History
301
  if (lowerMessage === "5" || lowerMessage.includes("chat history")) {
302
  const history = await getPythonChatHistory(from);
303
  if (history && history.length > 0) {
@@ -311,7 +279,7 @@ async function generateResponse(message, from) {
311
  }
312
  }
313
 
314
- // 7. Help & Support
315
  if (lowerMessage === "6" || lowerMessage.includes("help") || lowerMessage.includes("support")) {
316
  return `❓ *How can I help you?*\nType one of the following:\n- *Order Status* (e.g., "order tracking ORD-123456789")\n- *Contact Support*\n- *Main Menu* to see all options again.`;
317
  }
@@ -383,4 +351,4 @@ async function sendProactiveGreeting(userId) {
383
  return greeting;
384
  }
385
 
386
- module.exports = router;
 
1
+ import express from "express";
2
+ import axios from "axios";
3
+ import OpenAI from "openai";
4
+ import dotenv from "dotenv";
5
+ dotenv.config();
 
6
 
7
  const router = express.Router();
8
 
 
16
  NVIDIA_API_KEY, // Your NVIDIA secret key for LLM fallback
17
  } = process.env;
18
 
 
19
  if (
20
  !META_ACCESS_TOKEN ||
21
  !META_PHONE_NUMBER_ID ||
 
29
  }
30
 
31
  // -----------------------------------------------------------------------------
32
+ // Set up the OpenAI client for NVIDIA LLM fallback using the new format
33
  // -----------------------------------------------------------------------------
34
+ const openaiLLM = new OpenAI({
35
  apiKey: NVIDIA_API_KEY,
36
+ baseURL: "https://integrate.api.nvidia.com/v1",
 
37
  });
 
38
 
39
  /**
40
+ * Call the LLM fallback using the new syntax.
41
+ * This function streams the response and returns the full text.
42
  */
43
  async function callLLMFallback(message) {
44
+ try {
45
+ const completion = await openaiLLM.chat.completions.create({
46
+ model: "meta/llama-3.1-405b-instruct",
47
+ messages: [{ role: "user", content: message }],
48
+ temperature: 0.2,
49
+ top_p: 0.7,
50
+ max_tokens: 1024,
51
+ stream: true,
52
+ });
53
+
54
+ let fullText = "";
55
+ for await (const chunk of completion) {
56
+ fullText += chunk.choices[0]?.delta?.content || "";
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  }
58
+ return fullText;
59
+ } catch (err) {
60
+ console.error("LLM fallback error:", err);
61
+ throw err;
62
+ }
63
  }
64
 
65
  // -----------------------------------------------------------------------------
66
  // Helper Functions for calling Python API endpoints
67
  // -----------------------------------------------------------------------------
 
 
68
  async function getPythonChatResponse(userId, message) {
69
  try {
70
  console.log("Forwarding chat request to Python API...");
 
73
  message: message,
74
  });
75
 
 
76
  if (typeof response.data === "object" && response.data !== null) {
77
  let result = response.data.response || "";
 
 
78
  if (Array.isArray(response.data.menu)) {
79
  result += "\n\n*Menu:*\n";
80
  response.data.menu.forEach((item) => {
81
  result += `β€’ *${item.name}* - ${item.description} - ₦${item.price}\n`;
82
  });
83
  }
 
 
84
  if (response.data.follow_up) {
85
  result += "\n" + response.data.follow_up;
86
  }
 
93
  }
94
  }
95
 
 
96
  async function getPythonChatHistory(userId) {
97
  try {
98
  console.log("Requesting chat history from Python API...");
 
104
  }
105
  }
106
 
 
107
  async function getPythonOrderDetails(orderId) {
108
  try {
109
  console.log("Requesting order details from Python API...");
 
115
  }
116
  }
117
 
 
118
  async function getPythonUserProfile(userId) {
119
  try {
120
  console.log("Requesting user profile from Python API...");
 
126
  }
127
  }
128
 
 
129
  async function getPythonAnalytics() {
130
  try {
131
  console.log("Requesting analytics from Python API...");
 
140
  // -----------------------------------------------------------------------------
141
  // Utility: Log chat history locally (for debugging purposes)
142
  // -----------------------------------------------------------------------------
143
+ const chatHistories = {};
144
  function logChat(userId, direction, message) {
145
  if (!chatHistories[userId]) {
146
  chatHistories[userId] = [];
 
179
 
180
  if (body.object === "whatsapp_business_account") {
181
  const changes = body.entry[0]?.changes[0]?.value;
 
 
182
  if (changes?.messages) {
183
  const messageObj = changes.messages[0];
184
  const { from, text } = messageObj;
185
  const userMessage = text?.body;
186
  console.log(`Message from ${from}: ${userMessage}`);
 
 
187
  logChat(from, "inbound", userMessage);
188
+
189
+ // Generate response
190
  const botResponse = await generateResponse(userMessage, from);
191
  console.log("Bot response:", botResponse);
192
  logChat(from, "outbound", botResponse);
193
+
194
+ // Send the response via WhatsApp API.
195
  await sendMessage(from, botResponse);
196
  return res.status(200).send("EVENT_RECEIVED");
197
  }
 
 
198
  if (changes?.statuses) {
199
  console.log("Status update received:", JSON.stringify(changes.statuses, null, 2));
200
  return res.status(200).send("EVENT_RECEIVED");
201
  }
202
  }
 
203
  console.log("No valid content found in webhook.");
204
  res.sendStatus(404);
205
  } catch (error) {
 
208
  }
209
  });
210
 
211
+ // -----------------------------------------------------------------------------
212
+ // Order Flow is delegated to the Python API entirely, so no local order flow
213
+ // -----------------------------------------------------------------------------
214
+
215
  // -----------------------------------------------------------------------------
216
  // Enhanced Response Generator
217
  // -----------------------------------------------------------------------------
218
  async function generateResponse(message, from) {
219
  const lowerMessage = message.toLowerCase();
220
 
221
+ // 1. Welcome/Main Menu
222
  if (lowerMessage === "hi" || lowerMessage === "hello" || lowerMessage.includes("welcome")) {
223
+ return `πŸ‘‹ Hi there! Welcome to *FoodieBot*! πŸ”πŸ•\nPlease choose an option:\n1️⃣ *View Menu*\n2️⃣ *Place an Order*\n3️⃣ *Payment Status*\n4️⃣ *My Profile*\n5️⃣ *Chat History*\n6️⃣ *Help & Support*\nSimply type the number or the option name.`;
224
  }
225
 
226
+ // 2. Menu Display – Option "1" or any message that includes "menu" (but not "order")
227
  if (lowerMessage === "1" || (lowerMessage.includes("menu") && !lowerMessage.includes("order"))) {
228
+ return await getPythonChatResponse(from, "menu");
 
 
229
  }
230
 
231
+ // 3. Order Flow – Trigger on multiple keywords: "2", "order", "buy", "food"
232
  if (
233
  lowerMessage === "2" ||
234
  lowerMessage.includes("order") ||
235
  lowerMessage.includes("buy") ||
236
  lowerMessage.includes("food")
237
  ) {
238
+ // Check if the user's message includes a dish name from the menu.
239
  let selectedDish = null;
240
+ for (let item of menu_items) { // Assumes menu_items is defined as an array of dish objects.
241
  if (lowerMessage.includes(item.name.toLowerCase())) {
242
  selectedDish = item.name;
243
  break;
244
  }
245
  }
 
246
  if (selectedDish) {
247
+ // Prepend the dish name to the order command so the Python API can handle it.
248
  return await getPythonChatResponse(from, `order: ${selectedDish}`);
249
  }
 
250
  return await getPythonChatResponse(from, "order");
251
  }
252
 
253
+ // 4. Payment Status – Option "3" or keyword "payment"
254
  if (lowerMessage === "3" || lowerMessage.includes("payment")) {
255
  return await getPythonChatResponse(from, "payment");
256
  }
257
 
258
+ // 5. User Profile – Option "4" or keyword "profile"
259
  if (lowerMessage === "4" || lowerMessage.includes("profile")) {
260
  const profile = await getPythonUserProfile(from);
261
  if (profile) {
 
265
  }
266
  }
267
 
268
+ // 6. Chat History – Option "5" or keyword "chat history"
269
  if (lowerMessage === "5" || lowerMessage.includes("chat history")) {
270
  const history = await getPythonChatHistory(from);
271
  if (history && history.length > 0) {
 
279
  }
280
  }
281
 
282
+ // 7. Help & Support – Option "6" or keyword "help" / "support"
283
  if (lowerMessage === "6" || lowerMessage.includes("help") || lowerMessage.includes("support")) {
284
  return `❓ *How can I help you?*\nType one of the following:\n- *Order Status* (e.g., "order tracking ORD-123456789")\n- *Contact Support*\n- *Main Menu* to see all options again.`;
285
  }
 
351
  return greeting;
352
  }
353
 
354
+ export default router;