Ayush239 commited on
Commit
f0a0781
·
verified ·
1 Parent(s): e7e5eff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -31
app.py CHANGED
@@ -1,6 +1,6 @@
1
  # app.py — NVIDIA NIM + Tool Calling + Gradio Chatbot
2
- # Model: meta/llama3-8b-instruct (supports OpenAI-style tools)
3
- # Works on HuggingFace Spaces with your nvapi-... key.
4
 
5
  import os
6
  import json
@@ -12,7 +12,7 @@ import gradio as gr
12
  # ===============================
13
  # ENVIRONMENT CONFIG
14
  # ===============================
15
- OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") # Your nvapi-XXXX key
16
  BASE_URL = "https://integrate.api.nvidia.com/v1"
17
  MODEL = "meta/llama3-8b-instruct"
18
 
@@ -47,14 +47,19 @@ def push(text):
47
  # TOOL IMPLEMENTATIONS
48
  # ===============================
49
  def record_user_details(email, name="Name not provided", notes="not provided"):
50
- push(f"New lead → {name} | {email} | Notes: {notes}")
51
  return {"status": "ok", "email": email, "name": name, "notes": notes}
52
 
53
  def record_unknown_question(question):
54
- push(f"Unknown question recorded: {question}")
55
  return {"status": "ok", "question": question}
56
 
57
 
 
 
 
 
 
58
  # ===============================
59
  # TOOL JSON DEFINITIONS
60
  # ===============================
@@ -63,27 +68,27 @@ tools = [
63
  "type": "function",
64
  "function": {
65
  "name": "record_user_details",
66
- "description": "Record user interest and their email.",
67
  "parameters": {
68
  "type": "object",
69
  "properties": {
70
  "email": {"type": "string"},
71
  "name": {"type": "string"},
72
- "notes": {"type": "string"}
73
  },
74
  "required": ["email"]
75
  }
76
- },
77
  },
78
  {
79
  "type": "function",
80
  "function": {
81
  "name": "record_unknown_question",
82
- "description": "Record any question the assistant could not answer.",
83
  "parameters": {
84
  "type": "object",
85
  "properties": {
86
- "question": {"type": "string"}
87
  },
88
  "required": ["question"]
89
  }
@@ -91,9 +96,6 @@ tools = [
91
  }
92
  ]
93
 
94
- globals()["record_user_details"] = record_user_details
95
- globals()["record_unknown_question"] = record_unknown_question
96
-
97
 
98
  # ===============================
99
  # MAIN ASSISTANT CLASS
@@ -104,11 +106,11 @@ class Me:
104
  self.summary = ""
105
  self.linkedin_text = ""
106
 
107
- # Load summary text
108
  if os.path.exists("me/summary.txt"):
109
  self.summary = open("me/summary.txt", "r", encoding="utf-8").read()
110
 
111
- # Load LinkedIn PDF (optional)
112
  pdf_path = "me/Ayush_linkdin.pdf"
113
  if os.path.exists(pdf_path):
114
  text = []
@@ -121,21 +123,21 @@ class Me:
121
 
122
  def system_prompt(self):
123
  return f"""
124
- You are acting as {self.name}. Answer questions professionally about Ayush's skills,
125
- career, background, and experience.
126
 
127
- If you DON'T know something → call the tool:
128
- - record_unknown_question
129
 
130
- If the user shows interest ask for an email and call:
131
- - record_user_details
132
 
133
- Be polite, confident, friendly and helpful.
134
 
135
  ### Summary:
136
  {self.summary}
137
 
138
- ### LinkedIn Data:
139
  {self.linkedin_text}
140
  """
141
 
@@ -145,12 +147,17 @@ Be polite, confident, friendly and helpful.
145
  def chat(self, message, history):
146
  messages = [{"role": "system", "content": self.system_prompt()}]
147
 
148
- # Convert history from Gradio into chat API format
149
- for user_msg, bot_msg in history:
150
- if user_msg:
151
- messages.append({"role": "user", "content": user_msg})
152
- if bot_msg:
153
- messages.append({"role": "assistant", "content": bot_msg})
 
 
 
 
 
154
 
155
  messages.append({"role": "user", "content": message})
156
 
@@ -169,6 +176,8 @@ Be polite, confident, friendly and helpful.
169
 
170
  # ---- TOOL CALL ----
171
  if finish == "tool_calls":
 
 
172
  for tool_call in msg.tool_calls:
173
  func = tool_call.function
174
  name = func.name
@@ -177,13 +186,14 @@ Be polite, confident, friendly and helpful.
177
  tool_fn = globals().get(name)
178
  result = tool_fn(**args)
179
 
180
- messages.append(msg.dict()) # append tool call request
181
  messages.append({
182
  "role": "tool",
183
  "tool_call_id": tool_call.id,
184
  "content": json.dumps(result)
185
  })
186
- continue # loop again and let model respond
 
 
187
 
188
  # ---- NORMAL RESPONSE ----
189
  return msg.content
 
1
  # app.py — NVIDIA NIM + Tool Calling + Gradio Chatbot
2
+ # Model: meta/llama3-8b-instruct (supports OpenAI-style tools)
3
+ # Works with your nvapi-xxxx key on HuggingFace Spaces.
4
 
5
  import os
6
  import json
 
12
  # ===============================
13
  # ENVIRONMENT CONFIG
14
  # ===============================
15
+ OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") # your nvapi-xxxx key
16
  BASE_URL = "https://integrate.api.nvidia.com/v1"
17
  MODEL = "meta/llama3-8b-instruct"
18
 
 
47
  # TOOL IMPLEMENTATIONS
48
  # ===============================
49
  def record_user_details(email, name="Name not provided", notes="not provided"):
50
+ push(f"Lead captured → {name} | {email} | Notes: {notes}")
51
  return {"status": "ok", "email": email, "name": name, "notes": notes}
52
 
53
  def record_unknown_question(question):
54
+ push(f"Unknown question: {question}")
55
  return {"status": "ok", "question": question}
56
 
57
 
58
+ # Register tools globally
59
+ globals()["record_user_details"] = record_user_details
60
+ globals()["record_unknown_question"] = record_unknown_question
61
+
62
+
63
  # ===============================
64
  # TOOL JSON DEFINITIONS
65
  # ===============================
 
68
  "type": "function",
69
  "function": {
70
  "name": "record_user_details",
71
+ "description": "Record user's interest and email.",
72
  "parameters": {
73
  "type": "object",
74
  "properties": {
75
  "email": {"type": "string"},
76
  "name": {"type": "string"},
77
+ "notes": {"type": "string"},
78
  },
79
  "required": ["email"]
80
  }
81
+ }
82
  },
83
  {
84
  "type": "function",
85
  "function": {
86
  "name": "record_unknown_question",
87
+ "description": "Record any question the assistant cannot answer.",
88
  "parameters": {
89
  "type": "object",
90
  "properties": {
91
+ "question": {"type": "string"},
92
  },
93
  "required": ["question"]
94
  }
 
96
  }
97
  ]
98
 
 
 
 
99
 
100
  # ===============================
101
  # MAIN ASSISTANT CLASS
 
106
  self.summary = ""
107
  self.linkedin_text = ""
108
 
109
+ # Load summary file
110
  if os.path.exists("me/summary.txt"):
111
  self.summary = open("me/summary.txt", "r", encoding="utf-8").read()
112
 
113
+ # Load PDF if exists
114
  pdf_path = "me/Ayush_linkdin.pdf"
115
  if os.path.exists(pdf_path):
116
  text = []
 
123
 
124
  def system_prompt(self):
125
  return f"""
126
+ You are acting as {self.name}. You answer questions about his background,
127
+ skills, experience, portfolio, and career.
128
 
129
+ If you DO NOT know something:
130
+ Use the tool: record_unknown_question
131
 
132
+ If the user is interested in working with Ayush:
133
+ Ask for their email and call: record_user_details
134
 
135
+ Be friendly, confident, and professional.
136
 
137
  ### Summary:
138
  {self.summary}
139
 
140
+ ### LinkedIn Extract:
141
  {self.linkedin_text}
142
  """
143
 
 
147
  def chat(self, message, history):
148
  messages = [{"role": "system", "content": self.system_prompt()}]
149
 
150
+ # FIXED: Handle both tuple-history & dict-history
151
+ for item in history:
152
+ if isinstance(item, (list, tuple)) and len(item) == 2:
153
+ user_msg, bot_msg = item
154
+ if user_msg:
155
+ messages.append({"role": "user", "content": user_msg})
156
+ if bot_msg:
157
+ messages.append({"role": "assistant", "content": bot_msg})
158
+
159
+ elif isinstance(item, dict) and "role" in item and "content" in item:
160
+ messages.append({"role": item["role"], "content": item["content"]})
161
 
162
  messages.append({"role": "user", "content": message})
163
 
 
176
 
177
  # ---- TOOL CALL ----
178
  if finish == "tool_calls":
179
+ messages.append(msg.dict())
180
+
181
  for tool_call in msg.tool_calls:
182
  func = tool_call.function
183
  name = func.name
 
186
  tool_fn = globals().get(name)
187
  result = tool_fn(**args)
188
 
 
189
  messages.append({
190
  "role": "tool",
191
  "tool_call_id": tool_call.id,
192
  "content": json.dumps(result)
193
  })
194
+
195
+ # Let model continue after tool execution
196
+ continue
197
 
198
  # ---- NORMAL RESPONSE ----
199
  return msg.content