ProximileAdmin commited on
Commit
f5de2df
·
verified ·
1 Parent(s): 9dc0abf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -18
app.py CHANGED
@@ -70,19 +70,27 @@ class ToolBase:
70
  ):
71
  self.json_name = programmatic_name
72
  self.json_description = passive_voice_description_of_function
 
 
 
 
 
 
 
 
 
 
 
73
  self.json_definition_of_function = {
74
  "type": "function",
75
- "function": {
76
- "name": self.json_name,
77
- "description": self.json_description,
78
- "parameters": {
79
- "type": "object",
80
- "properties": input_params,
81
- "required": required_params,
82
- }
83
- }
84
  }
 
85
  self.system_prompt_paragraph = active_voice_description_of_capability + " " + prescriptive_conditional
 
86
  def actual_function(self, **kwargs):
87
  raise NotImplementedError("Subclasses must implement this method.")
88
 
@@ -132,7 +140,7 @@ def search_arxiv_papers(
132
  'summary': entry.summary,
133
  'published': entry.published,
134
  'link': entry.link,
135
- 'primary_category': entry.tags[0]['term']
136
  }
137
  papers.append(paper)
138
 
@@ -166,12 +174,12 @@ class ArxivSearchTool(ToolBase):
166
  "max_results": {
167
  "type": "integer",
168
  "description": "Maximum number of results to return (default: 5)",
169
- "optional": True
170
  },
171
  "sort_by": {
172
  "type": "string",
173
  "description": "Sort criteria (e.g., 'relevance', 'lastUpdatedDate', 'submittedDate')",
174
- "optional": True
175
  }
176
  },
177
  required_params=["query"],
@@ -352,7 +360,7 @@ class WeatherAPITool(ToolBase):
352
  # Instance of the weather tool.
353
  weather_tool = WeatherAPITool()
354
 
355
- tool_objects_list = [arxiv_tool, nih_ref_snp_tool,weather_tool]
356
  system_prompt = build_sys_prompt(tool_objects_list)
357
  functions_dict = {t.json_name: t.actual_function for t in tool_objects_list}
358
 
@@ -410,7 +418,7 @@ def check_assistant_response_for_tool_calls(response):
410
  """Check if the LLM response contains a function call."""
411
  response = response.split(FUNCTION_EOT_STRING)[0].split(EOT_STRING)[0]
412
  for tool_name in functions_dict.keys():
413
- if f"\"{tool_name}\"" in response and "{" in response:
414
  response = "{" + "{".join(response.split("{")[1:])
415
  for _ in range(10):
416
  response = "}".join(response.split("}")[:-1]) + "}"
@@ -524,10 +532,12 @@ def user_conversation(user_message, chat_history, full_history):
524
  llm = LLM(max_model_len=32000)
525
 
526
  lgs("STARTING NEW CHAT")
 
 
527
  with gr.Blocks() as demo:
528
- gr.Markdown(f"<h2>Weather/Arxiv/SNP Multi-tool Calling Bot</h2>")
529
  chat_state = gr.State([])
530
- chatbot = gr.Chatbot(label="Chat with the multi-tool bot")
531
  user_input = gr.Textbox(
532
  lines=1,
533
  placeholder="Type your message here...",
@@ -560,5 +570,5 @@ with gr.Blocks() as demo:
560
  outputs=[user_input, chatbot, chat_state],
561
  queue=False
562
  )
563
- demo.launch()
564
- share_url = demo.share_url
 
70
  ):
71
  self.json_name = programmatic_name
72
  self.json_description = passive_voice_description_of_function
73
+
74
+ # Fix: Explicitly set additionalProperties to avoid Gradio JSON schema parsing issue
75
+ parameters_schema = {
76
+ "type": "object",
77
+ "properties": input_params,
78
+ "additionalProperties": False # Explicitly set to False
79
+ }
80
+
81
+ if required_params:
82
+ parameters_schema["required"] = required_params
83
+
84
  self.json_definition_of_function = {
85
  "type": "function",
86
+ "function": {
87
+ "name": self.json_name,
88
+ "description": self.json_description,
89
+ "parameters": parameters_schema
 
 
 
 
 
90
  }
91
+ }
92
  self.system_prompt_paragraph = active_voice_description_of_capability + " " + prescriptive_conditional
93
+
94
  def actual_function(self, **kwargs):
95
  raise NotImplementedError("Subclasses must implement this method.")
96
 
 
140
  'summary': entry.summary,
141
  'published': entry.published,
142
  'link': entry.link,
143
+ 'primary_category': entry.tags[0]['term'] if entry.tags else 'Unknown'
144
  }
145
  papers.append(paper)
146
 
 
174
  "max_results": {
175
  "type": "integer",
176
  "description": "Maximum number of results to return (default: 5)",
177
+ "default": 5
178
  },
179
  "sort_by": {
180
  "type": "string",
181
  "description": "Sort criteria (e.g., 'relevance', 'lastUpdatedDate', 'submittedDate')",
182
+ "default": "relevance"
183
  }
184
  },
185
  required_params=["query"],
 
360
  # Instance of the weather tool.
361
  weather_tool = WeatherAPITool()
362
 
363
+ tool_objects_list = [arxiv_tool, nih_ref_snp_tool, weather_tool]
364
  system_prompt = build_sys_prompt(tool_objects_list)
365
  functions_dict = {t.json_name: t.actual_function for t in tool_objects_list}
366
 
 
418
  """Check if the LLM response contains a function call."""
419
  response = response.split(FUNCTION_EOT_STRING)[0].split(EOT_STRING)[0]
420
  for tool_name in functions_dict.keys():
421
+ if f'"{tool_name}"' in response and "{" in response:
422
  response = "{" + "{".join(response.split("{")[1:])
423
  for _ in range(10):
424
  response = "}".join(response.split("}")[:-1]) + "}"
 
532
  llm = LLM(max_model_len=32000)
533
 
534
  lgs("STARTING NEW CHAT")
535
+
536
+ # Alternative fix: Use a simpler Gradio interface that doesn't trigger the JSON schema parsing issue
537
  with gr.Blocks() as demo:
538
+ gr.Markdown("<h2>Weather/Arxiv/SNP Multi-tool Calling Bot</h2>")
539
  chat_state = gr.State([])
540
+ chatbot = gr.Chatbot(label="Chat with the multi-tool bot", type="messages")
541
  user_input = gr.Textbox(
542
  lines=1,
543
  placeholder="Type your message here...",
 
570
  outputs=[user_input, chatbot, chat_state],
571
  queue=False
572
  )
573
+
574
+ demo.launch()