Update app.py
Browse files
app.py
CHANGED
|
@@ -207,11 +207,21 @@ def Chat_Message(history, messages1):
|
|
| 207 |
|
| 208 |
message=HumanMessage(content=history[-1][0])
|
| 209 |
messages1.append(message)
|
| 210 |
-
response = chat_model.invoke(messages1)
|
| 211 |
-
messages1.append(AIMessage(content=response.content))
|
| 212 |
|
| 213 |
if len(messages1) >= 8:
|
| 214 |
messages1 = messages1[-8:]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 215 |
|
| 216 |
history[-1][1] = ""
|
| 217 |
for character in response.content:
|
|
@@ -245,7 +255,16 @@ def Internet_Search(history, messages2):
|
|
| 245 |
if len(messages2) >= 4:
|
| 246 |
messages2 = messages2[-4:]
|
| 247 |
|
| 248 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 249 |
messages2.append(AIMessage(content=response.content))
|
| 250 |
|
| 251 |
history[-1][1] = ""
|
|
@@ -272,7 +291,16 @@ def Chart_Generator(history, messages3):
|
|
| 272 |
if len(messages3) >= 6:
|
| 273 |
messages3 = messages3[-6:]
|
| 274 |
|
| 275 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 276 |
messages3.append(AIMessage(content=response.content))
|
| 277 |
|
| 278 |
combined_content = f'{image_html}<br>{response.content}'
|
|
@@ -286,7 +314,16 @@ def Chart_Generator(history, messages3):
|
|
| 286 |
if len(messages3) >= 6:
|
| 287 |
messages3 = messages3[-6:]
|
| 288 |
|
| 289 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 290 |
messages3.append(AIMessage(content=response.content))
|
| 291 |
|
| 292 |
combined_content=response.content
|
|
@@ -334,7 +371,16 @@ def Link_Scratch(history, messages4):
|
|
| 334 |
if len(messages4) >= 2:
|
| 335 |
messages4 = messages4[-2:]
|
| 336 |
|
| 337 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 338 |
messages4.append(AIMessage(content=response.content))
|
| 339 |
|
| 340 |
response_message = response.content
|
|
@@ -392,7 +438,17 @@ def File_Interact(history, filepath, messages5):
|
|
| 392 |
if len(messages5) >= 1:
|
| 393 |
messages5 = messages5[-1:]
|
| 394 |
|
| 395 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 396 |
messages5.append(AIMessage(content=response.content))
|
| 397 |
|
| 398 |
response_message = response.content
|
|
@@ -430,7 +486,16 @@ def Explore_WebSite(history, messages6):
|
|
| 430 |
if len(messages6) >= 2:
|
| 431 |
messages6 = messages6[-2:]
|
| 432 |
|
| 433 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 434 |
messages6.append(AIMessage(content=response.content))
|
| 435 |
|
| 436 |
history[-1][1] = ""
|
|
|
|
| 207 |
|
| 208 |
message=HumanMessage(content=history[-1][0])
|
| 209 |
messages1.append(message)
|
|
|
|
|
|
|
| 210 |
|
| 211 |
if len(messages1) >= 8:
|
| 212 |
messages1 = messages1[-8:]
|
| 213 |
+
|
| 214 |
+
try:
|
| 215 |
+
response = chat_model.invoke(messages1)
|
| 216 |
+
except Exception as e:
|
| 217 |
+
error_message = str(e)
|
| 218 |
+
index = error_message.find("Input validation error:")
|
| 219 |
+
if index != -1:
|
| 220 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 221 |
+
else:
|
| 222 |
+
raise gr.Error("Error occurred during response") from e
|
| 223 |
+
|
| 224 |
+
messages1.append(AIMessage(content=response.content))
|
| 225 |
|
| 226 |
history[-1][1] = ""
|
| 227 |
for character in response.content:
|
|
|
|
| 255 |
if len(messages2) >= 4:
|
| 256 |
messages2 = messages2[-4:]
|
| 257 |
|
| 258 |
+
try:
|
| 259 |
+
response = chat_model.invoke(messages1)
|
| 260 |
+
except Exception as e:
|
| 261 |
+
error_message = str(e)
|
| 262 |
+
index = error_message.find("Input validation error:")
|
| 263 |
+
if index != -1:
|
| 264 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 265 |
+
else:
|
| 266 |
+
raise gr.Error("Error occurred during response") from e
|
| 267 |
+
|
| 268 |
messages2.append(AIMessage(content=response.content))
|
| 269 |
|
| 270 |
history[-1][1] = ""
|
|
|
|
| 291 |
if len(messages3) >= 6:
|
| 292 |
messages3 = messages3[-6:]
|
| 293 |
|
| 294 |
+
try:
|
| 295 |
+
response = chat_model.invoke(messages1)
|
| 296 |
+
except Exception as e:
|
| 297 |
+
error_message = str(e)
|
| 298 |
+
index = error_message.find("Input validation error:")
|
| 299 |
+
if index != -1:
|
| 300 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 301 |
+
else:
|
| 302 |
+
raise gr.Error("Error occurred during response") from e
|
| 303 |
+
|
| 304 |
messages3.append(AIMessage(content=response.content))
|
| 305 |
|
| 306 |
combined_content = f'{image_html}<br>{response.content}'
|
|
|
|
| 314 |
if len(messages3) >= 6:
|
| 315 |
messages3 = messages3[-6:]
|
| 316 |
|
| 317 |
+
try:
|
| 318 |
+
response = chat_model.invoke(messages1)
|
| 319 |
+
except Exception as e:
|
| 320 |
+
error_message = str(e)
|
| 321 |
+
index = error_message.find("Input validation error:")
|
| 322 |
+
if index != -1:
|
| 323 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 324 |
+
else:
|
| 325 |
+
raise gr.Error("Error occurred during response") from e
|
| 326 |
+
|
| 327 |
messages3.append(AIMessage(content=response.content))
|
| 328 |
|
| 329 |
combined_content=response.content
|
|
|
|
| 371 |
if len(messages4) >= 2:
|
| 372 |
messages4 = messages4[-2:]
|
| 373 |
|
| 374 |
+
try:
|
| 375 |
+
response = chat_model.invoke(messages1)
|
| 376 |
+
except Exception as e:
|
| 377 |
+
error_message = str(e)
|
| 378 |
+
index = error_message.find("Input validation error:")
|
| 379 |
+
if index != -1:
|
| 380 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 381 |
+
else:
|
| 382 |
+
raise gr.Error("Error occurred during response") from e
|
| 383 |
+
|
| 384 |
messages4.append(AIMessage(content=response.content))
|
| 385 |
|
| 386 |
response_message = response.content
|
|
|
|
| 438 |
if len(messages5) >= 1:
|
| 439 |
messages5 = messages5[-1:]
|
| 440 |
|
| 441 |
+
try:
|
| 442 |
+
response = chat_model.invoke(messages1)
|
| 443 |
+
except Exception as e:
|
| 444 |
+
error_message = str(e)
|
| 445 |
+
index = error_message.find("Input validation error:")
|
| 446 |
+
if index != -1:
|
| 447 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 448 |
+
else:
|
| 449 |
+
raise gr.Error("Error occurred during response") from e
|
| 450 |
+
|
| 451 |
+
|
| 452 |
messages5.append(AIMessage(content=response.content))
|
| 453 |
|
| 454 |
response_message = response.content
|
|
|
|
| 486 |
if len(messages6) >= 2:
|
| 487 |
messages6 = messages6[-2:]
|
| 488 |
|
| 489 |
+
try:
|
| 490 |
+
response = chat_model.invoke(messages1)
|
| 491 |
+
except Exception as e:
|
| 492 |
+
error_message = str(e)
|
| 493 |
+
index = error_message.find("Input validation error:")
|
| 494 |
+
if index != -1:
|
| 495 |
+
raise gr.Error(error_message[index:].strip()) from e
|
| 496 |
+
else:
|
| 497 |
+
raise gr.Error("Error occurred during response") from e
|
| 498 |
+
|
| 499 |
messages6.append(AIMessage(content=response.content))
|
| 500 |
|
| 501 |
history[-1][1] = ""
|