Spaces:
Runtime error
Runtime error
Commit Β·
eb004cb
1
Parent(s): 416c750
removed some unnecssary logs
Browse files
app.py
CHANGED
|
@@ -77,7 +77,7 @@ async def chat_fn(user_input: str, history: dict, input_graph_state: dict, uuid:
|
|
| 77 |
ordered properly. GraphProcessingState.messages is used as history instead
|
| 78 |
"""
|
| 79 |
try:
|
| 80 |
-
logger.info(f"Prompt: {prompt}")
|
| 81 |
input_graph_state["tools_enabled"] = {
|
| 82 |
"download_website_text": download_website_text_enabled,
|
| 83 |
"tavily_search_results_json": search_enabled,
|
|
@@ -402,7 +402,7 @@ def update_3d_models_on_load():
|
|
| 402 |
"""
|
| 403 |
Gets the latest 3D model to display and updates the examples radio list on app load.
|
| 404 |
"""
|
| 405 |
-
print("\n
|
| 406 |
sorted_files = get_sorted_3d_model_files()
|
| 407 |
|
| 408 |
latest_model = str(sorted_files[0]) if sorted_files else None
|
|
@@ -498,7 +498,7 @@ def update_generated_image_on_state_change(state: dict):
|
|
| 498 |
# return gr.skip()
|
| 499 |
|
| 500 |
def update_build_plan_display(state: dict):
|
| 501 |
-
print('\nπ
|
| 502 |
"""
|
| 503 |
Searches the message history for the build plan and updates the Markdown display.
|
| 504 |
"""
|
|
@@ -528,7 +528,7 @@ def update_build_plan_display(state: dict):
|
|
| 528 |
|
| 529 |
print(f"π Found and displaying Build Plan.")
|
| 530 |
return gr.update(value=content) # Update the Markdown component
|
| 531 |
-
print('
|
| 532 |
|
| 533 |
return gr.skip() # Return skip if no plan is found
|
| 534 |
|
|
@@ -620,7 +620,7 @@ function toggleSidebarVisibility() {
|
|
| 620 |
"""
|
| 621 |
|
| 622 |
if __name__ == "__main__":
|
| 623 |
-
logger.info("
|
| 624 |
with gr.Blocks(title="DIYO is here",fill_height=True, css=CSS, elem_id="main-app") as demo:
|
| 625 |
is_new_user_for_greeting = gr.State(True)
|
| 626 |
chatbot_message_storage = gr.State([])
|
|
@@ -1055,10 +1055,10 @@ if __name__ == "__main__":
|
|
| 1055 |
existing_chat_history = []
|
| 1056 |
updated_chat_history = [greeting_entry] + existing_chat_history
|
| 1057 |
updated_is_new_user_flag = False
|
| 1058 |
-
logger.info("
|
| 1059 |
return updated_chat_history, updated_is_new_user_flag
|
| 1060 |
else:
|
| 1061 |
-
logger.info("
|
| 1062 |
return existing_chat_history, False
|
| 1063 |
|
| 1064 |
@demo.load(inputs=[chatbot_message_storage], outputs=[chat_interface.chatbot_value])
|
|
|
|
| 77 |
ordered properly. GraphProcessingState.messages is used as history instead
|
| 78 |
"""
|
| 79 |
try:
|
| 80 |
+
# logger.info(f"Prompt: {prompt}")
|
| 81 |
input_graph_state["tools_enabled"] = {
|
| 82 |
"download_website_text": download_website_text_enabled,
|
| 83 |
"tavily_search_results_json": search_enabled,
|
|
|
|
| 402 |
"""
|
| 403 |
Gets the latest 3D model to display and updates the examples radio list on app load.
|
| 404 |
"""
|
| 405 |
+
print("\nπ±π± Loading generated 3d models")
|
| 406 |
sorted_files = get_sorted_3d_model_files()
|
| 407 |
|
| 408 |
latest_model = str(sorted_files[0]) if sorted_files else None
|
|
|
|
| 498 |
# return gr.skip()
|
| 499 |
|
| 500 |
def update_build_plan_display(state: dict):
|
| 501 |
+
print('\nπ Loading build plan')
|
| 502 |
"""
|
| 503 |
Searches the message history for the build plan and updates the Markdown display.
|
| 504 |
"""
|
|
|
|
| 528 |
|
| 529 |
print(f"π Found and displaying Build Plan.")
|
| 530 |
return gr.update(value=content) # Update the Markdown component
|
| 531 |
+
print('π Build plan is not generated yet')
|
| 532 |
|
| 533 |
return gr.skip() # Return skip if no plan is found
|
| 534 |
|
|
|
|
| 620 |
"""
|
| 621 |
|
| 622 |
if __name__ == "__main__":
|
| 623 |
+
logger.info("Loading Interface")
|
| 624 |
with gr.Blocks(title="DIYO is here",fill_height=True, css=CSS, elem_id="main-app") as demo:
|
| 625 |
is_new_user_for_greeting = gr.State(True)
|
| 626 |
chatbot_message_storage = gr.State([])
|
|
|
|
| 1055 |
existing_chat_history = []
|
| 1056 |
updated_chat_history = [greeting_entry] + existing_chat_history
|
| 1057 |
updated_is_new_user_flag = False
|
| 1058 |
+
logger.info("starting new chat")
|
| 1059 |
return updated_chat_history, updated_is_new_user_flag
|
| 1060 |
else:
|
| 1061 |
+
logger.info("loading existing chat")
|
| 1062 |
return existing_chat_history, False
|
| 1063 |
|
| 1064 |
@demo.load(inputs=[chatbot_message_storage], outputs=[chat_interface.chatbot_value])
|
graph.py
CHANGED
|
@@ -275,9 +275,6 @@ async def guidance_node(state: GraphProcessingState, config=None):
|
|
| 275 |
|
| 276 |
# Identify completed and incomplete stages
|
| 277 |
incomplete = [stage for stage in stage_order if not getattr(state, f"{stage}_complete", False)]
|
| 278 |
-
|
| 279 |
-
for item in incomplete:
|
| 280 |
-
print("stage item",item)
|
| 281 |
|
| 282 |
# Determine the next stage
|
| 283 |
if not incomplete:
|
|
@@ -327,7 +324,7 @@ def guidance_routing(state: GraphProcessingState) -> str:
|
|
| 327 |
elif next_stage == "product_searching":
|
| 328 |
return "END"
|
| 329 |
elif next_stage == "purchasing":
|
| 330 |
-
print('\n
|
| 331 |
|
| 332 |
print(f"Prompt: {state.prompt}")
|
| 333 |
print(f"Prompt: {state.prompt}")
|
|
@@ -360,6 +357,7 @@ def guidance_routing(state: GraphProcessingState) -> str:
|
|
| 360 |
return "END"
|
| 361 |
elif next_stage == "end":
|
| 362 |
print('\n graph was forced to move to end because some error please analyze the state')
|
|
|
|
| 363 |
for message in state.messages:
|
| 364 |
print(f'\ncomplete message', message)
|
| 365 |
if isinstance(message, HumanMessage):
|
|
@@ -535,7 +533,7 @@ async def brainstorming_node(state: GraphProcessingState, config=None):
|
|
| 535 |
*openai_messages # history youβve already converted
|
| 536 |
]
|
| 537 |
|
| 538 |
-
print('open ai formatted', openai_messages_with_prompt[-1])
|
| 539 |
|
| 540 |
for msg in openai_messages_with_prompt:
|
| 541 |
print(msg)
|
|
|
|
| 275 |
|
| 276 |
# Identify completed and incomplete stages
|
| 277 |
incomplete = [stage for stage in stage_order if not getattr(state, f"{stage}_complete", False)]
|
|
|
|
|
|
|
|
|
|
| 278 |
|
| 279 |
# Determine the next stage
|
| 280 |
if not incomplete:
|
|
|
|
| 324 |
elif next_stage == "product_searching":
|
| 325 |
return "END"
|
| 326 |
elif next_stage == "purchasing":
|
| 327 |
+
print('\n Exit status 0 )')
|
| 328 |
|
| 329 |
print(f"Prompt: {state.prompt}")
|
| 330 |
print(f"Prompt: {state.prompt}")
|
|
|
|
| 357 |
return "END"
|
| 358 |
elif next_stage == "end":
|
| 359 |
print('\n graph was forced to move to end because some error please analyze the state')
|
| 360 |
+
print('\n Exit status 1 )')
|
| 361 |
for message in state.messages:
|
| 362 |
print(f'\ncomplete message', message)
|
| 363 |
if isinstance(message, HumanMessage):
|
|
|
|
| 533 |
*openai_messages # history youβve already converted
|
| 534 |
]
|
| 535 |
|
| 536 |
+
# print('open ai formatted', openai_messages_with_prompt[-1])
|
| 537 |
|
| 538 |
for msg in openai_messages_with_prompt:
|
| 539 |
print(msg)
|