Spaces:
Runtime error
Runtime error
Update Gradio_UI.py
Browse files- Gradio_UI.py +4 -3
Gradio_UI.py
CHANGED
|
@@ -142,8 +142,9 @@ def stream_to_gradio(
|
|
| 142 |
for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
|
| 143 |
# Track tokens if model provides them
|
| 144 |
if hasattr(agent.model, "last_input_token_count"):
|
| 145 |
-
|
| 146 |
-
|
|
|
|
| 147 |
if isinstance(step_log, ActionStep):
|
| 148 |
step_log.input_token_count = agent.model.last_input_token_count
|
| 149 |
step_log.output_token_count = agent.model.last_output_token_count
|
|
@@ -293,4 +294,4 @@ class GradioUI:
|
|
| 293 |
demo.launch(debug=True, share=True, **kwargs)
|
| 294 |
|
| 295 |
|
| 296 |
-
__all__ = ["stream_to_gradio", "GradioUI"]
|
|
|
|
| 142 |
for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
|
| 143 |
# Track tokens if model provides them
|
| 144 |
if hasattr(agent.model, "last_input_token_count"):
|
| 145 |
+
# Ensure that None is handled by defaulting to 0
|
| 146 |
+
total_input_tokens += agent.model.last_input_token_count or 0
|
| 147 |
+
total_output_tokens += agent.model.last_output_token_count or 0
|
| 148 |
if isinstance(step_log, ActionStep):
|
| 149 |
step_log.input_token_count = agent.model.last_input_token_count
|
| 150 |
step_log.output_token_count = agent.model.last_output_token_count
|
|
|
|
| 294 |
demo.launch(debug=True, share=True, **kwargs)
|
| 295 |
|
| 296 |
|
| 297 |
+
__all__ = ["stream_to_gradio", "GradioUI"]
|