Spaces:
Sleeping
Sleeping
| from smolagents import ToolCallingAgent, ActionStep, CodeAgent, DuckDuckGoSearchTool, LiteLLMModel | |
| import os | |
| import time | |
| #import litellm | |
| #litellm._turn_on_debug() # Enable debugging for litellm | |
| # Set the API key for Sambanova | |
| os.environ['SAMBANOVA_API_KEY'] = os.getenv('sambanova_token') | |
| # Meta-Llama-3.1-405B-Instruct-8k | |
| # Meta-Llama-3.1-70B-Instruct-8k | |
| # Create the model with explicit provider specification | |
| model = LiteLLMModel( | |
| model_id="sambanova/Meta-Llama-3.1-405B-Instruct-8k", | |
| max_tokens=2096, | |
| temperature=0.5, | |
| api_base="https://api.sambanova.ai/v1", # Specify the base URL for Sambanova, as in your working requests example | |
| ) | |
| def my_paused(step_log: ActionStep, agent: ToolCallingAgent) -> None: | |
| print('Paused 10sec.') | |
| time.sleep(10.0) | |
| return | |
| # Create the agent | |
| agent = ToolCallingAgent( | |
| tools=[DuckDuckGoSearchTool()], | |
| model=model, | |
| step_callbacks=[my_paused], | |
| ) | |
| # Когда запуск идет через streamlingt то ответы переводятс в json автоматом | |
| #agent = CodeAgent( | |
| # tools=[DuckDuckGoSearchTool()], | |
| # model=model | |
| #) | |
| # Run the agent with error handling | |
| try: | |
| result = agent.run("Search for the best music recommendations for a party at the Wayne's mansion.") | |
| print("\n++++\nResult:\n") | |
| print(result) | |
| except Exception as e: | |
| print(f"\n++++\nError occurred:\n") | |
| print(f"{e}") |