zasharepw77 commited on
Commit
7f2a321
·
1 Parent(s): 8ac20e7

пустой без smallfgenta

Browse files
Files changed (1) hide show
  1. app.py +9 -20
app.py CHANGED
@@ -1,30 +1,19 @@
1
- from smolagents import ToolCallingAgent, DuckDuckGoSearchTool, LiteLLMModel
2
- import os
3
  import litellm
4
  litellm._turn_on_debug() # Enable debugging for litellm
5
 
6
  # Set the API key for Sambanova
7
  os.environ['SAMBANOVA_API_KEY'] = os.getenv('sambanova_token')
8
 
9
- # Create the model with explicit provider specification
10
- model = LiteLLMModel(
11
- model_id="sambanova/Qwen2.5-Coder-32B-Instruct", # Explicitly specify the sambanova provider
12
- max_tokens=2096,
13
- temperature=0.1,
14
- api_base="https://api.sambanova.ai/v1/chat/completions", # Specify the base URL for Sambanova, as in your working requests example
15
- )
16
-
17
- # Create the agent
18
- agent = ToolCallingAgent(
19
- tools=[DuckDuckGoSearchTool()],
20
- model=model
21
- )
22
-
23
- # Run the agent with error handling
24
  try:
25
- result = agent.run("Search for the best music recommendations for a party at the Wayne's mansion.")
26
- print("\n++++\nResult:\n")
27
- print(result)
 
 
 
 
 
28
  except Exception as e:
29
  print(f"\n++++\nError occurred:\n")
30
  print(f"{e}")
 
 
 
1
  import litellm
2
  litellm._turn_on_debug() # Enable debugging for litellm
3
 
4
  # Set the API key for Sambanova
5
  os.environ['SAMBANOVA_API_KEY'] = os.getenv('sambanova_token')
6
 
7
+ # Test the completion directly with litellm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  try:
9
+ response = litellm.completion(
10
+ model="sambanova/Qwen2.5-Coder-32B-Instruct",
11
+ messages=[{"role": "user", "content": "Tell me about yourself."}],
12
+ max_tokens=100,
13
+ api_base="https://api.sambanova.ai/v1", # Base URL for Sambanova
14
+ )
15
+ print("\n++++\nResponse from litellm:\n")
16
+ print(response)
17
  except Exception as e:
18
  print(f"\n++++\nError occurred:\n")
19
  print(f"{e}")