Spaces:
Running
Running
File size: 1,654 Bytes
9a62fc6 38812af 437af2b 22e7311 38812af e5487db de34c00 38812af 62c5d6f 437af2b 22e7311 437af2b ca07969 38812af c047822 28a9df6 38812af 9a62fc6 501c4dc 494cab7 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 | import gradio as gr
import random
import os
from huggingface_hub import login
from smolagents import GradioUI, CodeAgent#, HfApiModel
# Try to import the HF Inference model class across smolagents versions
try:
from smolagents.models import HfApiModel as HFInferenceModel
except Exception:
try:
from smolagents.models import InferenceAPIModel as HFInferenceModel
except Exception:
try:
from smolagents.models import InferenceClientModel as HFInferenceModel
except Exception:
HFInferenceModel = None
# Import our custom tools from their modules
from tools import WeatherInfoTool, HubStatsTool#, DuckDuckGoSearchTool
from websearch import CustomDuckDuckGoSearchTool as DuckDuckGoSearchTool
from retriever import load_guest_dataset
# Initialize the Hugging Face model
hf_token = os.getenv("HF_TOKEN")
login(hf_token)
hf_model = "Qwen/Qwen2.5-Coder-32B-Instruct" # HfApiModel default model
model = HFInferenceModel(token=hf_token)
# Initialize the web search tool
search_tool = DuckDuckGoSearchTool()
# Initialize the weather tool
weather_info_tool = WeatherInfoTool()
# Initialize the Hub stats tool
hub_stats_tool = HubStatsTool()
# Load the guest dataset and initialize the guest info tool
guest_info_tool = load_guest_dataset()
# Create Alfred with all the tools
alfred = CodeAgent(
# tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool],
tools=[guest_info_tool],
model=model,
add_base_tools=True, # Add any additional base tools
planning_interval=3 # Enable planning every 3 steps
)
if __name__ == "__main__":
GradioUI(alfred).launch() |