Upload folder using huggingface_hub
Browse files- .env +1 -0
- README.md +65 -0
- agent_config.py +38 -0
- main.py +21 -0
- requirements.txt +6 -0
- tools/__init__.py +1 -0
- tools/excel_analysis_tool.py +49 -0
- tools/wiki_tool.py +44 -0
.env
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
HF_API_KEY= <your_huggingface_api_key>
|
README.md
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Hugging Face Agent Project
|
| 2 |
+
|
| 3 |
+
This project implements a Hugging Face agent using the smolagents framework. The agent is designed to handle various tasks by utilizing custom tools and models.
|
| 4 |
+
|
| 5 |
+
## Project Structure
|
| 6 |
+
|
| 7 |
+
```
|
| 8 |
+
huggingface-agent-project
|
| 9 |
+
├── main.py # Entry point for the agent
|
| 10 |
+
├── agent_config.py # Configuration and model loading
|
| 11 |
+
├── tools # Directory for custom tools
|
| 12 |
+
│ ├── __init__.py # Package initializer for tools
|
| 13 |
+
│ ├── wiki_tool.py # Tool for fetching Wikipedia summaries
|
| 14 |
+
│ ├── web_search_tool.py # Tool for performing web searches using DuckDuckGo
|
| 15 |
+
│ ├── excel_analysis_tool.py # Tool for analyzing Excel files
|
| 16 |
+
├── requirements.txt # Project dependencies
|
| 17 |
+
└── README.md # Project documentation
|
| 18 |
+
```
|
| 19 |
+
|
| 20 |
+
## Setup Instructions
|
| 21 |
+
|
| 22 |
+
1. Clone the repository:
|
| 23 |
+
```
|
| 24 |
+
git clone <repository-url>
|
| 25 |
+
cd huggingface-agent-project
|
| 26 |
+
```
|
| 27 |
+
|
| 28 |
+
2. Install the required dependencies:
|
| 29 |
+
```
|
| 30 |
+
pip install -r requirements.txt
|
| 31 |
+
```
|
| 32 |
+
|
| 33 |
+
## Usage
|
| 34 |
+
|
| 35 |
+
To run the agent, execute the following command:
|
| 36 |
+
```
|
| 37 |
+
python main.py
|
| 38 |
+
```
|
| 39 |
+
|
| 40 |
+
## Agent Capabilities
|
| 41 |
+
|
| 42 |
+
The agent is capable of performing various tasks by leveraging the following tools:
|
| 43 |
+
|
| 44 |
+
### Tools
|
| 45 |
+
- **WikiTool**: Fetches summaries of topics from Wikipedia, handling disambiguation and page errors gracefully.
|
| 46 |
+
- **WebSearchTool**: Performs web searches using DuckDuckGo and retrieves the first result's snippet.
|
| 47 |
+
- **ExcelAnalysisTool**: Loads an Excel file and sums sales from items labeled as "food", excluding drinks. Returns the total sales in USD with 2 decimal places.
|
| 48 |
+
|
| 49 |
+
### Models
|
| 50 |
+
- The agent uses the Hugging Face Inference API via `HfApiModel` to interact with models such as `DeepSeek-R1` or `DeepSeek-Coder`.
|
| 51 |
+
|
| 52 |
+
### Example Workflow
|
| 53 |
+
1. The agent receives a request.
|
| 54 |
+
2. Based on the request, it selects the appropriate tool (e.g., WikiTool for Wikipedia lookups).
|
| 55 |
+
3. The tool processes the request and returns the result.
|
| 56 |
+
|
| 57 |
+
## Contributing
|
| 58 |
+
|
| 59 |
+
Contributions are welcome! Please submit a pull request or open an issue for any enhancements or bug fixes.
|
| 60 |
+
|
| 61 |
+
## Future Enhancements
|
| 62 |
+
|
| 63 |
+
- Deployment to Hugging Face Spaces.
|
| 64 |
+
- Adding more tools for advanced functionalities.
|
| 65 |
+
- Improving error handling and logging.
|
agent_config.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel
|
| 2 |
+
from tools.wiki_tool import WikiTool
|
| 3 |
+
from tools.excel_analysis_tool import ExcelAnalysisTool
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
def create_agent():
|
| 7 |
+
"""
|
| 8 |
+
Creates and configures the CodeAgent with the necessary model and tools.
|
| 9 |
+
|
| 10 |
+
Returns:
|
| 11 |
+
CodeAgent: The configured agent instance.
|
| 12 |
+
"""
|
| 13 |
+
# Load API key from environment variable (more secure)
|
| 14 |
+
api_key = os.getenv("HF_API_KEY")
|
| 15 |
+
if not api_key:
|
| 16 |
+
raise RuntimeError("Missing Hugging Face API key. Set HF_API_KEY environment variable.")
|
| 17 |
+
|
| 18 |
+
# Initialize the LLM model
|
| 19 |
+
model = HfApiModel(api_key=api_key, model_id="deepseek-ai/deepseek-coder-6.7b-instruct")
|
| 20 |
+
|
| 21 |
+
# Initialize tools
|
| 22 |
+
wiki_tool = WikiTool() # Custom Wikipedia wrapper
|
| 23 |
+
web_search_tool = DuckDuckGoSearchTool() # Built-in smolagents web search tool
|
| 24 |
+
excel_analysis_tool = ExcelAnalysisTool() # Custom Excel processing tool
|
| 25 |
+
|
| 26 |
+
# Create and return the agent
|
| 27 |
+
agent = CodeAgent(
|
| 28 |
+
model=model,
|
| 29 |
+
tools=[wiki_tool, web_search_tool, excel_analysis_tool],
|
| 30 |
+
max_steps=8,
|
| 31 |
+
verbosity_level=2
|
| 32 |
+
)
|
| 33 |
+
return agent
|
| 34 |
+
|
| 35 |
+
# Example usage
|
| 36 |
+
if __name__ == "__main__":
|
| 37 |
+
agent = create_agent()
|
| 38 |
+
print("Agent created with tools:", [tool.name for tool in agent.tools])
|
main.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from agent_config import create_agent
|
| 3 |
+
|
| 4 |
+
agent = create_agent()
|
| 5 |
+
|
| 6 |
+
def ask_agent(prompt: str) -> str:
|
| 7 |
+
try:
|
| 8 |
+
return agent.chat(prompt)
|
| 9 |
+
except Exception as e:
|
| 10 |
+
return f"Error: {e}"
|
| 11 |
+
|
| 12 |
+
iface = gr.Interface(
|
| 13 |
+
fn=ask_agent,
|
| 14 |
+
inputs=gr.Textbox(lines=2, placeholder="Ask the agent something..."),
|
| 15 |
+
outputs="text",
|
| 16 |
+
title="Agent with smolagents + DeepSeek",
|
| 17 |
+
description="This agent uses smolagents, DeepSeek LLM, and custom tools to answer factual and data questions. ",
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
if __name__ == "__main__":
|
| 21 |
+
iface.launch()
|
requirements.txt
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
gradio
|
| 2 |
+
smolagents
|
| 3 |
+
wikipedia
|
| 4 |
+
pandas
|
| 5 |
+
openpyxl
|
| 6 |
+
huggingface_hub
|
tools/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# This file is intentionally left blank.
|
tools/excel_analysis_tool.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from huggingface_hub import hf_hub_download
|
| 3 |
+
|
| 4 |
+
class ExcelAnalysisTool:
|
| 5 |
+
def __init__(self):
|
| 6 |
+
self.name = "ExcelAnalysisTool"
|
| 7 |
+
self.description = (
|
| 8 |
+
"Loads an Excel file from the GAIA dataset on Hugging Face and calculates "
|
| 9 |
+
"the total sales for items labeled as 'food', excluding drinks. "
|
| 10 |
+
"Provide input as a string with the filename, e.g., 'sales_data.xlsx'."
|
| 11 |
+
)
|
| 12 |
+
self.repo_id = "gaia-benchmark/GAIA"
|
| 13 |
+
|
| 14 |
+
def __call__(self, filename: str) -> str:
|
| 15 |
+
"""
|
| 16 |
+
Loads and processes the Excel file.
|
| 17 |
+
|
| 18 |
+
Args:
|
| 19 |
+
filename (str): The name of the Excel file (e.g., 'sales_data.xlsx').
|
| 20 |
+
|
| 21 |
+
Returns:
|
| 22 |
+
str: Total food sales in USD, or an error message.
|
| 23 |
+
"""
|
| 24 |
+
try:
|
| 25 |
+
# Download the file from Hugging Face Hub
|
| 26 |
+
file_path = hf_hub_download(
|
| 27 |
+
repo_id=self.repo_id,
|
| 28 |
+
filename=filename,
|
| 29 |
+
repo_type="dataset"
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# Load the Excel file into a DataFrame
|
| 33 |
+
df = pd.read_excel(file_path)
|
| 34 |
+
|
| 35 |
+
# Filter rows: category == 'food' and item != 'drinks'
|
| 36 |
+
food_sales = df[
|
| 37 |
+
(df['category'].str.lower() == 'food') &
|
| 38 |
+
(df['item'].str.lower() != 'drinks')
|
| 39 |
+
]
|
| 40 |
+
|
| 41 |
+
total_sales = food_sales['sales'].sum()
|
| 42 |
+
return f"Total sales for food items: ${total_sales:.2f}"
|
| 43 |
+
|
| 44 |
+
except FileNotFoundError:
|
| 45 |
+
return "Error: The specified file was not found."
|
| 46 |
+
except KeyError as e:
|
| 47 |
+
return f"Error: Missing expected column in the Excel file: {str(e)}"
|
| 48 |
+
except Exception as e:
|
| 49 |
+
return f"An unexpected error occurred: {str(e)}"
|
tools/wiki_tool.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import wikipedia
|
| 2 |
+
|
| 3 |
+
class WikiTool:
|
| 4 |
+
def __init__(self):
|
| 5 |
+
self.name = "WikiTool"
|
| 6 |
+
self.description = (
|
| 7 |
+
"Performs Wikipedia lookups. Supports actions: 'summary' and 'is_historical_country'.\n"
|
| 8 |
+
"Usage:\n"
|
| 9 |
+
"- {'action': 'summary', 'topic': 'France'}\n"
|
| 10 |
+
"- {'action': 'is_historical_country', 'country_name': 'Yugoslavia'}"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
def fetch_summary(self, topic: str) -> str:
|
| 14 |
+
try:
|
| 15 |
+
summary = wikipedia.summary(topic, sentences=3)
|
| 16 |
+
return summary
|
| 17 |
+
except wikipedia.DisambiguationError as e:
|
| 18 |
+
return f"Disambiguation error: The topic '{topic}' is ambiguous. Suggestions: {e.options[:5]}"
|
| 19 |
+
except wikipedia.PageError:
|
| 20 |
+
return f"Page error: The topic '{topic}' does not exist on Wikipedia."
|
| 21 |
+
except Exception as e:
|
| 22 |
+
return f"An unexpected error occurred: {str(e)}"
|
| 23 |
+
|
| 24 |
+
def is_historical_country(self, country_name: str) -> bool:
|
| 25 |
+
try:
|
| 26 |
+
summary = wikipedia.summary(country_name, sentences=2).lower()
|
| 27 |
+
keywords = [
|
| 28 |
+
"former country", "no longer exists", "historical country",
|
| 29 |
+
"was a country", "defunct", "dissolved", "existed until",
|
| 30 |
+
"disestablished", "merged into"
|
| 31 |
+
]
|
| 32 |
+
return any(keyword in summary for keyword in keywords)
|
| 33 |
+
except Exception:
|
| 34 |
+
return False
|
| 35 |
+
|
| 36 |
+
def __call__(self, args: dict):
|
| 37 |
+
action = args.get("action")
|
| 38 |
+
|
| 39 |
+
if action == "summary":
|
| 40 |
+
return self.fetch_summary(args.get("topic", ""))
|
| 41 |
+
elif action == "is_historical_country":
|
| 42 |
+
return self.is_historical_country(args.get("country_name", ""))
|
| 43 |
+
else:
|
| 44 |
+
return "Error: Unknown action. Use 'summary' or 'is_historical_country'."
|