mobile_expert / app.py
sarath2803's picture
Update app.py
c4b92f5 verified
from smolagents import ToolCallingAgent, tool
from transformers import AutoModelForCausalLM, AutoTokenizer
from bs4 import BeautifulSoup
import requests
import torch
import gradio as gr
# 🛠️ TOOLS
@tool
def scrape_phones(budget: str, use_case: str) -> str:
"""
Search for smartphones under a given budget and use-case.
Args:
budget (str): The maximum budget in INR. Example: "15000"
use_case (str): The intended usage like "gaming", "camera", "battery", etc.
Returns:
str: A list of recommended phone names as a newline-separated string.
"""
query = f"best phones under {budget} for {use_case} site:smartprix.com"
url = f"https://www.google.com/search?q={query}"
headers = {"User-Agent": "Mozilla/5.0"}
try:
response = requests.get(url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
results = [h3.get_text() for h3 in soup.select("h3") if "phone" in h3.get_text().lower()]
return "\n".join(results[:5]) if results else "No phones found."
except Exception as e:
return f"Error occurred: {e}"
@tool
def compare_phones(phone_names: list[str]) -> str:
"""
Compare given phone models using a dummy spec database.
Args:
phone_names (list[str]): A list of phone model names to compare.
Returns:
str: A comparison string showing specs or stating if not available.
"""
dummy_db = {
"Redmi Note 13 Pro": "Good display, solid battery, mid gaming",
"iQOO Z9": "Fast processor, great gaming, average camera",
"Realme Narzo 70 Pro": "Excellent camera, AMOLED, decent performance"
}
return "\n".join(f"{p}: {dummy_db.get(p, 'Specs not available')}" for p in phone_names)
# 🤖 MODEL + AGENT SETUP
class MobileAdvisorAgent:
def __init__(self):
print("📱 MobileAdvisorAgent initialized")
self.system_prompt = """
You are a mobile phone expert. Use the tools and return the best phone based on user budget and use-case.
"""
model_id = "HuggingFaceH4/zephyr-7b-beta"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
self.agent = ToolCallingAgent(
tools=[scrape_phones, compare_phones],
model=model,
system_prompt=self.system_prompt
)
def __call__(self, question: str, context: str = "") -> str:
print(f"Agent received question: {question[:50]}...")
full_prompt = f"{self.system_prompt}\n\nContext: {context}\n\nQuestion: {question.strip()}"
try:
answer = self.agent.run(full_prompt)
except Exception as e:
print(f"Error: {e}")
answer = f"Sorry, something went wrong: {e}"
print(f"Answer: {answer.strip()}")
return answer.strip()
# ✅ Create an instance of the agent
agent_instance = MobileAdvisorAgent()
# 🎛️ GRADIO UI
def chat(user_input):
return agent_instance(user_input)
gr.Interface(fn=chat, inputs="text", outputs="text", title="📱 Mobile Advisor AI").launch()