Spaces:
Sleeping
Sleeping
File size: 2,909 Bytes
de6aef8 c19d193 6aae614 9b5b26a de6aef8 8cdb946 6040aba 8d0ffdb 6040aba cbf9978 6040aba cbf9978 6040aba cbf9978 8d0ffdb de6aef8 9b5b26a de6aef8 9b5b26a de6aef8 bb4617b 8c01ffb 6aae614 e121372 de6aef8 13d500a 8c01ffb de6aef8 cbf9978 861422e 9b5b26a 8c01ffb 8fe992b 6040aba 8c01ffb 861422e 8fe992b de6aef8 8d0ffdb | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
from sentence_transformers import SentenceTransformer, util
import wikipedia
@tool
def find_unexpected_analogy(topic: str) -> str:
"""A tool that Finds an unexpected but relevant analogy for a given topic
Args:
topic: A string that provides text for analogy search
"""
try:
# Get a summary from Wikipedia
summary = wikipedia.summary(topic, sentences=2)
# Pretrained model for embeddings (semantic similarity search)
model = SentenceTransformer('all-MiniLM-L6-v2')
topic_embedding = model.encode(topic, convert_to_tensor=True)
# Potential analogy candidates (could be expanded)
candidate_topics = ["ant colonies", "internet routing", "biological ecosystems", "black holes", "swarm intelligence", "musical composition"]
candidate_embeddings = model.encode(candidate_topics, convert_to_tensor=True)
# Find most unexpected but semantically relevant analogy
similarities = util.pytorch_cos_sim(topic_embedding, candidate_embeddings)
best_index = similarities.argmin().item() # Find the most *distant* but related topic
analogy = candidate_topics[best_index]
return f"For '{topic}', an unexpected analogy is '{analogy}'. Why? {summary}"
except Exception as e:
return f"Error generating analogy: {str(e)}"
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
final_answer = FinalAnswerTool()
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, get_current_time_in_timezone, find_unexpected_analogy], # add your tools here (don't remove final_answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |