sahandkh1419's picture
Update prompt_template
68c265f verified
from tavily import TavilyClient
from langchain_core.prompts import ChatPromptTemplate
import os
class AINewsNode:
def __init__(self, llm):
"""
Initializes the AI News Node.
This class handles fetching and summarizing recent AI-related news
using Tavily (a web search API) and an LLM (language model such as Groq).
Args:
llm: The language model used for generating summaries.
"""
# Create an instance of Tavily client to perform news searches
self.tavily = TavilyClient()
# Store the LLM (used later for summarization)
self.llm = llm
# A dictionary to store intermediate data such as frequency, fetched news, and summaries
self.state = {}
# ----------------------------------------------------------------------
def fetch_news(self, state: dict) -> dict:
"""
Fetch AI-related news articles using the Tavily API.
Args:
state (dict): The current graph state, expected to include
'messages' containing the news frequency (e.g., daily, weekly, monthly).
Returns:
dict: Updated state with 'news_data' containing the fetched news articles.
"""
# Extract frequency from user input (e.g., "daily", "weekly", etc.)
frequency = state['messages'][0].content.lower()
self.state['frequency'] = frequency
# Mapping for Tavily's time range codes and days
time_range_map = {'daily': 'd', 'weekly': 'w', 'monthly': 'm', 'year': 'y'}
days_map = {'daily': 1, 'weekly': 7, 'monthly': 30, 'year': 366}
# Perform a Tavily API search for the latest AI news
response = self.tavily.search(
query="Top Artificial Intelligence (AI) technology news globally",
topic="news",
time_range=time_range_map[frequency], # How far back to look
include_answer="advanced", # Request detailed information
max_results=20, # Limit number of news items
days=days_map[frequency], # Number of days to consider
# include_domains=["techcrunch.com", "venturebeat.com/ai", ...] # (Optional) restrict sources
)
# Store the fetched results in the state dictionary
state['news_data'] = response.get('results', [])
self.state['news_data'] = state['news_data']
return state
# ----------------------------------------------------------------------
def summarize_news(self, state: dict) -> dict:
"""
Summarize the fetched AI news articles using the provided LLM.
Args:
state (dict): The current graph state containing 'news_data'.
Returns:
dict: Updated state with 'summary' containing the summarized news in markdown format.
"""
# Get the list of fetched news articles
news_items = self.state['news_data']
# Define how the summary should be formatted (markdown structure)
prompt_template = ChatPromptTemplate.from_messages([
("system", """Summarize AI news articles into markdown format. For each item include:
- Date in **YYYY-MM-DD** format (IST timezone)
- A concise summary of the news
- Sorted by latest date first
- Include the source URL as a link
Use this format:
### [Date]
- [Summary](URL)"""),
("user", "Articles:\n{articles}")
])
# Convert each article into a formatted string with content, URL, and publication date
articles_str = "\n\n".join([
f"Content: {item.get('content', '')}\nURL: {item.get('url', '')}\nDate: {item.get('published_date', '')}"
for item in news_items
])
# Ask the LLM to generate a markdown summary from the provided articles
response = self.llm.invoke(prompt_template.format(articles=articles_str))
# Save the summary to the state
state['summary'] = response.content
self.state['summary'] = state['summary']
return self.state
# ----------------------------------------------------------------------
def save_result(self, state):
"""
Save the summarized AI news to a markdown (.md) file.
Args:
state (dict): The current graph state (contains 'frequency' and 'summary').
Returns:
dict: Updated state with 'filename' indicating where the summary file was saved.
"""
# Retrieve frequency and summary content from the state
frequency = self.state['frequency']
summary = self.state['summary']
# Define output file path (e.g., ./AINews/daily_summary.md)
filename = f"./AINews/{frequency}_summary.md"
os.makedirs(os.path.dirname(filename), exist_ok=True)
# Write the summary content to a markdown file
with open(filename, 'w') as f:
f.write(f"# {frequency.capitalize()} AI News Summary\n\n")
f.write(summary)
# Save filename in the state for reference
self.state['filename'] = filename
return self.state