Spaces:
Sleeping
Sleeping
| from tavily import TavilyClient | |
| from langchain_core.prompts import ChatPromptTemplate | |
| class AINewsNode: | |
| def __init__(self, llm): | |
| """ | |
| Initialize the AINewsNode with API keys for Tavily and GROQ. | |
| """ | |
| self.tavily = TavilyClient() | |
| self.llm = llm | |
| def fetch_news(self, state: dict) -> dict: | |
| """ | |
| Fetch AI news based on the specified frequency. | |
| Args: | |
| state (dict): The state dictionary containing 'frequency'. | |
| Returns: | |
| dict: Updated state with 'news_data' key containing fetched news. | |
| """ | |
| frequency = state['messages'][0].content.lower() | |
| time_range_map = {'daily': 'd', 'weekly': 'w', 'monthly': 'm', 'year': 'y'} | |
| days_map = {'daily': 1, 'weekly': 7, 'monthly': 30, 'year': 366} | |
| response = self.tavily.search( | |
| query="Top Artificial Intelligence (AI) technology news globally", | |
| topic="news", | |
| time_range=time_range_map[frequency], | |
| include_answer="advanced", | |
| max_results=20, | |
| days=days_map[frequency], | |
| ) | |
| state['news_data'] = response.get('results', []) | |
| state['frequency'] = frequency | |
| return state | |
| def summarize_news(self, state: dict) -> dict: | |
| """ | |
| Summarize the fetched news using an LLM. | |
| Args: | |
| state (dict): The state dictionary containing 'news_data'. | |
| Returns: | |
| dict: Updated state with 'summary' key containing the summarized news. | |
| """ | |
| news_items = state.get('news_data', []) | |
| frequency = state.get('frequency', 'daily') | |
| prompt_template = ChatPromptTemplate.from_messages([ | |
| ("system", """Summarize AI news articles into markdown format. For each item include: | |
| - Date in **YYYY-MM-DD** format in IST timezone | |
| - Concise sentences summary from latest news | |
| - Sort news by date wise (latest first) | |
| - Source URL as link | |
| Use format: | |
| ### [Date] | |
| - [Summary](URL)"""), | |
| ("user", "Articles:\n{articles}") | |
| ]) | |
| articles_str = "\n\n".join([ | |
| f"Content: {item.get('content', '')}\nURL: {item.get('url', '')}\nDate: {item.get('published_date', '')}" | |
| for item in news_items | |
| ]) | |
| response = self.llm.invoke(prompt_template.format(articles=articles_str)) | |
| # Create formatted summary with header | |
| formatted_summary = f"# {frequency.capitalize()} AI News Summary\n\n{response.content}" | |
| state['summary'] = formatted_summary | |
| # CRITICAL FIX: Return state, not self.state | |
| return state | |