import os from dotenv import load_dotenv import requests from langchain_community.document_loaders import WebBaseLoader from openai import OpenAI from bs4 import BeautifulSoup import re import time from tenacity import retry, stop_after_attempt, wait_exponential from urllib.parse import urlparse # Load environment variables load_dotenv() # Initialize API clients BRAVE_API_KEY = os.getenv("BRAVE_API_KEY") BRAVE_SEARCH_URL = "https://api.search.brave.com/res/v1/news/search" OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") openai_client = OpenAI(api_key=OPENAI_API_KEY) @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10)) def clean_content(content): # Parse HTML soup = BeautifulSoup(content, 'html.parser') # Remove unwanted elements for element in soup(['header', 'footer', 'nav', 'aside', 'menu']): element.decompose() # Try to find the main content main_content = soup.find('main') or soup.find('article') or soup.find('div', class_='content') if main_content: # If a main content area is found, use that text = main_content.get_text() else: # If no main content area is found, use the body body = soup.find('body') if body: text = body.get_text() else: text = soup.get_text() # Remove extra spaces and newlines text = re.sub(r'\s+', ' ', text).strip() if not text.strip(): raise ValueError("No content extracted after cleaning") return text def summarize_content(content, max_tokens=4000): summarization_prompt = f"""Summarize the following content, preserving important details, facts, and figures. This summary will be used for research and news purposes, so accuracy and comprehensiveness are crucial. Keep the summary within approximately {max_tokens} tokens. Content to summarize: {content} Summary:""" try: response = openai_client.chat.completions.create( model="gpt-4o-mini", messages=[ {"role": "system", "content": "You are an expert summarizer, capable of condensing information while retaining crucial details."}, {"role": "user", "content": summarization_prompt} ], max_tokens=max_tokens ) summary = response.choices[0].message.content if not summary.strip(): raise ValueError("Empty summary received from OpenAI") return summary except Exception as e: raise ValueError(f"Error in OpenAI API call: {str(e)}") def perform_web_search(query, num_results=2): headers = { "Accept": "application/json", "Accept-Encoding": "gzip", "X-Subscription-Token": BRAVE_API_KEY } params = { "q": query, "count": num_results, "country": "IN", "result_filter": "news" } try: response = requests.get(BRAVE_SEARCH_URL, headers=headers, params=params) response.raise_for_status() results = response.json() print("Raw search results:") print(results) print("\n" + "-"*50 + "\n") search_results = [] if 'results' in results: for result in results['results']: url = result.get('url', '') hostname = urlparse(url).netloc search_results.append({ 'url': url, 'thumbnail': result.get('thumbnail', {}).get('src', ''), 'title': result.get('title', ''), 'hostname': hostname }) if not search_results: print("Error: No results found in the search results") raise ValueError("No results found in the search results") print("Fetched results:") for result in search_results[:num_results]: print(f"URL: {result['url']}") print(f"Thumbnail: {result['thumbnail']}") print(f"Title: {result['title']}") print(f"Hostname: {result['hostname']}") print("-" * 30) print("\n" + "-"*50 + "\n") return search_results[:num_results] except Exception as e: print(f"Error in perform_web_search: {str(e)}") raise def load_web_content(urls): loader = WebBaseLoader(urls) documents = loader.load() print('Documents: ', documents) cleaned_contents = [] summarized_contents = [] for i, doc in enumerate(documents): try: cleaned_content = clean_content(doc.page_content) cleaned_contents.append(cleaned_content) print(f"Cleaned content for URL {i+1}:") print(cleaned_content[:500] + "..." if len(cleaned_content) > 500 else cleaned_content) print("\n" + "-"*50 + "\n") print('Cleaned content: ', cleaned_content) print('-'*50) print(len(cleaned_content)) cleaned_content = cleaned_content.replace('\n', ' ') cleaned_content = cleaned_content.replace('\t', ' ') cleaned_content = cleaned_content[:1000] summarized_content = summarize_content(cleaned_content) summarized_contents.append(summarized_content) print(f"Summarized content for URL {i+1}:") print(summarized_content) print("\n" + "-"*50 + "\n") except Exception as e: print(f"Error processing content for URL {i+1}: {str(e)}") print(f"Full error details: {repr(e)}") print(f"URL: {urls[i]}") print("Skipping this URL and continuing with the next one.") if not summarized_contents: print("Error: No content could be processed") raise ValueError("No content could be processed") return summarized_contents @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10)) def generate_detailed_explanation(query, context): prompt = f"""Based on the following summarized context, provide a good and easy to understand explanation of the topic. Make sure to incorporate all relevant details, facts, and figures from the context. Here's the topic: "{query}". Use this Context to answer the above query: {context} Important: Don't mention that you are answering based on the context. Just start with the main response. Avoid phrases like 'Based on the context provided, ...' etc. Explanation:""" try: response = openai_client.chat.completions.create( model="gpt-4o-mini", messages=[ {"role": "system", "content": "You are a knowledgeable assistant that provides good and easy to understand explanations on various topics, incorporating all relevant information from the given context."}, {"role": "user", "content": prompt} ], max_tokens=4096 # Adjust as needed ) explanation = response.choices[0].message.content if not explanation.strip(): print("Error: Empty explanation received from OpenAI") raise ValueError("Empty explanation received from OpenAI") return explanation except Exception as e: print(f"Error in generate_detailed_explanation: {str(e)}") raise def main(): query = input("Enter the topic you want to learn about: ") search_results = perform_web_search(query) print("Search results:", search_results, '\n') print('-'*50) web_content = load_web_content(search_results) print("Summarized web content: ", web_content, '\n') print('-'*50) detailed_explanation = generate_detailed_explanation(query, web_content) print(f"Detailed Explanation:\n\n{detailed_explanation}") if __name__ == "__main__": main()