Spaces:
Paused
Paused
| import gradio as gr | |
| import requests | |
| import os | |
| from duckduckgo_search import DDGS | |
| from transformers import pipeline | |
| # Initialize the DuckDuckGo search client | |
| ddgs = DDGS() | |
| # Initialize the language model for answering | |
| model_id = "meta-llama/Meta-Llama-3-8B-Instruct" # You can change this to any model you prefer | |
| try: | |
| answerer = pipeline("text-generation", model=model_id, max_length=512) | |
| except: | |
| # Fallback to a smaller model if the primary one fails | |
| answerer = pipeline("text-generation", model="google/flan-t5-base", max_length=512) | |
| def search_web(query, num_results=5): | |
| """Search the web using DuckDuckGo and return results.""" | |
| try: | |
| results = list(ddgs.text(query, max_results=num_results)) | |
| return results | |
| except Exception as e: | |
| return [{"title": f"Error searching: {str(e)}", "body": "", "href": ""}] | |
| def format_search_results(results): | |
| """Format search results into a readable text format.""" | |
| formatted = "### Search Results:\n\n" | |
| for i, result in enumerate(results, 1): | |
| title = result.get("title", "No title") | |
| body = result.get("body", "No description") | |
| href = result.get("href", "No link") | |
| formatted += f"**{i}. {title}**\n{body}\n[Link]({href})\n\n" | |
| return formatted | |
| def generate_answer(query, search_results): | |
| """Generate an answer based on the search results.""" | |
| context = format_search_results(search_results) | |
| prompt = f"""You are DeepSearch, a helpful AI assistant with web search capabilities. | |
| Based on the following search results, please answer the user's question: "{query}" | |
| {context} | |
| Please provide a comprehensive answer using the information from the search results. If the search results don't contain relevant information, say so and provide your best answer based on your knowledge. | |
| Answer:""" | |
| try: | |
| response = answerer(prompt, max_length=800, do_sample=True, temperature=0.7)[0]['generated_text'] | |
| # Extract just the answer part after the prompt | |
| answer = response.split("Answer:")[-1].strip() | |
| return answer | |
| except Exception as e: | |
| return f"Error generating answer: {str(e)}" | |
| def deep_search(message, history): | |
| """Main function to handle the chat interaction.""" | |
| # First, search the web for relevant information | |
| search_results = search_web(message) | |
| # If no results, return a message | |
| if not search_results: | |
| return "I couldn't find any information on that topic. Please try a different question." | |
| # Generate an answer based on the search results | |
| answer = generate_answer(message, search_results) | |
| # Return the answer | |
| return answer | |
| # Create the Gradio interface | |
| demo = gr.ChatInterface( | |
| fn=deep_search, | |
| title="DeepSearch Agent", | |
| description="Ask me anything! I'll search the web using DuckDuckGo and provide an answer based on the search results.", | |
| examples=[ | |
| "What is the capital of France?", | |
| "How does photosynthesis work?", | |
| "What are the latest developments in AI?", | |
| "Who won the last World Cup?", | |
| "What is the recipe for chocolate chip cookies?" | |
| ], | |
| theme="soft" | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |