Spaces:
Paused
Paused
File size: 3,229 Bytes
7f6b2dd 78f570f 3bd322d 78f570f 3bd322d 78f570f 3bd322d 78f570f 9e855f0 78f570f 9e855f0 78f570f 8a406cf 78f570f f43fc21 78f570f 84cf5cd 78f570f 84cf5cd 78f570f c2bccf4 78f570f 3bd322d 78f570f 8b5204c 78f570f 8b5204c 78f570f 3bd322d 78f570f af0db7b 7f6b2dd 78f570f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 | import gradio as gr
import requests
import os
from duckduckgo_search import DDGS
from transformers import pipeline
# Initialize the DuckDuckGo search client
ddgs = DDGS()
# Initialize the language model for answering
model_id = "meta-llama/Meta-Llama-3-8B-Instruct" # You can change this to any model you prefer
try:
answerer = pipeline("text-generation", model=model_id, max_length=512)
except:
# Fallback to a smaller model if the primary one fails
answerer = pipeline("text-generation", model="google/flan-t5-base", max_length=512)
def search_web(query, num_results=5):
"""Search the web using DuckDuckGo and return results."""
try:
results = list(ddgs.text(query, max_results=num_results))
return results
except Exception as e:
return [{"title": f"Error searching: {str(e)}", "body": "", "href": ""}]
def format_search_results(results):
"""Format search results into a readable text format."""
formatted = "### Search Results:\n\n"
for i, result in enumerate(results, 1):
title = result.get("title", "No title")
body = result.get("body", "No description")
href = result.get("href", "No link")
formatted += f"**{i}. {title}**\n{body}\n[Link]({href})\n\n"
return formatted
def generate_answer(query, search_results):
"""Generate an answer based on the search results."""
context = format_search_results(search_results)
prompt = f"""You are DeepSearch, a helpful AI assistant with web search capabilities.
Based on the following search results, please answer the user's question: "{query}"
{context}
Please provide a comprehensive answer using the information from the search results. If the search results don't contain relevant information, say so and provide your best answer based on your knowledge.
Answer:"""
try:
response = answerer(prompt, max_length=800, do_sample=True, temperature=0.7)[0]['generated_text']
# Extract just the answer part after the prompt
answer = response.split("Answer:")[-1].strip()
return answer
except Exception as e:
return f"Error generating answer: {str(e)}"
def deep_search(message, history):
"""Main function to handle the chat interaction."""
# First, search the web for relevant information
search_results = search_web(message)
# If no results, return a message
if not search_results:
return "I couldn't find any information on that topic. Please try a different question."
# Generate an answer based on the search results
answer = generate_answer(message, search_results)
# Return the answer
return answer
# Create the Gradio interface
demo = gr.ChatInterface(
fn=deep_search,
title="DeepSearch Agent",
description="Ask me anything! I'll search the web using DuckDuckGo and provide an answer based on the search results.",
examples=[
"What is the capital of France?",
"How does photosynthesis work?",
"What are the latest developments in AI?",
"Who won the last World Cup?",
"What is the recipe for chocolate chip cookies?"
],
theme="soft"
)
if __name__ == "__main__":
demo.launch()
|