|
|
import os |
|
|
import requests |
|
|
import json |
|
|
import google.generativeai as genai |
|
|
|
|
|
|
|
|
SERPER_API_KEY = os.getenv('X_API_KEY') |
|
|
GEMINI_API_KEY = os.getenv('GEMINI_API_KEY') |
|
|
genai.configure(api_key=GEMINI_API_KEY) |
|
|
model = genai.GenerativeModel('gemini-1.5-flash') |
|
|
|
|
|
def search_articles(query:str): |
|
|
""" |
|
|
Searches for articles related to the query using Serper API. |
|
|
Returns a list of dictionaries containing article URLs, headings, and text. |
|
|
""" |
|
|
articles = None |
|
|
|
|
|
url = "https://google.serper.dev/search" |
|
|
|
|
|
payload = json.dumps({ |
|
|
"q":query |
|
|
}) |
|
|
headers = { |
|
|
'X-API-KEY': SERPER_API_KEY, |
|
|
'Content-Type': 'application/json' |
|
|
} |
|
|
|
|
|
response = requests.request("POST", url, headers=headers, data=payload) |
|
|
articles = response.text |
|
|
return articles |
|
|
|
|
|
|
|
|
def fetch_article_content(articles): |
|
|
""" |
|
|
Fetches the article content, extracting headings and text. |
|
|
""" |
|
|
content = "" |
|
|
|
|
|
data = json.loads(articles) |
|
|
|
|
|
|
|
|
if 'answerBox' in data: |
|
|
if 'title' in data['answerBox']: |
|
|
content += data['answerBox']['title']+"\n" |
|
|
if 'snippet' in data['answerBox']: |
|
|
content += data['answerBox']['snippet']+"\n" |
|
|
|
|
|
|
|
|
if 'organic' in data: |
|
|
for result in data['organic']: |
|
|
if 'title' in result: |
|
|
content += result['title']+"\n" |
|
|
if 'snippet' in result: |
|
|
content += result['snippet']+"\n" |
|
|
|
|
|
|
|
|
if 'peopleAlsoAsk' in data: |
|
|
for question in data['peopleAlsoAsk']: |
|
|
if 'title' in question: |
|
|
content += question['title']+"\n" |
|
|
if 'snippet' in question: |
|
|
content += question['snippet']+"\n" |
|
|
return content.strip() |
|
|
|
|
|
|
|
|
def generate_answer(content,query): |
|
|
""" |
|
|
Generates an answer from the concatenated content using GPT-4. |
|
|
The content and the user's query are used to generate a contextual answer. |
|
|
""" |
|
|
|
|
|
response = None |
|
|
system_prompt = f"""You are a helpful assistant. Use the following context to answer the user's query. If the context doesn't contain relevant information, say so.\n |
|
|
Below is the context : \n |
|
|
{content}\n |
|
|
Below is the user query: |
|
|
{query}\n |
|
|
Based on the user query above and the context given provide with highly accurate response for the user query . You should cover all points , each and every small concrete detail's in the context. |
|
|
""" |
|
|
|
|
|
response = model.generate_content(system_prompt) |
|
|
return response.text |
|
|
|
|
|
|