WebQueryBot / app.py
ytrsoymr's picture
Update app.py
bf2dd99 verified
raw
history blame
1.72 kB
# app.py
import os
from dotenv import load_dotenv
from tavily import TavilyClient
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
# 1. Load environment variables from .env file (if present)
load_dotenv()
# 2. Set API keys from environment
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY")
TAVILY_API_KEY = os.environ.get("TAVILY_API_KEY")
# 3. Initialize Gemini LLM
llm = ChatGoogleGenerativeAI(
model="models/gemini-1.5-flash",
google_api_key=GOOGLE_API_KEY
)
# 4. Tavily Client
tavily_client = TavilyClient(api_key=TAVILY_API_KEY)
def extract_website_text(url):
result = tavily_client.extract(urls=url)
if result and "text" in result[0]:
return result[0]["text"]
return "Could not extract content from the URL."
# 5. Prompt Template
prompt = PromptTemplate(
input_variables=["website_content", "question"],
template="""
You are an intelligent assistant. Based on the following website content:
{website_content}
Answer the following question:
{question}
"""
)
# 6. LLM QA Chain
qa_chain = LLMChain(llm=llm, prompt=prompt)
# 7. Main Chat Function
def ask_from_website(url, question):
print(f"\n🔗 Extracting content from: {url}")
website_text = extract_website_text(url)
print(f"\n💬 Asking: {question}")
response = qa_chain.invoke({
"website_content": website_text,
"question": question
})
print("\n✅ Answer:")
print(response["text"])
# 8. Run
if __name__ == "__main__":
url = input("Enter the website URL: ")
question = input("What do you want to ask about this website? ")
ask_from_website(url, question)