ChronoQuery / main.py
Himanshu2003's picture
Upload 2 files
5e7327d verified
from langchain_community.retrievers import WikipediaRetriever
from langchain_core.prompts import PromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_community.tools import DuckDuckGoSearchRun
from langchain_core.runnables import RunnableParallel, RunnablePassthrough, RunnableLambda
import google.generativeai as genai
def concat_data(docs: list, search_results: str) -> str:
"""
Combines Wikipedia docs and web search results into one string.
"""
data = ""
for doc in docs:
data += "\n\n" + doc.page_content
data += "\n\n" + search_results
return data
# Configure Gemini
genai.configure(api_key="AIzaSyD-iwKoPUSxGerqKjKhjvAJ3KRERpy0-18")
# Load Gemini model
gemini_model = genai.GenerativeModel("gemini-2.5-flash")
# Wrap Gemini in a LangChain Runnable
model = RunnableLambda(
lambda x: gemini_model.generate_content(x if isinstance(x, str) else str(x)).text
)
# Prompt templates
main_template = PromptTemplate(
template=(
"You are a historical assistant. Based on the following context, "
"answer the user's question or summarize the topic if it's not a question.\n\n"
"Context:\n{context}\n\n"
"User's question:\n{question}\n"
),
input_variables=["context", "question"]
)
wiki_template = PromptTemplate(
template=(
"You are an expert at identifying the core topic of a user's historical question.\n"
"Extract and return only the specific topic or event (no explanation).\n\n"
"Query: {query}\nOutput:"
),
input_variables=["query"]
)
# Components
parser = StrOutputParser()
search_tool = DuckDuckGoSearchRun()
retriever = WikipediaRetriever(top_k_results=4, lang="en")
# Topic chain → extract core topic
topic_chain = wiki_template | model | parser
# Retrieve from Wikipedia
wiki_chain = topic_chain | retriever
# Web search chain
search_chain = RunnableLambda(lambda x: search_tool.run(x))
# Combine data sources
data_chain = RunnableParallel({
"docs": wiki_chain,
"search_results": search_chain
}) | RunnableLambda(lambda x: concat_data(x["docs"], x["search_results"]))
# Final reasoning + answer generation
final_chain = RunnableParallel({
"context": data_chain,
"question": RunnablePassthrough()
}) | main_template | model | parser
if __name__ == "__main__":
query = "wars between china and india"
output = final_chain.invoke(query)
print("\n🧠 Final Answer:\n", output)