File size: 2,556 Bytes
5e7327d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
from langchain_community.retrievers import WikipediaRetriever
from langchain_core.prompts import PromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_community.tools import DuckDuckGoSearchRun
from langchain_core.runnables import RunnableParallel, RunnablePassthrough, RunnableLambda
import google.generativeai as genai

def concat_data(docs: list, search_results: str) -> str:
    """

    Combines Wikipedia docs and web search results into one string.

    """
    data = ""
    for doc in docs:
        data += "\n\n" + doc.page_content
    data += "\n\n" + search_results
    return data

# Configure Gemini
genai.configure(api_key="AIzaSyD-iwKoPUSxGerqKjKhjvAJ3KRERpy0-18")

# Load Gemini model
gemini_model = genai.GenerativeModel("gemini-2.5-flash")

# Wrap Gemini in a LangChain Runnable
model = RunnableLambda(
    lambda x: gemini_model.generate_content(x if isinstance(x, str) else str(x)).text
)

# Prompt templates
main_template = PromptTemplate(
    template=(
        "You are a historical assistant. Based on the following context, "
        "answer the user's question or summarize the topic if it's not a question.\n\n"
        "Context:\n{context}\n\n"
        "User's question:\n{question}\n"
    ),
    input_variables=["context", "question"]
)

wiki_template = PromptTemplate(
    template=(
        "You are an expert at identifying the core topic of a user's historical question.\n"
        "Extract and return only the specific topic or event (no explanation).\n\n"
        "Query: {query}\nOutput:"
    ),
    input_variables=["query"]
)

# Components
parser = StrOutputParser()
search_tool = DuckDuckGoSearchRun()
retriever = WikipediaRetriever(top_k_results=4, lang="en")

# Topic chain → extract core topic
topic_chain = wiki_template | model | parser

# Retrieve from Wikipedia
wiki_chain = topic_chain | retriever

# Web search chain
search_chain = RunnableLambda(lambda x: search_tool.run(x))

# Combine data sources
data_chain = RunnableParallel({
    "docs": wiki_chain,
    "search_results": search_chain
}) | RunnableLambda(lambda x: concat_data(x["docs"], x["search_results"]))

# Final reasoning + answer generation
final_chain = RunnableParallel({
    "context": data_chain,
    "question": RunnablePassthrough()
}) | main_template | model | parser

if __name__ == "__main__":
    query = "wars between china and india"
    output = final_chain.invoke(query)
    print("\n🧠 Final Answer:\n", output)