Himanshu2003 commited on
Commit
5e7327d
·
verified ·
1 Parent(s): c972911

Upload 2 files

Browse files
Files changed (2) hide show
  1. main.py +78 -0
  2. requirements.txt +8 -0
main.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.retrievers import WikipediaRetriever
2
+ from langchain_core.prompts import PromptTemplate
3
+ from langchain_core.output_parsers import StrOutputParser
4
+ from langchain_community.tools import DuckDuckGoSearchRun
5
+ from langchain_core.runnables import RunnableParallel, RunnablePassthrough, RunnableLambda
6
+ import google.generativeai as genai
7
+
8
+ def concat_data(docs: list, search_results: str) -> str:
9
+ """
10
+ Combines Wikipedia docs and web search results into one string.
11
+ """
12
+ data = ""
13
+ for doc in docs:
14
+ data += "\n\n" + doc.page_content
15
+ data += "\n\n" + search_results
16
+ return data
17
+
18
+ # Configure Gemini
19
+ genai.configure(api_key="AIzaSyD-iwKoPUSxGerqKjKhjvAJ3KRERpy0-18")
20
+
21
+ # Load Gemini model
22
+ gemini_model = genai.GenerativeModel("gemini-2.5-flash")
23
+
24
+ # Wrap Gemini in a LangChain Runnable
25
+ model = RunnableLambda(
26
+ lambda x: gemini_model.generate_content(x if isinstance(x, str) else str(x)).text
27
+ )
28
+
29
+ # Prompt templates
30
+ main_template = PromptTemplate(
31
+ template=(
32
+ "You are a historical assistant. Based on the following context, "
33
+ "answer the user's question or summarize the topic if it's not a question.\n\n"
34
+ "Context:\n{context}\n\n"
35
+ "User's question:\n{question}\n"
36
+ ),
37
+ input_variables=["context", "question"]
38
+ )
39
+
40
+ wiki_template = PromptTemplate(
41
+ template=(
42
+ "You are an expert at identifying the core topic of a user's historical question.\n"
43
+ "Extract and return only the specific topic or event (no explanation).\n\n"
44
+ "Query: {query}\nOutput:"
45
+ ),
46
+ input_variables=["query"]
47
+ )
48
+
49
+ # Components
50
+ parser = StrOutputParser()
51
+ search_tool = DuckDuckGoSearchRun()
52
+ retriever = WikipediaRetriever(top_k_results=4, lang="en")
53
+
54
+ # Topic chain → extract core topic
55
+ topic_chain = wiki_template | model | parser
56
+
57
+ # Retrieve from Wikipedia
58
+ wiki_chain = topic_chain | retriever
59
+
60
+ # Web search chain
61
+ search_chain = RunnableLambda(lambda x: search_tool.run(x))
62
+
63
+ # Combine data sources
64
+ data_chain = RunnableParallel({
65
+ "docs": wiki_chain,
66
+ "search_results": search_chain
67
+ }) | RunnableLambda(lambda x: concat_data(x["docs"], x["search_results"]))
68
+
69
+ # Final reasoning + answer generation
70
+ final_chain = RunnableParallel({
71
+ "context": data_chain,
72
+ "question": RunnablePassthrough()
73
+ }) | main_template | model | parser
74
+
75
+ if __name__ == "__main__":
76
+ query = "wars between china and india"
77
+ output = final_chain.invoke(query)
78
+ print("\n🧠 Final Answer:\n", output)
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ langchain==1.0.2
2
+ langchain-core==1.0.1
3
+ langchain-community==0.4.1
4
+ langchain-google-genai==0.0.1
5
+ google-generativeai==0.3.2
6
+ wikipedia==1.4.0
7
+ duckduckgo-search==8.1.1
8
+ streamlit==1.50.0