Update app.py
Browse files
app.py
CHANGED
|
@@ -6,8 +6,7 @@ from langchain.chains import LLMChain
|
|
| 6 |
from langchain.prompts import PromptTemplate
|
| 7 |
from langchain_community.utilities import WikipediaAPIWrapper
|
| 8 |
from langchain_community.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper
|
| 9 |
-
from langchain.agents
|
| 10 |
-
from langchain.agents import Tool, initialize_agent
|
| 11 |
from langchain.callbacks import StreamlitCallbackHandler
|
| 12 |
|
| 13 |
# Load .env
|
|
@@ -43,30 +42,37 @@ web_search_tool = Tool(
|
|
| 43 |
description="Perform a live web search via DuckDuckGo."
|
| 44 |
)
|
| 45 |
|
| 46 |
-
# Prompt template
|
| 47 |
prompt = """
|
| 48 |
You are a knowledgeable assistant. Answer {question} using your internal knowledge.
|
| 49 |
-
If you’re unsure,
|
| 50 |
"""
|
| 51 |
prompt_template = PromptTemplate(input_variables=["question"], template=prompt)
|
| 52 |
chain = LLMChain(llm=llm, prompt=prompt_template)
|
| 53 |
|
| 54 |
-
#
|
| 55 |
def get_answer(query: str) -> str:
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
|
|
|
| 61 |
return wikipedia_wrapper.run(query)
|
| 62 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
|
| 64 |
# Conversation history
|
| 65 |
if "messages" not in st.session_state:
|
| 66 |
st.session_state["messages"] = [
|
| 67 |
-
{"role": "assistant", "content": "Hi
|
| 68 |
]
|
| 69 |
|
|
|
|
| 70 |
for msg in st.session_state["messages"]:
|
| 71 |
st.chat_message(msg["role"]).write(msg["content"])
|
| 72 |
|
|
@@ -82,4 +88,4 @@ if st.button("Find my answer"):
|
|
| 82 |
st.session_state["messages"].append({"role": "assistant", "content": answer})
|
| 83 |
st.chat_message("assistant").write(answer)
|
| 84 |
else:
|
| 85 |
-
st.warning("Please enter a question.")
|
|
|
|
| 6 |
from langchain.prompts import PromptTemplate
|
| 7 |
from langchain_community.utilities import WikipediaAPIWrapper
|
| 8 |
from langchain_community.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper
|
| 9 |
+
from langchain.agents import Tool
|
|
|
|
| 10 |
from langchain.callbacks import StreamlitCallbackHandler
|
| 11 |
|
| 12 |
# Load .env
|
|
|
|
| 42 |
description="Perform a live web search via DuckDuckGo."
|
| 43 |
)
|
| 44 |
|
| 45 |
+
# Prompt template for the LLM
|
| 46 |
prompt = """
|
| 47 |
You are a knowledgeable assistant. Answer {question} using your internal knowledge.
|
| 48 |
+
If you’re unsure, say "I don't know" or "Outdated".
|
| 49 |
"""
|
| 50 |
prompt_template = PromptTemplate(input_variables=["question"], template=prompt)
|
| 51 |
chain = LLMChain(llm=llm, prompt=prompt_template)
|
| 52 |
|
| 53 |
+
# Enhanced fallback logic:
|
| 54 |
def get_answer(query: str) -> str:
|
| 55 |
+
ql = query.lower()
|
| 56 |
+
# 1) If it's asking for recent info, always web‑search first:
|
| 57 |
+
if any(k in ql for k in ["recent", "latest", "today", "current", "2025"]):
|
| 58 |
+
ddg_ans = ddg_wrapper.run(query)
|
| 59 |
+
if ddg_ans and len(ddg_ans) > 20:
|
| 60 |
+
return ddg_ans
|
| 61 |
return wikipedia_wrapper.run(query)
|
| 62 |
+
# 2) Otherwise, use your LLM
|
| 63 |
+
lm_ans = chain.run({"question": query}).strip()
|
| 64 |
+
# 3) If the LLM defers, fall back to Wikipedia
|
| 65 |
+
if any(flag in lm_ans.lower() for flag in ["i don't know", "outdated", "not sure"]):
|
| 66 |
+
return wikipedia_wrapper.run(query)
|
| 67 |
+
return lm_ans
|
| 68 |
|
| 69 |
# Conversation history
|
| 70 |
if "messages" not in st.session_state:
|
| 71 |
st.session_state["messages"] = [
|
| 72 |
+
{"role": "assistant", "content": "Hi! Ask me anything—I'll fetch the latest data for recent questions."}
|
| 73 |
]
|
| 74 |
|
| 75 |
+
# Display history
|
| 76 |
for msg in st.session_state["messages"]:
|
| 77 |
st.chat_message(msg["role"]).write(msg["content"])
|
| 78 |
|
|
|
|
| 88 |
st.session_state["messages"].append({"role": "assistant", "content": answer})
|
| 89 |
st.chat_message("assistant").write(answer)
|
| 90 |
else:
|
| 91 |
+
st.warning("Please enter a question.")
|