Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -6,26 +6,16 @@ from langchain.prompts import ChatPromptTemplate
|
|
| 6 |
from langchain_groq import ChatGroq
|
| 7 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
| 8 |
|
| 9 |
-
# --- Load Environment Variables ---
|
| 10 |
-
# This line loads the variables from a .env file during local development.
|
| 11 |
-
# On Hugging Face Spaces, secrets are set in the repository settings.
|
| 12 |
load_dotenv()
|
| 13 |
|
| 14 |
-
# --- App Configuration ---
|
| 15 |
st.set_page_config(
|
| 16 |
page_title="AgriChat",
|
| 17 |
page_icon="🌱",
|
| 18 |
layout="centered"
|
| 19 |
)
|
| 20 |
|
| 21 |
-
# --- Model and Embeddings Setup ---
|
| 22 |
@st.cache_resource
|
| 23 |
def load_resources():
|
| 24 |
-
"""
|
| 25 |
-
Load the language model, embeddings, and vector store.
|
| 26 |
-
Using st.cache_resource ensures these heavy resources are loaded only once.
|
| 27 |
-
"""
|
| 28 |
-
# Initialize HuggingFace embeddings for vectorization
|
| 29 |
model_name = "sentence-transformers/all-mpnet-base-v2"
|
| 30 |
try:
|
| 31 |
hf_embeddings = HuggingFaceEmbeddings(
|
|
@@ -37,15 +27,12 @@ def load_resources():
|
|
| 37 |
st.error(f"Failed to load embeddings model. Error: {e}")
|
| 38 |
return None, None, None
|
| 39 |
|
| 40 |
-
# Load the FAISS vector store from the local directory
|
| 41 |
-
# Ensure the 'faiss_index_datamodel' directory is in the same folder as this script
|
| 42 |
try:
|
| 43 |
vectorstore = FAISS.load_local("faiss_index_datamodel", hf_embeddings, allow_dangerous_deserialization=True)
|
| 44 |
except Exception as e:
|
| 45 |
st.error(f"Failed to load the vector store. Make sure 'faiss_index_datamodel' is present. Error: {e}")
|
| 46 |
return hf_embeddings, None, None
|
| 47 |
|
| 48 |
-
# Initialize the Groq LLM, fetching the API key from environment variables
|
| 49 |
groq_api_key = os.getenv("GROQ_API_KEY")
|
| 50 |
if not groq_api_key:
|
| 51 |
st.error("GROQ_API_KEY not found. Please add it to your repository secrets on Hugging Face Spaces.")
|
|
@@ -58,11 +45,8 @@ def load_resources():
|
|
| 58 |
)
|
| 59 |
return hf_embeddings, vectorstore, llm
|
| 60 |
|
| 61 |
-
# Load the resources and handle potential loading errors
|
| 62 |
hf_embeddings, vectorstore, llm = load_resources()
|
| 63 |
|
| 64 |
-
# --- Prompt Template ---
|
| 65 |
-
# This template guides the LLM on how to structure its response.
|
| 66 |
prompt_template = ChatPromptTemplate.from_template(
|
| 67 |
"""You are a knowledgeable and helpful agricultural expert. Your task is to provide clear, concise, and accurate answers to questions on various agricultural topics.
|
| 68 |
**Guidelines:**
|
|
@@ -87,54 +71,39 @@ prompt_template = ChatPromptTemplate.from_template(
|
|
| 87 |
**Answer:**"""
|
| 88 |
)
|
| 89 |
|
| 90 |
-
# Create the processing chain only if all components loaded successfully
|
| 91 |
if llm and prompt_template:
|
| 92 |
chain = prompt_template | llm
|
| 93 |
else:
|
| 94 |
chain = None
|
| 95 |
|
| 96 |
-
# --- Streamlit UI ---
|
| 97 |
st.title("🌱 AgriChat")
|
| 98 |
st.markdown("Your AI assistant for agricultural questions, powered by Llama 3.1.")
|
| 99 |
|
| 100 |
-
# Initialize chat history in session state
|
| 101 |
if "messages" not in st.session_state:
|
| 102 |
st.session_state.messages = []
|
| 103 |
|
| 104 |
-
# Display chat messages from history on app rerun
|
| 105 |
for message in st.session_state.messages:
|
| 106 |
with st.chat_message(message["role"]):
|
| 107 |
st.markdown(message["content"])
|
| 108 |
|
| 109 |
-
# Main chat input and response logic
|
| 110 |
if prompt := st.chat_input("Ask your agricultural question:"):
|
| 111 |
-
# Ensure the app is fully loaded before processing input
|
| 112 |
if not chain or not vectorstore:
|
| 113 |
st.warning("The application is not fully initialized. Please check for error messages above and ensure your API key is set correctly.")
|
| 114 |
else:
|
| 115 |
-
# Add user message to chat history and display it
|
| 116 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 117 |
with st.chat_message("user"):
|
| 118 |
st.markdown(prompt)
|
| 119 |
|
| 120 |
-
# Process the prompt and display the assistant's response
|
| 121 |
with st.chat_message("assistant"):
|
| 122 |
with st.spinner("Finding the best answer..."):
|
| 123 |
-
# Retrieve relevant documents from the vector store
|
| 124 |
relevant_documents = vectorstore.similarity_search_with_score(prompt, k=4)
|
| 125 |
context = "\n\n".join([doc[0].page_content for doc in relevant_documents])
|
| 126 |
-
|
| 127 |
-
# Invoke the LLM chain to get the response
|
| 128 |
response = chain.invoke({"context": context, "question": prompt})
|
| 129 |
response_content = response.content
|
| 130 |
-
|
| 131 |
st.markdown(response_content)
|
| 132 |
|
| 133 |
-
# Add assistant response to chat history
|
| 134 |
st.session_state.messages.append({"role": "assistant", "content": response_content})
|
| 135 |
|
| 136 |
-
|
| 137 |
-
# --- Sidebar Instructions ---
|
| 138 |
st.sidebar.header("How to Use")
|
| 139 |
st.sidebar.markdown("""
|
| 140 |
- This app is designed to answer your farming and agriculture-related questions.
|
|
|
|
| 6 |
from langchain_groq import ChatGroq
|
| 7 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
| 8 |
|
|
|
|
|
|
|
|
|
|
| 9 |
load_dotenv()
|
| 10 |
|
|
|
|
| 11 |
st.set_page_config(
|
| 12 |
page_title="AgriChat",
|
| 13 |
page_icon="🌱",
|
| 14 |
layout="centered"
|
| 15 |
)
|
| 16 |
|
|
|
|
| 17 |
@st.cache_resource
|
| 18 |
def load_resources():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
model_name = "sentence-transformers/all-mpnet-base-v2"
|
| 20 |
try:
|
| 21 |
hf_embeddings = HuggingFaceEmbeddings(
|
|
|
|
| 27 |
st.error(f"Failed to load embeddings model. Error: {e}")
|
| 28 |
return None, None, None
|
| 29 |
|
|
|
|
|
|
|
| 30 |
try:
|
| 31 |
vectorstore = FAISS.load_local("faiss_index_datamodel", hf_embeddings, allow_dangerous_deserialization=True)
|
| 32 |
except Exception as e:
|
| 33 |
st.error(f"Failed to load the vector store. Make sure 'faiss_index_datamodel' is present. Error: {e}")
|
| 34 |
return hf_embeddings, None, None
|
| 35 |
|
|
|
|
| 36 |
groq_api_key = os.getenv("GROQ_API_KEY")
|
| 37 |
if not groq_api_key:
|
| 38 |
st.error("GROQ_API_KEY not found. Please add it to your repository secrets on Hugging Face Spaces.")
|
|
|
|
| 45 |
)
|
| 46 |
return hf_embeddings, vectorstore, llm
|
| 47 |
|
|
|
|
| 48 |
hf_embeddings, vectorstore, llm = load_resources()
|
| 49 |
|
|
|
|
|
|
|
| 50 |
prompt_template = ChatPromptTemplate.from_template(
|
| 51 |
"""You are a knowledgeable and helpful agricultural expert. Your task is to provide clear, concise, and accurate answers to questions on various agricultural topics.
|
| 52 |
**Guidelines:**
|
|
|
|
| 71 |
**Answer:**"""
|
| 72 |
)
|
| 73 |
|
|
|
|
| 74 |
if llm and prompt_template:
|
| 75 |
chain = prompt_template | llm
|
| 76 |
else:
|
| 77 |
chain = None
|
| 78 |
|
|
|
|
| 79 |
st.title("🌱 AgriChat")
|
| 80 |
st.markdown("Your AI assistant for agricultural questions, powered by Llama 3.1.")
|
| 81 |
|
|
|
|
| 82 |
if "messages" not in st.session_state:
|
| 83 |
st.session_state.messages = []
|
| 84 |
|
|
|
|
| 85 |
for message in st.session_state.messages:
|
| 86 |
with st.chat_message(message["role"]):
|
| 87 |
st.markdown(message["content"])
|
| 88 |
|
|
|
|
| 89 |
if prompt := st.chat_input("Ask your agricultural question:"):
|
|
|
|
| 90 |
if not chain or not vectorstore:
|
| 91 |
st.warning("The application is not fully initialized. Please check for error messages above and ensure your API key is set correctly.")
|
| 92 |
else:
|
|
|
|
| 93 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 94 |
with st.chat_message("user"):
|
| 95 |
st.markdown(prompt)
|
| 96 |
|
|
|
|
| 97 |
with st.chat_message("assistant"):
|
| 98 |
with st.spinner("Finding the best answer..."):
|
|
|
|
| 99 |
relevant_documents = vectorstore.similarity_search_with_score(prompt, k=4)
|
| 100 |
context = "\n\n".join([doc[0].page_content for doc in relevant_documents])
|
|
|
|
|
|
|
| 101 |
response = chain.invoke({"context": context, "question": prompt})
|
| 102 |
response_content = response.content
|
|
|
|
| 103 |
st.markdown(response_content)
|
| 104 |
|
|
|
|
| 105 |
st.session_state.messages.append({"role": "assistant", "content": response_content})
|
| 106 |
|
|
|
|
|
|
|
| 107 |
st.sidebar.header("How to Use")
|
| 108 |
st.sidebar.markdown("""
|
| 109 |
- This app is designed to answer your farming and agriculture-related questions.
|