udituen commited on
Commit
7ff4c08
·
1 Parent(s): c1456bc

update model response

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +22 -4
src/streamlit_app.py CHANGED
@@ -18,6 +18,20 @@ import os
18
  # os.environ["HF_HOME"] = HF_CACHE_PATH
19
 
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  # Initialize embeddings & documents
22
  @st.cache_resource
23
  def load_retriever():
@@ -39,10 +53,14 @@ def load_llm():
39
  # Setup RAG Chain
40
  @st.cache_resource
41
  def setup_qa():
 
42
  retriever = load_retriever()
43
  llm = load_llm()
44
- qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
45
- return qa_chain
 
 
 
46
 
47
 
48
  # Streamlit App UI
@@ -53,5 +71,5 @@ query = st.text_input("Ask a question related to agriculture:")
53
  if query:
54
  qa = setup_qa()
55
  with st.spinner("Thinking..."):
56
- result = qa.run(query)
57
- st.success(result)
 
18
  # os.environ["HF_HOME"] = HF_CACHE_PATH
19
 
20
 
21
+ system_prompt = (
22
+ "You are an agriultural research assistant."
23
+ "Use the given context to answer the question."
24
+ "If you don't know the answer, say you don't know."
25
+ "Context: {context}"
26
+ )
27
+
28
+ prompt = ChatPromptTemplate.from_messages(
29
+ [
30
+ ("system", system_prompt),
31
+ ("human", "{input}"),
32
+ ]
33
+ )
34
+
35
  # Initialize embeddings & documents
36
  @st.cache_resource
37
  def load_retriever():
 
53
  # Setup RAG Chain
54
  @st.cache_resource
55
  def setup_qa():
56
+
57
  retriever = load_retriever()
58
  llm = load_llm()
59
+ question_answer_chain = create_stuff_documents_chain(llm,prompt)
60
+ chain = create_retrieval_chain(retriever, question_answer_chain)
61
+
62
+ # qa_chain = RetrievalQA.from_chain_type(llm=llm, retriever=retriever)
63
+ return chain
64
 
65
 
66
  # Streamlit App UI
 
71
  if query:
72
  qa = setup_qa()
73
  with st.spinner("Thinking..."):
74
+ result = qa.invoke({"input": query})
75
+ st.success(result['answer'])