Update app/llm.py
Browse files- app/llm.py +3 -6
app/llm.py
CHANGED
|
@@ -22,7 +22,7 @@ from langchain_core.prompts import PromptTemplate
|
|
| 22 |
from langchain import hub
|
| 23 |
from langchain_core.runnables import RunnablePassthrough, RunnablePick
|
| 24 |
|
| 25 |
-
|
| 26 |
rag_prompt.messages
|
| 27 |
|
| 28 |
class RagChat:
|
|
@@ -38,8 +38,9 @@ class RagChat:
|
|
| 38 |
return vectorstore
|
| 39 |
|
| 40 |
def chat(self, question):
|
|
|
|
| 41 |
chain = (
|
| 42 |
-
|
| 43 |
| rag_prompt_llama
|
| 44 |
| llm
|
| 45 |
| StrOutputParser()
|
|
@@ -50,10 +51,6 @@ class RagChat:
|
|
| 50 |
docs = vectorstore.similarity_search(question)
|
| 51 |
return docs
|
| 52 |
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
|
| 58 |
class GenModel(BaseModel):
|
| 59 |
question: str
|
|
|
|
| 22 |
from langchain import hub
|
| 23 |
from langchain_core.runnables import RunnablePassthrough, RunnablePick
|
| 24 |
|
| 25 |
+
rag_prompt_llama = hub.pull("rlm/rag-prompt-llama")
|
| 26 |
rag_prompt.messages
|
| 27 |
|
| 28 |
class RagChat:
|
|
|
|
| 38 |
return vectorstore
|
| 39 |
|
| 40 |
def chat(self, question):
|
| 41 |
+
retriever = vectorstore.as_retriever()
|
| 42 |
chain = (
|
| 43 |
+
{"context": retriever | format_docs, "question": RunnablePassthrough()}
|
| 44 |
| rag_prompt_llama
|
| 45 |
| llm
|
| 46 |
| StrOutputParser()
|
|
|
|
| 51 |
docs = vectorstore.similarity_search(question)
|
| 52 |
return docs
|
| 53 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
class GenModel(BaseModel):
|
| 56 |
question: str
|