Update my_memory_logic.py
Browse files- my_memory_logic.py +28 -5
my_memory_logic.py
CHANGED
|
@@ -1,21 +1,44 @@
|
|
| 1 |
# my_memory_logic.py
|
|
|
|
| 2 |
from langchain.memory import ConversationBufferMemory
|
| 3 |
-
from langchain.chat_models import ChatOpenAI
|
| 4 |
from langchain.chains import LLMChain
|
| 5 |
-
from langchain.prompts.chat import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
|
|
|
| 7 |
memory = ConversationBufferMemory(return_messages=True)
|
| 8 |
|
|
|
|
| 9 |
restatement_system_prompt = (
|
| 10 |
"Given a chat history and the latest user question "
|
| 11 |
-
"which might reference context in the chat history,
|
|
|
|
|
|
|
|
|
|
| 12 |
)
|
| 13 |
|
|
|
|
| 14 |
restatement_prompt = ChatPromptTemplate.from_messages([
|
| 15 |
SystemMessagePromptTemplate.from_template(restatement_system_prompt),
|
| 16 |
MessagesPlaceholder(variable_name="chat_history"),
|
| 17 |
HumanMessagePromptTemplate.from_template("{input}")
|
| 18 |
])
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
# my_memory_logic.py
|
| 2 |
+
import os
|
| 3 |
from langchain.memory import ConversationBufferMemory
|
|
|
|
| 4 |
from langchain.chains import LLMChain
|
| 5 |
+
from langchain.prompts.chat import (
|
| 6 |
+
ChatPromptTemplate,
|
| 7 |
+
SystemMessagePromptTemplate,
|
| 8 |
+
MessagesPlaceholder,
|
| 9 |
+
HumanMessagePromptTemplate,
|
| 10 |
+
)
|
| 11 |
+
# Import ChatGroq from the langchain_groq package
|
| 12 |
+
from langchain_groq import ChatGroq
|
| 13 |
|
| 14 |
+
# 1) Memory object for storing conversation messages
|
| 15 |
memory = ConversationBufferMemory(return_messages=True)
|
| 16 |
|
| 17 |
+
# 2) Restatement system prompt for question rewriting
|
| 18 |
restatement_system_prompt = (
|
| 19 |
"Given a chat history and the latest user question "
|
| 20 |
+
"which might reference context in the chat history, "
|
| 21 |
+
"formulate a standalone question that can be understood "
|
| 22 |
+
"without the chat history. Do NOT answer the question, "
|
| 23 |
+
"just reformulate it if needed; otherwise return it as is."
|
| 24 |
)
|
| 25 |
|
| 26 |
+
# 3) Build the ChatPromptTemplate
|
| 27 |
restatement_prompt = ChatPromptTemplate.from_messages([
|
| 28 |
SystemMessagePromptTemplate.from_template(restatement_system_prompt),
|
| 29 |
MessagesPlaceholder(variable_name="chat_history"),
|
| 30 |
HumanMessagePromptTemplate.from_template("{input}")
|
| 31 |
])
|
| 32 |
|
| 33 |
+
# 4) Initialize the ChatGroq LLM
|
| 34 |
+
# Ensure you have your GROQ_API_KEY set in the environment
|
| 35 |
+
restatement_llm = ChatGroq(
|
| 36 |
+
model="llama3-70b-8192" # or whichever model
|
| 37 |
+
groq_api_key=os.environ["GROQ_API_KEY"]
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
# 5) Create the LLMChain for restatement
|
| 41 |
+
restatement_chain = LLMChain(
|
| 42 |
+
llm=restatement_llm,
|
| 43 |
+
prompt=restatement_prompt
|
| 44 |
+
)
|