Update app.py
Browse files
app.py
CHANGED
|
@@ -73,8 +73,9 @@ prompt = PromptTemplate(
|
|
| 73 |
input_variables=["chat_history", "human_input", "context"], template=template
|
| 74 |
)
|
| 75 |
|
|
|
|
| 76 |
memory = ConversationBufferWindowMemory(memory_key="chat_history", input_key="human_input",k=3)
|
| 77 |
-
chain = load_qa_chain( OpenAI(), chain_type="stuff", memory=memory, prompt=prompt)
|
| 78 |
|
| 79 |
"""# Demo
|
| 80 |
|
|
@@ -111,10 +112,7 @@ def vote(tmp, index_state, data: gr.LikeData):
|
|
| 111 |
write_to_file(file_name, value_new + ';' + find_previous_question(value_new))
|
| 112 |
|
| 113 |
def find_previous_question(answer_string):
|
| 114 |
-
# Split the chat string into lines
|
| 115 |
lines = chain.memory.buffer.split('\n')
|
| 116 |
-
|
| 117 |
-
# Initialize variables to keep track of the last question and the current question
|
| 118 |
last_question = None
|
| 119 |
current_question = None
|
| 120 |
|
|
|
|
| 73 |
input_variables=["chat_history", "human_input", "context"], template=template
|
| 74 |
)
|
| 75 |
|
| 76 |
+
model_name = "gpt-3.5-turbo"
|
| 77 |
memory = ConversationBufferWindowMemory(memory_key="chat_history", input_key="human_input",k=3)
|
| 78 |
+
chain = load_qa_chain( OpenAI(model_name=model_name, temperature=0), chain_type="stuff", memory=memory, prompt=prompt)
|
| 79 |
|
| 80 |
"""# Demo
|
| 81 |
|
|
|
|
| 112 |
write_to_file(file_name, value_new + ';' + find_previous_question(value_new))
|
| 113 |
|
| 114 |
def find_previous_question(answer_string):
|
|
|
|
| 115 |
lines = chain.memory.buffer.split('\n')
|
|
|
|
|
|
|
| 116 |
last_question = None
|
| 117 |
current_question = None
|
| 118 |
|