Flutra commited on
Commit
c975a90
·
1 Parent(s): 29d89e4

streaming and memory management

Browse files
Files changed (1) hide show
  1. app.py +14 -6
app.py CHANGED
@@ -84,17 +84,26 @@ Technical answer (following the exact structure above):""")
84
 
85
  def predict(message, history):
86
  """
87
- Process each message with streaming
88
  """
 
 
 
 
 
 
 
 
89
  token_queue = Queue()
90
  stream_handler = StreamHandler(token_queue)
91
 
92
- # Create new QA chain for each conversation to ensure fresh memory
93
- qa_chain = create_qa_chain(stream_handler)
 
94
 
95
  # Function to process the message and add to queue
96
  def get_response():
97
- result = qa_chain({"question": message})
98
  # Add sources to queue
99
  sources = "\n\nSources:\n"
100
  seen_components = set()
@@ -135,7 +144,7 @@ with gr.Blocks() as demo:
135
  chatbot = gr.ChatInterface(
136
  predict,
137
  title="Apple Music API Documentation Assistant",
138
- description="Ask questions about the Apple Music API documentation.",
139
  examples=[
140
  "How to search for songs on Apple Music API?",
141
  "What are the required parameters for searching songs?",
@@ -145,4 +154,3 @@ with gr.Blocks() as demo:
145
 
146
  if __name__ == "__main__":
147
  demo.queue().launch()
148
-
 
84
 
85
  def predict(message, history):
86
  """
87
+ Process each message with streaming and handle quit commands
88
  """
89
+ # Check for quit command
90
+ if message.lower() in ['quit', 'exit', 'bye']:
91
+ # Clear the QA chain if it exists
92
+ if hasattr(predict, 'qa_chain'):
93
+ predict.qa_chain.memory.clear()
94
+ delattr(predict, 'qa_chain')
95
+ return "Chat history cleared. Goodbye!"
96
+
97
  token_queue = Queue()
98
  stream_handler = StreamHandler(token_queue)
99
 
100
+ # Create or get QA chain
101
+ if not hasattr(predict, 'qa_chain'):
102
+ predict.qa_chain = create_qa_chain(stream_handler)
103
 
104
  # Function to process the message and add to queue
105
  def get_response():
106
+ result = predict.qa_chain({"question": message})
107
  # Add sources to queue
108
  sources = "\n\nSources:\n"
109
  seen_components = set()
 
144
  chatbot = gr.ChatInterface(
145
  predict,
146
  title="Apple Music API Documentation Assistant",
147
+ description="Ask questions about the Apple Music API documentation. Type 'quit', 'exit', or 'bye' to clear chat history.",
148
  examples=[
149
  "How to search for songs on Apple Music API?",
150
  "What are the required parameters for searching songs?",
 
154
 
155
  if __name__ == "__main__":
156
  demo.queue().launch()