kdevoe commited on
Commit
e8c2ecc
·
verified ·
1 Parent(s): 0e4ab50

Adding scrolling conversation history like ChatGPT

Browse files
Files changed (1) hide show
  1. app.py +13 -10
app.py CHANGED
@@ -1,7 +1,6 @@
1
  import gradio as gr
2
  from transformers import T5Tokenizer, T5ForConditionalGeneration
3
  from langchain.memory import ConversationBufferMemory
4
- from langchain.prompts import PromptTemplate
5
 
6
  # Load the tokenizer and model for flan-t5
7
  tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
@@ -30,17 +29,21 @@ def chat_with_flan(input_text):
30
  # Update the memory with the user input and model response
31
  memory.save_context({"input": input_text}, {"output": response})
32
 
33
- return response
34
 
35
- # Set up the Gradio interface
36
- interface = gr.Interface(
37
- fn=chat_with_flan,
38
- inputs=gr.Textbox(label="Chat with FLAN-T5"),
39
- outputs=gr.Textbox(label="FLAN-T5's Response"),
40
- title="FLAN-T5 Chatbot with Memory",
41
- description="This is a simple chatbot powered by the FLAN-T5 model with conversational memory, using LangChain.",
42
- )
 
 
 
43
 
44
  # Launch the Gradio app
45
  interface.launch()
46
 
 
 
1
  import gradio as gr
2
  from transformers import T5Tokenizer, T5ForConditionalGeneration
3
  from langchain.memory import ConversationBufferMemory
 
4
 
5
  # Load the tokenizer and model for flan-t5
6
  tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
 
29
  # Update the memory with the user input and model response
30
  memory.save_context({"input": input_text}, {"output": response})
31
 
32
+ return conversation_history + f"\nUser: {input_text}\nAssistant: {response}"
33
 
34
+ # Set up the Gradio interface with the input box below the output box
35
+ with gr.Blocks() as interface:
36
+ chatbot_output = gr.Textbox(label="Conversation", lines=15, placeholder="Chat history will appear here...", interactive=False)
37
+ user_input = gr.Textbox(label="Your Input", placeholder="Type your message here...", lines=2)
38
+
39
+ def update_chat(input_text, chat_history):
40
+ updated_history = chat_with_flan(input_text)
41
+ return updated_history, ""
42
+
43
+ # Arrange input/output components with live updating
44
+ user_input.submit(update_chat, inputs=[user_input, chatbot_output], outputs=[chatbot_output, user_input])
45
 
46
  # Launch the Gradio app
47
  interface.launch()
48
 
49
+