Jia0603 commited on
Commit
a2fb7fb
·
verified ·
1 Parent(s): 2723569

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -50,7 +50,7 @@ def clean_reply(text):
50
 
51
  return lines[0]
52
 
53
- def chat_with_model(user_message, chat_history, max_new_tokens=60, temperature=0.8, top_p=0.9):
54
 
55
  if chat_history is None:
56
  chat_history = []
@@ -90,8 +90,8 @@ with gr.Blocks() as demo:
90
  chat = gr.Chatbot(elem_id="chatbot", label="Conversation")
91
  msg = gr.Textbox(label="Your message")
92
  send = gr.Button("Send")
93
- max_tokens = gr.Slider(50, 256, value=60, label="max_new_tokens")
94
- temp = gr.Slider(0.8, 1.5, value=0.8, label="temperature")
95
  top_p = gr.Slider(0.1, 1.0, value=0.9, label="top_p")
96
 
97
  with gr.Column(scale=1):
@@ -103,6 +103,6 @@ with gr.Blocks() as demo:
103
  fn=chat_with_model,
104
  inputs=[msg, state, max_tokens, temp, top_p],
105
  outputs=[msg, chat, state]
106
- )
107
 
108
  demo.launch()
 
50
 
51
  return lines[0]
52
 
53
+ def chat_with_model(user_message, chat_history, max_new_tokens=256, temperature=0.8, top_p=0.9):
54
 
55
  if chat_history is None:
56
  chat_history = []
 
90
  chat = gr.Chatbot(elem_id="chatbot", label="Conversation")
91
  msg = gr.Textbox(label="Your message")
92
  send = gr.Button("Send")
93
+ max_tokens = gr.Slider(50, 512, value=256, label="max_new_tokens")
94
+ temp = gr.Slider(0.6, 1.5, value=0.8, label="temperature")
95
  top_p = gr.Slider(0.1, 1.0, value=0.9, label="top_p")
96
 
97
  with gr.Column(scale=1):
 
103
  fn=chat_with_model,
104
  inputs=[msg, state, max_tokens, temp, top_p],
105
  outputs=[msg, chat, state]
106
+ )
107
 
108
  demo.launch()