Raymond commited on
Commit
bb64cd3
·
1 Parent(s): 8235b28

ui tweaks

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -42,8 +42,8 @@ def generate_llm(prompt, max_tokens = 512, analyze_probs = False):
42
  return [output, str(token_count / (time.time() - start_time)) + "tok/s " + str(token_count) + " tokens generated.", probtext]
43
 
44
  demo = gr.Interface(generate_llm,
45
- inputs=[gr.TextArea(placeholder = "In the midst of chaos."), gr.Number(value = 512, maximum = 2048, minimum = 1, step = 1, label = "Max tokens"), gr.Checkbox(label = "Show probs, 10x slower")],
46
  outputs=[gr.TextArea(label = "Output"), gr.Text(placeholder = "tok/s and other stats", label = "Stats"), gr.TextArea(label = "Probability stats")])
47
 
48
  if __name__ == "__main__":
49
- demo.launch(share = True)
 
42
  return [output, str(token_count / (time.time() - start_time)) + "tok/s " + str(token_count) + " tokens generated.", probtext]
43
 
44
  demo = gr.Interface(generate_llm,
45
+ inputs=[gr.TextArea(placeholder = "In the midst of chaos.", value = "Once upon a time"), gr.Number(value = 512, maximum = 2048, minimum = 1, step = 1, label = "Max tokens"), gr.Checkbox(label = "Show probs, 10x slower if run on gpu")],
46
  outputs=[gr.TextArea(label = "Output"), gr.Text(placeholder = "tok/s and other stats", label = "Stats"), gr.TextArea(label = "Probability stats")])
47
 
48
  if __name__ == "__main__":
49
+ demo.launch(share = False)