mrpoons-studio commited on
Commit
502c3ee
·
verified ·
1 Parent(s): 0861101

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -7
app.py CHANGED
@@ -66,9 +66,12 @@ conversation.append({
66
  input_ids = tokenizer.apply_chat_template(conversation, return_tensors = "pt")
67
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
68
  input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
69
- gr.Warning(f"Trimmed input from conversation as it was longer than {
 
 
70
  MAX_INPUT_TOKEN_LENGTH
71
- } tokens.")
 
72
  input_ids = input_ids.to(model.device)
73
 
74
  streamer = TextIteratorStreamer(tokenizer, timeout = 10.0, skip_prompt = True, skip_special_tokens = True)
@@ -107,11 +110,11 @@ chat_interface = gr.ChatInterface(
107
  value = DEFAULT_MAX_NEW_TOKENS,
108
  ),
109
  gr.Slider(
110
- label="Temperature",
111
- minimum=0,
112
- maximum=4.0,
113
- step=0.01,
114
- value=0,
115
  ),
116
  gr.Slider(
117
  label = "Top-p (nucleus sampling)",
 
66
  input_ids = tokenizer.apply_chat_template(conversation, return_tensors = "pt")
67
  if input_ids.shape[1] > MAX_INPUT_TOKEN_LENGTH:
68
  input_ids = input_ids[:, -MAX_INPUT_TOKEN_LENGTH:]
69
+ gr.Warning((
70
+ f"Trimmed input from conversation as it was longer than "
71
+ f" {
72
  MAX_INPUT_TOKEN_LENGTH
73
+ } tokens."
74
+ ))
75
  input_ids = input_ids.to(model.device)
76
 
77
  streamer = TextIteratorStreamer(tokenizer, timeout = 10.0, skip_prompt = True, skip_special_tokens = True)
 
110
  value = DEFAULT_MAX_NEW_TOKENS,
111
  ),
112
  gr.Slider(
113
+ label = "Temperature",
114
+ minimum = 0,
115
+ maximum = 4.0,
116
+ step = 0.01,
117
+ value = 0,
118
  ),
119
  gr.Slider(
120
  label = "Top-p (nucleus sampling)",