c commited on
Commit
674e23c
·
verified ·
1 Parent(s): 3a7bd62

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -24,6 +24,7 @@ def predict(message, history):
24
  for item in history_transformer_format])
25
 
26
  model_inputs = tokenizer([messages], return_tensors="pt").to("cpu")
 
27
  streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
28
  generate_kwargs = dict(
29
  model_inputs,
@@ -37,10 +38,12 @@ def predict(message, history):
37
  stopping_criteria=StoppingCriteriaList([stop])
38
  )
39
  t = Thread(target=model.generate, kwargs=generate_kwargs)
 
40
  t.start()
41
 
42
  partial_message = ""
43
  for new_token in streamer:
 
44
  if new_token != '<':
45
  partial_message += new_token
46
  yield partial_message
 
24
  for item in history_transformer_format])
25
 
26
  model_inputs = tokenizer([messages], return_tensors="pt").to("cpu")
27
+ print(model_inputs)
28
  streamer = TextIteratorStreamer(tokenizer, timeout=10., skip_prompt=True, skip_special_tokens=True)
29
  generate_kwargs = dict(
30
  model_inputs,
 
38
  stopping_criteria=StoppingCriteriaList([stop])
39
  )
40
  t = Thread(target=model.generate, kwargs=generate_kwargs)
41
+ print(t)
42
  t.start()
43
 
44
  partial_message = ""
45
  for new_token in streamer:
46
+ print(new_token)
47
  if new_token != '<':
48
  partial_message += new_token
49
  yield partial_message