Spaces:
Sleeping
Sleeping
Update LLMPipeline.py
Browse files- LLMPipeline.py +1 -1
LLMPipeline.py
CHANGED
|
@@ -109,5 +109,5 @@ def generate_image_prompt(summary):
|
|
| 109 |
input_text=tokenizer.apply_chat_template(messages, tokenize=False)
|
| 110 |
inputs = tokenizer.encode(input_text, return_tensors="pt").to(device)
|
| 111 |
outputs = model.generate(inputs, max_new_tokens=100, temperature=0.2, top_p=0.9, do_sample=True)
|
| 112 |
-
return tokenizer.decode(outputs[0])
|
| 113 |
|
|
|
|
| 109 |
input_text=tokenizer.apply_chat_template(messages, tokenize=False)
|
| 110 |
inputs = tokenizer.encode(input_text, return_tensors="pt").to(device)
|
| 111 |
outputs = model.generate(inputs, max_new_tokens=100, temperature=0.2, top_p=0.9, do_sample=True)
|
| 112 |
+
return tokenizer.decode(outputs[0])
|
| 113 |
|