Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,9 +10,13 @@ upsampler = pipeline("text-generation", repo_id, torch_dtype=torch.bfloat16)
|
|
| 10 |
@spaces.GPU
|
| 11 |
def upsample(prompt):
|
| 12 |
with torch.no_grad(), amp.autocast("cuda", dtype=torch.bfloat16):
|
| 13 |
-
return upsampler(f"Upsample the short caption to a long caption:")[0]["generated_text"]
|
| 14 |
|
| 15 |
demo = gr.Interface(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
fn=upsample,
|
| 17 |
inputs=gr.Text(
|
| 18 |
label="Prompt",
|
|
|
|
| 10 |
@spaces.GPU
|
| 11 |
def upsample(prompt):
|
| 12 |
with torch.no_grad(), amp.autocast("cuda", dtype=torch.bfloat16):
|
| 13 |
+
return upsampler(f"Upsample the short caption to a long caption: {prompt}")[0]["generated_text"]
|
| 14 |
|
| 15 |
demo = gr.Interface(
|
| 16 |
+
title="NVIDIA Cosmos 🌌 Prompt Upsampler",t
|
| 17 |
+
description="""Upsample prompts using NVIDIA's 12B Cosmos model, based on Mistral NeMo 12B. This space uses the HuggingFace Transformers version at bfloat16 precision.
|
| 18 |
+
|
| 19 |
+
[[cosmos]](https://huggingface.co/nvidia/Cosmos-1.0-Prompt-Upsampler-12B-Text2World) [[transformers]](https://huggingface.co/appmana/Cosmos-1.0-Prompt-Upsampler-12B-Text2World-hf) [[gguf]](https://huggingface.co/mradermacher/Cosmos-1.0-Prompt-Upsampler-12B-Text2World-hf-GGUF)""",
|
| 20 |
fn=upsample,
|
| 21 |
inputs=gr.Text(
|
| 22 |
label="Prompt",
|