Spaces:
Runtime error
Runtime error
upscaled examples a bit
Browse files
app.py
CHANGED
|
@@ -267,7 +267,7 @@ def run_main_app(semantic_concept, word, script, font_selected, prompt_suffix, n
|
|
| 267 |
# training loop
|
| 268 |
t_range = tqdm(range(num_iter))
|
| 269 |
gif_frames = []
|
| 270 |
-
skip =
|
| 271 |
for step in t_range:
|
| 272 |
optim.zero_grad()
|
| 273 |
|
|
@@ -289,7 +289,7 @@ def run_main_app(semantic_concept, word, script, font_selected, prompt_suffix, n
|
|
| 289 |
|
| 290 |
if step % skip == 0:
|
| 291 |
img_tensor = x.detach().cpu()
|
| 292 |
-
img_tensor = torch.nn.functional.interpolate(img_tensor, size=(
|
| 293 |
img_tensor = img_tensor.permute(0, 2, 3, 1).squeeze(0)
|
| 294 |
gif_frames += [img_tensor.numpy()]
|
| 295 |
|
|
|
|
| 267 |
# training loop
|
| 268 |
t_range = tqdm(range(num_iter))
|
| 269 |
gif_frames = []
|
| 270 |
+
skip = 10
|
| 271 |
for step in t_range:
|
| 272 |
optim.zero_grad()
|
| 273 |
|
|
|
|
| 289 |
|
| 290 |
if step % skip == 0:
|
| 291 |
img_tensor = x.detach().cpu()
|
| 292 |
+
img_tensor = torch.nn.functional.interpolate(img_tensor, size=(300, 300), mode='bilinear', align_corners=False)
|
| 293 |
img_tensor = img_tensor.permute(0, 2, 3, 1).squeeze(0)
|
| 294 |
gif_frames += [img_tensor.numpy()]
|
| 295 |
|