Spaces:
Runtime error
Runtime error
Restore zeros
Browse files
app.py
CHANGED
|
@@ -455,8 +455,6 @@ def worker(input_image, image_position, prompts, n_prompt, seed, resolution, tot
|
|
| 455 |
clean_latent_indices = torch.cat([clean_latent_1x_indices, clean_latent_indices_start], dim=1)
|
| 456 |
|
| 457 |
def post_process(forward, generated_latents, total_generated_latent_frames, history_latents, high_vram, transformer, gpu, vae, history_pixels, latent_window_size, enable_preview, section_index, total_latent_sections, outputs_folder, mp4_crf, stream):
|
| 458 |
-
if int(history_latents.shape[2]) > total_generated_latent_frames:
|
| 459 |
-
history_latents = history_latents[:, :, -total_generated_latent_frames:, :, :] if forward else history_latents[:, :, :total_generated_latent_frames, :, :]
|
| 460 |
total_generated_latent_frames += int(generated_latents.shape[2])
|
| 461 |
history_latents = torch.cat([history_latents, generated_latents.to(history_latents)] if forward else [generated_latents.to(history_latents), history_latents], dim=2)
|
| 462 |
|
|
@@ -465,15 +463,18 @@ def worker(input_image, image_position, prompts, n_prompt, seed, resolution, tot
|
|
| 465 |
load_model_as_complete(vae, target_device=gpu)
|
| 466 |
|
| 467 |
if history_pixels is None:
|
| 468 |
-
|
|
|
|
| 469 |
else:
|
| 470 |
section_latent_frames = latent_window_size * 2
|
| 471 |
overlapped_frames = latent_window_size * 4 - 3
|
| 472 |
|
| 473 |
if forward:
|
| 474 |
-
|
|
|
|
| 475 |
else:
|
| 476 |
-
|
|
|
|
| 477 |
|
| 478 |
if not high_vram:
|
| 479 |
unload_complete_models(text_encoder, text_encoder_2, image_encoder, vae, transformer)
|
|
@@ -557,6 +558,11 @@ def worker(input_image, image_position, prompts, n_prompt, seed, resolution, tot
|
|
| 557 |
else:
|
| 558 |
clean_latent_indices_start, clean_latent_4x_indices, clean_latent_2x_indices, clean_latent_1x_indices, latent_indices = indices.split([1, 16, 2, 1, latent_window_size], dim=1)
|
| 559 |
clean_latent_indices = torch.cat([clean_latent_indices_start, clean_latent_1x_indices], dim=1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 560 |
|
| 561 |
forward = True
|
| 562 |
section_index = first_section_index
|
|
@@ -1084,7 +1090,7 @@ title_html = """
|
|
| 1084 |
js = """
|
| 1085 |
function createGradioAnimation() {
|
| 1086 |
window.addEventListener("beforeunload", function(e) {
|
| 1087 |
-
if (document.getElementById('
|
| 1088 |
var confirmationMessage = 'A process is still running. '
|
| 1089 |
+ 'If you leave before saving, your changes will be lost.';
|
| 1090 |
|
|
|
|
| 455 |
clean_latent_indices = torch.cat([clean_latent_1x_indices, clean_latent_indices_start], dim=1)
|
| 456 |
|
| 457 |
def post_process(forward, generated_latents, total_generated_latent_frames, history_latents, high_vram, transformer, gpu, vae, history_pixels, latent_window_size, enable_preview, section_index, total_latent_sections, outputs_folder, mp4_crf, stream):
|
|
|
|
|
|
|
| 458 |
total_generated_latent_frames += int(generated_latents.shape[2])
|
| 459 |
history_latents = torch.cat([history_latents, generated_latents.to(history_latents)] if forward else [generated_latents.to(history_latents), history_latents], dim=2)
|
| 460 |
|
|
|
|
| 463 |
load_model_as_complete(vae, target_device=gpu)
|
| 464 |
|
| 465 |
if history_pixels is None:
|
| 466 |
+
real_history_latents = history_latents[:, :, -total_generated_latent_frames:, :, :] if forward else history_latents[:, :, :total_generated_latent_frames, :, :]
|
| 467 |
+
history_pixels = vae_decode(real_history_latents, vae).cpu()
|
| 468 |
else:
|
| 469 |
section_latent_frames = latent_window_size * 2
|
| 470 |
overlapped_frames = latent_window_size * 4 - 3
|
| 471 |
|
| 472 |
if forward:
|
| 473 |
+
real_history_latents = history_latents[:, :, -min(section_latent_frames, total_generated_latent_frames):, :, :]
|
| 474 |
+
history_pixels = soft_append_bcthw(history_pixels, vae_decode(real_history_latents, vae).cpu(), overlapped_frames)
|
| 475 |
else:
|
| 476 |
+
real_history_latents = history_latents[:, :, :min(section_latent_frames, total_generated_latent_frames), :, :]
|
| 477 |
+
history_pixels = soft_append_bcthw(vae_decode(real_history_latents, vae).cpu(), history_pixels, overlapped_frames)
|
| 478 |
|
| 479 |
if not high_vram:
|
| 480 |
unload_complete_models(text_encoder, text_encoder_2, image_encoder, vae, transformer)
|
|
|
|
| 558 |
else:
|
| 559 |
clean_latent_indices_start, clean_latent_4x_indices, clean_latent_2x_indices, clean_latent_1x_indices, latent_indices = indices.split([1, 16, 2, 1, latent_window_size], dim=1)
|
| 560 |
clean_latent_indices = torch.cat([clean_latent_indices_start, clean_latent_1x_indices], dim=1)
|
| 561 |
+
|
| 562 |
+
real_history_latents = history_latents[:, :, :total_generated_latent_frames, :, :]
|
| 563 |
+
zero_latents = history_latents[:, :, total_generated_latent_frames:, :, :]
|
| 564 |
+
history_latents = torch.cat([zero_latents, real_history_latents], dim=2)
|
| 565 |
+
real_history_latents = zero_latents = None
|
| 566 |
|
| 567 |
forward = True
|
| 568 |
section_index = first_section_index
|
|
|
|
| 1090 |
js = """
|
| 1091 |
function createGradioAnimation() {
|
| 1092 |
window.addEventListener("beforeunload", function(e) {
|
| 1093 |
+
if (document.getElementById('end-button') && !document.getElementById('end-button').disabled) {
|
| 1094 |
var confirmationMessage = 'A process is still running. '
|
| 1095 |
+ 'If you leave before saving, your changes will be lost.';
|
| 1096 |
|