text
stringlengths 0
284
|
|---|
p.denoising_strength = first_denoise
|
p.control_net_input_image = p.control_net_input_image.resize((initial_width, p.height))
|
frames.append(p.control_net_input_image)
|
if append_interrogation != "None":
|
p.prompt = original_prompt + ", " if original_prompt != "" else ""
|
if append_interrogation == "CLIP":
|
p.prompt += shared.interrogator.interrogate(p.init_images[0])
|
elif append_interrogation == "DeepBooru":
|
p.prompt += deepbooru.model.tag(p.init_images[0])
|
state.job = f"Iteration {i + 1}/{loops}, batch {n + 1}/{batch_count}"
|
processed = processing.process_images(p)
|
if initial_seed is None:
|
initial_seed = processed.seed
|
initial_info = processed.info
|
init_img = processed.images[0]
|
if(i > 0):
|
init_img = init_img.crop((initial_width, 0, initial_width*2, p.height))
|
if third_frame_image != "None":
|
if third_frame_image == "FirstGen" and i == 0:
|
third_image = init_img
|
third_image_index = 0
|
elif third_frame_image == "GuideImg" and i == 0:
|
third_image = original_init_image[0]
|
third_image_index = 0
|
elif third_frame_image == "Historical":
|
third_image = processed.images[0].crop((0, 0, initial_width, p.height))
|
third_image_index = (i-1)
|
p.init_images = [init_img]
|
if(freeze_seed):
|
p.seed = processed.seed
|
else:
|
p.seed = processed.seed + 1
|
history.append(init_img)
|
if opts.samples_save:
|
images.save_image(init_img, p.outpath_samples, "Frame", p.seed, p.prompt, opts.grid_format, info=info, short_filename=not opts.grid_extended_filename, grid=True, p=p)
|
frames.append(processed.images[0])
|
grid = images.image_grid(history, rows=1)
|
if opts.grid_save:
|
images.save_image(grid, p.outpath_grids, "grid", initial_seed, p.prompt, opts.grid_format, info=info, short_filename=not opts.grid_extended_filename, grid=True, p=p)
|
grids.append(grid)
|
# all_images += history + frames
|
all_images += history
|
p.seed = p.seed+1
|
if opts.return_grid:
|
all_images = grids + all_images
|
processed = Processed(p, all_images, initial_seed, initial_info)
|
return processed
|
Negative prompt:
|
Steps: 32, Sampler: UniPC, CFG scale: 7.5, Size: 540x960, Model hash: d3976436e9, Denoising strength: 0.5, Hires upscale: 2, Hires steps: 10, Hires upscaler: 4x-UltraSharp, Clip skip: 2
|
import math
|
import os
|
import sys
|
import traceback
|
import modules.scripts as scripts
|
import gradio as gr
|
from modules.processing import Processed, process_images
|
class Script(scripts.Script):
|
def title(self):
|
return "Run n times"
|
def ui(self, is_img2img):
|
n = gr.Textbox(label="n")
|
return [n]
|
def run(self, p, n):
|
for x in range(int(n)):
|
p.seed = -1
|
proc = process_images(p)
|
image = proc.images
|
return Processed(p, image, p.seed, proc.info)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.