|
|
|
|
|
|
|
|
|
|
|
|
|
|
from torch import Tensor
|
|
|
from PIL import Image, ImageOps, ImageDraw, ImageFont
|
|
|
from PIL.PngImagePlugin import PngInfo
|
|
|
import numpy as np
|
|
|
import torch
|
|
|
|
|
|
import ast
|
|
|
from pathlib import Path
|
|
|
from importlib import import_module
|
|
|
import os
|
|
|
import sys
|
|
|
import copy
|
|
|
import subprocess
|
|
|
import json
|
|
|
import psutil
|
|
|
|
|
|
from comfy_extras.nodes_align_your_steps import AlignYourStepsScheduler
|
|
|
|
|
|
|
|
|
my_dir = os.path.dirname(os.path.abspath(__file__))
|
|
|
custom_nodes_dir = os.path.abspath(os.path.join(my_dir, '..'))
|
|
|
comfy_dir = os.path.abspath(os.path.join(my_dir, '..', '..'))
|
|
|
|
|
|
|
|
|
font_path = os.path.join(my_dir, 'arial.ttf')
|
|
|
|
|
|
|
|
|
sys.path.append(comfy_dir)
|
|
|
from nodes import LatentUpscaleBy, KSampler, KSamplerAdvanced, VAEDecode, VAEDecodeTiled, VAEEncode, VAEEncodeTiled, \
|
|
|
ImageScaleBy, CLIPSetLastLayer, CLIPTextEncode, ControlNetLoader, ControlNetApply, ControlNetApplyAdvanced, \
|
|
|
PreviewImage, MAX_RESOLUTION
|
|
|
from comfy_extras.nodes_upscale_model import UpscaleModelLoader, ImageUpscaleWithModel
|
|
|
from comfy_extras.nodes_clip_sdxl import CLIPTextEncodeSDXL, CLIPTextEncodeSDXLRefiner
|
|
|
import comfy.sample
|
|
|
import comfy.samplers
|
|
|
import comfy.sd
|
|
|
import comfy.utils
|
|
|
import comfy.latent_formats
|
|
|
sys.path.remove(comfy_dir)
|
|
|
|
|
|
|
|
|
sys.path.append(my_dir)
|
|
|
from tsc_utils import *
|
|
|
from .py import smZ_cfg_denoiser
|
|
|
from .py import smZ_rng_source
|
|
|
from .py import cg_mixed_seed_noise
|
|
|
from .py import city96_latent_upscaler
|
|
|
from .py import ttl_nn_latent_upscaler
|
|
|
from .py import bnk_tiled_samplers
|
|
|
from .py import bnk_adv_encode
|
|
|
sys.path.remove(my_dir)
|
|
|
|
|
|
from comfy import samplers
|
|
|
|
|
|
sys.path.append(custom_nodes_dir)
|
|
|
|
|
|
|
|
|
REFINER_CFG_OFFSET = 0
|
|
|
|
|
|
|
|
|
SCHEDULER_NAMES = samplers.SCHEDULER_NAMES + ["AYS SD1", "AYS SDXL", "AYS SVD"]
|
|
|
SCHEDULERS = samplers.KSampler.SCHEDULERS + ["AYS SD1", "AYS SDXL", "AYS SVD"]
|
|
|
|
|
|
|
|
|
|
|
|
def encode_prompts(positive_prompt, negative_prompt, token_normalization, weight_interpretation, clip, clip_skip,
|
|
|
refiner_clip, refiner_clip_skip, ascore, is_sdxl, empty_latent_width, empty_latent_height,
|
|
|
return_type="both"):
|
|
|
|
|
|
positive_encoded = negative_encoded = refiner_positive_encoded = refiner_negative_encoded = None
|
|
|
|
|
|
|
|
|
if return_type in ["base", "both"]:
|
|
|
clip = CLIPSetLastLayer().set_last_layer(clip, clip_skip)[0]
|
|
|
|
|
|
positive_encoded = bnk_adv_encode.AdvancedCLIPTextEncode().encode(clip, positive_prompt, token_normalization, weight_interpretation)[0]
|
|
|
negative_encoded = bnk_adv_encode.AdvancedCLIPTextEncode().encode(clip, negative_prompt, token_normalization, weight_interpretation)[0]
|
|
|
|
|
|
|
|
|
if return_type in ["refiner", "both"] and is_sdxl and refiner_clip and refiner_clip_skip and ascore:
|
|
|
refiner_clip = CLIPSetLastLayer().set_last_layer(refiner_clip, refiner_clip_skip)[0]
|
|
|
|
|
|
refiner_positive_encoded = bnk_adv_encode.AdvancedCLIPTextEncode().encode(refiner_clip, positive_prompt, token_normalization, weight_interpretation)[0]
|
|
|
refiner_positive_encoded = bnk_adv_encode.AddCLIPSDXLRParams().encode(refiner_positive_encoded, empty_latent_width, empty_latent_height, ascore[0])[0]
|
|
|
|
|
|
refiner_negative_encoded = bnk_adv_encode.AdvancedCLIPTextEncode().encode(refiner_clip, negative_prompt, token_normalization, weight_interpretation)[0]
|
|
|
refiner_negative_encoded = bnk_adv_encode.AddCLIPSDXLRParams().encode(refiner_negative_encoded, empty_latent_width, empty_latent_height, ascore[1])[0]
|
|
|
|
|
|
|
|
|
if return_type == "base":
|
|
|
return positive_encoded, negative_encoded, clip
|
|
|
elif return_type == "refiner":
|
|
|
return refiner_positive_encoded, refiner_negative_encoded, refiner_clip
|
|
|
elif return_type == "both":
|
|
|
return positive_encoded, negative_encoded, clip, refiner_positive_encoded, refiner_negative_encoded, refiner_clip
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_EfficientLoader:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": { "ckpt_name": (folder_paths.get_filename_list("checkpoints"),),
|
|
|
"vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),),
|
|
|
"clip_skip": ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}),
|
|
|
"lora_name": (["None"] + folder_paths.get_filename_list("loras"),),
|
|
|
"lora_model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
|
|
|
"lora_clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
|
|
|
"positive": ("STRING", {"default": "CLIP_POSITIVE","multiline": True}),
|
|
|
"negative": ("STRING", {"default": "CLIP_NEGATIVE", "multiline": True}),
|
|
|
"token_normalization": (["none", "mean", "length", "length+mean"],),
|
|
|
"weight_interpretation": (["comfy", "A1111", "compel", "comfy++", "down_weight"],),
|
|
|
"empty_latent_width": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"empty_latent_height": ("INT", {"default": 512, "min": 64, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"batch_size": ("INT", {"default": 1, "min": 1, "max": 262144})},
|
|
|
"optional": {"lora_stack": ("LORA_STACK", ),
|
|
|
"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
"hidden": { "prompt": "PROMPT",
|
|
|
"my_unique_id": "UNIQUE_ID",},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", "CLIP", "DEPENDENCIES",)
|
|
|
RETURN_NAMES = ("MODEL", "CONDITIONING+", "CONDITIONING-", "LATENT", "VAE", "CLIP", "DEPENDENCIES", )
|
|
|
FUNCTION = "efficientloader"
|
|
|
CATEGORY = "Efficiency Nodes/Loaders"
|
|
|
|
|
|
def efficientloader(self, ckpt_name, vae_name, clip_skip, lora_name, lora_model_strength, lora_clip_strength,
|
|
|
positive, negative, token_normalization, weight_interpretation, empty_latent_width,
|
|
|
empty_latent_height, batch_size, lora_stack=None, cnet_stack=None, refiner_name="None",
|
|
|
ascore=None, prompt=None, my_unique_id=None, loader_type="regular"):
|
|
|
|
|
|
|
|
|
globals_cleanup(prompt)
|
|
|
|
|
|
|
|
|
latent = torch.zeros([batch_size, 4, empty_latent_height // 8, empty_latent_width // 8]).cpu()
|
|
|
|
|
|
|
|
|
vae_cache, ckpt_cache, lora_cache, refn_cache = get_cache_numbers("Efficient Loader")
|
|
|
|
|
|
if lora_name != "None" or lora_stack:
|
|
|
|
|
|
lora_params = []
|
|
|
|
|
|
|
|
|
if lora_name != "None":
|
|
|
lora_params.append((lora_name, lora_model_strength, lora_clip_strength))
|
|
|
|
|
|
|
|
|
if lora_stack:
|
|
|
lora_params.extend(lora_stack)
|
|
|
|
|
|
|
|
|
model, clip = load_lora(lora_params, ckpt_name, my_unique_id, cache=lora_cache, ckpt_cache=ckpt_cache, cache_overwrite=True)
|
|
|
|
|
|
if vae_name == "Baked VAE":
|
|
|
vae = get_bvae_by_ckpt_name(ckpt_name)
|
|
|
else:
|
|
|
model, clip, vae = load_checkpoint(ckpt_name, my_unique_id, cache=ckpt_cache, cache_overwrite=True)
|
|
|
lora_params = None
|
|
|
|
|
|
|
|
|
if refiner_name != "None":
|
|
|
refiner_model, refiner_clip, _ = load_checkpoint(refiner_name, my_unique_id, output_vae=False,
|
|
|
cache=refn_cache, cache_overwrite=True, ckpt_type="refn")
|
|
|
else:
|
|
|
refiner_model = refiner_clip = None
|
|
|
|
|
|
|
|
|
refiner_clip_skip = clip_skip[1] if loader_type == "sdxl" else None
|
|
|
clip_skip = clip_skip[0] if loader_type == "sdxl" else clip_skip
|
|
|
|
|
|
|
|
|
positive_encoded, negative_encoded, clip, refiner_positive_encoded, refiner_negative_encoded, refiner_clip = \
|
|
|
encode_prompts(positive, negative, token_normalization, weight_interpretation, clip, clip_skip,
|
|
|
refiner_clip, refiner_clip_skip, ascore, loader_type == "sdxl",
|
|
|
empty_latent_width, empty_latent_height)
|
|
|
|
|
|
|
|
|
if cnet_stack:
|
|
|
controlnet_conditioning = TSC_Apply_ControlNet_Stack().apply_cnet_stack(positive_encoded, negative_encoded, cnet_stack)
|
|
|
positive_encoded, negative_encoded = controlnet_conditioning[0], controlnet_conditioning[1]
|
|
|
|
|
|
|
|
|
if vae_name != "Baked VAE":
|
|
|
vae = load_vae(vae_name, my_unique_id, cache=vae_cache, cache_overwrite=True)
|
|
|
|
|
|
|
|
|
dependencies = (vae_name, ckpt_name, clip, clip_skip, refiner_name, refiner_clip, refiner_clip_skip,
|
|
|
positive, negative, token_normalization, weight_interpretation, ascore,
|
|
|
empty_latent_width, empty_latent_height, lora_params, cnet_stack)
|
|
|
|
|
|
|
|
|
|
|
|
print_loaded_objects_entries(my_unique_id, prompt)
|
|
|
|
|
|
if loader_type == "regular":
|
|
|
return (model, positive_encoded, negative_encoded, {"samples":latent}, vae, clip, dependencies,)
|
|
|
elif loader_type == "sdxl":
|
|
|
return ((model, clip, positive_encoded, negative_encoded, refiner_model, refiner_clip,
|
|
|
refiner_positive_encoded, refiner_negative_encoded), {"samples":latent}, vae, dependencies,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_EfficientLoaderSDXL(TSC_EfficientLoader):
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": { "base_ckpt_name": (folder_paths.get_filename_list("checkpoints"),),
|
|
|
"base_clip_skip": ("INT", {"default": -2, "min": -24, "max": -1, "step": 1}),
|
|
|
"refiner_ckpt_name": (["None"] + folder_paths.get_filename_list("checkpoints"),),
|
|
|
"refiner_clip_skip": ("INT", {"default": -2, "min": -24, "max": -1, "step": 1}),
|
|
|
"positive_ascore": ("FLOAT", {"default": 6.0, "min": 0.0, "max": 1000.0, "step": 0.01}),
|
|
|
"negative_ascore": ("FLOAT", {"default": 2.0, "min": 0.0, "max": 1000.0, "step": 0.01}),
|
|
|
"vae_name": (["Baked VAE"] + folder_paths.get_filename_list("vae"),),
|
|
|
"positive": ("STRING", {"default": "CLIP_POSITIVE", "multiline": True}),
|
|
|
"negative": ("STRING", {"default": "CLIP_NEGATIVE", "multiline": True}),
|
|
|
"token_normalization": (["none", "mean", "length", "length+mean"],),
|
|
|
"weight_interpretation": (["comfy", "A1111", "compel", "comfy++", "down_weight"],),
|
|
|
"empty_latent_width": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"empty_latent_height": ("INT", {"default": 1024, "min": 64, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"batch_size": ("INT", {"default": 1, "min": 1, "max": 64})},
|
|
|
"optional": {"lora_stack": ("LORA_STACK", ), "cnet_stack": ("CONTROL_NET_STACK",),},
|
|
|
"hidden": { "prompt": "PROMPT", "my_unique_id": "UNIQUE_ID",},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("SDXL_TUPLE", "LATENT", "VAE", "DEPENDENCIES",)
|
|
|
RETURN_NAMES = ("SDXL_TUPLE", "LATENT", "VAE", "DEPENDENCIES", )
|
|
|
FUNCTION = "efficientloaderSDXL"
|
|
|
CATEGORY = "Efficiency Nodes/Loaders"
|
|
|
|
|
|
def efficientloaderSDXL(self, base_ckpt_name, base_clip_skip, refiner_ckpt_name, refiner_clip_skip, positive_ascore,
|
|
|
negative_ascore, vae_name, positive, negative, token_normalization, weight_interpretation,
|
|
|
empty_latent_width, empty_latent_height, batch_size, lora_stack=None, cnet_stack=None,
|
|
|
prompt=None, my_unique_id=None):
|
|
|
clip_skip = (base_clip_skip, refiner_clip_skip)
|
|
|
lora_name = "None"
|
|
|
lora_model_strength = lora_clip_strength = 0
|
|
|
return super().efficientloader(base_ckpt_name, vae_name, clip_skip, lora_name, lora_model_strength, lora_clip_strength,
|
|
|
positive, negative, token_normalization, weight_interpretation, empty_latent_width, empty_latent_height,
|
|
|
batch_size, lora_stack=lora_stack, cnet_stack=cnet_stack, refiner_name=refiner_ckpt_name,
|
|
|
ascore=(positive_ascore, negative_ascore), prompt=prompt, my_unique_id=my_unique_id, loader_type="sdxl")
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_Unpack_SDXL_Tuple:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {"sdxl_tuple": ("SDXL_TUPLE",)},}
|
|
|
|
|
|
RETURN_TYPES = ("MODEL", "CLIP", "CONDITIONING","CONDITIONING", "MODEL", "CLIP", "CONDITIONING", "CONDITIONING",)
|
|
|
RETURN_NAMES = ("BASE_MODEL", "BASE_CLIP", "BASE_CONDITIONING+", "BASE_CONDITIONING-",
|
|
|
"REFINER_MODEL", "REFINER_CLIP","REFINER_CONDITIONING+","REFINER_CONDITIONING-",)
|
|
|
FUNCTION = "unpack_sdxl_tuple"
|
|
|
CATEGORY = "Efficiency Nodes/Misc"
|
|
|
|
|
|
def unpack_sdxl_tuple(self, sdxl_tuple):
|
|
|
return (sdxl_tuple[0], sdxl_tuple[1],sdxl_tuple[2],sdxl_tuple[3],
|
|
|
sdxl_tuple[4],sdxl_tuple[5],sdxl_tuple[6],sdxl_tuple[7],)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_Pack_SDXL_Tuple:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {"base_model": ("MODEL",),
|
|
|
"base_clip": ("CLIP",),
|
|
|
"base_positive": ("CONDITIONING",),
|
|
|
"base_negative": ("CONDITIONING",),
|
|
|
"refiner_model": ("MODEL",),
|
|
|
"refiner_clip": ("CLIP",),
|
|
|
"refiner_positive": ("CONDITIONING",),
|
|
|
"refiner_negative": ("CONDITIONING",),},}
|
|
|
|
|
|
RETURN_TYPES = ("SDXL_TUPLE",)
|
|
|
RETURN_NAMES = ("SDXL_TUPLE",)
|
|
|
FUNCTION = "pack_sdxl_tuple"
|
|
|
CATEGORY = "Efficiency Nodes/Misc"
|
|
|
|
|
|
def pack_sdxl_tuple(self, base_model, base_clip, base_positive, base_negative,
|
|
|
refiner_model, refiner_clip, refiner_positive, refiner_negative):
|
|
|
return ((base_model, base_clip, base_positive, base_negative,
|
|
|
refiner_model, refiner_clip, refiner_positive, refiner_negative),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_LoRA_Stacker:
|
|
|
modes = ["simple", "advanced"]
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
loras = ["None"] + folder_paths.get_filename_list("loras")
|
|
|
inputs = {
|
|
|
"required": {
|
|
|
"input_mode": (cls.modes,),
|
|
|
"lora_count": ("INT", {"default": 3, "min": 0, "max": 50, "step": 1}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
for i in range(1, 50):
|
|
|
inputs["required"][f"lora_name_{i}"] = (loras,)
|
|
|
inputs["required"][f"lora_wt_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01})
|
|
|
inputs["required"][f"model_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01})
|
|
|
inputs["required"][f"clip_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01})
|
|
|
|
|
|
inputs["optional"] = {
|
|
|
"lora_stack": ("LORA_STACK",)
|
|
|
}
|
|
|
return inputs
|
|
|
|
|
|
RETURN_TYPES = ("LORA_STACK",)
|
|
|
RETURN_NAMES = ("LORA_STACK",)
|
|
|
FUNCTION = "lora_stacker"
|
|
|
CATEGORY = "Efficiency Nodes/Stackers"
|
|
|
|
|
|
def lora_stacker(self, input_mode, lora_count, lora_stack=None, **kwargs):
|
|
|
|
|
|
|
|
|
loras = [kwargs.get(f"lora_name_{i}") for i in range(1, lora_count + 1)]
|
|
|
|
|
|
|
|
|
if input_mode == "simple":
|
|
|
weights = [kwargs.get(f"lora_wt_{i}") for i in range(1, lora_count + 1)]
|
|
|
loras = [(lora_name, lora_weight, lora_weight) for lora_name, lora_weight in zip(loras, weights) if
|
|
|
lora_name != "None"]
|
|
|
else:
|
|
|
model_strs = [kwargs.get(f"model_str_{i}") for i in range(1, lora_count + 1)]
|
|
|
clip_strs = [kwargs.get(f"clip_str_{i}") for i in range(1, lora_count + 1)]
|
|
|
loras = [(lora_name, model_str, clip_str) for lora_name, model_str, clip_str in
|
|
|
zip(loras, model_strs, clip_strs) if lora_name != "None"]
|
|
|
|
|
|
|
|
|
if lora_stack is not None:
|
|
|
loras.extend([l for l in lora_stack if l[0] != "None"])
|
|
|
|
|
|
return (loras,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_Control_Net_Stacker:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {"control_net": ("CONTROL_NET",),
|
|
|
"image": ("IMAGE",),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}),
|
|
|
"start_percent": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.001}),
|
|
|
"end_percent": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.001})},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("CONTROL_NET_STACK",)
|
|
|
RETURN_NAMES = ("CNET_STACK",)
|
|
|
FUNCTION = "control_net_stacker"
|
|
|
CATEGORY = "Efficiency Nodes/Stackers"
|
|
|
|
|
|
def control_net_stacker(self, control_net, image, strength, start_percent, end_percent, cnet_stack=None):
|
|
|
|
|
|
cnet_stack = [] if cnet_stack is None else cnet_stack
|
|
|
|
|
|
|
|
|
cnet_stack.extend([(control_net, image, strength, start_percent, end_percent)])
|
|
|
|
|
|
return (cnet_stack,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_Apply_ControlNet_Stack:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {"positive": ("CONDITIONING",),
|
|
|
"negative": ("CONDITIONING",)},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("CONDITIONING","CONDITIONING",)
|
|
|
RETURN_NAMES = ("CONDITIONING+","CONDITIONING-",)
|
|
|
FUNCTION = "apply_cnet_stack"
|
|
|
CATEGORY = "Efficiency Nodes/Stackers"
|
|
|
|
|
|
def apply_cnet_stack(self, positive, negative, cnet_stack=None):
|
|
|
if cnet_stack is None:
|
|
|
return (positive, negative)
|
|
|
|
|
|
for control_net_tuple in cnet_stack:
|
|
|
control_net, image, strength, start_percent, end_percent = control_net_tuple
|
|
|
controlnet_conditioning = ControlNetApplyAdvanced().apply_controlnet(positive, negative, control_net, image,
|
|
|
strength, start_percent, end_percent)
|
|
|
positive, negative = controlnet_conditioning[0], controlnet_conditioning[1]
|
|
|
|
|
|
return (positive, negative, )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_KSampler:
|
|
|
empty_image = pil2tensor(Image.new('RGBA', (1, 1), (0, 0, 0, 0)))
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required":
|
|
|
{"model": ("MODEL",),
|
|
|
"seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}),
|
|
|
"steps": ("INT", {"default": 20, "min": 1, "max": 10000}),
|
|
|
"cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}),
|
|
|
"sampler_name": (comfy.samplers.KSampler.SAMPLERS,),
|
|
|
"scheduler": (SCHEDULERS,),
|
|
|
"positive": ("CONDITIONING",),
|
|
|
"negative": ("CONDITIONING",),
|
|
|
"latent_image": ("LATENT",),
|
|
|
"denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}),
|
|
|
"preview_method": (["auto", "latent2rgb", "taesd", "vae_decoded_only", "none"],),
|
|
|
"vae_decode": (["true", "true (tiled)", "false"],),
|
|
|
},
|
|
|
"optional": { "optional_vae": ("VAE",),
|
|
|
"script": ("SCRIPT",),},
|
|
|
"hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID",},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", "IMAGE", )
|
|
|
RETURN_NAMES = ("MODEL", "CONDITIONING+", "CONDITIONING-", "LATENT", "VAE", "IMAGE", )
|
|
|
OUTPUT_NODE = True
|
|
|
FUNCTION = "sample"
|
|
|
CATEGORY = "Efficiency Nodes/Sampling"
|
|
|
|
|
|
def sample(self, model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
|
|
|
preview_method, vae_decode, denoise=1.0, prompt=None, extra_pnginfo=None, my_unique_id=None,
|
|
|
optional_vae=(None,), script=None, add_noise=None, start_at_step=None, end_at_step=None,
|
|
|
return_with_leftover_noise=None, sampler_type="regular"):
|
|
|
|
|
|
|
|
|
vae = optional_vae
|
|
|
|
|
|
|
|
|
if vae == (None,) and vae_decode != "false":
|
|
|
print(f"{warning('KSampler(Efficient) Warning:')} No vae input detected, proceeding as if vae_decode was false.\n")
|
|
|
vae_decode = "false"
|
|
|
|
|
|
|
|
|
|
|
|
if sampler_type == "sdxl":
|
|
|
sdxl_tuple = model
|
|
|
model, _, positive, negative, refiner_model, _, refiner_positive, refiner_negative = sdxl_tuple
|
|
|
else:
|
|
|
refiner_model = refiner_positive = refiner_negative = None
|
|
|
|
|
|
|
|
|
def keys_exist_in_script(*keys):
|
|
|
return any(key in script for key in keys) if script else False
|
|
|
|
|
|
|
|
|
def vae_decode_latent(vae, samples, vae_decode):
|
|
|
return VAEDecodeTiled().decode(vae,samples,320)[0] if "tiled" in vae_decode else VAEDecode().decode(vae,samples)[0]
|
|
|
|
|
|
def vae_encode_image(vae, pixels, vae_decode):
|
|
|
return VAEEncodeTiled().encode(vae,pixels,320)[0] if "tiled" in vae_decode else VAEEncode().encode(vae,pixels)[0]
|
|
|
|
|
|
|
|
|
def process_latent_image(model, seed, steps, cfg, sampler_name, scheduler, positive, negative, latent_image,
|
|
|
denoise, sampler_type, add_noise, start_at_step, end_at_step, return_with_leftover_noise,
|
|
|
refiner_model, refiner_positive, refiner_negative, vae, vae_decode, preview_method):
|
|
|
|
|
|
|
|
|
original_calculation = comfy.samplers.calculate_sigmas
|
|
|
original_KSampler_SCHEDULERS = comfy.samplers.KSampler.SCHEDULERS
|
|
|
previous_preview_method = global_preview_method()
|
|
|
original_prepare_noise = comfy.sample.prepare_noise
|
|
|
original_KSampler = comfy.samplers.KSampler
|
|
|
original_model_str = str(model)
|
|
|
|
|
|
|
|
|
def calculate_sigmas(model_sampling, scheduler_name: str, steps):
|
|
|
if scheduler_name.startswith("AYS"):
|
|
|
return AlignYourStepsScheduler().get_sigmas(scheduler_name.split(" ")[1], steps, denoise=1.0)[0]
|
|
|
return original_calculation(model_sampling, scheduler_name, steps)
|
|
|
|
|
|
comfy.samplers.KSampler.SCHEDULERS = SCHEDULERS
|
|
|
comfy.samplers.calculate_sigmas = calculate_sigmas
|
|
|
|
|
|
|
|
|
samples = images = gifs = preview = cnet_imgs = None
|
|
|
|
|
|
try:
|
|
|
|
|
|
set_preview_method(preview_method)
|
|
|
|
|
|
|
|
|
|
|
|
if keys_exist_in_script("noise"):
|
|
|
rng_source, cfg_denoiser, add_seed_noise, m_seed, m_weight = script["noise"]
|
|
|
smZ_rng_source.rng_rand_source(rng_source)
|
|
|
if cfg_denoiser:
|
|
|
comfy.samplers.KSampler = smZ_cfg_denoiser.SDKSampler
|
|
|
if add_seed_noise:
|
|
|
comfy.sample.prepare_noise = cg_mixed_seed_noise.get_mixed_noise_function(comfy.sample.prepare_noise, m_seed, m_weight)
|
|
|
else:
|
|
|
m_seed = m_weight = None
|
|
|
else:
|
|
|
rng_source = cfg_denoiser = add_seed_noise = m_seed = m_weight = None
|
|
|
|
|
|
|
|
|
|
|
|
if keys_exist_in_script("anim"):
|
|
|
if preview_method != "none":
|
|
|
set_preview_method("none")
|
|
|
print(f"{warning('KSampler(Efficient) Warning:')} Live preview disabled for animatediff generations.")
|
|
|
motion_model, beta_schedule, context_options, frame_rate, loop_count, format, pingpong, save_image = script["anim"]
|
|
|
model = AnimateDiffLoaderWithContext().load_mm_and_inject_params(model, motion_model, beta_schedule, context_options)[0]
|
|
|
|
|
|
|
|
|
|
|
|
latent_image_hash = tensor_to_hash(latent_image["samples"])
|
|
|
positive_hash = tensor_to_hash(positive[0][0])
|
|
|
negative_hash = tensor_to_hash(negative[0][0])
|
|
|
refiner_positive_hash = tensor_to_hash(refiner_positive[0][0]) if refiner_positive is not None else None
|
|
|
refiner_negative_hash = tensor_to_hash(refiner_negative[0][0]) if refiner_negative is not None else None
|
|
|
|
|
|
|
|
|
model_identifier = [original_model_str, motion_model, beta_schedule, context_options] if keys_exist_in_script("anim")\
|
|
|
else [original_model_str]
|
|
|
|
|
|
parameters = [model_identifier] + [seed, steps, cfg, sampler_name, scheduler, positive_hash, negative_hash,
|
|
|
latent_image_hash, denoise, sampler_type, add_noise, start_at_step,
|
|
|
end_at_step, return_with_leftover_noise, refiner_model, refiner_positive_hash,
|
|
|
refiner_negative_hash, rng_source, cfg_denoiser, add_seed_noise, m_seed, m_weight]
|
|
|
|
|
|
|
|
|
parameters = [str(item) if not isinstance(item, type(latent_image_hash)) else item for item in parameters]
|
|
|
|
|
|
|
|
|
samples = load_ksampler_results("latent", my_unique_id, parameters)
|
|
|
|
|
|
if samples is None:
|
|
|
store_ksampler_results("image", my_unique_id, None)
|
|
|
store_ksampler_results("cnet_img", my_unique_id, None)
|
|
|
|
|
|
if samples is not None:
|
|
|
images = load_ksampler_results("image", my_unique_id)
|
|
|
cnet_imgs = True
|
|
|
|
|
|
|
|
|
elif sampler_type == "regular":
|
|
|
samples = KSampler().sample(model, seed, steps, cfg, sampler_name, scheduler, positive, negative,
|
|
|
latent_image, denoise=denoise)[0] if denoise>0 else latent_image
|
|
|
|
|
|
elif sampler_type == "advanced":
|
|
|
samples = KSamplerAdvanced().sample(model, add_noise, seed, steps, cfg, sampler_name, scheduler,
|
|
|
positive, negative, latent_image, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, denoise=1.0)[0]
|
|
|
|
|
|
elif sampler_type == "sdxl":
|
|
|
|
|
|
if end_at_step == -1:
|
|
|
end_at_step = steps
|
|
|
|
|
|
|
|
|
add_noise = return_with_leftover_noise = "enable"
|
|
|
samples = KSamplerAdvanced().sample(model, add_noise, seed, steps, cfg, sampler_name, scheduler,
|
|
|
positive, negative, latent_image, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, denoise=1.0)[0]
|
|
|
|
|
|
|
|
|
if refiner_model and end_at_step < steps:
|
|
|
add_noise = return_with_leftover_noise = "disable"
|
|
|
samples = KSamplerAdvanced().sample(refiner_model, add_noise, seed, steps, cfg + REFINER_CFG_OFFSET,
|
|
|
sampler_name, scheduler, refiner_positive, refiner_negative,
|
|
|
samples, end_at_step, steps,
|
|
|
return_with_leftover_noise, denoise=1.0)[0]
|
|
|
|
|
|
|
|
|
if not any(keys_exist_in_script(key) for key in ["xyplot"]):
|
|
|
store_ksampler_results("latent", my_unique_id, samples, parameters)
|
|
|
|
|
|
|
|
|
|
|
|
if keys_exist_in_script("hiresfix"):
|
|
|
|
|
|
upscale_type, latent_upscaler, upscale_by, use_same_seed, hires_seed, hires_steps, hires_denoise,\
|
|
|
iterations, hires_control_net, hires_cnet_strength, preprocessor, preprocessor_imgs, \
|
|
|
latent_upscale_function, latent_upscale_model, pixel_upscale_model = script["hiresfix"]
|
|
|
|
|
|
|
|
|
hires_seed = seed if use_same_seed else hires_seed
|
|
|
|
|
|
|
|
|
if latent_upscale_model is None:
|
|
|
latent_upscale_model = model
|
|
|
elif keys_exist_in_script("anim"):
|
|
|
latent_upscale_model = \
|
|
|
AnimateDiffLoaderWithContext().load_mm_and_inject_params(latent_upscale_model, motion_model,
|
|
|
beta_schedule, context_options)[0]
|
|
|
|
|
|
|
|
|
if hires_control_net is not None:
|
|
|
|
|
|
if cnet_imgs is True:
|
|
|
cnet_imgs = load_ksampler_results("cnet_img", my_unique_id, [preprocessor])
|
|
|
|
|
|
if cnet_imgs is None:
|
|
|
if images is None:
|
|
|
images = vae_decode_latent(vae, samples, vae_decode)
|
|
|
store_ksampler_results("image", my_unique_id, images)
|
|
|
cnet_imgs = AIO_Preprocessor().execute(preprocessor, images)[0]
|
|
|
store_ksampler_results("cnet_img", my_unique_id, cnet_imgs, [preprocessor])
|
|
|
positive = ControlNetApply().apply_controlnet(positive, hires_control_net, cnet_imgs, hires_cnet_strength)[0]
|
|
|
|
|
|
|
|
|
if upscale_type == "latent":
|
|
|
for _ in range(iterations):
|
|
|
upscaled_latent_image = latent_upscale_function().upscale(samples, latent_upscaler, upscale_by)[0]
|
|
|
samples = KSampler().sample(latent_upscale_model, hires_seed, hires_steps, cfg, sampler_name, scheduler,
|
|
|
positive, negative, upscaled_latent_image, denoise=hires_denoise)[0]
|
|
|
images = None
|
|
|
elif upscale_type == "pixel":
|
|
|
if images is None:
|
|
|
images = vae_decode_latent(vae, samples, vae_decode)
|
|
|
store_ksampler_results("image", my_unique_id, images)
|
|
|
images = ImageUpscaleWithModel().upscale(pixel_upscale_model, images)[0]
|
|
|
images = ImageScaleBy().upscale(images, "nearest-exact", upscale_by/4)[0]
|
|
|
elif upscale_type == "both":
|
|
|
for _ in range(iterations):
|
|
|
if images is None:
|
|
|
images = vae_decode_latent(vae, samples, vae_decode)
|
|
|
store_ksampler_results("image", my_unique_id, images)
|
|
|
images = ImageUpscaleWithModel().upscale(pixel_upscale_model, images)[0]
|
|
|
images = ImageScaleBy().upscale(images, "nearest-exact", upscale_by/4)[0]
|
|
|
|
|
|
samples = vae_encode_image(vae, images, vae_decode)
|
|
|
upscaled_latent_image = latent_upscale_function().upscale(samples, latent_upscaler, 1)[0]
|
|
|
samples = KSampler().sample(latent_upscale_model, hires_seed, hires_steps, cfg, sampler_name, scheduler,
|
|
|
positive, negative, upscaled_latent_image, denoise=hires_denoise)[0]
|
|
|
images = None
|
|
|
|
|
|
|
|
|
|
|
|
if keys_exist_in_script("tile"):
|
|
|
|
|
|
upscale_by, tile_size, tiling_strategy, tiling_steps, tile_seed, tiled_denoise,\
|
|
|
tile_controlnet, strength = script["tile"]
|
|
|
|
|
|
|
|
|
if images is None:
|
|
|
images = vae_decode_latent(vae, samples, vae_decode)
|
|
|
if not any(keys_exist_in_script(key) for key in ["xyplot", "hiresfix"]):
|
|
|
store_ksampler_results("image", my_unique_id, images)
|
|
|
|
|
|
|
|
|
upscaled_image = ImageScaleBy().upscale(images, "nearest-exact", upscale_by)[0]
|
|
|
upscaled_latent = vae_encode_image(vae, upscaled_image, vae_decode)
|
|
|
|
|
|
|
|
|
if tile_controlnet is not None:
|
|
|
positive = ControlNetApply().apply_controlnet(positive, tile_controlnet, upscaled_image, 1)[0]
|
|
|
|
|
|
|
|
|
TSampler = bnk_tiled_samplers.TiledKSampler
|
|
|
samples = TSampler().sample(model, tile_seed, tile_size, tile_size, tiling_strategy, tiling_steps, cfg,
|
|
|
sampler_name, scheduler, positive, negative, upscaled_latent,
|
|
|
denoise=tiled_denoise)[0]
|
|
|
images = None
|
|
|
|
|
|
|
|
|
|
|
|
if keys_exist_in_script("anim"):
|
|
|
if images is None:
|
|
|
images = vae_decode_latent(vae, samples, vae_decode)
|
|
|
if not any(keys_exist_in_script(key) for key in ["xyplot", "hiresfix", "tile"]):
|
|
|
store_ksampler_results("image", my_unique_id, images)
|
|
|
gifs = AnimateDiffCombine().generate_gif(images, frame_rate, loop_count, format=format,
|
|
|
pingpong=pingpong, save_image=save_image, prompt=prompt, extra_pnginfo=extra_pnginfo)["ui"]["gifs"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if "true" in vae_decode:
|
|
|
if images is None:
|
|
|
images = vae_decode_latent(vae, samples, vae_decode)
|
|
|
|
|
|
if all(not keys_exist_in_script(key) for key in ["xyplot", "hiresfix", "tile", "anim"]):
|
|
|
store_ksampler_results("image", my_unique_id, images)
|
|
|
|
|
|
|
|
|
if cnet_imgs is not None and not True:
|
|
|
if preprocessor_imgs and upscale_type == "latent":
|
|
|
if keys_exist_in_script("xyplot"):
|
|
|
print(
|
|
|
f"{warning('HighRes-Fix Warning:')} Preprocessor images auto-disabled when XY Plotting.")
|
|
|
else:
|
|
|
|
|
|
if images.shape[1:3] != cnet_imgs.shape[1:3]:
|
|
|
cnet_imgs = quick_resize(cnet_imgs, images.shape)
|
|
|
images = torch.cat([images, cnet_imgs], dim=0)
|
|
|
|
|
|
|
|
|
if keys_exist_in_script("anim"):
|
|
|
preview = {"gifs": gifs, "images": list()}
|
|
|
elif preview_method == "none" or (preview_method == "vae_decoded_only" and vae_decode == "false"):
|
|
|
preview = {"images": list()}
|
|
|
elif images is not None:
|
|
|
preview = PreviewImage().save_images(images, prompt=prompt, extra_pnginfo=extra_pnginfo)["ui"]
|
|
|
|
|
|
|
|
|
if images is None and vae_decode == "false":
|
|
|
images = TSC_KSampler.empty_image
|
|
|
|
|
|
finally:
|
|
|
|
|
|
set_preview_method(previous_preview_method)
|
|
|
comfy.samplers.KSampler = original_KSampler
|
|
|
comfy.sample.prepare_noise = original_prepare_noise
|
|
|
comfy.samplers.calculate_sigmas = original_calculation
|
|
|
comfy.samplers.KSampler.SCHEDULERS = original_KSampler_SCHEDULERS
|
|
|
|
|
|
return samples, images, gifs, preview
|
|
|
|
|
|
|
|
|
|
|
|
globals_cleanup(prompt)
|
|
|
|
|
|
|
|
|
|
|
|
if not keys_exist_in_script("xyplot"):
|
|
|
|
|
|
|
|
|
samples, images, gifs, preview = process_latent_image(model, seed, steps, cfg, sampler_name, scheduler,
|
|
|
positive, negative, latent_image, denoise, sampler_type, add_noise,
|
|
|
start_at_step, end_at_step, return_with_leftover_noise, refiner_model,
|
|
|
refiner_positive, refiner_negative, vae, vae_decode, preview_method)
|
|
|
|
|
|
if sampler_type == "sdxl":
|
|
|
result = (sdxl_tuple, samples, vae, images,)
|
|
|
else:
|
|
|
result = (model, positive, negative, samples, vae, images,)
|
|
|
|
|
|
if preview is None:
|
|
|
return {"result": result}
|
|
|
else:
|
|
|
return {"ui": preview, "result": result}
|
|
|
|
|
|
|
|
|
|
|
|
elif keys_exist_in_script("xyplot"):
|
|
|
|
|
|
|
|
|
if vae == (None,):
|
|
|
print(f"{error('KSampler(Efficient) Error:')} VAE input must be connected in order to use the XY Plot script.")
|
|
|
|
|
|
return {"ui": {"images": list()},
|
|
|
"result": (model, positive, negative, latent_image, vae, TSC_KSampler.empty_image,)}
|
|
|
|
|
|
|
|
|
if "true" not in vae_decode:
|
|
|
print(f"{warning('KSampler(Efficient) Warning:')} VAE decoding must be set to \'true\'"
|
|
|
" for the XY Plot script, proceeding as if \'true\'.\n")
|
|
|
|
|
|
|
|
|
|
|
|
vae_name = None
|
|
|
ckpt_name = None
|
|
|
clip = None
|
|
|
clip_skip = None
|
|
|
refiner_name = None
|
|
|
refiner_clip = None
|
|
|
refiner_clip_skip = None
|
|
|
positive_prompt = None
|
|
|
negative_prompt = None
|
|
|
ascore = None
|
|
|
empty_latent_width = None
|
|
|
empty_latent_height = None
|
|
|
lora_stack = None
|
|
|
cnet_stack = None
|
|
|
|
|
|
|
|
|
samples_tensors = torch.split(latent_image['samples'], 1, dim=0)
|
|
|
|
|
|
|
|
|
if 'noise_mask' in latent_image:
|
|
|
noise_mask_tensors = torch.split(latent_image['noise_mask'], 1, dim=0)
|
|
|
latent_tensors = [{'samples': img, 'noise_mask': mask} for img, mask in
|
|
|
zip(samples_tensors, noise_mask_tensors)]
|
|
|
else:
|
|
|
latent_tensors = [{'samples': img} for img in samples_tensors]
|
|
|
|
|
|
|
|
|
latent_image = latent_tensors[0]
|
|
|
|
|
|
|
|
|
X_type, X_value, Y_type, Y_value, grid_spacing, Y_label_orientation, cache_models, xyplot_as_output_image,\
|
|
|
xyplot_id, dependencies = script["xyplot"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samplers = {
|
|
|
"regular": {
|
|
|
"disallowed": ["AddNoise", "ReturnNoise", "StartStep", "EndStep", "RefineStep",
|
|
|
"Refiner", "Refiner On/Off", "AScore+", "AScore-"],
|
|
|
"name": "KSampler (Efficient)"
|
|
|
},
|
|
|
"advanced": {
|
|
|
"disallowed": ["RefineStep", "Denoise", "RefineStep", "Refiner", "Refiner On/Off",
|
|
|
"AScore+", "AScore-"],
|
|
|
"name": "KSampler Adv. (Efficient)"
|
|
|
},
|
|
|
"sdxl": {
|
|
|
"disallowed": ["AddNoise", "EndStep", "Denoise"],
|
|
|
"name": "KSampler SDXL (Eff.)"
|
|
|
}
|
|
|
}
|
|
|
|
|
|
|
|
|
def get_ksampler_details(sampler_type):
|
|
|
return samplers.get(sampler_type, {"disallowed": [], "name": ""})
|
|
|
|
|
|
def suggest_ksampler(X_type, Y_type, current_sampler):
|
|
|
for sampler, details in samplers.items():
|
|
|
if sampler != current_sampler and X_type not in details["disallowed"] and Y_type not in details["disallowed"]:
|
|
|
return details["name"]
|
|
|
return "a different KSampler"
|
|
|
|
|
|
|
|
|
details = get_ksampler_details(sampler_type)
|
|
|
disallowed_XY_types = details["disallowed"]
|
|
|
ksampler_name = details["name"]
|
|
|
|
|
|
if X_type in disallowed_XY_types or Y_type in disallowed_XY_types:
|
|
|
error_prefix = f"{error(f'{ksampler_name} Error:')}"
|
|
|
|
|
|
failed_type = []
|
|
|
if X_type in disallowed_XY_types:
|
|
|
failed_type.append(f"X_type: '{X_type}'")
|
|
|
if Y_type in disallowed_XY_types:
|
|
|
failed_type.append(f"Y_type: '{Y_type}'")
|
|
|
|
|
|
suggested_ksampler = suggest_ksampler(X_type, Y_type, sampler_type)
|
|
|
|
|
|
print(f"{error_prefix} Invalid value for {' and '.join(failed_type)}. "
|
|
|
f"Use {suggested_ksampler} for this XY Plot type."
|
|
|
f"\nDisallowed XY_types for this KSampler are: {', '.join(disallowed_XY_types)}.")
|
|
|
|
|
|
return {"ui": {"images": list()},
|
|
|
"result": (model, positive, negative, latent_image, vae, TSC_KSampler.empty_image,)}
|
|
|
|
|
|
|
|
|
|
|
|
if dependencies is not None:
|
|
|
vae_name, ckpt_name, clip, clip_skip, refiner_name, refiner_clip, refiner_clip_skip,\
|
|
|
positive_prompt, negative_prompt, token_normalization, weight_interpretation, ascore,\
|
|
|
empty_latent_width, empty_latent_height, lora_stack, cnet_stack = dependencies
|
|
|
|
|
|
|
|
|
|
|
|
def process_xy_for_print(value, replacement, type_):
|
|
|
|
|
|
if type_ == "Seeds++ Batch" and isinstance(value, list):
|
|
|
return [v + seed for v in value]
|
|
|
|
|
|
elif type_ == "Scheduler" and isinstance(value, tuple):
|
|
|
return value[0]
|
|
|
|
|
|
elif type_ == "VAE" and isinstance(value, list):
|
|
|
|
|
|
return [os.path.basename(v) for v in value]
|
|
|
|
|
|
elif (type_ == "Checkpoint" or type_ == "Refiner") and isinstance(value, list):
|
|
|
|
|
|
return [(os.path.basename(v[0]),) + v[1:] if v[1] is None or v[2] is None
|
|
|
else (os.path.basename(v[0]), v[1]) if v[2] is None
|
|
|
else (os.path.basename(v[0]),) + v[1:] for v in value]
|
|
|
|
|
|
elif type_ == "LoRA" and isinstance(value, list):
|
|
|
|
|
|
return [[(os.path.basename(v[0][0]),) + v[0][1:], "..."] if len(v) > 1
|
|
|
else [(os.path.basename(v[0][0]),) + v[0][1:]] for v in value]
|
|
|
|
|
|
elif type_ == "LoRA Batch" and isinstance(value, list):
|
|
|
|
|
|
return [os.path.basename(v[0][0]) for v in value if v and isinstance(v[0], tuple) and v[0][0]]
|
|
|
|
|
|
elif (type_ == "LoRA Wt" or type_ == "LoRA MStr") and isinstance(value, list):
|
|
|
|
|
|
return [v[0][1] for v in value if v and isinstance(v[0], tuple)]
|
|
|
|
|
|
elif type_ == "LoRA CStr" and isinstance(value, list):
|
|
|
|
|
|
return [v[0][2] for v in value if v and isinstance(v[0], tuple)]
|
|
|
|
|
|
elif type_ == "ControlNetStrength" and isinstance(value, list):
|
|
|
|
|
|
return [round(inner_list[0][2], 3) for inner_list in value]
|
|
|
|
|
|
elif type_ == "ControlNetStart%" and isinstance(value, list):
|
|
|
|
|
|
return [round(inner_list[0][3], 3) for inner_list in value]
|
|
|
|
|
|
elif type_ == "ControlNetEnd%" and isinstance(value, list):
|
|
|
|
|
|
return [round(inner_list[0][4], 3) for inner_list in value]
|
|
|
|
|
|
elif isinstance(value, tuple):
|
|
|
return tuple(replacement if v is None else v for v in value)
|
|
|
|
|
|
else:
|
|
|
return replacement if value is None else value
|
|
|
|
|
|
|
|
|
replacement_X = scheduler if X_type == 'Sampler' else clip_skip if X_type == 'Checkpoint' else None
|
|
|
replacement_Y = scheduler if Y_type == 'Sampler' else clip_skip if Y_type == 'Checkpoint' else None
|
|
|
|
|
|
|
|
|
X_value_processed = process_xy_for_print(X_value, replacement_X, X_type)
|
|
|
Y_value_processed = process_xy_for_print(Y_value, replacement_Y, Y_type)
|
|
|
|
|
|
print(info("-" * 40))
|
|
|
print(info('XY Plot Script Inputs:'))
|
|
|
print(info(f"(X) {X_type}:"))
|
|
|
for item in X_value_processed:
|
|
|
print(info(f" {item}"))
|
|
|
print(info(f"(Y) {Y_type}:"))
|
|
|
for item in Y_value_processed:
|
|
|
print(info(f" {item}"))
|
|
|
print(info("-" * 40))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if cache_models == "False":
|
|
|
vae_cache = ckpt_cache = lora_cache = refn_cache = 1
|
|
|
else:
|
|
|
|
|
|
vae_cache, ckpt_cache, lora_cache, refn_cache = get_cache_numbers("XY Plot")
|
|
|
|
|
|
cache = (vae_cache, ckpt_cache, lora_cache, refn_cache)
|
|
|
|
|
|
|
|
|
X_value = [v + seed for v in X_value] if "Seeds++ Batch" == X_type else X_value
|
|
|
Y_value = [v + seed for v in Y_value] if "Seeds++ Batch" == Y_type else Y_value
|
|
|
|
|
|
|
|
|
positive_prompt = (positive_prompt, positive_prompt)
|
|
|
negative_prompt = (negative_prompt, negative_prompt)
|
|
|
|
|
|
|
|
|
if "LoRA" in X_type or "LoRA" in Y_type:
|
|
|
lora_stack = None
|
|
|
|
|
|
|
|
|
|
|
|
cn_1, cn_2, cn_3 = None, None, None
|
|
|
|
|
|
if "ControlNet" in X_type:
|
|
|
cn_1, cn_2, cn_3 = X_value[0][0][2], X_value[0][0][3], X_value[0][0][4]
|
|
|
|
|
|
elif "ControlNet" in Y_type:
|
|
|
cn_1, cn_2, cn_3 = Y_value[0][0][2], Y_value[0][0][3], Y_value[0][0][4]
|
|
|
|
|
|
if "ControlNetStrength" in X_type or "ControlNetStrength" in Y_type:
|
|
|
cn_1 = None
|
|
|
if "ControlNetStart%" in X_type or "ControlNetStart%" in Y_type:
|
|
|
cn_2 = None
|
|
|
if "ControlNetEnd%" in X_type or "ControlNetEnd%" in Y_type:
|
|
|
cn_3 = None
|
|
|
|
|
|
cnet_stack = (cnet_stack, (cn_1, cn_2, cn_3))
|
|
|
|
|
|
|
|
|
priority = [
|
|
|
"Checkpoint",
|
|
|
"Refiner",
|
|
|
"LoRA",
|
|
|
"VAE",
|
|
|
]
|
|
|
conditioners = {
|
|
|
"Positive Prompt S/R",
|
|
|
"Negative Prompt S/R",
|
|
|
"AScore+",
|
|
|
"AScore-",
|
|
|
"Clip Skip",
|
|
|
"Clip Skip (Refiner)",
|
|
|
"ControlNetStrength",
|
|
|
"ControlNetStart%",
|
|
|
"ControlNetEnd%"
|
|
|
}
|
|
|
|
|
|
x_priority = priority.index(X_type) if X_type in priority else 999
|
|
|
y_priority = priority.index(Y_type) if Y_type in priority else 999
|
|
|
|
|
|
|
|
|
are_both_conditioners = X_type in conditioners and Y_type in conditioners
|
|
|
|
|
|
|
|
|
is_special_case = (
|
|
|
(X_type == "Refiner On/Off" and Y_type in ["RefineStep", "Steps"]) or
|
|
|
(X_type == "Nothing" and Y_type != "Nothing")
|
|
|
)
|
|
|
|
|
|
|
|
|
flip_xy = (y_priority < x_priority and not are_both_conditioners) or is_special_case
|
|
|
|
|
|
|
|
|
if flip_xy:
|
|
|
X_type, Y_type = Y_type, X_type
|
|
|
X_value, Y_value = Y_value, X_value
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
dict_map = {"VAE": [], "Checkpoint": [], "LoRA": [], "Refiner": []}
|
|
|
|
|
|
|
|
|
type_value_pairs = [(X_type, X_value.copy()), (Y_type, Y_value.copy())]
|
|
|
|
|
|
|
|
|
for t, v in type_value_pairs:
|
|
|
if t in dict_map:
|
|
|
|
|
|
if t == "LoRA":
|
|
|
dict_map[t] = [item for sublist in v for item in sublist]
|
|
|
else:
|
|
|
dict_map[t] = v
|
|
|
|
|
|
vae_dict = dict_map.get("VAE", [])
|
|
|
|
|
|
|
|
|
if dict_map.get("Checkpoint", []):
|
|
|
ckpt_dict = [t[0] for t in dict_map["Checkpoint"]]
|
|
|
for t in dict_map["Checkpoint"]:
|
|
|
if t[2] is not None and t[2] != "Baked VAE":
|
|
|
vae_dict.append(t[2])
|
|
|
else:
|
|
|
ckpt_dict = []
|
|
|
|
|
|
lora_dict = [[t,] for t in dict_map.get("LoRA", [])] if dict_map.get("LoRA", []) else []
|
|
|
|
|
|
|
|
|
if dict_map.get("Refiner", []):
|
|
|
refn_dict = [t[0] for t in dict_map["Refiner"]]
|
|
|
else:
|
|
|
refn_dict = []
|
|
|
|
|
|
|
|
|
if ckpt_dict and lora_dict:
|
|
|
lora_dict = [(lora_stack, ckpt) for ckpt in ckpt_dict for lora_stack in lora_dict]
|
|
|
|
|
|
elif lora_dict:
|
|
|
lora_dict = [(lora_stack, ckpt_name) for lora_stack in lora_dict]
|
|
|
|
|
|
|
|
|
if X_type == "Checkpoint":
|
|
|
lora_dict = []
|
|
|
refn_dict = []
|
|
|
elif X_type == "Refiner":
|
|
|
ckpt_dict = []
|
|
|
lora_dict = []
|
|
|
elif X_type == "LoRA":
|
|
|
ckpt_dict = []
|
|
|
refn_dict = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
clear_cache_by_exception(xyplot_id, vae_dict=vae_dict, ckpt_dict=ckpt_dict, lora_dict=lora_dict, refn_dict=refn_dict)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def define_variable(var_type, var, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler, denoise, vae_name, ckpt_name,
|
|
|
clip_skip, refiner_name, refiner_clip_skip, positive_prompt, negative_prompt, ascore,
|
|
|
lora_stack, cnet_stack, var_label, num_label):
|
|
|
|
|
|
|
|
|
max_label_len = 42
|
|
|
|
|
|
|
|
|
if var_type == "AddNoise":
|
|
|
add_noise = var
|
|
|
text = f"AddNoise: {add_noise}"
|
|
|
|
|
|
|
|
|
elif var_type == "Seeds++ Batch":
|
|
|
seed = var
|
|
|
text = f"Seed: {seed}"
|
|
|
|
|
|
|
|
|
elif var_type == "Steps":
|
|
|
steps = var
|
|
|
text = f"Steps: {steps}"
|
|
|
|
|
|
|
|
|
elif var_type == "StartStep":
|
|
|
start_at_step = var
|
|
|
text = f"StartStep: {start_at_step}"
|
|
|
|
|
|
|
|
|
elif var_type == "EndStep":
|
|
|
end_at_step = var
|
|
|
text = f"EndStep: {end_at_step}"
|
|
|
|
|
|
|
|
|
elif var_type == "RefineStep":
|
|
|
end_at_step = var
|
|
|
text = f"RefineStep: {end_at_step}"
|
|
|
|
|
|
|
|
|
elif var_type == "ReturnNoise":
|
|
|
return_with_leftover_noise = var
|
|
|
text = f"ReturnNoise: {return_with_leftover_noise}"
|
|
|
|
|
|
|
|
|
elif var_type == "CFG Scale":
|
|
|
cfg = var
|
|
|
text = f"CFG: {round(cfg,2)}"
|
|
|
|
|
|
|
|
|
elif var_type == "Sampler":
|
|
|
sampler_name = var[0]
|
|
|
if var[1] == "":
|
|
|
text = f"{sampler_name}"
|
|
|
else:
|
|
|
if var[1] != None:
|
|
|
scheduler = (var[1], scheduler[1])
|
|
|
else:
|
|
|
scheduler = (scheduler[1], scheduler[1])
|
|
|
text = f"{sampler_name} ({scheduler[0]})"
|
|
|
text = text.replace("ancestral", "a").replace("uniform", "u").replace("exponential","exp")
|
|
|
|
|
|
|
|
|
elif var_type == "Scheduler":
|
|
|
if len(var) == 2:
|
|
|
scheduler = (var[0], scheduler[1])
|
|
|
text = f"{sampler_name} ({scheduler[0]})"
|
|
|
else:
|
|
|
scheduler = (var, scheduler[1])
|
|
|
text = f"{scheduler[0]}"
|
|
|
text = text.replace("ancestral", "a").replace("uniform", "u").replace("exponential","exp")
|
|
|
|
|
|
|
|
|
elif var_type == "Denoise":
|
|
|
denoise = var
|
|
|
text = f"Denoise: {round(denoise, 2)}"
|
|
|
|
|
|
|
|
|
elif var_type == "VAE":
|
|
|
vae_name = var
|
|
|
vae_filename = os.path.splitext(os.path.basename(vae_name))[0]
|
|
|
text = f"VAE: {vae_filename}"
|
|
|
|
|
|
|
|
|
elif var_type == "Positive Prompt S/R":
|
|
|
search_txt, replace_txt = var
|
|
|
if replace_txt != None:
|
|
|
|
|
|
if positive_prompt[2] is not None:
|
|
|
positive_prompt = (positive_prompt[2].replace(search_txt, replace_txt, 1), positive_prompt[1], positive_prompt[2])
|
|
|
else:
|
|
|
positive_prompt = (positive_prompt[1].replace(search_txt, replace_txt, 1), positive_prompt[1], positive_prompt[1].replace(search_txt, replace_txt, 1))
|
|
|
else:
|
|
|
if positive_prompt[2] is not None:
|
|
|
positive_prompt = (positive_prompt[2], positive_prompt[1], positive_prompt[2])
|
|
|
else:
|
|
|
positive_prompt = (positive_prompt[1], positive_prompt[1], positive_prompt[1])
|
|
|
replace_txt = search_txt
|
|
|
text = f"{replace_txt}"
|
|
|
|
|
|
|
|
|
elif var_type == "Negative Prompt S/R":
|
|
|
search_txt, replace_txt = var
|
|
|
if replace_txt != None:
|
|
|
|
|
|
if negative_prompt[2] is not None:
|
|
|
negative_prompt = (negative_prompt[2].replace(search_txt, replace_txt, 1), negative_prompt[1], negative_prompt[2])
|
|
|
else:
|
|
|
negative_prompt = (negative_prompt[1].replace(search_txt, replace_txt, 1), negative_prompt[1], negative_prompt[1].replace(search_txt, replace_txt, 1))
|
|
|
else:
|
|
|
if negative_prompt[2] is not None:
|
|
|
negative_prompt = (negative_prompt[2], negative_prompt[1], negative_prompt[2])
|
|
|
else:
|
|
|
negative_prompt = (negative_prompt[1], negative_prompt[1], negative_prompt[1])
|
|
|
replace_txt = search_txt
|
|
|
text = f"(-) {replace_txt}"
|
|
|
|
|
|
|
|
|
elif var_type == "AScore+":
|
|
|
ascore = (var,ascore[1])
|
|
|
text = f"+AScore: {ascore[0]}"
|
|
|
|
|
|
|
|
|
elif var_type == "AScore-":
|
|
|
ascore = (ascore[0],var)
|
|
|
text = f"-AScore: {ascore[1]}"
|
|
|
|
|
|
|
|
|
elif var_type == "Checkpoint":
|
|
|
ckpt_name = var[0]
|
|
|
if var[1] == None:
|
|
|
clip_skip = (clip_skip[1],clip_skip[1])
|
|
|
else:
|
|
|
clip_skip = (var[1],clip_skip[1])
|
|
|
if var[2] != None:
|
|
|
vae_name = var[2]
|
|
|
ckpt_filename = os.path.splitext(os.path.basename(ckpt_name))[0]
|
|
|
text = f"{ckpt_filename}"
|
|
|
|
|
|
|
|
|
elif var_type == "Refiner":
|
|
|
refiner_name = var[0]
|
|
|
if var[1] == None:
|
|
|
refiner_clip_skip = (refiner_clip_skip[1],refiner_clip_skip[1])
|
|
|
else:
|
|
|
refiner_clip_skip = (var[1],refiner_clip_skip[1])
|
|
|
ckpt_filename = os.path.splitext(os.path.basename(refiner_name))[0]
|
|
|
text = f"{ckpt_filename}"
|
|
|
|
|
|
|
|
|
elif var_type == "Refiner On/Off":
|
|
|
end_at_step = int(var * steps)
|
|
|
text = f"Refiner: {'On' if var < 1 else 'Off'}"
|
|
|
|
|
|
elif var_type == "Clip Skip":
|
|
|
clip_skip = (var, clip_skip[1])
|
|
|
text = f"ClipSkip ({clip_skip[0]})"
|
|
|
|
|
|
elif var_type == "Clip Skip (Refiner)":
|
|
|
refiner_clip_skip = (var, refiner_clip_skip[1])
|
|
|
text = f"RefClipSkip ({refiner_clip_skip[0]})"
|
|
|
|
|
|
elif "LoRA" in var_type:
|
|
|
if not lora_stack:
|
|
|
lora_stack = var.copy()
|
|
|
else:
|
|
|
|
|
|
lora_stack[0] = tuple(v if v is not None else lora_stack[0][i] for i, v in enumerate(var[0]))
|
|
|
|
|
|
max_label_len = 50 + (12 * (len(lora_stack) - 1))
|
|
|
lora_name, lora_model_wt, lora_clip_wt = lora_stack[0]
|
|
|
lora_filename = os.path.splitext(os.path.basename(lora_name))[0]
|
|
|
|
|
|
if var_type == "LoRA":
|
|
|
if len(lora_stack) == 1:
|
|
|
lora_model_wt = format(float(lora_model_wt), ".2f").rstrip('0').rstrip('.')
|
|
|
lora_clip_wt = format(float(lora_clip_wt), ".2f").rstrip('0').rstrip('.')
|
|
|
lora_filename = lora_filename[:max_label_len - len(f"LoRA: ({lora_model_wt})")]
|
|
|
if lora_model_wt == lora_clip_wt:
|
|
|
text = f"LoRA: {lora_filename}({lora_model_wt})"
|
|
|
else:
|
|
|
text = f"LoRA: {lora_filename}({lora_model_wt},{lora_clip_wt})"
|
|
|
elif len(lora_stack) > 1:
|
|
|
lora_filenames = [os.path.splitext(os.path.basename(lora_name))[0] for lora_name, _, _ in
|
|
|
lora_stack]
|
|
|
lora_details = [(format(float(lora_model_wt), ".2f").rstrip('0').rstrip('.'),
|
|
|
format(float(lora_clip_wt), ".2f").rstrip('0').rstrip('.')) for
|
|
|
_, lora_model_wt, lora_clip_wt in lora_stack]
|
|
|
non_name_length = sum(
|
|
|
len(f"({lora_details[i][0]},{lora_details[i][1]})") + 2 for i in range(len(lora_stack)))
|
|
|
available_space = max_label_len - non_name_length
|
|
|
max_name_length = available_space // len(lora_stack)
|
|
|
lora_filenames = [filename[:max_name_length] for filename in lora_filenames]
|
|
|
text_elements = [
|
|
|
f"{lora_filename}({lora_details[i][0]})" if lora_details[i][0] == lora_details[i][1]
|
|
|
else f"{lora_filename}({lora_details[i][0]},{lora_details[i][1]})" for i, lora_filename in
|
|
|
enumerate(lora_filenames)]
|
|
|
text = " ".join(text_elements)
|
|
|
|
|
|
elif var_type == "LoRA Batch":
|
|
|
text = f"LoRA: {lora_filename}"
|
|
|
|
|
|
elif var_type == "LoRA Wt":
|
|
|
lora_model_wt = format(float(lora_model_wt), ".2f").rstrip('0').rstrip('.')
|
|
|
text = f"LoRA Wt: {lora_model_wt}"
|
|
|
|
|
|
elif var_type == "LoRA MStr":
|
|
|
lora_model_wt = format(float(lora_model_wt), ".2f").rstrip('0').rstrip('.')
|
|
|
text = f"LoRA Mstr: {lora_model_wt}"
|
|
|
|
|
|
elif var_type == "LoRA CStr":
|
|
|
lora_clip_wt = format(float(lora_clip_wt), ".2f").rstrip('0').rstrip('.')
|
|
|
text = f"LoRA Cstr: {lora_clip_wt}"
|
|
|
|
|
|
elif var_type in ["ControlNetStrength", "ControlNetStart%", "ControlNetEnd%"]:
|
|
|
if "Strength" in var_type:
|
|
|
entry_index = 2
|
|
|
elif "Start%" in var_type:
|
|
|
entry_index = 3
|
|
|
elif "End%" in var_type:
|
|
|
entry_index = 4
|
|
|
|
|
|
|
|
|
if cnet_stack[0] is None:
|
|
|
cnet_stack = (var, cnet_stack[1])
|
|
|
else:
|
|
|
|
|
|
entry_from_var = var[0][entry_index]
|
|
|
|
|
|
|
|
|
first_cn_entry = list(cnet_stack[0][0])
|
|
|
|
|
|
|
|
|
first_cn_entry[entry_index] = entry_from_var
|
|
|
|
|
|
|
|
|
for i, value in enumerate(cnet_stack[1][-3:]):
|
|
|
if value is not None:
|
|
|
first_cn_entry[i + 2] = value
|
|
|
|
|
|
|
|
|
updated_first_entry = tuple(first_cn_entry)
|
|
|
|
|
|
|
|
|
updated_cnet_stack_0 = [updated_first_entry] + list(cnet_stack[0][1:])
|
|
|
|
|
|
|
|
|
cnet_stack = (updated_cnet_stack_0, cnet_stack[1])
|
|
|
|
|
|
|
|
|
text = f'{var_type}: {round(cnet_stack[0][0][entry_index], 3)}'
|
|
|
|
|
|
elif var_type == "XY_Capsule":
|
|
|
text = var.getLabel()
|
|
|
|
|
|
else:
|
|
|
text=""
|
|
|
|
|
|
def truncate_texts(texts, num_label, max_label_len):
|
|
|
truncate_length = max(min(max(len(text) for text in texts), max_label_len), 24)
|
|
|
|
|
|
return [text if len(text) <= truncate_length else text[:truncate_length] + "..." for text in
|
|
|
texts]
|
|
|
|
|
|
|
|
|
if len(var_label) < num_label:
|
|
|
var_label.append(text)
|
|
|
|
|
|
|
|
|
if len(var_label) == num_label and (var_type == "VAE" or var_type == "Checkpoint"
|
|
|
or var_type == "Refiner" or "LoRA" in var_type):
|
|
|
var_label = truncate_texts(var_label, num_label, max_label_len)
|
|
|
|
|
|
|
|
|
return add_noise, seed, steps, start_at_step, end_at_step, return_with_leftover_noise, cfg,\
|
|
|
sampler_name, scheduler, denoise, vae_name, ckpt_name, clip_skip, \
|
|
|
refiner_name, refiner_clip_skip, positive_prompt, negative_prompt, ascore,\
|
|
|
lora_stack, cnet_stack, var_label
|
|
|
|
|
|
|
|
|
|
|
|
def define_model(model, clip, clip_skip, refiner_model, refiner_clip, refiner_clip_skip,
|
|
|
ckpt_name, refiner_name, positive, negative, refiner_positive, refiner_negative,
|
|
|
positive_prompt, negative_prompt, ascore, vae, vae_name, lora_stack, cnet_stack, index,
|
|
|
types, xyplot_id, cache, sampler_type, empty_latent_width, empty_latent_height):
|
|
|
|
|
|
|
|
|
encode = False
|
|
|
encode_refiner = False
|
|
|
|
|
|
|
|
|
X_type, Y_type = types
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (X_type == "Checkpoint" and index == 0 and Y_type != "LoRA"):
|
|
|
if lora_stack is None:
|
|
|
model, clip, _ = load_checkpoint(ckpt_name, xyplot_id, cache=cache[1])
|
|
|
else:
|
|
|
model, clip = load_lora(lora_stack, ckpt_name, xyplot_id,
|
|
|
cache=None, ckpt_cache=cache[1])
|
|
|
encode = True
|
|
|
|
|
|
|
|
|
elif (X_type == "LoRA"):
|
|
|
|
|
|
model, clip = load_lora(lora_stack, ckpt_name, xyplot_id, cache=cache[2])
|
|
|
encode = True
|
|
|
elif Y_type == "LoRA":
|
|
|
model, clip = load_lora(lora_stack, ckpt_name, xyplot_id,
|
|
|
cache=None, ckpt_cache=cache[1])
|
|
|
encode = True
|
|
|
elif X_type == "LoRA Batch" or X_type == "LoRA Wt" or X_type == "LoRA MStr" or X_type == "LoRA CStr":
|
|
|
|
|
|
model, clip = load_lora(lora_stack, ckpt_name, xyplot_id, cache=0)
|
|
|
encode = True
|
|
|
|
|
|
if (X_type == "Refiner" and index == 0) or Y_type == "Refiner":
|
|
|
refiner_model, refiner_clip, _ = \
|
|
|
load_checkpoint(refiner_name, xyplot_id, output_vae=False, cache=cache[3], ckpt_type="refn")
|
|
|
encode_refiner = True
|
|
|
|
|
|
|
|
|
encode_types = ["Positive Prompt S/R", "Negative Prompt S/R", "Clip Skip", "ControlNetStrength",
|
|
|
"ControlNetStart%", "ControlNetEnd%", "XY_Capsule"]
|
|
|
if (X_type in encode_types and index == 0) or Y_type in encode_types:
|
|
|
encode = True
|
|
|
|
|
|
|
|
|
encode_refiner_types = ["Positive Prompt S/R", "Negative Prompt S/R", "AScore+", "AScore-",
|
|
|
"Clip Skip (Refiner)", "XY_Capsule"]
|
|
|
if (X_type in encode_refiner_types and index == 0) or Y_type in encode_refiner_types:
|
|
|
encode_refiner = True
|
|
|
|
|
|
|
|
|
if encode == True:
|
|
|
positive, negative, clip = \
|
|
|
encode_prompts(positive_prompt, negative_prompt, token_normalization, weight_interpretation,
|
|
|
clip, clip_skip, refiner_clip, refiner_clip_skip, ascore, sampler_type == "sdxl",
|
|
|
empty_latent_width, empty_latent_height, return_type="base")
|
|
|
|
|
|
if cnet_stack:
|
|
|
controlnet_conditioning = TSC_Apply_ControlNet_Stack().apply_cnet_stack(positive, negative, cnet_stack)
|
|
|
positive, negative = controlnet_conditioning[0], controlnet_conditioning[1]
|
|
|
|
|
|
if encode_refiner == True:
|
|
|
refiner_positive, refiner_negative, refiner_clip = \
|
|
|
encode_prompts(positive_prompt, negative_prompt, token_normalization, weight_interpretation,
|
|
|
clip, clip_skip, refiner_clip, refiner_clip_skip, ascore, sampler_type == "sdxl",
|
|
|
empty_latent_width, empty_latent_height, return_type="refiner")
|
|
|
|
|
|
|
|
|
if (X_type == "VAE" and index == 0) or Y_type == "VAE":
|
|
|
|
|
|
vae = get_bvae_by_ckpt_name(ckpt_name) if vae_name == "Baked VAE" \
|
|
|
else load_vae(vae_name, xyplot_id, cache=cache[0])
|
|
|
elif X_type == "Checkpoint" and index == 0 and vae_name:
|
|
|
vae = get_bvae_by_ckpt_name(ckpt_name) if vae_name == "Baked VAE" \
|
|
|
else load_vae(vae_name, xyplot_id, cache=cache[0])
|
|
|
|
|
|
return model, positive, negative, refiner_model, refiner_positive, refiner_negative, vae
|
|
|
|
|
|
|
|
|
|
|
|
def process_values(model, refiner_model, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler, positive, negative,
|
|
|
refiner_positive, refiner_negative, latent_image, denoise, vae, vae_decode,
|
|
|
sampler_type, latent_list=[], image_tensor_list=[], image_pil_list=[], xy_capsule=None):
|
|
|
|
|
|
capsule_result = None
|
|
|
if xy_capsule is not None:
|
|
|
capsule_result = xy_capsule.get_result(model, clip, vae)
|
|
|
if capsule_result is not None:
|
|
|
image, latent = capsule_result
|
|
|
latent_list.append(latent)
|
|
|
|
|
|
if capsule_result is None:
|
|
|
|
|
|
samples, images, _, _ = process_latent_image(model, seed, steps, cfg, sampler_name, scheduler, positive, negative,
|
|
|
latent_image, denoise, sampler_type, add_noise, start_at_step,
|
|
|
end_at_step, return_with_leftover_noise, refiner_model,
|
|
|
refiner_positive, refiner_negative, vae, vae_decode, preview_method)
|
|
|
|
|
|
|
|
|
latent_list.append(samples)
|
|
|
|
|
|
|
|
|
image = images if images is not None else vae_decode_latent(vae, samples, vae_decode)
|
|
|
|
|
|
if xy_capsule is not None:
|
|
|
xy_capsule.set_result(image, samples)
|
|
|
|
|
|
|
|
|
image_tensor_list.append(image)
|
|
|
|
|
|
|
|
|
image_pil_list.append(tensor2pil(image))
|
|
|
|
|
|
|
|
|
return latent_list, image_tensor_list, image_pil_list
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
X_label = []
|
|
|
Y_label = []
|
|
|
|
|
|
|
|
|
scheduler = (scheduler, scheduler)
|
|
|
|
|
|
|
|
|
clip_skip = (clip_skip, clip_skip)
|
|
|
refiner_clip_skip = (refiner_clip_skip, refiner_clip_skip)
|
|
|
|
|
|
|
|
|
types = (X_type, Y_type)
|
|
|
|
|
|
|
|
|
def clone_or_none(*originals):
|
|
|
cloned_items = []
|
|
|
for original in originals:
|
|
|
try:
|
|
|
cloned_items.append(original.clone())
|
|
|
except (AttributeError, TypeError):
|
|
|
|
|
|
cloned_items.append(original)
|
|
|
return cloned_items
|
|
|
original_model, original_clip, original_positive, original_negative,\
|
|
|
original_refiner_model, original_refiner_clip, original_refiner_positive, original_refiner_negative =\
|
|
|
clone_or_none(model, clip, positive, negative, refiner_model, refiner_clip, refiner_positive, refiner_negative)
|
|
|
|
|
|
|
|
|
for X_index, X in enumerate(X_value):
|
|
|
|
|
|
|
|
|
positive_prompt = (positive_prompt[0], positive_prompt[1], None)
|
|
|
negative_prompt = (negative_prompt[0], negative_prompt[1], None)
|
|
|
|
|
|
|
|
|
add_noise, seed, steps, start_at_step, end_at_step, return_with_leftover_noise, cfg,\
|
|
|
sampler_name, scheduler, denoise, vae_name, ckpt_name, clip_skip,\
|
|
|
refiner_name, refiner_clip_skip, positive_prompt, negative_prompt, ascore,\
|
|
|
lora_stack, cnet_stack, X_label = \
|
|
|
define_variable(X_type, X, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler, denoise, vae_name,
|
|
|
ckpt_name, clip_skip, refiner_name, refiner_clip_skip, positive_prompt,
|
|
|
negative_prompt, ascore, lora_stack, cnet_stack, X_label, len(X_value))
|
|
|
|
|
|
if X_type != "Nothing" and Y_type == "Nothing":
|
|
|
if X_type == "XY_Capsule":
|
|
|
model, clip, refiner_model, refiner_clip = \
|
|
|
clone_or_none(original_model, original_clip, original_refiner_model, original_refiner_clip)
|
|
|
model, clip, vae = X.pre_define_model(model, clip, vae)
|
|
|
|
|
|
|
|
|
model, positive, negative, refiner_model, refiner_positive, refiner_negative, vae = \
|
|
|
define_model(model, clip, clip_skip[0], refiner_model, refiner_clip, refiner_clip_skip[0],
|
|
|
ckpt_name, refiner_name, positive, negative, refiner_positive, refiner_negative,
|
|
|
positive_prompt[0], negative_prompt[0], ascore, vae, vae_name, lora_stack, cnet_stack[0],
|
|
|
0, types, xyplot_id, cache, sampler_type, empty_latent_width, empty_latent_height)
|
|
|
|
|
|
xy_capsule = None
|
|
|
if X_type == "XY_Capsule":
|
|
|
xy_capsule = X
|
|
|
|
|
|
|
|
|
latent_list, image_tensor_list, image_pil_list = \
|
|
|
process_values(model, refiner_model, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler[0], positive, negative,
|
|
|
refiner_positive, refiner_negative, latent_image, denoise, vae, vae_decode, sampler_type, xy_capsule=xy_capsule)
|
|
|
|
|
|
elif X_type != "Nothing" and Y_type != "Nothing":
|
|
|
for Y_index, Y in enumerate(Y_value):
|
|
|
|
|
|
if Y_type == "XY_Capsule" or X_type == "XY_Capsule":
|
|
|
model, clip, refiner_model, refiner_clip = \
|
|
|
clone_or_none(original_model, original_clip, original_refiner_model, original_refiner_clip)
|
|
|
|
|
|
if Y_type == "XY_Capsule" and X_type == "XY_Capsule":
|
|
|
Y.set_x_capsule(X)
|
|
|
|
|
|
|
|
|
add_noise, seed, steps, start_at_step, end_at_step, return_with_leftover_noise, cfg,\
|
|
|
sampler_name, scheduler, denoise, vae_name, ckpt_name, clip_skip,\
|
|
|
refiner_name, refiner_clip_skip, positive_prompt, negative_prompt, ascore,\
|
|
|
lora_stack, cnet_stack, Y_label = \
|
|
|
define_variable(Y_type, Y, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler, denoise, vae_name,
|
|
|
ckpt_name, clip_skip, refiner_name, refiner_clip_skip, positive_prompt,
|
|
|
negative_prompt, ascore, lora_stack, cnet_stack, Y_label, len(Y_value))
|
|
|
|
|
|
if Y_type == "XY_Capsule":
|
|
|
model, clip, vae = Y.pre_define_model(model, clip, vae)
|
|
|
elif X_type == "XY_Capsule":
|
|
|
model, clip, vae = X.pre_define_model(model, clip, vae)
|
|
|
|
|
|
|
|
|
model, positive, negative, refiner_model, refiner_positive, refiner_negative, vae = \
|
|
|
define_model(model, clip, clip_skip[0], refiner_model, refiner_clip, refiner_clip_skip[0],
|
|
|
ckpt_name, refiner_name, positive, negative, refiner_positive, refiner_negative,
|
|
|
positive_prompt[0], negative_prompt[0], ascore, vae, vae_name, lora_stack, cnet_stack[0],
|
|
|
Y_index, types, xyplot_id, cache, sampler_type, empty_latent_width,
|
|
|
empty_latent_height)
|
|
|
|
|
|
|
|
|
xy_capsule = None
|
|
|
if Y_type == "XY_Capsule":
|
|
|
xy_capsule = Y
|
|
|
|
|
|
latent_list, image_tensor_list, image_pil_list = \
|
|
|
process_values(model, refiner_model, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler[0],
|
|
|
positive, negative, refiner_positive, refiner_negative, latent_image,
|
|
|
denoise, vae, vae_decode, sampler_type, xy_capsule=xy_capsule)
|
|
|
|
|
|
|
|
|
if cache_models == "False":
|
|
|
clear_cache_by_exception(xyplot_id, vae_dict=[], ckpt_dict=[], lora_dict=[], refn_dict=[])
|
|
|
else:
|
|
|
|
|
|
if X_type == "Checkpoint":
|
|
|
clear_cache_by_exception(xyplot_id, lora_dict=[], refn_dict=[])
|
|
|
elif X_type == "Refiner":
|
|
|
clear_cache_by_exception(xyplot_id, ckpt_dict=[], lora_dict=[])
|
|
|
elif X_type == "LoRA":
|
|
|
clear_cache_by_exception(xyplot_id, ckpt_dict=[], refn_dict=[])
|
|
|
|
|
|
|
|
|
|
|
|
def print_plot_variables(X_type, Y_type, X_value, Y_value, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler, denoise, vae_name, ckpt_name,
|
|
|
clip_skip, refiner_name, refiner_clip_skip, ascore, lora_stack, cnet_stack, sampler_type,
|
|
|
num_rows, num_cols, i_height, i_width):
|
|
|
|
|
|
print("-" * 40)
|
|
|
print(f"{xyplot_message('XY Plot Results:')}")
|
|
|
|
|
|
def get_vae_name(X_type, Y_type, X_value, Y_value, vae_name):
|
|
|
if X_type == "VAE":
|
|
|
vae_name = "\n ".join(map(lambda x: os.path.splitext(os.path.basename(str(x)))[0], X_value))
|
|
|
elif Y_type == "VAE":
|
|
|
vae_name = "\n ".join(map(lambda y: os.path.splitext(os.path.basename(str(y)))[0], Y_value))
|
|
|
elif vae_name:
|
|
|
vae_name = os.path.splitext(os.path.basename(str(vae_name)))[0]
|
|
|
else:
|
|
|
vae_name = ""
|
|
|
return vae_name
|
|
|
|
|
|
def get_clip_skip(X_type, Y_type, X_value, Y_value, cskip, mode):
|
|
|
clip_type = "Clip Skip" if mode == "ckpt" else "Clip Skip (Refiner)"
|
|
|
if X_type == clip_type:
|
|
|
cskip = ", ".join(map(str, X_value))
|
|
|
elif Y_type == clip_type:
|
|
|
cskip = ", ".join(map(str, Y_value))
|
|
|
elif cskip[1] != None:
|
|
|
cskip = cskip[1]
|
|
|
else:
|
|
|
cskip = ""
|
|
|
return cskip
|
|
|
|
|
|
def get_checkpoint_name(X_type, Y_type, X_value, Y_value, ckpt_name, clip_skip, mode, vae_name=None):
|
|
|
|
|
|
|
|
|
if ckpt_name is not None:
|
|
|
ckpt_name = os.path.basename(ckpt_name)
|
|
|
|
|
|
|
|
|
primary_type = "Checkpoint" if mode == "ckpt" else "Refiner"
|
|
|
clip_type = "Clip Skip" if mode == "ckpt" else "Clip Skip (Refiner)"
|
|
|
|
|
|
|
|
|
if X_type == primary_type:
|
|
|
ckpt_type, ckpt_value = X_type, X_value.copy()
|
|
|
othr_type, othr_value = Y_type, Y_value.copy()
|
|
|
elif Y_type == primary_type:
|
|
|
ckpt_type, ckpt_value = Y_type, Y_value.copy()
|
|
|
othr_type, othr_value = X_type, X_value.copy()
|
|
|
else:
|
|
|
|
|
|
clip_skip = get_clip_skip(X_type, Y_type, X_value, Y_value, clip_skip, mode)
|
|
|
if mode == "ckpt":
|
|
|
if vae_name:
|
|
|
vae_name = get_vae_name(X_type, Y_type, X_value, Y_value, vae_name)
|
|
|
return ckpt_name, clip_skip, vae_name
|
|
|
else:
|
|
|
|
|
|
return ckpt_name, clip_skip
|
|
|
|
|
|
|
|
|
if othr_type == clip_type:
|
|
|
clip_skip = ", ".join(map(str, othr_value))
|
|
|
elif ckpt_value[0][1] != None:
|
|
|
clip_skip = None
|
|
|
|
|
|
|
|
|
if mode == "ckpt":
|
|
|
if othr_type == "VAE":
|
|
|
vae_name = get_vae_name(X_type, Y_type, X_value, Y_value, vae_name)
|
|
|
elif ckpt_value[0][2] != None:
|
|
|
vae_name = None
|
|
|
|
|
|
def format_name(v, _type):
|
|
|
base = os.path.basename(v[0])
|
|
|
if _type == clip_type and v[1] is not None:
|
|
|
return base
|
|
|
elif _type == "VAE" and v[1] is not None and v[2] is not None:
|
|
|
return f"{base}({v[1]})"
|
|
|
elif v[1] is not None and v[2] is not None:
|
|
|
return f"{base}({v[1]}) + vae:{v[2]}"
|
|
|
elif v[1] is not None:
|
|
|
return f"{base}({v[1]})"
|
|
|
else:
|
|
|
return base
|
|
|
|
|
|
ckpt_name = "\n ".join([format_name(v, othr_type) for v in ckpt_value])
|
|
|
if mode == "ckpt":
|
|
|
return ckpt_name, clip_skip, vae_name
|
|
|
else:
|
|
|
return ckpt_name, clip_skip
|
|
|
|
|
|
def get_lora_name(X_type, Y_type, X_value, Y_value, lora_stack=None):
|
|
|
lora_name = lora_wt = lora_model_str = lora_clip_str = None
|
|
|
|
|
|
|
|
|
lora_types = ["LoRA", "LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr"]
|
|
|
|
|
|
if X_type not in lora_types and Y_type not in lora_types:
|
|
|
if lora_stack:
|
|
|
names_list = []
|
|
|
for name, model_wt, clip_wt in lora_stack:
|
|
|
base_name = os.path.splitext(os.path.basename(name))[0]
|
|
|
formatted_str = f"{base_name}({round(model_wt, 3)},{round(clip_wt, 3)})"
|
|
|
names_list.append(formatted_str)
|
|
|
lora_name = f"[{', '.join(names_list)}]"
|
|
|
else:
|
|
|
if X_type in lora_types:
|
|
|
value = get_lora_sublist_name(X_type, X_value)
|
|
|
if X_type == "LoRA":
|
|
|
lora_name = value
|
|
|
lora_model_str = None
|
|
|
lora_clip_str = None
|
|
|
if X_type == "LoRA Batch":
|
|
|
lora_name = value
|
|
|
lora_model_str = X_value[0][0][1] if lora_model_str is None else lora_model_str
|
|
|
lora_clip_str = X_value[0][0][2] if lora_clip_str is None else lora_clip_str
|
|
|
elif X_type == "LoRA MStr":
|
|
|
lora_name = os.path.basename(X_value[0][0][0]) if lora_name is None else lora_name
|
|
|
lora_model_str = value
|
|
|
lora_clip_str = X_value[0][0][2] if lora_clip_str is None else lora_clip_str
|
|
|
elif X_type == "LoRA CStr":
|
|
|
lora_name = os.path.basename(X_value[0][0][0]) if lora_name is None else lora_name
|
|
|
lora_model_str = X_value[0][0][1] if lora_model_str is None else lora_model_str
|
|
|
lora_clip_str = value
|
|
|
elif X_type == "LoRA Wt":
|
|
|
lora_name = os.path.basename(X_value[0][0][0]) if lora_name is None else lora_name
|
|
|
lora_wt = value
|
|
|
|
|
|
if Y_type in lora_types:
|
|
|
value = get_lora_sublist_name(Y_type, Y_value)
|
|
|
if Y_type == "LoRA":
|
|
|
lora_name = value
|
|
|
lora_model_str = None
|
|
|
lora_clip_str = None
|
|
|
if Y_type == "LoRA Batch":
|
|
|
lora_name = value
|
|
|
lora_model_str = Y_value[0][0][1] if lora_model_str is None else lora_model_str
|
|
|
lora_clip_str = Y_value[0][0][2] if lora_clip_str is None else lora_clip_str
|
|
|
elif Y_type == "LoRA MStr":
|
|
|
lora_name = os.path.basename(Y_value[0][0][0]) if lora_name is None else lora_name
|
|
|
lora_model_str = value
|
|
|
lora_clip_str = Y_value[0][0][2] if lora_clip_str is None else lora_clip_str
|
|
|
elif Y_type == "LoRA CStr":
|
|
|
lora_name = os.path.basename(Y_value[0][0][0]) if lora_name is None else lora_name
|
|
|
lora_model_str = Y_value[0][0][1] if lora_model_str is None else lora_model_str
|
|
|
lora_clip_str = value
|
|
|
elif Y_type == "LoRA Wt":
|
|
|
lora_name = os.path.basename(Y_value[0][0][0]) if lora_name is None else lora_name
|
|
|
lora_wt = value
|
|
|
|
|
|
return lora_name, lora_wt, lora_model_str, lora_clip_str
|
|
|
|
|
|
def get_lora_sublist_name(lora_type, lora_value):
|
|
|
if lora_type == "LoRA" or lora_type == "LoRA Batch":
|
|
|
formatted_sublists = []
|
|
|
for sublist in lora_value:
|
|
|
formatted_entries = []
|
|
|
for x in sublist:
|
|
|
base_name = os.path.splitext(os.path.basename(str(x[0])))[0]
|
|
|
formatted_str = f"{base_name}({round(x[1], 3)},{round(x[2], 3)})" if lora_type == "LoRA" else f"{base_name}"
|
|
|
formatted_entries.append(formatted_str)
|
|
|
formatted_sublists.append(f"{', '.join(formatted_entries)}")
|
|
|
return "\n ".join(formatted_sublists)
|
|
|
elif lora_type == "LoRA MStr":
|
|
|
return ", ".join([str(round(x[0][1], 3)) for x in lora_value])
|
|
|
elif lora_type == "LoRA CStr":
|
|
|
return ", ".join([str(round(x[0][2], 3)) for x in lora_value])
|
|
|
elif lora_type == "LoRA Wt":
|
|
|
return ", ".join([str(round(x[0][1], 3)) for x in lora_value])
|
|
|
else:
|
|
|
return ""
|
|
|
|
|
|
|
|
|
ckpt_name, clip_skip, vae_name = get_checkpoint_name(X_type, Y_type, X_value, Y_value, ckpt_name, clip_skip, "ckpt", vae_name)
|
|
|
lora_name, lora_wt, lora_model_str, lora_clip_str = get_lora_name(X_type, Y_type, X_value, Y_value, lora_stack)
|
|
|
refiner_name, refiner_clip_skip = get_checkpoint_name(X_type, Y_type, X_value, Y_value, refiner_name, refiner_clip_skip, "refn")
|
|
|
|
|
|
|
|
|
add_noise = ", ".join(map(str, X_value)) if X_type == "AddNoise" else ", ".join(
|
|
|
map(str, Y_value)) if Y_type == "AddNoise" else add_noise
|
|
|
|
|
|
|
|
|
seed = "\n ".join(map(str, X_value)) if X_type == "Seeds++ Batch" else "\n ".join(
|
|
|
map(str, Y_value)) if Y_type == "Seeds++ Batch" else seed
|
|
|
|
|
|
|
|
|
steps = ", ".join(map(str, X_value)) if X_type == "Steps" else ", ".join(
|
|
|
map(str, Y_value)) if Y_type == "Steps" else steps
|
|
|
|
|
|
|
|
|
start_at_step = ", ".join(map(str, X_value)) if X_type == "StartStep" else ", ".join(
|
|
|
map(str, Y_value)) if Y_type == "StartStep" else start_at_step
|
|
|
|
|
|
|
|
|
end_at_step = ", ".join(map(str, X_value)) if X_type in ["EndStep", "RefineStep"] else ", ".join(
|
|
|
map(str, Y_value)) if Y_type in ["EndStep", "RefineStep"] else end_at_step
|
|
|
|
|
|
|
|
|
return_with_leftover_noise = ", ".join(map(str, X_value)) if X_type == "ReturnNoise" else ", ".join(
|
|
|
map(str, Y_value)) if Y_type == "ReturnNoise" else return_with_leftover_noise
|
|
|
|
|
|
|
|
|
cfg = ", ".join(map(str, X_value)) if X_type == "CFG Scale" else ", ".join(
|
|
|
map(str, Y_value)) if Y_type == "CFG Scale" else round(cfg,3)
|
|
|
|
|
|
|
|
|
if X_type == "Sampler":
|
|
|
if Y_type == "Scheduler":
|
|
|
sampler_name = ", ".join([f"{x[0]}" for x in X_value])
|
|
|
scheduler = ", ".join([f"{y}" for y in Y_value])
|
|
|
else:
|
|
|
sampler_name = ", ".join([f"{x[0]}({x[1] if x[1] != '' and x[1] is not None else scheduler[1]})" for x in X_value])
|
|
|
scheduler = "_"
|
|
|
elif Y_type == "Sampler":
|
|
|
if X_type == "Scheduler":
|
|
|
sampler_name = ", ".join([f"{y[0]}" for y in Y_value])
|
|
|
scheduler = ", ".join([f"{x}" for x in X_value])
|
|
|
else:
|
|
|
sampler_name = ", ".join([f"{y[0]}({y[1] if y[1] != '' and y[1] is not None else scheduler[1]})" for y in Y_value])
|
|
|
scheduler = "_"
|
|
|
else:
|
|
|
scheduler = ", ".join([str(x[0]) if isinstance(x, tuple) else str(x) for x in X_value]) if X_type == "Scheduler" else \
|
|
|
", ".join([str(y[0]) if isinstance(y, tuple) else str(y) for y in Y_value]) if Y_type == "Scheduler" else scheduler[0]
|
|
|
|
|
|
|
|
|
denoise = ", ".join(map(str, X_value)) if X_type == "Denoise" else ", ".join(
|
|
|
map(str, Y_value)) if Y_type == "Denoise" else round(denoise,3)
|
|
|
|
|
|
|
|
|
if ascore is None:
|
|
|
pos_ascore = neg_ascore = None
|
|
|
else:
|
|
|
|
|
|
pos_ascore = (", ".join(map(str, X_value)) if X_type == "Ascore+"
|
|
|
else ", ".join(map(str, Y_value)) if Y_type == "Ascore+" else round(ascore[0],3))
|
|
|
|
|
|
neg_ascore = (", ".join(map(str, X_value)) if X_type == "Ascore-"
|
|
|
else ", ".join(map(str, Y_value)) if Y_type == "Ascore-" else round(ascore[1],3))
|
|
|
|
|
|
|
|
|
print(f"(X) {X_type}")
|
|
|
print(f"(Y) {Y_type}")
|
|
|
print(f"img_count: {len(X_value)*len(Y_value)}")
|
|
|
print(f"img_dims: {i_height} x {i_width}")
|
|
|
print(f"plot_dim: {num_cols} x {num_rows}")
|
|
|
print(f"ckpt: {ckpt_name if ckpt_name is not None else ''}")
|
|
|
if clip_skip:
|
|
|
print(f"clip_skip: {clip_skip}")
|
|
|
if sampler_type == "sdxl":
|
|
|
if refiner_clip_skip == "_":
|
|
|
print(f"refiner(clipskip): {refiner_name if refiner_name is not None else ''}")
|
|
|
else:
|
|
|
print(f"refiner: {refiner_name if refiner_name is not None else ''}")
|
|
|
print(f"refiner_clip_skip: {refiner_clip_skip if refiner_clip_skip is not None else ''}")
|
|
|
print(f"+ascore: {pos_ascore if pos_ascore is not None else ''}")
|
|
|
print(f"-ascore: {neg_ascore if neg_ascore is not None else ''}")
|
|
|
if lora_name:
|
|
|
print(f"lora: {lora_name}")
|
|
|
if lora_wt:
|
|
|
print(f"lora_wt: {lora_wt}")
|
|
|
if lora_model_str:
|
|
|
print(f"lora_mstr: {lora_model_str}")
|
|
|
if lora_clip_str:
|
|
|
print(f"lora_cstr: {lora_clip_str}")
|
|
|
if vae_name:
|
|
|
print(f"vae: {vae_name}")
|
|
|
if sampler_type == "advanced":
|
|
|
print(f"add_noise: {add_noise}")
|
|
|
print(f"seed: {seed}")
|
|
|
print(f"steps: {steps}")
|
|
|
if sampler_type == "advanced":
|
|
|
print(f"start_at_step: {start_at_step}")
|
|
|
print(f"end_at_step: {end_at_step}")
|
|
|
print(f"return_noise: {return_with_leftover_noise}")
|
|
|
if sampler_type == "sdxl":
|
|
|
print(f"start_at_step: {start_at_step}")
|
|
|
if X_type == "Refiner On/Off":
|
|
|
print(f"refine_at_percent: {X_value[0]}")
|
|
|
elif Y_type == "Refiner On/Off":
|
|
|
print(f"refine_at_percent: {Y_value[0]}")
|
|
|
else:
|
|
|
print(f"refine_at_step: {end_at_step}")
|
|
|
print(f"cfg: {cfg}")
|
|
|
if scheduler == "_":
|
|
|
print(f"sampler(scheduler): {sampler_name}")
|
|
|
else:
|
|
|
print(f"sampler: {sampler_name}")
|
|
|
print(f"scheduler: {scheduler}")
|
|
|
if sampler_type == "regular":
|
|
|
print(f"denoise: {denoise}")
|
|
|
|
|
|
if X_type == "Positive Prompt S/R" or Y_type == "Positive Prompt S/R":
|
|
|
positive_prompt = ", ".join([str(x[0]) if i == 0 else str(x[1]) for i, x in enumerate(
|
|
|
X_value)]) if X_type == "Positive Prompt S/R" else ", ".join(
|
|
|
[str(y[0]) if i == 0 else str(y[1]) for i, y in
|
|
|
enumerate(Y_value)]) if Y_type == "Positive Prompt S/R" else positive_prompt
|
|
|
print(f"+prompt_s/r: {positive_prompt}")
|
|
|
|
|
|
if X_type == "Negative Prompt S/R" or Y_type == "Negative Prompt S/R":
|
|
|
negative_prompt = ", ".join([str(x[0]) if i == 0 else str(x[1]) for i, x in enumerate(
|
|
|
X_value)]) if X_type == "Negative Prompt S/R" else ", ".join(
|
|
|
[str(y[0]) if i == 0 else str(y[1]) for i, y in
|
|
|
enumerate(Y_value)]) if Y_type == "Negative Prompt S/R" else negative_prompt
|
|
|
print(f"-prompt_s/r: {negative_prompt}")
|
|
|
|
|
|
if "ControlNet" in X_type or "ControlNet" in Y_type:
|
|
|
cnet_strength, cnet_start_pct, cnet_end_pct = cnet_stack[1]
|
|
|
|
|
|
if "ControlNet" in X_type:
|
|
|
if "Strength" in X_type:
|
|
|
cnet_strength = [str(round(inner_list[0][2], 3)) for inner_list in X_value if
|
|
|
isinstance(inner_list, list) and
|
|
|
inner_list and isinstance(inner_list[0], tuple) and len(inner_list[0]) >= 3]
|
|
|
if "Start%" in X_type:
|
|
|
cnet_start_pct = [str(round(inner_list[0][3], 3)) for inner_list in X_value if
|
|
|
isinstance(inner_list, list) and
|
|
|
inner_list and isinstance(inner_list[0], tuple) and len(inner_list[0]) >= 3]
|
|
|
if "End%" in X_type:
|
|
|
cnet_end_pct = [str(round(inner_list[0][4], 3)) for inner_list in X_value if
|
|
|
isinstance(inner_list, list) and
|
|
|
inner_list and isinstance(inner_list[0], tuple) and len(inner_list[0]) >= 3]
|
|
|
if "ControlNet" in Y_type:
|
|
|
if "Strength" in Y_type:
|
|
|
cnet_strength = [str(round(inner_list[0][2], 3)) for inner_list in Y_value if
|
|
|
isinstance(inner_list, list) and
|
|
|
inner_list and isinstance(inner_list[0], tuple) and len(
|
|
|
inner_list[0]) >= 3]
|
|
|
if "Start%" in Y_type:
|
|
|
cnet_start_pct = [str(round(inner_list[0][3], 3)) for inner_list in Y_value if
|
|
|
isinstance(inner_list, list) and
|
|
|
inner_list and isinstance(inner_list[0], tuple) and len(
|
|
|
inner_list[0]) >= 3]
|
|
|
if "End%" in Y_type:
|
|
|
cnet_end_pct = [str(round(inner_list[0][4], 3)) for inner_list in Y_value if
|
|
|
isinstance(inner_list, list) and
|
|
|
inner_list and isinstance(inner_list[0], tuple) and len(
|
|
|
inner_list[0]) >= 3]
|
|
|
|
|
|
if "ControlNet" in X_type or "ControlNet" in Y_type:
|
|
|
print(f"cnet_strength: {', '.join(cnet_strength) if isinstance(cnet_strength, list) else cnet_strength}")
|
|
|
print(f"cnet_start%: {', '.join(cnet_start_pct) if isinstance(cnet_start_pct, list) else cnet_start_pct}")
|
|
|
print(f"cnet_end%: {', '.join(cnet_end_pct) if isinstance(cnet_end_pct, list) else cnet_end_pct}")
|
|
|
|
|
|
|
|
|
def adjusted_font_size(text, initial_font_size, i_width):
|
|
|
font = ImageFont.truetype(str(Path(font_path)), initial_font_size)
|
|
|
text_width = font.getlength(text)
|
|
|
|
|
|
if text_width > (i_width * 0.9):
|
|
|
scaling_factor = 0.9
|
|
|
new_font_size = int(initial_font_size * (i_width / text_width) * scaling_factor)
|
|
|
else:
|
|
|
new_font_size = initial_font_size
|
|
|
|
|
|
return new_font_size
|
|
|
|
|
|
|
|
|
|
|
|
def rearrange_list_A(arr, num_cols, num_rows):
|
|
|
new_list = []
|
|
|
for i in range(num_rows):
|
|
|
for j in range(num_cols):
|
|
|
index = j * num_rows + i
|
|
|
new_list.append(arr[index])
|
|
|
return new_list
|
|
|
|
|
|
def rearrange_list_B(arr, num_rows, num_cols):
|
|
|
new_list = []
|
|
|
for i in range(num_rows):
|
|
|
for j in range(num_cols):
|
|
|
index = i * num_cols + j
|
|
|
new_list.append(arr[index])
|
|
|
return new_list
|
|
|
|
|
|
|
|
|
num_rows = max(len(Y_value) if Y_value is not None else 0, 1)
|
|
|
num_cols = max(len(X_value) if X_value is not None else 0, 1)
|
|
|
|
|
|
|
|
|
if flip_xy == True:
|
|
|
X_type, Y_type = Y_type, X_type
|
|
|
X_value, Y_value = Y_value, X_value
|
|
|
X_label, Y_label = Y_label, X_label
|
|
|
num_rows, num_cols = num_cols, num_rows
|
|
|
image_pil_list = rearrange_list_A(image_pil_list, num_rows, num_cols)
|
|
|
else:
|
|
|
image_pil_list = rearrange_list_B(image_pil_list, num_rows, num_cols)
|
|
|
image_tensor_list = rearrange_list_A(image_tensor_list, num_cols, num_rows)
|
|
|
latent_list = rearrange_list_A(latent_list, num_cols, num_rows)
|
|
|
|
|
|
|
|
|
i_height, i_width = image_tensor_list[0].shape[1], image_tensor_list[0].shape[2]
|
|
|
|
|
|
|
|
|
print_plot_variables(X_type, Y_type, X_value, Y_value, add_noise, seed, steps, start_at_step, end_at_step,
|
|
|
return_with_leftover_noise, cfg, sampler_name, scheduler, denoise, vae_name, ckpt_name,
|
|
|
clip_skip, refiner_name, refiner_clip_skip, ascore, lora_stack, cnet_stack,
|
|
|
sampler_type, num_rows, num_cols, i_height, i_width)
|
|
|
|
|
|
|
|
|
keys = latent_list[0].keys()
|
|
|
result = {}
|
|
|
for key in keys:
|
|
|
tensors = [d[key] for d in latent_list]
|
|
|
result[key] = torch.cat(tensors, dim=0)
|
|
|
latent_list = result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
border_size_top = i_width // 15
|
|
|
|
|
|
|
|
|
if len(Y_label) > 0:
|
|
|
Y_label_longest = max(len(s) for s in Y_label)
|
|
|
else:
|
|
|
|
|
|
Y_label_longest = 0
|
|
|
|
|
|
Y_label_scale = min(Y_label_longest + 4,24) / 24
|
|
|
|
|
|
if Y_label_orientation == "Vertical":
|
|
|
border_size_left = border_size_top
|
|
|
else:
|
|
|
|
|
|
border_size_left = min(i_width, i_height) + int(0.2 * abs(i_width - i_height))
|
|
|
border_size_left = int(border_size_left * Y_label_scale)
|
|
|
|
|
|
|
|
|
if Y_type == "Nothing":
|
|
|
bg_width = num_cols * i_width + (num_cols - 1) * grid_spacing
|
|
|
x_offset_initial = 0
|
|
|
else:
|
|
|
if Y_label_orientation == "Vertical":
|
|
|
bg_width = num_cols * i_width + (num_cols - 1) * grid_spacing + 3 * border_size_left
|
|
|
x_offset_initial = border_size_left * 3
|
|
|
else:
|
|
|
bg_width = num_cols * i_width + (num_cols - 1) * grid_spacing + border_size_left
|
|
|
x_offset_initial = border_size_left
|
|
|
|
|
|
|
|
|
if X_type == "Nothing":
|
|
|
bg_height = num_rows * i_height + (num_rows - 1) * grid_spacing
|
|
|
y_offset = 0
|
|
|
else:
|
|
|
bg_height = num_rows * i_height + (num_rows - 1) * grid_spacing + 3 * border_size_top
|
|
|
y_offset = border_size_top * 3
|
|
|
|
|
|
|
|
|
background = Image.new('RGBA', (int(bg_width), int(bg_height)), color=(255, 255, 255, 255))
|
|
|
|
|
|
for row in range(num_rows):
|
|
|
|
|
|
|
|
|
x_offset = x_offset_initial
|
|
|
|
|
|
for col in range(num_cols):
|
|
|
|
|
|
index = col * num_rows + row
|
|
|
img = image_pil_list[index]
|
|
|
|
|
|
|
|
|
background.paste(img, (x_offset, y_offset))
|
|
|
|
|
|
if row == 0 and X_type != "Nothing":
|
|
|
|
|
|
text = X_label[col]
|
|
|
|
|
|
|
|
|
initial_font_size = int(48 * img.width / 512)
|
|
|
font_size = adjusted_font_size(text, initial_font_size, img.width)
|
|
|
label_height = int(font_size*1.5)
|
|
|
|
|
|
|
|
|
label_bg = Image.new('RGBA', (img.width, label_height), color=(255, 255, 255, 0))
|
|
|
d = ImageDraw.Draw(label_bg)
|
|
|
|
|
|
|
|
|
font = ImageFont.truetype(str(Path(font_path)), font_size)
|
|
|
|
|
|
|
|
|
_, _, text_width, text_height = d.textbbox([0,0], text, font=font)
|
|
|
text_x = (img.width - text_width) // 2
|
|
|
text_y = (label_height - text_height) // 2
|
|
|
|
|
|
|
|
|
d.text((text_x, text_y), text, fill='black', font=font)
|
|
|
|
|
|
|
|
|
available_space = y_offset - label_height
|
|
|
|
|
|
|
|
|
label_y = available_space // 2
|
|
|
|
|
|
|
|
|
background.alpha_composite(label_bg, (x_offset, label_y))
|
|
|
|
|
|
if col == 0 and Y_type != "Nothing":
|
|
|
|
|
|
text = Y_label[row]
|
|
|
|
|
|
|
|
|
if Y_label_orientation == "Vertical":
|
|
|
initial_font_size = int(48 * i_width / 512)
|
|
|
font_size = adjusted_font_size(text, initial_font_size, i_width)
|
|
|
else:
|
|
|
initial_font_size = int(48 * (border_size_left/Y_label_scale) / 512)
|
|
|
font_size = adjusted_font_size(text, initial_font_size, int(border_size_left/Y_label_scale))
|
|
|
|
|
|
|
|
|
label_bg = Image.new('RGBA', (img.height, int(font_size*1.2)), color=(255, 255, 255, 0))
|
|
|
d = ImageDraw.Draw(label_bg)
|
|
|
|
|
|
|
|
|
font = ImageFont.truetype(str(Path(font_path)), font_size)
|
|
|
|
|
|
|
|
|
_, _, text_width, text_height = d.textbbox([0,0], text, font=font)
|
|
|
text_x = (img.height - text_width) // 2
|
|
|
text_y = (font_size - text_height) // 2
|
|
|
|
|
|
|
|
|
d.text((text_x, text_y), text, fill='black', font=font)
|
|
|
|
|
|
|
|
|
if Y_label_orientation == "Vertical":
|
|
|
label_bg = label_bg.rotate(90, expand=True)
|
|
|
|
|
|
|
|
|
available_space = x_offset - label_bg.width
|
|
|
|
|
|
|
|
|
label_x = available_space // 2
|
|
|
|
|
|
|
|
|
if Y_label_orientation == "Vertical":
|
|
|
label_y = y_offset + (img.height - label_bg.height) // 2
|
|
|
else:
|
|
|
label_y = y_offset + img.height - (img.height - label_bg.height) // 2
|
|
|
|
|
|
|
|
|
background.alpha_composite(label_bg, (label_x, label_y))
|
|
|
|
|
|
|
|
|
x_offset += img.width + grid_spacing
|
|
|
|
|
|
|
|
|
y_offset += img.height + grid_spacing
|
|
|
|
|
|
xy_plot_image = pil2tensor(background)
|
|
|
|
|
|
|
|
|
preview_images = PreviewImage().save_images(xy_plot_image)["ui"]["images"]
|
|
|
|
|
|
|
|
|
output_images = torch.stack([tensor.squeeze() for tensor in image_tensor_list])
|
|
|
|
|
|
|
|
|
if xyplot_as_output_image == True:
|
|
|
output_images = xy_plot_image
|
|
|
|
|
|
|
|
|
if cache_models == "True":
|
|
|
print_loaded_objects_entries(xyplot_id, prompt)
|
|
|
|
|
|
print("-" * 40)
|
|
|
|
|
|
if sampler_type == "sdxl":
|
|
|
sdxl_tuple = original_model, original_clip, original_positive, original_negative,\
|
|
|
original_refiner_model, original_refiner_clip, original_refiner_positive, original_refiner_negative
|
|
|
result = (sdxl_tuple, latent_list, optional_vae, output_images,)
|
|
|
else:
|
|
|
result = (original_model, original_positive, original_negative, latent_list, optional_vae, output_images,)
|
|
|
return {"ui": {"images": preview_images}, "result": result}
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_KSamplerAdvanced(TSC_KSampler):
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required":
|
|
|
{"model": ("MODEL",),
|
|
|
"add_noise": (["enable", "disable"],),
|
|
|
"noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}),
|
|
|
"steps": ("INT", {"default": 20, "min": 1, "max": 10000}),
|
|
|
"cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}),
|
|
|
"sampler_name": (comfy.samplers.KSampler.SAMPLERS,),
|
|
|
"scheduler": (SCHEDULERS,),
|
|
|
"positive": ("CONDITIONING",),
|
|
|
"negative": ("CONDITIONING",),
|
|
|
"latent_image": ("LATENT",),
|
|
|
"start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}),
|
|
|
"end_at_step": ("INT", {"default": 10000, "min": 0, "max": 10000}),
|
|
|
"return_with_leftover_noise": (["disable", "enable"],),
|
|
|
"preview_method": (["auto", "latent2rgb", "taesd", "none"],),
|
|
|
"vae_decode": (["true", "true (tiled)", "false", "output only", "output only (tiled)"],),
|
|
|
},
|
|
|
"optional": {"optional_vae": ("VAE",),
|
|
|
"script": ("SCRIPT",), },
|
|
|
"hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID", },
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("MODEL", "CONDITIONING", "CONDITIONING", "LATENT", "VAE", "IMAGE",)
|
|
|
RETURN_NAMES = ("MODEL", "CONDITIONING+", "CONDITIONING-", "LATENT", "VAE", "IMAGE",)
|
|
|
OUTPUT_NODE = True
|
|
|
FUNCTION = "sample_adv"
|
|
|
CATEGORY = "Efficiency Nodes/Sampling"
|
|
|
|
|
|
def sample_adv(self, model, add_noise, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative,
|
|
|
latent_image, start_at_step, end_at_step, return_with_leftover_noise, preview_method, vae_decode,
|
|
|
prompt=None, extra_pnginfo=None, my_unique_id=None, optional_vae=(None,), script=None):
|
|
|
|
|
|
return super().sample(model, noise_seed, steps, cfg, sampler_name, scheduler, positive, negative,
|
|
|
latent_image, preview_method, vae_decode, denoise=1.0, prompt=prompt, extra_pnginfo=extra_pnginfo, my_unique_id=my_unique_id,
|
|
|
optional_vae=optional_vae, script=script, add_noise=add_noise, start_at_step=start_at_step,end_at_step=end_at_step,
|
|
|
return_with_leftover_noise=return_with_leftover_noise,sampler_type="advanced")
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_KSamplerSDXL(TSC_KSampler):
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required":
|
|
|
{"sdxl_tuple": ("SDXL_TUPLE",),
|
|
|
"noise_seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}),
|
|
|
"steps": ("INT", {"default": 20, "min": 1, "max": 10000}),
|
|
|
"cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}),
|
|
|
"sampler_name": (comfy.samplers.KSampler.SAMPLERS,),
|
|
|
"scheduler": (SCHEDULERS,),
|
|
|
"latent_image": ("LATENT",),
|
|
|
"start_at_step": ("INT", {"default": 0, "min": 0, "max": 10000}),
|
|
|
"refine_at_step": ("INT", {"default": -1, "min": -1, "max": 10000}),
|
|
|
"preview_method": (["auto", "latent2rgb", "taesd", "none"],),
|
|
|
"vae_decode": (["true", "true (tiled)", "false", "output only", "output only (tiled)"],),
|
|
|
},
|
|
|
"optional": {"optional_vae": ("VAE",),
|
|
|
"script": ("SCRIPT",),},
|
|
|
"hidden": {"prompt": "PROMPT", "extra_pnginfo": "EXTRA_PNGINFO", "my_unique_id": "UNIQUE_ID",},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("SDXL_TUPLE", "LATENT", "VAE", "IMAGE",)
|
|
|
RETURN_NAMES = ("SDXL_TUPLE", "LATENT", "VAE", "IMAGE",)
|
|
|
OUTPUT_NODE = True
|
|
|
FUNCTION = "sample_sdxl"
|
|
|
CATEGORY = "Efficiency Nodes/Sampling"
|
|
|
|
|
|
def sample_sdxl(self, sdxl_tuple, noise_seed, steps, cfg, sampler_name, scheduler, latent_image,
|
|
|
start_at_step, refine_at_step, preview_method, vae_decode, prompt=None, extra_pnginfo=None,
|
|
|
my_unique_id=None, optional_vae=(None,), refiner_extras=None, script=None):
|
|
|
|
|
|
negative = None
|
|
|
return super().sample(sdxl_tuple, noise_seed, steps, cfg, sampler_name, scheduler,
|
|
|
refiner_extras, negative, latent_image, preview_method, vae_decode, denoise=1.0,
|
|
|
prompt=prompt, extra_pnginfo=extra_pnginfo, my_unique_id=my_unique_id, optional_vae=optional_vae,
|
|
|
script=script, add_noise=None, start_at_step=start_at_step, end_at_step=refine_at_step,
|
|
|
return_with_leftover_noise=None,sampler_type="sdxl")
|
|
|
|
|
|
|
|
|
|
|
|
XYPLOT_LIM = 50
|
|
|
XYPLOT_DEF = 3
|
|
|
CKPT_EXTENSIONS = LORA_EXTENSIONS = ['.safetensors', '.ckpt']
|
|
|
VAE_EXTENSIONS = ['.safetensors', '.ckpt', '.pt']
|
|
|
try:
|
|
|
xy_batch_default_path = os.path.abspath(os.sep) + "example_folder"
|
|
|
except Exception:
|
|
|
xy_batch_default_path = ""
|
|
|
|
|
|
def generate_floats(batch_count, first_float, last_float):
|
|
|
if batch_count > 1:
|
|
|
interval = (last_float - first_float) / (batch_count - 1)
|
|
|
return [round(first_float + i * interval, 3) for i in range(batch_count)]
|
|
|
else:
|
|
|
return [first_float] if batch_count == 1 else []
|
|
|
|
|
|
def generate_ints(batch_count, first_int, last_int):
|
|
|
if batch_count > 1:
|
|
|
interval = (last_int - first_int) / (batch_count - 1)
|
|
|
values = [int(first_int + i * interval) for i in range(batch_count)]
|
|
|
else:
|
|
|
values = [first_int] if batch_count == 1 else []
|
|
|
values = list(set(values))
|
|
|
values.sort()
|
|
|
return values
|
|
|
|
|
|
def get_batch_files(directory_path, valid_extensions, include_subdirs=False):
|
|
|
batch_files = []
|
|
|
|
|
|
try:
|
|
|
if include_subdirs:
|
|
|
|
|
|
for dirpath, dirnames, filenames in os.walk(directory_path):
|
|
|
for file in filenames:
|
|
|
if any(file.endswith(ext) for ext in valid_extensions):
|
|
|
batch_files.append(os.path.join(dirpath, file))
|
|
|
else:
|
|
|
|
|
|
batch_files = [os.path.join(directory_path, f) for f in os.listdir(directory_path) if
|
|
|
os.path.isfile(os.path.join(directory_path, f)) and any(
|
|
|
f.endswith(ext) for ext in valid_extensions)]
|
|
|
except Exception as e:
|
|
|
print(f"Error while listing files in {directory_path}: {e}")
|
|
|
|
|
|
return batch_files
|
|
|
|
|
|
def print_xy_values(xy_type, xy_value, xy_name):
|
|
|
print("===== XY Value Returns =====")
|
|
|
print(f"{xy_name} Values:")
|
|
|
print("- Type:", xy_type)
|
|
|
print("- Entries:", xy_value)
|
|
|
print("============================")
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"grid_spacing": ("INT", {"default": 0, "min": 0, "max": 500, "step": 5}),
|
|
|
"XY_flip": (["False","True"],),
|
|
|
"Y_label_orientation": (["Horizontal", "Vertical"],),
|
|
|
"cache_models": (["True", "False"],),
|
|
|
"ksampler_output_image": (["Images","Plot"],),},
|
|
|
"optional": {
|
|
|
"dependencies": ("DEPENDENCIES", ),
|
|
|
"X": ("XY", ),
|
|
|
"Y": ("XY", ),},
|
|
|
"hidden": {"my_unique_id": "UNIQUE_ID"},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("SCRIPT",)
|
|
|
RETURN_NAMES = ("SCRIPT",)
|
|
|
FUNCTION = "XYplot"
|
|
|
CATEGORY = "Efficiency Nodes/Scripts"
|
|
|
|
|
|
def XYplot(self, grid_spacing, XY_flip, Y_label_orientation, cache_models, ksampler_output_image, my_unique_id,
|
|
|
dependencies=None, X=None, Y=None):
|
|
|
|
|
|
|
|
|
if X != None:
|
|
|
X_type, X_value = X
|
|
|
else:
|
|
|
X_type = "Nothing"
|
|
|
X_value = [""]
|
|
|
if Y != None:
|
|
|
Y_type, Y_value = Y
|
|
|
else:
|
|
|
Y_type = "Nothing"
|
|
|
Y_value = [""]
|
|
|
|
|
|
|
|
|
if X_type != "XY_Capsule" and (X_type == Y_type) and X_type not in ["Positive Prompt S/R", "Negative Prompt S/R"]:
|
|
|
if X_type != "Nothing":
|
|
|
print(f"{error('XY Plot Error:')} X and Y input types must be different.")
|
|
|
return (None,)
|
|
|
|
|
|
|
|
|
encode_types = {
|
|
|
"Checkpoint", "Refiner",
|
|
|
"LoRA", "LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr",
|
|
|
"Positive Prompt S/R", "Negative Prompt S/R",
|
|
|
"AScore+", "AScore-",
|
|
|
"Clip Skip", "Clip Skip (Refiner)",
|
|
|
"ControlNetStrength", "ControlNetStart%", "ControlNetEnd%"
|
|
|
}
|
|
|
|
|
|
if X_type in encode_types or Y_type in encode_types:
|
|
|
if dependencies is None:
|
|
|
print(f"{error('XY Plot Error:')} The dependencies input must be connected for certain plot types.")
|
|
|
|
|
|
return (None,)
|
|
|
|
|
|
|
|
|
lora_types = {"LoRA Batch", "LoRA Wt", "LoRA MStr", "LoRA CStr"}
|
|
|
if (X_type in lora_types and Y_type not in lora_types) or (Y_type in lora_types and X_type not in lora_types):
|
|
|
print(
|
|
|
f"{error('XY Plot Error:')} Both X and Y must be connected to use the 'LoRA Plot' node.")
|
|
|
return (None,)
|
|
|
|
|
|
|
|
|
if X_type == "Sampler" and Y_type == "Scheduler":
|
|
|
|
|
|
X_value = [(x[0], "") for x in X_value]
|
|
|
elif Y_type == "Sampler" and X_type == "Scheduler":
|
|
|
|
|
|
Y_value = [(y[0], "") for y in Y_value]
|
|
|
|
|
|
|
|
|
if X_type == "Scheduler" and Y_type != "Sampler":
|
|
|
|
|
|
X_value = [(x, None) for x in X_value]
|
|
|
|
|
|
if Y_type == "Scheduler" and X_type != "Sampler":
|
|
|
|
|
|
Y_value = [(y, None) for y in Y_value]
|
|
|
|
|
|
|
|
|
if X_type == "Checkpoint" and Y_type == "VAE":
|
|
|
|
|
|
X_value = [(t[0], t[1], None) for t in X_value]
|
|
|
elif Y_type == "VAE" and X_type == "Checkpoint":
|
|
|
|
|
|
Y_value = [(t[0], t[1], None) for t in Y_value]
|
|
|
|
|
|
|
|
|
if XY_flip == "True":
|
|
|
X_type, Y_type = Y_type, X_type
|
|
|
X_value, Y_value = Y_value, X_value
|
|
|
|
|
|
|
|
|
xyplot_as_output_image = ksampler_output_image == "Plot"
|
|
|
|
|
|
|
|
|
script = {"xyplot": (X_type, X_value, Y_type, Y_value, grid_spacing, Y_label_orientation, cache_models,
|
|
|
xyplot_as_output_image, my_unique_id, dependencies)}
|
|
|
|
|
|
return (script,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_SeedsBatch:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, batch_count):
|
|
|
if batch_count == 0:
|
|
|
return (None,)
|
|
|
xy_type = "Seeds++ Batch"
|
|
|
xy_value = list(range(batch_count))
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_AddReturnNoise:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"XY_type": (["add_noise", "return_with_leftover_noise"],)}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, XY_type):
|
|
|
type_mapping = {
|
|
|
"add_noise": "AddNoise",
|
|
|
"return_with_leftover_noise": "ReturnNoise"
|
|
|
}
|
|
|
xy_type = type_mapping[XY_type]
|
|
|
xy_value = ["enable", "disable"]
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Steps:
|
|
|
parameters = ["steps","start_at_step", "end_at_step", "refine_at_step"]
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"target_parameter": (cls.parameters,),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_step": ("INT", {"default": 10, "min": 1, "max": 10000}),
|
|
|
"last_step": ("INT", {"default": 20, "min": 1, "max": 10000}),
|
|
|
"first_start_step": ("INT", {"default": 0, "min": 0, "max": 10000}),
|
|
|
"last_start_step": ("INT", {"default": 10, "min": 0, "max": 10000}),
|
|
|
"first_end_step": ("INT", {"default": 10, "min": 0, "max": 10000}),
|
|
|
"last_end_step": ("INT", {"default": 20, "min": 0, "max": 10000}),
|
|
|
"first_refine_step": ("INT", {"default": 10, "min": 0, "max": 10000}),
|
|
|
"last_refine_step": ("INT", {"default": 20, "min": 0, "max": 10000}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, target_parameter, batch_count, first_step, last_step, first_start_step, last_start_step,
|
|
|
first_end_step, last_end_step, first_refine_step, last_refine_step):
|
|
|
|
|
|
if target_parameter == "steps":
|
|
|
xy_type = "Steps"
|
|
|
xy_first = first_step
|
|
|
xy_last = last_step
|
|
|
elif target_parameter == "start_at_step":
|
|
|
xy_type = "StartStep"
|
|
|
xy_first = first_start_step
|
|
|
xy_last = last_start_step
|
|
|
elif target_parameter == "end_at_step":
|
|
|
xy_type = "EndStep"
|
|
|
xy_first = first_end_step
|
|
|
xy_last = last_end_step
|
|
|
elif target_parameter == "refine_at_step":
|
|
|
xy_type = "RefineStep"
|
|
|
xy_first = first_refine_step
|
|
|
xy_last = last_refine_step
|
|
|
|
|
|
xy_value = generate_ints(batch_count, xy_first, xy_last)
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_CFG:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_cfg": ("FLOAT", {"default": 7.0, "min": 0.0, "max": 100.0}),
|
|
|
"last_cfg": ("FLOAT", {"default": 9.0, "min": 0.0, "max": 100.0}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, batch_count, first_cfg, last_cfg):
|
|
|
xy_type = "CFG Scale"
|
|
|
xy_value = generate_floats(batch_count, first_cfg, last_cfg)
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Sampler_Scheduler:
|
|
|
parameters = ["sampler", "scheduler", "sampler & scheduler"]
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
samplers = ["None"] + comfy.samplers.KSampler.SAMPLERS
|
|
|
schedulers = ["None"] + SCHEDULERS
|
|
|
inputs = {
|
|
|
"required": {
|
|
|
"target_parameter": (cls.parameters,),
|
|
|
"input_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM, "step": 1})
|
|
|
}
|
|
|
}
|
|
|
for i in range(1, XYPLOT_LIM+1):
|
|
|
inputs["required"][f"sampler_{i}"] = (samplers,)
|
|
|
inputs["required"][f"scheduler_{i}"] = (schedulers,)
|
|
|
|
|
|
return inputs
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, target_parameter, input_count, **kwargs):
|
|
|
if target_parameter == "scheduler":
|
|
|
xy_type = "Scheduler"
|
|
|
schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)]
|
|
|
xy_value = [scheduler for scheduler in schedulers if scheduler != "None"]
|
|
|
elif target_parameter == "sampler":
|
|
|
xy_type = "Sampler"
|
|
|
samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)]
|
|
|
xy_value = [(sampler, None) for sampler in samplers if sampler != "None"]
|
|
|
else:
|
|
|
xy_type = "Sampler"
|
|
|
samplers = [kwargs.get(f"sampler_{i}") for i in range(1, input_count + 1)]
|
|
|
schedulers = [kwargs.get(f"scheduler_{i}") for i in range(1, input_count + 1)]
|
|
|
xy_value = [(sampler, scheduler if scheduler != "None" else None) for sampler,
|
|
|
scheduler in zip(samplers, schedulers) if sampler != "None"]
|
|
|
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Denoise:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_denoise": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1.0, "step": 0.01}),
|
|
|
"last_denoise": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 1.0, "step": 0.01}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, batch_count, first_denoise, last_denoise):
|
|
|
xy_type = "Denoise"
|
|
|
xy_value = generate_floats(batch_count, first_denoise, last_denoise)
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_VAE:
|
|
|
|
|
|
modes = ["VAE Names", "VAE Batch"]
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
|
|
|
vaes = ["None", "Baked VAE"] + folder_paths.get_filename_list("vae")
|
|
|
|
|
|
inputs = {
|
|
|
"required": {
|
|
|
"input_mode": (cls.modes,),
|
|
|
"batch_path": ("STRING", {"default": xy_batch_default_path, "multiline": False}),
|
|
|
"subdirectories": ("BOOLEAN", {"default": False}),
|
|
|
"batch_sort": (["ascending", "descending"],),
|
|
|
"batch_max": ("INT", {"default": -1, "min": -1, "max": XYPLOT_LIM, "step": 1}),
|
|
|
"vae_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM, "step": 1})
|
|
|
}
|
|
|
}
|
|
|
|
|
|
for i in range(1, XYPLOT_LIM+1):
|
|
|
inputs["required"][f"vae_name_{i}"] = (vaes,)
|
|
|
|
|
|
return inputs
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, input_mode, batch_path, subdirectories, batch_sort, batch_max, vae_count, **kwargs):
|
|
|
|
|
|
xy_type = "VAE"
|
|
|
|
|
|
if "Batch" not in input_mode:
|
|
|
|
|
|
vaes = [kwargs.get(f"vae_name_{i}") for i in range(1, vae_count + 1)]
|
|
|
xy_value = [vae for vae in vaes if vae != "None"]
|
|
|
else:
|
|
|
if batch_max == 0:
|
|
|
return (None,)
|
|
|
|
|
|
try:
|
|
|
vaes = get_batch_files(batch_path, VAE_EXTENSIONS, include_subdirs=subdirectories)
|
|
|
|
|
|
if not vaes:
|
|
|
print(f"{error('XY Plot Error:')} No VAE files found.")
|
|
|
return (None,)
|
|
|
|
|
|
if batch_sort == "ascending":
|
|
|
vaes.sort()
|
|
|
elif batch_sort == "descending":
|
|
|
vaes.sort(reverse=True)
|
|
|
|
|
|
|
|
|
xy_value = [vae for vae in vaes]
|
|
|
|
|
|
if batch_max != -1:
|
|
|
xy_value = xy_value[:batch_max]
|
|
|
|
|
|
except Exception as e:
|
|
|
print(f"{error('XY Plot Error:')} {e}")
|
|
|
return (None,)
|
|
|
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_PromptSR:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
inputs = {
|
|
|
"required": {
|
|
|
"target_prompt": (["positive", "negative"],),
|
|
|
"search_txt": ("STRING", {"default": "", "multiline": False}),
|
|
|
"replace_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM-1}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
|
|
|
for i in range(1, XYPLOT_LIM):
|
|
|
replace_key = f"replace_{i}"
|
|
|
inputs["required"][replace_key] = ("STRING", {"default": "", "multiline": False})
|
|
|
|
|
|
return inputs
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, target_prompt, search_txt, replace_count, **kwargs):
|
|
|
if search_txt == "":
|
|
|
return (None,)
|
|
|
|
|
|
if target_prompt == "positive":
|
|
|
xy_type = "Positive Prompt S/R"
|
|
|
elif target_prompt == "negative":
|
|
|
xy_type = "Negative Prompt S/R"
|
|
|
|
|
|
|
|
|
xy_values = [(search_txt, None)]
|
|
|
|
|
|
if replace_count > 0:
|
|
|
|
|
|
xy_values.extend([(search_txt, kwargs.get(f"replace_{i+1}")) for i in range(replace_count)])
|
|
|
|
|
|
return ((xy_type, xy_values),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_AScore:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"target_ascore": (["positive", "negative"],),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_ascore": ("FLOAT", {"default": 0.0, "min": 0.0, "max": 1000.0, "step": 0.01}),
|
|
|
"last_ascore": ("FLOAT", {"default": 10.0, "min": 0.0, "max": 1000.0, "step": 0.01}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, target_ascore, batch_count, first_ascore, last_ascore):
|
|
|
if target_ascore == "positive":
|
|
|
xy_type = "AScore+"
|
|
|
else:
|
|
|
xy_type = "AScore-"
|
|
|
xy_value = generate_floats(batch_count, first_ascore, last_ascore)
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Refiner_OnOff:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"refine_at_percent": ("FLOAT",{"default": 0.80, "min": 0.00, "max": 1.00, "step": 0.01})},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, refine_at_percent):
|
|
|
xy_type = "Refiner On/Off"
|
|
|
xy_value = [refine_at_percent, 1]
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_ClipSkip:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"target_ckpt": (["Base","Refiner"],),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_clip_skip": ("INT", {"default": -1, "min": -24, "max": -1, "step": 1}),
|
|
|
"last_clip_skip": ("INT", {"default": -3, "min": -24, "max": -1, "step": 1}),
|
|
|
},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, target_ckpt, batch_count, first_clip_skip, last_clip_skip):
|
|
|
if target_ckpt == "Base":
|
|
|
xy_type = "Clip Skip"
|
|
|
else:
|
|
|
xy_type = "Clip Skip (Refiner)"
|
|
|
xy_value = generate_ints(batch_count, first_clip_skip, last_clip_skip)
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Checkpoint:
|
|
|
modes = ["Ckpt Names", "Ckpt Names+ClipSkip", "Ckpt Names+ClipSkip+VAE", "Checkpoint Batch"]
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
checkpoints = ["None"] + folder_paths.get_filename_list("checkpoints")
|
|
|
vaes = ["Baked VAE"] + folder_paths.get_filename_list("vae")
|
|
|
|
|
|
inputs = {
|
|
|
"required": {
|
|
|
"target_ckpt": (["Base", "Refiner"],),
|
|
|
"input_mode": (cls.modes,),
|
|
|
"batch_path": ("STRING", {"default": xy_batch_default_path, "multiline": False}),
|
|
|
"subdirectories": ("BOOLEAN", {"default": False}),
|
|
|
"batch_sort": (["ascending", "descending"],),
|
|
|
"batch_max": ("INT", {"default": -1, "min": -1, "max": 50, "step": 1}),
|
|
|
"ckpt_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM, "step": 1})
|
|
|
}
|
|
|
}
|
|
|
|
|
|
for i in range(1, XYPLOT_LIM+1):
|
|
|
inputs["required"][f"ckpt_name_{i}"] = (checkpoints,)
|
|
|
inputs["required"][f"clip_skip_{i}"] = ("INT", {"default": -1, "min": -24, "max": -1, "step": 1})
|
|
|
inputs["required"][f"vae_name_{i}"] = (vaes,)
|
|
|
|
|
|
return inputs
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, target_ckpt, input_mode, batch_path, subdirectories, batch_sort, batch_max, ckpt_count, **kwargs):
|
|
|
|
|
|
|
|
|
xy_type = "Checkpoint" if target_ckpt == "Base" else "Refiner"
|
|
|
|
|
|
if "Batch" not in input_mode:
|
|
|
|
|
|
checkpoints = [kwargs.get(f"ckpt_name_{i}") for i in range(1, ckpt_count + 1)]
|
|
|
clip_skips = [kwargs.get(f"clip_skip_{i}") for i in range(1, ckpt_count + 1)]
|
|
|
vaes = [kwargs.get(f"vae_name_{i}") for i in range(1, ckpt_count + 1)]
|
|
|
|
|
|
|
|
|
for i in range(ckpt_count):
|
|
|
if "ClipSkip" not in input_mode:
|
|
|
clip_skips[i] = None
|
|
|
if "VAE" not in input_mode:
|
|
|
vaes[i] = None
|
|
|
|
|
|
xy_value = [(checkpoint, clip_skip, vae) for checkpoint, clip_skip, vae in zip(checkpoints, clip_skips, vaes) if
|
|
|
checkpoint != "None"]
|
|
|
else:
|
|
|
if batch_max == 0:
|
|
|
return (None,)
|
|
|
|
|
|
try:
|
|
|
ckpts = get_batch_files(batch_path, CKPT_EXTENSIONS, include_subdirs=subdirectories)
|
|
|
|
|
|
if not ckpts:
|
|
|
print(f"{error('XY Plot Error:')} No Checkpoint files found.")
|
|
|
return (None,)
|
|
|
|
|
|
if batch_sort == "ascending":
|
|
|
ckpts.sort()
|
|
|
elif batch_sort == "descending":
|
|
|
ckpts.sort(reverse=True)
|
|
|
|
|
|
|
|
|
xy_value = [(ckpt, None, None) for ckpt in ckpts]
|
|
|
|
|
|
if batch_max != -1:
|
|
|
xy_value = xy_value[:batch_max]
|
|
|
|
|
|
except Exception as e:
|
|
|
print(f"{error('XY Plot Error:')} {e}")
|
|
|
return (None,)
|
|
|
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_LoRA_Batch:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
|
|
|
return {"required": {
|
|
|
"batch_path": ("STRING", {"default": xy_batch_default_path, "multiline": False}),
|
|
|
"subdirectories": ("BOOLEAN", {"default": False}),
|
|
|
"batch_sort": (["ascending", "descending"],),
|
|
|
"batch_max": ("INT",{"default": -1, "min": -1, "max": XYPLOT_LIM, "step": 1}),
|
|
|
"model_strength": ("FLOAT", {"default": 1.0, "min": -10.00, "max": 10.0, "step": 0.01}),
|
|
|
"clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01})},
|
|
|
"optional": {"lora_stack": ("LORA_STACK",)}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, batch_path, subdirectories, batch_sort, model_strength, clip_strength, batch_max, lora_stack=None):
|
|
|
if batch_max == 0:
|
|
|
return (None,)
|
|
|
|
|
|
xy_type = "LoRA"
|
|
|
|
|
|
loras = get_batch_files(batch_path, LORA_EXTENSIONS, include_subdirs=subdirectories)
|
|
|
|
|
|
if not loras:
|
|
|
print(f"{error('XY Plot Error:')} No LoRA files found.")
|
|
|
return (None,)
|
|
|
|
|
|
if batch_sort == "ascending":
|
|
|
loras.sort()
|
|
|
elif batch_sort == "descending":
|
|
|
loras.sort(reverse=True)
|
|
|
|
|
|
|
|
|
xy_value = [[(lora, model_strength, clip_strength)] + (lora_stack if lora_stack else []) for lora in loras]
|
|
|
|
|
|
if batch_max != -1:
|
|
|
xy_value = xy_value[:batch_max]
|
|
|
|
|
|
return ((xy_type, xy_value),) if xy_value else (None,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_LoRA:
|
|
|
modes = ["LoRA Names", "LoRA Names+Weights", "LoRA Batch"]
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
loras = ["None"] + folder_paths.get_filename_list("loras")
|
|
|
|
|
|
inputs = {
|
|
|
"required": {
|
|
|
"input_mode": (cls.modes,),
|
|
|
"batch_path": ("STRING", {"default": xy_batch_default_path, "multiline": False}),
|
|
|
"subdirectories": ("BOOLEAN", {"default": False}),
|
|
|
"batch_sort": (["ascending", "descending"],),
|
|
|
"batch_max": ("INT", {"default": -1, "min": -1, "max": XYPLOT_LIM, "step": 1}),
|
|
|
"lora_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM, "step": 1}),
|
|
|
"model_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
|
|
|
"clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
|
|
|
}
|
|
|
}
|
|
|
|
|
|
for i in range(1, XYPLOT_LIM+1):
|
|
|
inputs["required"][f"lora_name_{i}"] = (loras,)
|
|
|
inputs["required"][f"model_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01})
|
|
|
inputs["required"][f"clip_str_{i}"] = ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01})
|
|
|
|
|
|
inputs["optional"] = {
|
|
|
"lora_stack": ("LORA_STACK",)
|
|
|
}
|
|
|
return inputs
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def __init__(self):
|
|
|
self.lora_batch = TSC_XYplot_LoRA_Batch()
|
|
|
|
|
|
def xy_value(self, input_mode, batch_path, subdirectories, batch_sort, batch_max, lora_count, model_strength,
|
|
|
clip_strength, lora_stack=None, **kwargs):
|
|
|
|
|
|
xy_type = "LoRA"
|
|
|
result = (None,)
|
|
|
lora_stack = lora_stack if lora_stack else []
|
|
|
|
|
|
if "Batch" not in input_mode:
|
|
|
|
|
|
loras = [kwargs.get(f"lora_name_{i}") for i in range(1, lora_count + 1)]
|
|
|
model_strs = [kwargs.get(f"model_str_{i}", model_strength) for i in range(1, lora_count + 1)]
|
|
|
clip_strs = [kwargs.get(f"clip_str_{i}", clip_strength) for i in range(1, lora_count + 1)]
|
|
|
|
|
|
|
|
|
if "Weights" not in input_mode:
|
|
|
for i in range(lora_count):
|
|
|
model_strs[i] = model_strength
|
|
|
clip_strs[i] = clip_strength
|
|
|
|
|
|
|
|
|
xy_value = [[(lora, model_str, clip_str)] + lora_stack for lora, model_str, clip_str
|
|
|
in zip(loras, model_strs, clip_strs) if lora != "None"]
|
|
|
|
|
|
result = ((xy_type, xy_value),)
|
|
|
else:
|
|
|
try:
|
|
|
result = self.lora_batch.xy_value(batch_path, subdirectories, batch_sort, model_strength,
|
|
|
clip_strength, batch_max, lora_stack)
|
|
|
except Exception as e:
|
|
|
print(f"{error('XY Plot Error:')} {e}")
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_LoRA_Plot:
|
|
|
|
|
|
modes = ["X: LoRA Batch, Y: LoRA Weight",
|
|
|
"X: LoRA Batch, Y: Model Strength",
|
|
|
"X: LoRA Batch, Y: Clip Strength",
|
|
|
"X: Model Strength, Y: Clip Strength",
|
|
|
]
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
loras = ["None"] + folder_paths.get_filename_list("loras")
|
|
|
return {"required": {
|
|
|
"input_mode": (cls.modes,),
|
|
|
"lora_name": (loras,),
|
|
|
"model_strength": ("FLOAT", {"default": 1.0, "min": -10.00, "max": 10.0, "step": 0.01}),
|
|
|
"clip_strength": ("FLOAT", {"default": 1.0, "min": -10.0, "max": 10.0, "step": 0.01}),
|
|
|
"X_batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"X_batch_path": ("STRING", {"default": xy_batch_default_path, "multiline": False}),
|
|
|
"X_subdirectories": ("BOOLEAN", {"default": False}),
|
|
|
"X_batch_sort": (["ascending", "descending"],),
|
|
|
"X_first_value": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"X_last_value": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"Y_batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"Y_first_value": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"Y_last_value": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),},
|
|
|
"optional": {"lora_stack": ("LORA_STACK",)}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY","XY",)
|
|
|
RETURN_NAMES = ("X","Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def __init__(self):
|
|
|
self.lora_batch = TSC_XYplot_LoRA_Batch()
|
|
|
|
|
|
def generate_values(self, mode, X_or_Y, *args, **kwargs):
|
|
|
result = self.lora_batch.xy_value(*args, **kwargs)
|
|
|
|
|
|
if result and result[0]:
|
|
|
xy_type, xy_value_list = result[0]
|
|
|
|
|
|
|
|
|
if "LoRA Weight" in mode:
|
|
|
xy_type = "LoRA Wt"
|
|
|
elif "Model Strength" in mode:
|
|
|
xy_type = "LoRA MStr"
|
|
|
elif "Clip Strength" in mode:
|
|
|
xy_type = "LoRA CStr"
|
|
|
|
|
|
|
|
|
if "LoRA Batch" in mode:
|
|
|
return self.generate_batch_values(*args, **kwargs)
|
|
|
else:
|
|
|
return ((xy_type, xy_value_list),)
|
|
|
|
|
|
return (None,)
|
|
|
|
|
|
def xy_value(self, input_mode, lora_name, model_strength, clip_strength, X_batch_count, X_batch_path, X_subdirectories,
|
|
|
X_batch_sort, X_first_value, X_last_value, Y_batch_count, Y_first_value, Y_last_value, lora_stack=None):
|
|
|
|
|
|
x_value, y_value = [], []
|
|
|
lora_stack = lora_stack if lora_stack else []
|
|
|
|
|
|
if "Model Strength" in input_mode and "Clip Strength" in input_mode:
|
|
|
if lora_name == 'None':
|
|
|
return (None,None,)
|
|
|
if "LoRA Batch" in input_mode:
|
|
|
lora_name = None
|
|
|
if "LoRA Weight" in input_mode:
|
|
|
model_strength = None
|
|
|
clip_strength = None
|
|
|
if "Model Strength" in input_mode:
|
|
|
model_strength = None
|
|
|
if "Clip Strength" in input_mode:
|
|
|
clip_strength = None
|
|
|
|
|
|
|
|
|
if "X: LoRA Batch" in input_mode:
|
|
|
try:
|
|
|
x_value = self.lora_batch.xy_value(X_batch_path, X_subdirectories, X_batch_sort,
|
|
|
model_strength, clip_strength, X_batch_count, lora_stack)[0][1]
|
|
|
except Exception as e:
|
|
|
print(f"{error('XY Plot Error:')} {e}")
|
|
|
return (None,)
|
|
|
x_type = "LoRA Batch"
|
|
|
elif "X: Model Strength" in input_mode:
|
|
|
x_floats = generate_floats(X_batch_count, X_first_value, X_last_value)
|
|
|
x_type = "LoRA MStr"
|
|
|
x_value = [[(lora_name, x, clip_strength)] + lora_stack for x in x_floats]
|
|
|
|
|
|
|
|
|
y_floats = generate_floats(Y_batch_count, Y_first_value, Y_last_value)
|
|
|
if "Y: LoRA Weight" in input_mode:
|
|
|
y_type = "LoRA Wt"
|
|
|
y_value = [[(lora_name, y, y)] + lora_stack for y in y_floats]
|
|
|
elif "Y: Model Strength" in input_mode:
|
|
|
y_type = "LoRA MStr"
|
|
|
y_value = [[(lora_name, y, clip_strength)] + lora_stack for y in y_floats]
|
|
|
elif "Y: Clip Strength" in input_mode:
|
|
|
y_type = "LoRA CStr"
|
|
|
y_value = [[(lora_name, model_strength, y)] + lora_stack for y in y_floats]
|
|
|
|
|
|
return ((x_type, x_value), (y_type, y_value))
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_LoRA_Stacks:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"node_state": (["Enabled"],)},
|
|
|
"optional": {
|
|
|
"lora_stack_1": ("LORA_STACK",),
|
|
|
"lora_stack_2": ("LORA_STACK",),
|
|
|
"lora_stack_3": ("LORA_STACK",),
|
|
|
"lora_stack_4": ("LORA_STACK",),
|
|
|
"lora_stack_5": ("LORA_STACK",),},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, node_state, lora_stack_1=None, lora_stack_2=None, lora_stack_3=None, lora_stack_4=None, lora_stack_5=None):
|
|
|
xy_type = "LoRA"
|
|
|
xy_value = [stack for stack in [lora_stack_1, lora_stack_2, lora_stack_3, lora_stack_4, lora_stack_5] if stack is not None]
|
|
|
if not xy_value or not any(xy_value) or node_state == "Disabled":
|
|
|
return (None,)
|
|
|
else:
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Control_Net_Strength:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"control_net": ("CONTROL_NET",),
|
|
|
"image": ("IMAGE",),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_strength": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"last_strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, control_net, image, batch_count, first_strength, last_strength,
|
|
|
start_percent, end_percent, cnet_stack=None):
|
|
|
|
|
|
if batch_count == 0:
|
|
|
return (None,)
|
|
|
|
|
|
xy_type = "ControlNetStrength"
|
|
|
strength_increment = (last_strength - first_strength) / (batch_count - 1) if batch_count > 1 else 0
|
|
|
|
|
|
xy_value = []
|
|
|
|
|
|
|
|
|
xy_value.append([(control_net, image, first_strength, start_percent, end_percent)])
|
|
|
|
|
|
|
|
|
for i in range(1, batch_count - 1):
|
|
|
xy_value.append([(control_net, image, first_strength + i * strength_increment, start_percent,
|
|
|
end_percent)])
|
|
|
|
|
|
|
|
|
if batch_count > 1:
|
|
|
xy_value.append([(control_net, image, last_strength, start_percent, end_percent)])
|
|
|
|
|
|
|
|
|
if cnet_stack:
|
|
|
for inner_list in xy_value:
|
|
|
inner_list.extend(cnet_stack)
|
|
|
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Control_Net_Start:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"control_net": ("CONTROL_NET",),
|
|
|
"image": ("IMAGE",),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"last_start_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, control_net, image, batch_count, first_start_percent, last_start_percent,
|
|
|
strength, end_percent, cnet_stack=None):
|
|
|
|
|
|
if batch_count == 0:
|
|
|
return (None,)
|
|
|
|
|
|
xy_type = "ControlNetStart%"
|
|
|
percent_increment = (last_start_percent - first_start_percent) / (batch_count - 1) if batch_count > 1 else 0
|
|
|
|
|
|
xy_value = []
|
|
|
|
|
|
|
|
|
xy_value.append([(control_net, image, strength, first_start_percent, end_percent)])
|
|
|
|
|
|
|
|
|
for i in range(1, batch_count - 1):
|
|
|
xy_value.append([(control_net, image, strength, first_start_percent + i * percent_increment,
|
|
|
end_percent)])
|
|
|
|
|
|
|
|
|
if batch_count > 1:
|
|
|
xy_value.append([(control_net, image, strength, last_start_percent, end_percent)])
|
|
|
|
|
|
|
|
|
if cnet_stack:
|
|
|
for inner_list in xy_value:
|
|
|
inner_list.extend(cnet_stack)
|
|
|
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Control_Net_End:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"control_net": ("CONTROL_NET",),
|
|
|
"image": ("IMAGE",),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_end_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"last_end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"start_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, control_net, image, batch_count, first_end_percent, last_end_percent,
|
|
|
strength, start_percent, cnet_stack=None):
|
|
|
|
|
|
if batch_count == 0:
|
|
|
return (None,)
|
|
|
|
|
|
xy_type = "ControlNetEnd%"
|
|
|
percent_increment = (last_end_percent - first_end_percent) / (batch_count - 1) if batch_count > 1 else 0
|
|
|
|
|
|
xy_value = []
|
|
|
|
|
|
|
|
|
xy_value.append([(control_net, image, strength, start_percent, first_end_percent)])
|
|
|
|
|
|
|
|
|
for i in range(1, batch_count - 1):
|
|
|
xy_value.append([(control_net, image, strength, start_percent,
|
|
|
first_end_percent + i * percent_increment)])
|
|
|
|
|
|
|
|
|
if batch_count > 1:
|
|
|
xy_value.append([(control_net, image, strength, start_percent, last_end_percent)])
|
|
|
|
|
|
|
|
|
if cnet_stack:
|
|
|
for inner_list in xy_value:
|
|
|
inner_list.extend(cnet_stack)
|
|
|
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Control_Net(TSC_XYplot_Control_Net_Strength, TSC_XYplot_Control_Net_Start, TSC_XYplot_Control_Net_End):
|
|
|
parameters = ["strength", "start_percent", "end_percent"]
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"control_net": ("CONTROL_NET",),
|
|
|
"image": ("IMAGE",),
|
|
|
"target_parameter": (cls.parameters,),
|
|
|
"batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"first_strength": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"last_strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"first_start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"last_start_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"first_end_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"last_end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, control_net, image, target_parameter, batch_count, first_strength, last_strength, first_start_percent,
|
|
|
last_start_percent, first_end_percent, last_end_percent, strength, start_percent, end_percent, cnet_stack=None):
|
|
|
|
|
|
if target_parameter == "strength":
|
|
|
return TSC_XYplot_Control_Net_Strength.xy_value(self, control_net, image, batch_count, first_strength,
|
|
|
last_strength, start_percent, end_percent, cnet_stack=cnet_stack)
|
|
|
elif target_parameter == "start_percent":
|
|
|
return TSC_XYplot_Control_Net_Start.xy_value(self, control_net, image, batch_count, first_start_percent,
|
|
|
last_start_percent, strength, end_percent, cnet_stack=cnet_stack)
|
|
|
elif target_parameter == "end_percent":
|
|
|
return TSC_XYplot_Control_Net_End.xy_value(self, control_net, image, batch_count, first_end_percent,
|
|
|
last_end_percent, strength, start_percent, cnet_stack=cnet_stack)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Control_Net_Plot:
|
|
|
|
|
|
plot_types = ["X: Strength, Y: Start%",
|
|
|
"X: Strength, Y: End%",
|
|
|
"X: Start%, Y: Strength",
|
|
|
"X: Start%, Y: End%",
|
|
|
"X: End%, Y: Strength",
|
|
|
"X: End%, Y: Start%"]
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
|
|
|
return {
|
|
|
"required": {
|
|
|
"control_net": ("CONTROL_NET",),
|
|
|
"image": ("IMAGE",),
|
|
|
"plot_type": (cls.plot_types,),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"start_percent": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"end_percent": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 1.0, "step": 0.01}),
|
|
|
"X_batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"X_first_value": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"X_last_value": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"Y_batch_count": ("INT", {"default": XYPLOT_DEF, "min": 0, "max": XYPLOT_LIM}),
|
|
|
"Y_first_value": ("FLOAT", {"default": 0.0, "min": 0.00, "max": 10.0, "step": 0.01}),
|
|
|
"Y_last_value": ("FLOAT", {"default": 1.0, "min": 0.00, "max": 10.0, "step": 0.01}),},
|
|
|
"optional": {"cnet_stack": ("CONTROL_NET_STACK",)},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY","XY",)
|
|
|
RETURN_NAMES = ("X","Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def get_value(self, axis, control_net, image, strength, start_percent, end_percent,
|
|
|
batch_count, first_value, last_value):
|
|
|
|
|
|
|
|
|
if axis in ["Start%", "End%"]:
|
|
|
first_value = min(1, first_value)
|
|
|
last_value = min(1, last_value)
|
|
|
|
|
|
increment = (last_value - first_value) / (batch_count - 1) if batch_count > 1 else 0
|
|
|
|
|
|
values = []
|
|
|
|
|
|
|
|
|
if axis == "Strength":
|
|
|
values.append([(control_net, image, first_value, start_percent, end_percent)])
|
|
|
elif axis == "Start%":
|
|
|
values.append([(control_net, image, strength, first_value, end_percent)])
|
|
|
elif axis == "End%":
|
|
|
values.append([(control_net, image, strength, start_percent, first_value)])
|
|
|
|
|
|
|
|
|
for i in range(1, batch_count - 1):
|
|
|
if axis == "Strength":
|
|
|
values.append(
|
|
|
[(control_net, image, first_value + i * increment, start_percent, end_percent)])
|
|
|
elif axis == "Start%":
|
|
|
values.append(
|
|
|
[(control_net, image, strength, first_value + i * increment, end_percent)])
|
|
|
elif axis == "End%":
|
|
|
values.append(
|
|
|
[(control_net, image, strength, start_percent, first_value + i * increment)])
|
|
|
|
|
|
|
|
|
if batch_count > 1:
|
|
|
if axis == "Strength":
|
|
|
values.append([(control_net, image, last_value, start_percent, end_percent)])
|
|
|
elif axis == "Start%":
|
|
|
values.append([(control_net, image, strength, last_value, end_percent)])
|
|
|
elif axis == "End%":
|
|
|
values.append([(control_net, image, strength, start_percent, last_value)])
|
|
|
|
|
|
return values
|
|
|
|
|
|
def xy_value(self, control_net, image, strength, start_percent, end_percent, plot_type,
|
|
|
X_batch_count, X_first_value, X_last_value, Y_batch_count, Y_first_value, Y_last_value,
|
|
|
cnet_stack=None):
|
|
|
|
|
|
x_type, y_type = plot_type.split(", ")
|
|
|
|
|
|
|
|
|
x_type = x_type.split(": ")[1].strip()
|
|
|
y_type = y_type.split(": ")[1].strip()
|
|
|
|
|
|
x_entry = None
|
|
|
y_entry = None
|
|
|
|
|
|
if X_batch_count > 0:
|
|
|
x_value = self.get_value(x_type, control_net, image, strength, start_percent,
|
|
|
end_percent, X_batch_count, X_first_value, X_last_value)
|
|
|
|
|
|
if cnet_stack:
|
|
|
for inner_list in x_value:
|
|
|
inner_list.extend(cnet_stack)
|
|
|
|
|
|
x_entry = ("ControlNet" + x_type, x_value)
|
|
|
|
|
|
if Y_batch_count > 0:
|
|
|
y_value = self.get_value(y_type, control_net, image, strength, start_percent,
|
|
|
end_percent, Y_batch_count, Y_first_value, Y_last_value)
|
|
|
|
|
|
if cnet_stack:
|
|
|
for inner_list in y_value:
|
|
|
inner_list.extend(cnet_stack)
|
|
|
|
|
|
y_entry = ("ControlNet" + y_type, y_value)
|
|
|
|
|
|
return (x_entry, y_entry,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Manual_XY_Entry_Info:
|
|
|
|
|
|
syntax = "(X/Y_types) (X/Y_values)\n" \
|
|
|
"Seeds++ Batch batch_count\n" \
|
|
|
"Steps steps_1;steps_2;...\n" \
|
|
|
"StartStep start_step_1;start_step_2;...\n" \
|
|
|
"EndStep end_step_1;end_step_2;...\n" \
|
|
|
"CFG Scale cfg_1;cfg_2;...\n" \
|
|
|
"Sampler(1) sampler_1;sampler_2;...\n" \
|
|
|
"Sampler(2) sampler_1,scheduler_1;...\n" \
|
|
|
"Sampler(3) sampler_1;...;,default_scheduler\n" \
|
|
|
"Scheduler scheduler_1;scheduler_2;...\n" \
|
|
|
"Denoise denoise_1;denoise_2;...\n" \
|
|
|
"VAE vae_1;vae_2;vae_3;...\n" \
|
|
|
"+Prompt S/R search_txt;replace_1;replace_2;...\n" \
|
|
|
"-Prompt S/R search_txt;replace_1;replace_2;...\n" \
|
|
|
"Checkpoint(1) ckpt_1;ckpt_2;ckpt_3;...\n" \
|
|
|
"Checkpoint(2) ckpt_1,clip_skip_1;...\n" \
|
|
|
"Checkpoint(3) ckpt_1;ckpt_2;...;,default_clip_skip\n" \
|
|
|
"Clip Skip clip_skip_1;clip_skip_2;...\n" \
|
|
|
"LoRA(1) lora_1;lora_2;lora_3;...\n" \
|
|
|
"LoRA(2) lora_1;...;,default_model_str,default_clip_str\n" \
|
|
|
"LoRA(3) lora_1,model_str_1,clip_str_1;..."
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
samplers = ";\n".join(comfy.samplers.KSampler.SAMPLERS)
|
|
|
schedulers = ";\n".join(SCHEDULERS)
|
|
|
vaes = ";\n".join(folder_paths.get_filename_list("vae"))
|
|
|
ckpts = ";\n".join(folder_paths.get_filename_list("checkpoints"))
|
|
|
loras = ";\n".join(folder_paths.get_filename_list("loras"))
|
|
|
return {"required": {
|
|
|
"notes": ("STRING", {"default":
|
|
|
f"_____________SYNTAX_____________\n{cls.syntax}\n\n"
|
|
|
f"____________SAMPLERS____________\n{samplers}\n\n"
|
|
|
f"___________SCHEDULERS___________\n{schedulers}\n\n"
|
|
|
f"_____________VAES_______________\n{vaes}\n\n"
|
|
|
f"___________CHECKPOINTS__________\n{ckpts}\n\n"
|
|
|
f"_____________LORAS______________\n{loras}\n","multiline": True}),},}
|
|
|
|
|
|
RETURN_TYPES = ()
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_Manual_XY_Entry:
|
|
|
|
|
|
plot_types = ["Nothing", "Seeds++ Batch", "Steps", "StartStep", "EndStep", "CFG Scale", "Sampler", "Scheduler",
|
|
|
"Denoise", "VAE", "Positive Prompt S/R", "Negative Prompt S/R", "Checkpoint", "Clip Skip", "LoRA"]
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"plot_type": (cls.plot_types,),
|
|
|
"plot_value": ("STRING", {"default": "", "multiline": True}),}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, plot_type, plot_value):
|
|
|
|
|
|
|
|
|
if plot_type not in {"Positive Prompt S/R", "Negative Prompt S/R", "VAE", "Checkpoint", "LoRA", "Scheduler"}:
|
|
|
plot_value = plot_value.replace(" ", "")
|
|
|
plot_value = plot_value.replace("\n", "")
|
|
|
plot_value = plot_value.rstrip(";")
|
|
|
plot_value = plot_value.split(";")
|
|
|
|
|
|
|
|
|
bounds = {
|
|
|
"Seeds++ Batch": {"min": 1, "max": 50},
|
|
|
"Steps": {"min": 1, "max": 10000},
|
|
|
"StartStep": {"min": 0, "max": 10000},
|
|
|
"EndStep": {"min": 0, "max": 10000},
|
|
|
"CFG Scale": {"min": 0, "max": 100},
|
|
|
"Sampler": {"options": comfy.samplers.KSampler.SAMPLERS},
|
|
|
"Scheduler": {"options": SCHEDULERS},
|
|
|
"Denoise": {"min": 0, "max": 1},
|
|
|
"VAE": {"options": folder_paths.get_filename_list("vae")},
|
|
|
"Checkpoint": {"options": folder_paths.get_filename_list("checkpoints")},
|
|
|
"Clip Skip": {"min": -24, "max": -1},
|
|
|
"LoRA": {"options": folder_paths.get_filename_list("loras"),
|
|
|
"model_str": {"min": -10, "max": 10},"clip_str": {"min": -10, "max": 10},},
|
|
|
}
|
|
|
|
|
|
|
|
|
def validate_value(value, value_type, bounds):
|
|
|
|
|
|
|
|
|
if value_type == "Seeds++ Batch":
|
|
|
try:
|
|
|
x = int(float(value))
|
|
|
if x < bounds["Seeds++ Batch"]["min"]:
|
|
|
x = bounds["Seeds++ Batch"]["min"]
|
|
|
elif x > bounds["Seeds++ Batch"]["max"]:
|
|
|
x = bounds["Seeds++ Batch"]["max"]
|
|
|
except ValueError:
|
|
|
print(f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid batch count.")
|
|
|
return None
|
|
|
if float(value) != x:
|
|
|
print(f"\033[31mmXY Plot Error:\033[0m '{value}' is not a valid batch count.")
|
|
|
return None
|
|
|
return x
|
|
|
|
|
|
|
|
|
elif value_type == "Steps":
|
|
|
try:
|
|
|
x = int(value)
|
|
|
if x < bounds["Steps"]["min"]:
|
|
|
x = bounds["Steps"]["min"]
|
|
|
elif x > bounds["Steps"]["max"]:
|
|
|
x = bounds["Steps"]["max"]
|
|
|
return x
|
|
|
except ValueError:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid Step count.")
|
|
|
return None
|
|
|
|
|
|
|
|
|
elif value_type == "StartStep":
|
|
|
try:
|
|
|
x = int(value)
|
|
|
if x < bounds["StartStep"]["min"]:
|
|
|
x = bounds["StartStep"]["min"]
|
|
|
elif x > bounds["StartStep"]["max"]:
|
|
|
x = bounds["StartStep"]["max"]
|
|
|
return x
|
|
|
except ValueError:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid Start Step.")
|
|
|
return None
|
|
|
|
|
|
|
|
|
elif value_type == "EndStep":
|
|
|
try:
|
|
|
x = int(value)
|
|
|
if x < bounds["EndStep"]["min"]:
|
|
|
x = bounds["EndStep"]["min"]
|
|
|
elif x > bounds["EndStep"]["max"]:
|
|
|
x = bounds["EndStep"]["max"]
|
|
|
return x
|
|
|
except ValueError:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid End Step.")
|
|
|
return None
|
|
|
|
|
|
|
|
|
elif value_type == "CFG Scale":
|
|
|
try:
|
|
|
x = float(value)
|
|
|
if x < bounds["CFG Scale"]["min"]:
|
|
|
x = bounds["CFG Scale"]["min"]
|
|
|
elif x > bounds["CFG Scale"]["max"]:
|
|
|
x = bounds["CFG Scale"]["max"]
|
|
|
return x
|
|
|
except ValueError:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a number between {bounds['CFG Scale']['min']}"
|
|
|
f" and {bounds['CFG Scale']['max']} for CFG Scale.")
|
|
|
return None
|
|
|
|
|
|
|
|
|
elif value_type == "Sampler":
|
|
|
if isinstance(value, str) and ',' in value:
|
|
|
value = tuple(map(str.strip, value.split(',')))
|
|
|
if isinstance(value, tuple):
|
|
|
if len(value) >= 2:
|
|
|
value = value[:2]
|
|
|
sampler, scheduler = value
|
|
|
scheduler = scheduler.lower()
|
|
|
if sampler not in bounds["Sampler"]["options"]:
|
|
|
valid_samplers = '\n'.join(bounds["Sampler"]["options"])
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{sampler}' is not a valid sampler. Valid samplers are:\n{valid_samplers}")
|
|
|
sampler = None
|
|
|
if scheduler not in bounds["Scheduler"]["options"]:
|
|
|
valid_schedulers = '\n'.join(bounds["Scheduler"]["options"])
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{scheduler}' is not a valid scheduler. Valid schedulers are:\n{valid_schedulers}")
|
|
|
scheduler = None
|
|
|
if sampler is None or scheduler is None:
|
|
|
return None
|
|
|
else:
|
|
|
return sampler, scheduler
|
|
|
else:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid sampler.'")
|
|
|
return None
|
|
|
else:
|
|
|
if value not in bounds["Sampler"]["options"]:
|
|
|
valid_samplers = '\n'.join(bounds["Sampler"]["options"])
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid sampler. Valid samplers are:\n{valid_samplers}")
|
|
|
return None
|
|
|
else:
|
|
|
return value, None
|
|
|
|
|
|
|
|
|
elif value_type == "Scheduler":
|
|
|
if value not in bounds["Scheduler"]["options"]:
|
|
|
valid_schedulers = '\n'.join(bounds["Scheduler"]["options"])
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid Scheduler. Valid Schedulers are:\n{valid_schedulers}")
|
|
|
return None
|
|
|
else:
|
|
|
return value
|
|
|
|
|
|
|
|
|
elif value_type == "Denoise":
|
|
|
try:
|
|
|
x = float(value)
|
|
|
if x < bounds["Denoise"]["min"]:
|
|
|
x = bounds["Denoise"]["min"]
|
|
|
elif x > bounds["Denoise"]["max"]:
|
|
|
x = bounds["Denoise"]["max"]
|
|
|
return x
|
|
|
except ValueError:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a number between {bounds['Denoise']['min']} "
|
|
|
f"and {bounds['Denoise']['max']} for Denoise.")
|
|
|
return None
|
|
|
|
|
|
|
|
|
elif value_type == "VAE":
|
|
|
if value not in bounds["VAE"]["options"]:
|
|
|
valid_vaes = '\n'.join(bounds["VAE"]["options"])
|
|
|
print(f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid VAE. Valid VAEs are:\n{valid_vaes}")
|
|
|
return None
|
|
|
else:
|
|
|
return value
|
|
|
|
|
|
|
|
|
elif value_type == "Checkpoint":
|
|
|
if isinstance(value, str) and ',' in value:
|
|
|
value = tuple(map(str.strip, value.split(',')))
|
|
|
if isinstance(value, tuple):
|
|
|
if len(value) >= 2:
|
|
|
value = value[:2]
|
|
|
checkpoint, clip_skip = value
|
|
|
try:
|
|
|
clip_skip = int(clip_skip)
|
|
|
except ValueError:
|
|
|
print(f"\033[31mXY Plot Error:\033[0m '{clip_skip}' is not a valid clip_skip. "
|
|
|
f"Valid clip skip values are integers between {bounds['Clip Skip']['min']} and {bounds['Clip Skip']['max']}.")
|
|
|
return None
|
|
|
if checkpoint not in bounds["Checkpoint"]["options"]:
|
|
|
valid_checkpoints = '\n'.join(bounds["Checkpoint"]["options"])
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{checkpoint}' is not a valid checkpoint. Valid checkpoints are:\n{valid_checkpoints}")
|
|
|
checkpoint = None
|
|
|
if clip_skip < bounds["Clip Skip"]["min"] or clip_skip > bounds["Clip Skip"]["max"]:
|
|
|
print(f"\033[31mXY Plot Error:\033[0m '{clip_skip}' is not a valid clip skip. "
|
|
|
f"Valid clip skip values are integers between {bounds['Clip Skip']['min']} and {bounds['Clip Skip']['max']}.")
|
|
|
clip_skip = None
|
|
|
if checkpoint is None or clip_skip is None:
|
|
|
return None
|
|
|
else:
|
|
|
return checkpoint, clip_skip, None
|
|
|
else:
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid checkpoint.'")
|
|
|
return None
|
|
|
else:
|
|
|
if value not in bounds["Checkpoint"]["options"]:
|
|
|
valid_checkpoints = '\n'.join(bounds["Checkpoint"]["options"])
|
|
|
print(
|
|
|
f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid checkpoint. Valid checkpoints are:\n{valid_checkpoints}")
|
|
|
return None
|
|
|
else:
|
|
|
return value, None, None
|
|
|
|
|
|
|
|
|
elif value_type == "Clip Skip":
|
|
|
try:
|
|
|
x = int(value)
|
|
|
if x < bounds["Clip Skip"]["min"]:
|
|
|
x = bounds["Clip Skip"]["min"]
|
|
|
elif x > bounds["Clip Skip"]["max"]:
|
|
|
x = bounds["Clip Skip"]["max"]
|
|
|
return x
|
|
|
except ValueError:
|
|
|
print(f"\033[31mXY Plot Error:\033[0m '{value}' is not a valid Clip Skip.")
|
|
|
return None
|
|
|
|
|
|
|
|
|
elif value_type == "LoRA":
|
|
|
if isinstance(value, str) and ',' in value:
|
|
|
value = tuple(map(str.strip, value.split(',')))
|
|
|
|
|
|
if isinstance(value, tuple):
|
|
|
lora_name, model_str, clip_str = (value + (1.0, 1.0))[:3]
|
|
|
|
|
|
if lora_name not in bounds["LoRA"]["options"]:
|
|
|
valid_loras = '\n'.join(bounds["LoRA"]["options"])
|
|
|
print(f"{error('XY Plot Error:')} '{lora_name}' is not a valid LoRA. Valid LoRAs are:\n{valid_loras}")
|
|
|
lora_name = None
|
|
|
|
|
|
try:
|
|
|
model_str = float(model_str)
|
|
|
clip_str = float(clip_str)
|
|
|
except ValueError:
|
|
|
print(f"{error('XY Plot Error:')} The LoRA model strength and clip strength values should be numbers"
|
|
|
f" between {bounds['LoRA']['model_str']['min']} and {bounds['LoRA']['model_str']['max']}.")
|
|
|
return None
|
|
|
|
|
|
if model_str < bounds["LoRA"]["model_str"]["min"] or model_str > bounds["LoRA"]["model_str"]["max"]:
|
|
|
print(f"{error('XY Plot Error:')} '{model_str}' is not a valid LoRA model strength value. "
|
|
|
f"Valid lora model strength values are between {bounds['LoRA']['model_str']['min']} and {bounds['LoRA']['model_str']['max']}.")
|
|
|
model_str = None
|
|
|
|
|
|
if clip_str < bounds["LoRA"]["clip_str"]["min"] or clip_str > bounds["LoRA"]["clip_str"]["max"]:
|
|
|
print(f"{error('XY Plot Error:')} '{clip_str}' is not a valid LoRA clip strength value. "
|
|
|
f"Valid lora clip strength values are between {bounds['LoRA']['clip_str']['min']} and {bounds['LoRA']['clip_str']['max']}.")
|
|
|
clip_str = None
|
|
|
|
|
|
if lora_name is None or model_str is None or clip_str is None:
|
|
|
return None
|
|
|
else:
|
|
|
return lora_name, model_str, clip_str
|
|
|
else:
|
|
|
if value not in bounds["LoRA"]["options"]:
|
|
|
valid_loras = '\n'.join(bounds["LoRA"]["options"])
|
|
|
print(f"{error('XY Plot Error:')} '{value}' is not a valid LoRA. Valid LoRAs are:\n{valid_loras}")
|
|
|
return None
|
|
|
else:
|
|
|
return value, 1.0, 1.0
|
|
|
|
|
|
|
|
|
else:
|
|
|
return None
|
|
|
|
|
|
|
|
|
if len(plot_value) != 1 and plot_type == "Seeds++ Batch":
|
|
|
print(f"{error('XY Plot Error:')} '{';'.join(plot_value)}' is not a valid batch count.")
|
|
|
return (None,)
|
|
|
|
|
|
|
|
|
if plot_type in ["Sampler", "Checkpoint", "LoRA"]:
|
|
|
if plot_value[-1].startswith(','):
|
|
|
|
|
|
suffixes = plot_value.pop().lstrip(',').split(',')
|
|
|
|
|
|
plot_value = [entry.split(',') for entry in plot_value]
|
|
|
|
|
|
for entry in plot_value:
|
|
|
entry += suffixes[len(entry) - 1:]
|
|
|
|
|
|
plot_value = [','.join(entry) for entry in plot_value]
|
|
|
|
|
|
|
|
|
if plot_type in {"Positive Prompt S/R", "Negative Prompt S/R"}:
|
|
|
if plot_value[0] == '':
|
|
|
print(f"{error('XY Plot Error:')} Prompt S/R value can not be empty.")
|
|
|
return (None,)
|
|
|
else:
|
|
|
plot_value = [(plot_value[0], None) if i == 0 else (plot_value[0], x) for i, x in enumerate(plot_value)]
|
|
|
|
|
|
|
|
|
if plot_type not in {"Nothing", "Positive Prompt S/R", "Negative Prompt S/R"}:
|
|
|
for i in range(len(plot_value)):
|
|
|
plot_value[i] = validate_value(plot_value[i], plot_type, bounds)
|
|
|
if plot_value[i] == None:
|
|
|
return (None,)
|
|
|
|
|
|
|
|
|
if plot_type == "Seeds++ Batch":
|
|
|
plot_value = list(range(plot_value[0]))
|
|
|
|
|
|
|
|
|
if plot_type == "LoRA":
|
|
|
plot_value = [[x] for x in plot_value]
|
|
|
|
|
|
|
|
|
if plot_type == "Nothing":
|
|
|
plot_value = [""]
|
|
|
|
|
|
return ((plot_type, plot_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_XYplot_JoinInputs:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"XY_1": ("XY",),
|
|
|
"XY_2": ("XY",),},
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("XY",)
|
|
|
RETURN_NAMES = ("X or Y",)
|
|
|
FUNCTION = "xy_value"
|
|
|
CATEGORY = "Efficiency Nodes/XY Inputs"
|
|
|
|
|
|
def xy_value(self, XY_1, XY_2):
|
|
|
xy_type_1, xy_value_1 = XY_1
|
|
|
xy_type_2, xy_value_2 = XY_2
|
|
|
|
|
|
if xy_type_1 != xy_type_2:
|
|
|
print(f"{error('Join XY Inputs Error:')} Input types must match")
|
|
|
return (None,)
|
|
|
elif xy_type_1 == "Seeds++ Batch":
|
|
|
xy_type = xy_type_1
|
|
|
combined_length = len(xy_value_1) + len(xy_value_2)
|
|
|
xy_value = list(range(combined_length))
|
|
|
elif xy_type_1 == "Positive Prompt S/R" or xy_type_1 == "Negative Prompt S/R":
|
|
|
xy_type = xy_type_1
|
|
|
xy_value = xy_value_1 + [(xy_value_1[0][0], t[1]) for t in xy_value_2[1:]]
|
|
|
else:
|
|
|
xy_type = xy_type_1
|
|
|
xy_value = xy_value_1 + xy_value_2
|
|
|
return ((xy_type, xy_value),)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_ImageOverlay:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"base_image": ("IMAGE",),
|
|
|
"overlay_image": ("IMAGE",),
|
|
|
"overlay_resize": (["None", "Fit", "Resize by rescale_factor", "Resize to width & heigth"],),
|
|
|
"resize_method": (["nearest-exact", "bilinear", "area"],),
|
|
|
"rescale_factor": ("FLOAT", {"default": 1, "min": 0.01, "max": 16.0, "step": 0.1}),
|
|
|
"width": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"height": ("INT", {"default": 512, "min": 0, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"x_offset": ("INT", {"default": 0, "min": -48000, "max": 48000, "step": 10}),
|
|
|
"y_offset": ("INT", {"default": 0, "min": -48000, "max": 48000, "step": 10}),
|
|
|
"rotation": ("INT", {"default": 0, "min": -180, "max": 180, "step": 5}),
|
|
|
"opacity": ("FLOAT", {"default": 0, "min": 0, "max": 100, "step": 5}),
|
|
|
},
|
|
|
"optional": {"optional_mask": ("MASK",),}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("IMAGE",)
|
|
|
FUNCTION = "apply_overlay_image"
|
|
|
CATEGORY = "Efficiency Nodes/Image"
|
|
|
|
|
|
def apply_overlay_image(self, base_image, overlay_image, overlay_resize, resize_method, rescale_factor,
|
|
|
width, height, x_offset, y_offset, rotation, opacity, optional_mask=None):
|
|
|
|
|
|
|
|
|
size = width, height
|
|
|
location = x_offset, y_offset
|
|
|
mask = optional_mask
|
|
|
|
|
|
|
|
|
if overlay_resize != "None":
|
|
|
|
|
|
overlay_image_size = overlay_image.size()
|
|
|
overlay_image_size = (overlay_image_size[2], overlay_image_size[1])
|
|
|
if overlay_resize == "Fit":
|
|
|
h_ratio = base_image.size()[1] / overlay_image_size[1]
|
|
|
w_ratio = base_image.size()[2] / overlay_image_size[0]
|
|
|
ratio = min(h_ratio, w_ratio)
|
|
|
overlay_image_size = tuple(round(dimension * ratio) for dimension in overlay_image_size)
|
|
|
elif overlay_resize == "Resize by rescale_factor":
|
|
|
overlay_image_size = tuple(int(dimension * rescale_factor) for dimension in overlay_image_size)
|
|
|
elif overlay_resize == "Resize to width & heigth":
|
|
|
overlay_image_size = (size[0], size[1])
|
|
|
|
|
|
samples = overlay_image.movedim(-1, 1)
|
|
|
overlay_image = comfy.utils.common_upscale(samples, overlay_image_size[0], overlay_image_size[1], resize_method, False)
|
|
|
overlay_image = overlay_image.movedim(1, -1)
|
|
|
|
|
|
overlay_image = tensor2pil(overlay_image)
|
|
|
|
|
|
|
|
|
overlay_image = overlay_image.convert('RGBA')
|
|
|
overlay_image.putalpha(Image.new("L", overlay_image.size, 255))
|
|
|
|
|
|
|
|
|
if mask is not None:
|
|
|
|
|
|
mask = tensor2pil(mask)
|
|
|
mask = mask.resize(overlay_image.size)
|
|
|
|
|
|
overlay_image.putalpha(ImageOps.invert(mask))
|
|
|
|
|
|
|
|
|
overlay_image = overlay_image.rotate(rotation, expand=True)
|
|
|
|
|
|
|
|
|
r, g, b, a = overlay_image.split()
|
|
|
a = a.point(lambda x: max(0, int(x * (1 - opacity / 100))))
|
|
|
overlay_image.putalpha(a)
|
|
|
|
|
|
|
|
|
base_image_list = torch.unbind(base_image, dim=0)
|
|
|
|
|
|
|
|
|
processed_base_image_list = []
|
|
|
for tensor in base_image_list:
|
|
|
|
|
|
image = tensor2pil(tensor)
|
|
|
|
|
|
|
|
|
if mask is None:
|
|
|
image.paste(overlay_image, location)
|
|
|
else:
|
|
|
image.paste(overlay_image, location, overlay_image)
|
|
|
|
|
|
|
|
|
processed_tensor = pil2tensor(image)
|
|
|
|
|
|
|
|
|
processed_base_image_list.append(processed_tensor)
|
|
|
|
|
|
|
|
|
base_image = torch.stack([tensor.squeeze() for tensor in processed_base_image_list])
|
|
|
|
|
|
|
|
|
return (base_image,)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_Noise_Control_Script:
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {
|
|
|
"required": {
|
|
|
"rng_source": (["cpu", "gpu", "nv"],),
|
|
|
"cfg_denoiser": ("BOOLEAN", {"default": False}),
|
|
|
"add_seed_noise": ("BOOLEAN", {"default": False}),
|
|
|
"seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}),
|
|
|
"weight": ("FLOAT", {"default": 0.015, "min": 0, "max": 1, "step": 0.001})},
|
|
|
"optional": {"script": ("SCRIPT",)}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("SCRIPT",)
|
|
|
RETURN_NAMES = ("SCRIPT",)
|
|
|
FUNCTION = "noise_control"
|
|
|
CATEGORY = "Efficiency Nodes/Scripts"
|
|
|
|
|
|
def noise_control(self, rng_source, cfg_denoiser, add_seed_noise, seed, weight, script=None):
|
|
|
script = script or {}
|
|
|
script["noise"] = (rng_source, cfg_denoiser, add_seed_noise, seed, weight)
|
|
|
return (script,)
|
|
|
|
|
|
|
|
|
|
|
|
use_controlnet_widget = preprocessor_widget = (["_"],)
|
|
|
if os.path.exists(os.path.join(custom_nodes_dir, "comfyui_controlnet_aux")):
|
|
|
printout = "Attempting to add Control Net options to the 'HiRes-Fix Script' Node (comfyui_controlnet_aux add-on)..."
|
|
|
|
|
|
|
|
|
try:
|
|
|
with suppress_output():
|
|
|
AIO_Preprocessor = getattr(import_module("comfyui_controlnet_aux.__init__"), 'AIO_Preprocessor')
|
|
|
use_controlnet_widget = ("BOOLEAN", {"default": False})
|
|
|
preprocessor_widget = AIO_Preprocessor.INPUT_TYPES()["optional"]["preprocessor"]
|
|
|
print(f"\r{message('Efficiency Nodes:')} {printout}{success('Success!')}")
|
|
|
except Exception:
|
|
|
print(f"\r{message('Efficiency Nodes:')} {printout}{error('Failed!')}")
|
|
|
|
|
|
|
|
|
class TSC_HighRes_Fix:
|
|
|
|
|
|
default_latent_upscalers = LatentUpscaleBy.INPUT_TYPES()["required"]["upscale_method"][0]
|
|
|
|
|
|
city96_upscale_methods =\
|
|
|
["city96." + ver for ver in city96_latent_upscaler.LatentUpscaler.INPUT_TYPES()["required"]["latent_ver"][0]]
|
|
|
city96_scalings_raw = city96_latent_upscaler.LatentUpscaler.INPUT_TYPES()["required"]["scale_factor"][0]
|
|
|
city96_scalings_float = [float(scale) for scale in city96_scalings_raw]
|
|
|
|
|
|
ttl_nn_upscale_methods = \
|
|
|
["ttl_nn." + ver for ver in
|
|
|
ttl_nn_latent_upscaler.NNLatentUpscale.INPUT_TYPES()["required"]["version"][0]]
|
|
|
|
|
|
latent_upscalers = default_latent_upscalers + city96_upscale_methods + ttl_nn_upscale_methods
|
|
|
pixel_upscalers = folder_paths.get_filename_list("upscale_models")
|
|
|
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
|
|
|
return {"required": {"upscale_type": (["latent","pixel","both"],),
|
|
|
"hires_ckpt_name": (["(use same)"] + folder_paths.get_filename_list("checkpoints"),),
|
|
|
"latent_upscaler": (cls.latent_upscalers,),
|
|
|
"pixel_upscaler": (cls.pixel_upscalers,),
|
|
|
"upscale_by": ("FLOAT", {"default": 1.25, "min": 0.01, "max": 8.0, "step": 0.05}),
|
|
|
"use_same_seed": ("BOOLEAN", {"default": True}),
|
|
|
"seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}),
|
|
|
"hires_steps": ("INT", {"default": 12, "min": 1, "max": 10000}),
|
|
|
"denoise": ("FLOAT", {"default": .56, "min": 0.00, "max": 1.00, "step": 0.01}),
|
|
|
"iterations": ("INT", {"default": 1, "min": 0, "max": 5, "step": 1}),
|
|
|
"use_controlnet": use_controlnet_widget,
|
|
|
"control_net_name": (folder_paths.get_filename_list("controlnet"),),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}),
|
|
|
"preprocessor": preprocessor_widget,
|
|
|
"preprocessor_imgs": ("BOOLEAN", {"default": False})
|
|
|
},
|
|
|
"optional": {"script": ("SCRIPT",)},
|
|
|
"hidden": {"my_unique_id": "UNIQUE_ID"}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("SCRIPT",)
|
|
|
FUNCTION = "hires_fix_script"
|
|
|
CATEGORY = "Efficiency Nodes/Scripts"
|
|
|
|
|
|
def hires_fix_script(self, upscale_type, hires_ckpt_name, latent_upscaler, pixel_upscaler, upscale_by,
|
|
|
use_same_seed, seed, hires_steps, denoise, iterations, use_controlnet, control_net_name,
|
|
|
strength, preprocessor, preprocessor_imgs, script=None, my_unique_id=None):
|
|
|
latent_upscale_function = None
|
|
|
latent_upscale_model = None
|
|
|
pixel_upscale_model = None
|
|
|
|
|
|
def float_to_string(num):
|
|
|
if num == int(num):
|
|
|
return "{:.1f}".format(num)
|
|
|
else:
|
|
|
return str(num)
|
|
|
|
|
|
if iterations > 0 and upscale_by > 0:
|
|
|
if upscale_type == "latent":
|
|
|
|
|
|
if latent_upscaler in self.city96_upscale_methods:
|
|
|
|
|
|
latent_upscaler = latent_upscaler.replace("city96.", "")
|
|
|
|
|
|
|
|
|
latent_upscale_function = city96_latent_upscaler.LatentUpscaler
|
|
|
|
|
|
|
|
|
nearest_scaling = min(self.city96_scalings_float, key=lambda x: abs(x - upscale_by))
|
|
|
|
|
|
|
|
|
nearest_scaling_index = self.city96_scalings_float.index(nearest_scaling)
|
|
|
|
|
|
|
|
|
nearest_scaling_raw = self.city96_scalings_raw[nearest_scaling_index]
|
|
|
|
|
|
upscale_by = float_to_string(upscale_by)
|
|
|
|
|
|
|
|
|
if upscale_by != nearest_scaling_raw:
|
|
|
print(f"{warning('HighRes-Fix Warning:')} "
|
|
|
f"When using 'city96.{latent_upscaler}', 'upscale_by' must be one of {self.city96_scalings_raw}.\n"
|
|
|
f"Rounding to the nearest valid value ({nearest_scaling_raw}).\033[0m")
|
|
|
upscale_by = nearest_scaling_raw
|
|
|
|
|
|
|
|
|
elif latent_upscaler in self.ttl_nn_upscale_methods:
|
|
|
|
|
|
latent_upscaler = latent_upscaler.replace("ttl_nn.", "")
|
|
|
|
|
|
|
|
|
upscale_by_clamped = min(max(upscale_by, 1), 2)
|
|
|
if upscale_by != upscale_by_clamped:
|
|
|
print(f"{warning('HighRes-Fix Warning:')} "
|
|
|
f"When using 'ttl_nn.{latent_upscaler}', 'upscale_by' must be between 1 and 2.\n"
|
|
|
f"Rounding to the nearest valid value ({upscale_by_clamped}).\033[0m")
|
|
|
upscale_by = upscale_by_clamped
|
|
|
|
|
|
latent_upscale_function = ttl_nn_latent_upscaler.NNLatentUpscale
|
|
|
|
|
|
|
|
|
elif latent_upscaler in self.default_latent_upscalers:
|
|
|
latent_upscale_function = LatentUpscaleBy
|
|
|
|
|
|
else:
|
|
|
latent_upscale_function = LatentUpscaleBy
|
|
|
latent_upscaler = self.default_latent_upscalers[0]
|
|
|
print(f"{warning('HiResFix Script Warning:')} Chosen latent upscale method not found! "
|
|
|
f"defaulting to '{latent_upscaler}'.\n")
|
|
|
|
|
|
|
|
|
if hires_ckpt_name == "(use same)":
|
|
|
clear_cache(my_unique_id, 0, "ckpt")
|
|
|
else:
|
|
|
latent_upscale_model, _, _ = \
|
|
|
load_checkpoint(hires_ckpt_name, my_unique_id, output_vae=False, cache=1, cache_overwrite=True)
|
|
|
|
|
|
elif upscale_type == "pixel":
|
|
|
pixel_upscale_model = UpscaleModelLoader().load_model(pixel_upscaler)[0]
|
|
|
|
|
|
elif upscale_type == "both":
|
|
|
latent_upscale_function = LatentUpscaleBy
|
|
|
latent_upscaler = self.default_latent_upscalers[0]
|
|
|
pixel_upscale_model = UpscaleModelLoader().load_model(pixel_upscaler)[0]
|
|
|
|
|
|
if hires_ckpt_name == "(use same)":
|
|
|
clear_cache(my_unique_id, 0, "ckpt")
|
|
|
else:
|
|
|
latent_upscale_model, _, _ = \
|
|
|
load_checkpoint(hires_ckpt_name, my_unique_id, output_vae=False, cache=1, cache_overwrite=True)
|
|
|
|
|
|
control_net = ControlNetLoader().load_controlnet(control_net_name)[0] if use_controlnet is True else None
|
|
|
|
|
|
|
|
|
script = script or {}
|
|
|
script["hiresfix"] = (upscale_type, latent_upscaler, upscale_by, use_same_seed, seed, hires_steps,
|
|
|
denoise, iterations, control_net, strength, preprocessor, preprocessor_imgs,
|
|
|
latent_upscale_function, latent_upscale_model, pixel_upscale_model)
|
|
|
|
|
|
return (script,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_Tiled_Upscaler:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
|
|
|
cnet_filenames = [name for name in folder_paths.get_filename_list("controlnet")]
|
|
|
|
|
|
return {"required": {"upscale_by": ("FLOAT", {"default": 1.25, "min": 0.01, "max": 8.0, "step": 0.05}),
|
|
|
"tile_size": ("INT", {"default": 512, "min": 256, "max": MAX_RESOLUTION, "step": 64}),
|
|
|
"tiling_strategy": (["random", "random strict", "padded", 'simple', 'none'],),
|
|
|
"tiling_steps": ("INT", {"default": 30, "min": 1, "max": 10000}),
|
|
|
"seed": ("INT", {"default": 0, "min": 0, "max": 0xffffffffffffffff}),
|
|
|
"denoise": ("FLOAT", {"default": .4, "min": 0.0, "max": 1.0, "step": 0.01}),
|
|
|
"use_controlnet": ("BOOLEAN", {"default": False}),
|
|
|
"tile_controlnet": (cnet_filenames,),
|
|
|
"strength": ("FLOAT", {"default": 1.0, "min": 0.0, "max": 10.0, "step": 0.01}),
|
|
|
},
|
|
|
"optional": {"script": ("SCRIPT",)}}
|
|
|
|
|
|
RETURN_TYPES = ("SCRIPT",)
|
|
|
FUNCTION = "tiled_sampling"
|
|
|
CATEGORY = "Efficiency Nodes/Scripts"
|
|
|
|
|
|
def tiled_sampling(self, upscale_by, tile_size, tiling_strategy, tiling_steps, seed, denoise,
|
|
|
use_controlnet, tile_controlnet, strength, script=None):
|
|
|
if tiling_strategy != 'none':
|
|
|
script = script or {}
|
|
|
tile_controlnet = ControlNetLoader().load_controlnet(tile_controlnet)[0] if use_controlnet else None
|
|
|
|
|
|
script["tile"] = (upscale_by, tile_size, tiling_strategy, tiling_steps, seed, denoise, tile_controlnet, strength)
|
|
|
return (script,)
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_LoRA_Stack2String:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {"lora_stack": ("LORA_STACK",)}}
|
|
|
|
|
|
RETURN_TYPES = ("STRING",)
|
|
|
RETURN_NAMES = ("LoRA string",)
|
|
|
FUNCTION = "convert"
|
|
|
CATEGORY = "Efficiency Nodes/Misc"
|
|
|
|
|
|
def convert(self, lora_stack):
|
|
|
"""
|
|
|
Converts a list of tuples into a single space-separated string.
|
|
|
Each tuple contains (STR, FLOAT1, FLOAT2) and is converted to the format "<lora:STR:FLOAT1:FLOAT2>".
|
|
|
"""
|
|
|
output = ' '.join(f"<lora:{tup[0]}:{tup[1]}:{tup[2]}>" for tup in lora_stack)
|
|
|
return (output,)
|
|
|
|
|
|
|
|
|
|
|
|
NODE_CLASS_MAPPINGS = {
|
|
|
"KSampler (Efficient)": TSC_KSampler,
|
|
|
"KSampler Adv. (Efficient)":TSC_KSamplerAdvanced,
|
|
|
"KSampler SDXL (Eff.)": TSC_KSamplerSDXL,
|
|
|
"Efficient Loader": TSC_EfficientLoader,
|
|
|
"Eff. Loader SDXL": TSC_EfficientLoaderSDXL,
|
|
|
"LoRA Stacker": TSC_LoRA_Stacker,
|
|
|
"Control Net Stacker": TSC_Control_Net_Stacker,
|
|
|
"Apply ControlNet Stack": TSC_Apply_ControlNet_Stack,
|
|
|
"Unpack SDXL Tuple": TSC_Unpack_SDXL_Tuple,
|
|
|
"Pack SDXL Tuple": TSC_Pack_SDXL_Tuple,
|
|
|
"XY Plot": TSC_XYplot,
|
|
|
"XY Input: Seeds++ Batch": TSC_XYplot_SeedsBatch,
|
|
|
"XY Input: Add/Return Noise": TSC_XYplot_AddReturnNoise,
|
|
|
"XY Input: Steps": TSC_XYplot_Steps,
|
|
|
"XY Input: CFG Scale": TSC_XYplot_CFG,
|
|
|
"XY Input: Sampler/Scheduler": TSC_XYplot_Sampler_Scheduler,
|
|
|
"XY Input: Denoise": TSC_XYplot_Denoise,
|
|
|
"XY Input: VAE": TSC_XYplot_VAE,
|
|
|
"XY Input: Prompt S/R": TSC_XYplot_PromptSR,
|
|
|
"XY Input: Aesthetic Score": TSC_XYplot_AScore,
|
|
|
"XY Input: Refiner On/Off": TSC_XYplot_Refiner_OnOff,
|
|
|
"XY Input: Checkpoint": TSC_XYplot_Checkpoint,
|
|
|
"XY Input: Clip Skip": TSC_XYplot_ClipSkip,
|
|
|
"XY Input: LoRA": TSC_XYplot_LoRA,
|
|
|
"XY Input: LoRA Plot": TSC_XYplot_LoRA_Plot,
|
|
|
"XY Input: LoRA Stacks": TSC_XYplot_LoRA_Stacks,
|
|
|
"XY Input: Control Net": TSC_XYplot_Control_Net,
|
|
|
"XY Input: Control Net Plot": TSC_XYplot_Control_Net_Plot,
|
|
|
"XY Input: Manual XY Entry": TSC_XYplot_Manual_XY_Entry,
|
|
|
"Manual XY Entry Info": TSC_XYplot_Manual_XY_Entry_Info,
|
|
|
"Join XY Inputs of Same Type": TSC_XYplot_JoinInputs,
|
|
|
"Image Overlay": TSC_ImageOverlay,
|
|
|
"Noise Control Script": TSC_Noise_Control_Script,
|
|
|
"HighRes-Fix Script": TSC_HighRes_Fix,
|
|
|
"Tiled Upscaler Script": TSC_Tiled_Upscaler,
|
|
|
"LoRA Stack to String converter": TSC_LoRA_Stack2String
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
if os.path.exists(os.path.join(custom_nodes_dir, "ComfyUI-AnimateDiff-Evolved")):
|
|
|
printout = "Attempting to add 'AnimatedDiff Script' Node (ComfyUI-AnimateDiff-Evolved add-on)..."
|
|
|
print(f"{message('Efficiency Nodes:')} {printout}", end="")
|
|
|
try:
|
|
|
module = import_module("ComfyUI-AnimateDiff-Evolved.animatediff.nodes")
|
|
|
AnimateDiffLoaderWithContext = getattr(module, 'AnimateDiffLoaderWithContext')
|
|
|
AnimateDiffCombine = getattr(module, 'AnimateDiffCombine_Deprecated')
|
|
|
print(f"\r{message('Efficiency Nodes:')} {printout}{success('Success!')}")
|
|
|
|
|
|
# TSC AnimatedDiff Script (https://github.com/BlenderNeko/ComfyUI_TiledKSampler)
|
|
|
class TSC_AnimateDiff_Script:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
|
|
|
return {"required": {
|
|
|
"motion_model": AnimateDiffLoaderWithContext.INPUT_TYPES()["required"]["model_name"],
|
|
|
"beta_schedule": AnimateDiffLoaderWithContext.INPUT_TYPES()["required"]["beta_schedule"],
|
|
|
"frame_rate": AnimateDiffCombine.INPUT_TYPES()["required"]["frame_rate"],
|
|
|
"loop_count": AnimateDiffCombine.INPUT_TYPES()["required"]["loop_count"],
|
|
|
"format": AnimateDiffCombine.INPUT_TYPES()["required"]["format"],
|
|
|
"pingpong": AnimateDiffCombine.INPUT_TYPES()["required"]["pingpong"],
|
|
|
"save_image": AnimateDiffCombine.INPUT_TYPES()["required"]["save_image"]},
|
|
|
"optional": {"context_options": ("CONTEXT_OPTIONS",)}
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("SCRIPT",)
|
|
|
FUNCTION = "animatediff"
|
|
|
CATEGORY = "Efficiency Nodes/Scripts"
|
|
|
|
|
|
def animatediff(self, motion_model, beta_schedule, frame_rate, loop_count, format, pingpong, save_image,
|
|
|
script=None, context_options=None):
|
|
|
script = script or {}
|
|
|
script["anim"] = (motion_model, beta_schedule, context_options, frame_rate, loop_count, format, pingpong, save_image)
|
|
|
return (script,)
|
|
|
|
|
|
NODE_CLASS_MAPPINGS.update({"AnimateDiff Script": TSC_AnimateDiff_Script})
|
|
|
|
|
|
except Exception:
|
|
|
print(f"\r{message('Efficiency Nodes:')} {printout}{error('Failed!')}")
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
import simpleeval
|
|
|
|
|
|
|
|
|
class TSC_EvaluateInts:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"python_expression": ("STRING", {"default": "((a + b) - c) / 2", "multiline": False}),
|
|
|
"print_to_console": (["False", "True"],), },
|
|
|
"optional": {
|
|
|
"a": ("INT", {"default": 0, "min": -48000, "max": 48000, "step": 1}),
|
|
|
"b": ("INT", {"default": 0, "min": -48000, "max": 48000, "step": 1}),
|
|
|
"c": ("INT", {"default": 0, "min": -48000, "max": 48000, "step": 1}), },
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("INT", "FLOAT", "STRING",)
|
|
|
OUTPUT_NODE = True
|
|
|
FUNCTION = "evaluate"
|
|
|
CATEGORY = "Efficiency Nodes/Simple Eval"
|
|
|
|
|
|
def evaluate(self, python_expression, print_to_console, a=0, b=0, c=0):
|
|
|
|
|
|
result = simpleeval.simple_eval(python_expression, names={'a': a, 'b': b, 'c': c})
|
|
|
int_result = int(result)
|
|
|
float_result = float(result)
|
|
|
string_result = str(result)
|
|
|
if print_to_console == "True":
|
|
|
print(f"\n{error('Evaluate Integers:')}")
|
|
|
print(f"\033[90m{{a = {a} , b = {b} , c = {c}}} \033[0m")
|
|
|
print(f"{python_expression} = \033[92m INT: " + str(int_result) + " , FLOAT: " + str(
|
|
|
float_result) + ", STRING: " + string_result + "\033[0m")
|
|
|
return (int_result, float_result, string_result,)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_EvaluateFloats:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"python_expression": ("STRING", {"default": "((a + b) - c) / 2", "multiline": False}),
|
|
|
"print_to_console": (["False", "True"],), },
|
|
|
"optional": {
|
|
|
"a": ("FLOAT", {"default": 0, "min": -sys.float_info.max, "max": sys.float_info.max, "step": 1}),
|
|
|
"b": ("FLOAT", {"default": 0, "min": -sys.float_info.max, "max": sys.float_info.max, "step": 1}),
|
|
|
"c": ("FLOAT", {"default": 0, "min": -sys.float_info.max, "max": sys.float_info.max, "step": 1}), },
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("INT", "FLOAT", "STRING",)
|
|
|
OUTPUT_NODE = True
|
|
|
FUNCTION = "evaluate"
|
|
|
CATEGORY = "Efficiency Nodes/Simple Eval"
|
|
|
|
|
|
def evaluate(self, python_expression, print_to_console, a=0, b=0, c=0):
|
|
|
|
|
|
result = simpleeval.simple_eval(python_expression, names={'a': a, 'b': b, 'c': c})
|
|
|
int_result = int(result)
|
|
|
float_result = float(result)
|
|
|
string_result = str(result)
|
|
|
if print_to_console == "True":
|
|
|
print(f"\n{error('Evaluate Floats:')}")
|
|
|
print(f"\033[90m{{a = {a} , b = {b} , c = {c}}} \033[0m")
|
|
|
print(f"{python_expression} = \033[92m INT: " + str(int_result) + " , FLOAT: " + str(
|
|
|
float_result) + ", STRING: " + string_result + "\033[0m")
|
|
|
return (int_result, float_result, string_result,)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_EvaluateStrs:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
return {"required": {
|
|
|
"python_expression": ("STRING", {"default": "a + b + c", "multiline": False}),
|
|
|
"print_to_console": (["False", "True"],)},
|
|
|
"optional": {
|
|
|
"a": ("STRING", {"default": "Hello", "multiline": False}),
|
|
|
"b": ("STRING", {"default": " World", "multiline": False}),
|
|
|
"c": ("STRING", {"default": "!", "multiline": False}), }
|
|
|
}
|
|
|
|
|
|
RETURN_TYPES = ("STRING",)
|
|
|
OUTPUT_NODE = True
|
|
|
FUNCTION = "evaluate"
|
|
|
CATEGORY = "Efficiency Nodes/Simple Eval"
|
|
|
|
|
|
def evaluate(self, python_expression, print_to_console, a="", b="", c=""):
|
|
|
variables = {'a': a, 'b': b, 'c': c}
|
|
|
|
|
|
functions = simpleeval.DEFAULT_FUNCTIONS.copy()
|
|
|
functions.update({"len": len})
|
|
|
|
|
|
result = simpleeval.simple_eval(python_expression, names=variables, functions=functions)
|
|
|
if print_to_console == "True":
|
|
|
print(f"\n{error('Evaluate Strings:')}")
|
|
|
print(f"\033[90ma = {a} \nb = {b} \nc = {c}\033[0m")
|
|
|
print(f"{python_expression} = \033[92m" + str(result) + "\033[0m")
|
|
|
return (str(result),)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class TSC_EvalExamples:
|
|
|
@classmethod
|
|
|
def INPUT_TYPES(cls):
|
|
|
filepath = os.path.join(my_dir, 'workflows', 'SimpleEval_Node_Examples.txt')
|
|
|
with open(filepath, 'r') as file:
|
|
|
examples = file.read()
|
|
|
return {"required": {"models_text": ("STRING", {"default": examples, "multiline": True}), }, }
|
|
|
|
|
|
RETURN_TYPES = ()
|
|
|
CATEGORY = "Efficiency Nodes/Simple Eval"
|
|
|
|
|
|
|
|
|
NODE_CLASS_MAPPINGS.update({"Evaluate Integers": TSC_EvaluateInts})
|
|
|
NODE_CLASS_MAPPINGS.update({"Evaluate Floats": TSC_EvaluateFloats})
|
|
|
NODE_CLASS_MAPPINGS.update({"Evaluate Strings": TSC_EvaluateStrs})
|
|
|
NODE_CLASS_MAPPINGS.update({"Simple Eval Examples": TSC_EvalExamples})
|
|
|
|
|
|
except ImportError:
|
|
|
print(f"{warning('Efficiency Nodes Warning:')} Failed to import python package 'simpleeval'; related nodes disabled.\n")
|
|
|
|