File size: 4,574 Bytes
23b413b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 | """
Node alias maps for ComfyUI workflow portability.
ComfyUI custom node ecosystems are not standardized; different InstantID and
face-restore packs register different class names. These alias maps let us
keep one "canonical" workflow while adapting at runtime to the installed node
pack.
Non-destructive:
- If the canonical name already exists in ComfyUI, nothing changes.
- If not, we try known alternatives.
- The returned dict records every replacement made (for logging/debugging).
"""
from __future__ import annotations
from typing import Any, Dict, Iterable, Tuple
# ---------------------------------------------------------------------------
# Alias tables — canonical name → ordered list of known alternatives
# ---------------------------------------------------------------------------
NODE_ALIAS_CANDIDATES: Dict[str, Tuple[str, ...]] = {
# --- InstantID / InsightFace face analysis ---
"InstantIDFaceAnalysis": (
"InstantIDFaceAnalysis",
"InstantIDFaceEmbedder",
"InstantIDFaceEmbed",
"InsightFaceAnalyzer",
"InsightFaceFaceAnalysis",
"FaceAnalysis",
),
# --- InsightFace loader (some packs use this instead) ---
"InsightFaceLoader": (
"InsightFaceLoader",
"InstantIDInsightFaceLoader",
"FaceAnalysisLoader",
"InsightFaceModelLoader",
),
# --- InstantID model loader ---
"InstantIDModelLoader": (
"InstantIDModelLoader",
"InstantIDLoader",
"InstantIDIPAdapterLoader",
),
# --- InstantID apply ---
"ApplyInstantID": (
"ApplyInstantID",
"InstantIDApply",
"InstantIDConditioning",
),
# --- InstantID apply SDXL variant ---
"ApplyInstantIDAdvanced": (
"ApplyInstantIDAdvanced",
"InstantIDApplySDXL",
"ApplyInstantIDSDXL",
"InstantIDConditioningSDXL",
),
# --- GFPGAN / Face restore ---
"FaceRestoreModelLoader": (
"FaceRestoreModelLoader",
"GFPGANLoader",
"GFPGANModelLoader",
"FaceRestorationModelLoader",
),
"FaceRestoreWithModel": (
"FaceRestoreWithModel",
"GFPGAN",
"ApplyGFPGAN",
"FaceRestore",
),
# --- Impact-Pack detectors (names may vary across versions) ---
"UltralyticsDetectorProvider": (
"UltralyticsDetectorProvider",
# Some Impact-Pack versions / forks have slightly different naming.
# Keeping these here makes workflows portable across ecosystems.
"UltralyticsDetector",
"UltralyticsDetectorLoader",
"YOLODetectorProvider",
"YoloDetectorProvider",
),
# --- FaceDetailer (Impact-Pack core, names stable but aliased for safety) ---
"FaceDetailer": (
"FaceDetailer",
"FaceDetailerV2",
"FaceDetailerAdvanced",
),
}
def remap_workflow_nodes(
workflow: Dict[str, Any],
available_nodes: Iterable[str],
) -> Dict[str, str]:
"""
Rewrite ``class_type`` values **in place** using the alias table.
For each node in the workflow whose ``class_type`` is not in
*available_nodes*, search the alias table for a match that IS available
and replace.
Returns:
``{old_name: new_name}`` for every replacement made (empty if none).
"""
available = set(available_nodes)
replacements: Dict[str, str] = {}
for _node_id, node in workflow.items():
if not isinstance(node, dict):
continue
ct = node.get("class_type")
if not ct or ct in available:
continue
candidates = NODE_ALIAS_CANDIDATES.get(ct)
if not candidates:
continue
for alt in candidates:
if alt in available:
node["class_type"] = alt
replacements[ct] = alt
break
return replacements
def find_missing_class_types(
workflow: Dict[str, Any],
available_nodes: Iterable[str],
) -> Tuple[str, ...]:
"""
Return a sorted, de-duplicated tuple of ``class_type`` names that are
present in *workflow* but absent from *available_nodes*.
Should be called **after** :func:`remap_workflow_nodes` for an accurate
picture of what is truly missing.
"""
available = set(available_nodes)
missing: list[str] = []
for _node_id, node in workflow.items():
if isinstance(node, dict):
ct = node.get("class_type")
if ct and ct not in available:
missing.append(ct)
return tuple(sorted(set(missing)))
|