kimwint commited on
Commit
7156c96
·
verified ·
1 Parent(s): da8bf00

Upload custom_nodes folder recursively

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +734 -0
  2. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/__init__.py +33 -0
  3. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/blendmodes.py +329 -0
  4. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/face_create_hole.py +20 -0
  5. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/find_crop_box.py +61 -0
  6. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/half_person_check.py +256 -0
  7. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/image_center_paste.py +118 -0
  8. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/imagefunc.py +0 -0
  9. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/mask_solid_area.py +68 -0
  10. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/node.py +145 -0
  11. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/reduce_mask_by_ratio.py +127 -0
  12. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/yolov8_detect.py +106 -0
  13. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/yolov8_person_detect.py +188 -0
  14. kim_comfyui_data/custom_nodes/Cheny_custom_nodes/yolov8_person_nomask.py +195 -0
  15. kim_comfyui_data/custom_nodes/ComfyLiterals/.gitignore +2 -0
  16. kim_comfyui_data/custom_nodes/ComfyLiterals/README.md +2 -0
  17. kim_comfyui_data/custom_nodes/ComfyLiterals/__init__.py +21 -0
  18. kim_comfyui_data/custom_nodes/ComfyLiterals/js/operation-node.js +83 -0
  19. kim_comfyui_data/custom_nodes/ComfyLiterals/nodes.py +136 -0
  20. kim_comfyui_data/custom_nodes/ComfyLiterals/operations.py +54 -0
  21. kim_comfyui_data/custom_nodes/ComfyLiterals/startup_utils.py +29 -0
  22. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/.github/workflows/publish.yml +28 -0
  23. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/.gitignore +18 -0
  24. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/README.md +183 -0
  25. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/__init__.py +109 -0
  26. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/icon.jpg +0 -0
  27. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBase.jpg +0 -0
  28. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBaseGallery.jpg +3 -0
  29. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBasePIP.jpg +0 -0
  30. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBase_big.jpg +0 -0
  31. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKCLIPEncodeMultiple.jpg +0 -0
  32. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKIndexMultiple.jpg +0 -0
  33. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKProjectSettingsOut.jpg +3 -0
  34. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKReplaceAlphaWithColor.jpg +0 -0
  35. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKResizeOnBoolean.jpg +0 -0
  36. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/CLIPTextEncodeAndCombineCached.jpg +0 -0
  37. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplers.jpg +0 -0
  38. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplersChooser.jpg +0 -0
  39. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplersSettings.jpg +0 -0
  40. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplers_big.jpg +3 -0
  41. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettings.jpg +0 -0
  42. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettingsOptions.jpg +0 -0
  43. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettings_big.jpg +3 -0
  44. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/preview_is_group_active.jpg +0 -0
  45. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/preview_repeater.jpg +0 -0
  46. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase.js +263 -0
  47. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_input.js +599 -0
  48. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_io.js +110 -0
  49. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_pip.js +926 -0
  50. kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_ui.js +392 -0
.gitattributes CHANGED
@@ -36,3 +36,737 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
36
  RMBG/BiRefNet/__pycache__/birefnet.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
37
  RMBG/RMBG-2.0/__pycache__/birefnet.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
38
  SDPose_OOD/SDPose-Wholebody/assets/wholebody_anno.png filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  RMBG/BiRefNet/__pycache__/birefnet.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text
37
  RMBG/RMBG-2.0/__pycache__/birefnet.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
38
  SDPose_OOD/SDPose-Wholebody/assets/wholebody_anno.png filter=lfs diff=lfs merge=lfs -text
39
+ kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBaseGallery.jpg filter=lfs diff=lfs merge=lfs -text
40
+ kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKProjectSettingsOut.jpg filter=lfs diff=lfs merge=lfs -text
41
+ kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplers_big.jpg filter=lfs diff=lfs merge=lfs -text
42
+ kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettings_big.jpg filter=lfs diff=lfs merge=lfs -text
43
+ kim_comfyui_data/custom_nodes/ComfyUI-Addoor/example_workflow/12-22日更新.png filter=lfs diff=lfs merge=lfs -text
44
+ kim_comfyui_data/custom_nodes/ComfyUI-Addoor/example_workflow/Bizyair[[:space:]]Flux批量反推+生图.png filter=lfs diff=lfs merge=lfs -text
45
+ kim_comfyui_data/custom_nodes/ComfyUI-Addoor/example_workflow/RunningHub[[:space:]]API.jpg filter=lfs diff=lfs merge=lfs -text
46
+ kim_comfyui_data/custom_nodes/ComfyUI-Addoor/example_workflow/example_workflow.png filter=lfs diff=lfs merge=lfs -text
47
+ kim_comfyui_data/custom_nodes/ComfyUI-Addoor/example_workflow/提示词随机组合流.png filter=lfs diff=lfs merge=lfs -text
48
+ kim_comfyui_data/custom_nodes/ComfyUI-AutoCropFaces/Pytorch_Retinaface/images/test.jpg filter=lfs diff=lfs merge=lfs -text
49
+ kim_comfyui_data/custom_nodes/ComfyUI-AutoCropFaces/Pytorch_Retinaface/images/test.webp filter=lfs diff=lfs merge=lfs -text
50
+ kim_comfyui_data/custom_nodes/ComfyUI-AutoCropFaces/images/Crop_Data.png filter=lfs diff=lfs merge=lfs -text
51
+ kim_comfyui_data/custom_nodes/ComfyUI-AutoCropFaces/images/workflow-AutoCropFaces-Simple.png filter=lfs diff=lfs merge=lfs -text
52
+ kim_comfyui_data/custom_nodes/ComfyUI-AutoCropFaces/images/workflow-AutoCropFaces-bottom.png filter=lfs diff=lfs merge=lfs -text
53
+ kim_comfyui_data/custom_nodes/ComfyUI-AutoCropFaces/images/workflow-AutoCropFaces-with-Constrain.png filter=lfs diff=lfs merge=lfs -text
54
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/assets/2de5b085-1125-46f9-8ef3-06706743f182.png filter=lfs diff=lfs merge=lfs -text
55
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/assets/d0a22b2a-ceb3-4205-9b4e-f6a68e4337c7.png filter=lfs diff=lfs merge=lfs -text
56
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/assets/demo1.gif filter=lfs diff=lfs merge=lfs -text
57
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/assets/demo2.gif filter=lfs diff=lfs merge=lfs -text
58
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/assets/demo3.gif filter=lfs diff=lfs merge=lfs -text
59
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/assets/demo4.gif filter=lfs diff=lfs merge=lfs -text
60
+ kim_comfyui_data/custom_nodes/ComfyUI-BiRefNet-Hugo/example_workflows/image.webp filter=lfs diff=lfs merge=lfs -text
61
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/Framework.png filter=lfs diff=lfs merge=lfs -text
62
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/Framework-v3.png filter=lfs diff=lfs merge=lfs -text
63
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/Framework.png filter=lfs diff=lfs merge=lfs -text
64
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/GenLabHistory.png filter=lfs diff=lfs merge=lfs -text
65
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/Genlab.gif filter=lfs diff=lfs merge=lfs -text
66
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/broken_workflow_funny.png filter=lfs diff=lfs merge=lfs -text
67
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/comfycopilot_nodes_recommend.gif filter=lfs diff=lfs merge=lfs -text
68
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/comfycopilot_nodes_search.gif filter=lfs diff=lfs merge=lfs -text
69
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/comfyui_manager.png filter=lfs diff=lfs merge=lfs -text
70
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/comfyui_manager_install.png filter=lfs diff=lfs merge=lfs -text
71
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/comfyui_ui_icon.png filter=lfs diff=lfs merge=lfs -text
72
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/debug.gif filter=lfs diff=lfs merge=lfs -text
73
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/discordqrcode.png filter=lfs diff=lfs merge=lfs -text
74
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/expert_add.jpg filter=lfs diff=lfs merge=lfs -text
75
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/gif.gif filter=lfs diff=lfs merge=lfs -text
76
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/keygen.png filter=lfs diff=lfs merge=lfs -text
77
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/qrcode.png filter=lfs diff=lfs merge=lfs -text
78
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/assets/start.jpg filter=lfs diff=lfs merge=lfs -text
79
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/backend/data/workflow_debug.db filter=lfs diff=lfs merge=lfs -text
80
+ kim_comfyui_data/custom_nodes/ComfyUI-Copilot/dist/copilot_web/assets/gif-Dx_BC5pS.gif filter=lfs diff=lfs merge=lfs -text
81
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/All_in_one_v1_3.png filter=lfs diff=lfs merge=lfs -text
82
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime0.png filter=lfs diff=lfs merge=lfs -text
83
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/anime1.png filter=lfs diff=lfs merge=lfs -text
84
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi0.jpg filter=lfs diff=lfs merge=lfs -text
85
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/bocchi1.jpg filter=lfs diff=lfs merge=lfs -text
86
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real0.png filter=lfs diff=lfs merge=lfs -text
87
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/real1.png filter=lfs diff=lfs merge=lfs -text
88
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00003.png filter=lfs diff=lfs merge=lfs -text
89
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00004.png filter=lfs diff=lfs merge=lfs -text
90
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/rick/00005.png filter=lfs diff=lfs merge=lfs -text
91
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet0.png filter=lfs diff=lfs merge=lfs -text
92
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/demo_frames/violet1.png filter=lfs diff=lfs merge=lfs -text
93
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/example.png filter=lfs diff=lfs merge=lfs -text
94
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/interpolation_schedule.png filter=lfs diff=lfs merge=lfs -text
95
+ kim_comfyui_data/custom_nodes/ComfyUI-Frame-Interpolation/test_vfi_schedule.gif filter=lfs diff=lfs merge=lfs -text
96
+ kim_comfyui_data/custom_nodes/ComfyUI-Image-Filters/toy.png filter=lfs diff=lfs merge=lfs -text
97
+ kim_comfyui_data/custom_nodes/ComfyUI-InpaintEasy/example/InpaintEasy-CY-V1.png filter=lfs diff=lfs merge=lfs -text
98
+ kim_comfyui_data/custom_nodes/ComfyUI-KJNodes/docs/images/2024-04-03_20_49_29-ComfyUI.png filter=lfs diff=lfs merge=lfs -text
99
+ kim_comfyui_data/custom_nodes/ComfyUI-KJNodes/fonts/FreeMono.ttf filter=lfs diff=lfs merge=lfs -text
100
+ kim_comfyui_data/custom_nodes/ComfyUI-KJNodes/fonts/FreeMonoBoldOblique.otf filter=lfs diff=lfs merge=lfs -text
101
+ kim_comfyui_data/custom_nodes/ComfyUI-KJNodes/fonts/TTNorms-Black.otf filter=lfs diff=lfs merge=lfs -text
102
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/assets/bedroom.mp4 filter=lfs diff=lfs merge=lfs -text
103
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/assets/groceries.jpg filter=lfs diff=lfs merge=lfs -text
104
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/assets/image.png filter=lfs diff=lfs merge=lfs -text
105
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/docs/bbox.png filter=lfs diff=lfs merge=lfs -text
106
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/docs/point.png filter=lfs diff=lfs merge=lfs -text
107
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/docs/text_prompt.png filter=lfs diff=lfs merge=lfs -text
108
+ kim_comfyui_data/custom_nodes/ComfyUI-SAM3/docs/video.png filter=lfs diff=lfs merge=lfs -text
109
+ kim_comfyui_data/custom_nodes/ComfyUI-SDPose-OOD/example.png filter=lfs diff=lfs merge=lfs -text
110
+ kim_comfyui_data/custom_nodes/ComfyUI-SDPose-OOD/mmcv-2.1.0-py2.py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
111
+ kim_comfyui_data/custom_nodes/ComfyUI-SaveAsScript/images/SDXL-UI-Example.PNG filter=lfs diff=lfs merge=lfs -text
112
+ kim_comfyui_data/custom_nodes/ComfyUI-SaveAsScript/images/default_altered.png filter=lfs diff=lfs merge=lfs -text
113
+ kim_comfyui_data/custom_nodes/ComfyUI-SeedVR2_VideoUpscaler/docs/demo_01.jpg filter=lfs diff=lfs merge=lfs -text
114
+ kim_comfyui_data/custom_nodes/ComfyUI-SeedVR2_VideoUpscaler/docs/demo_02.jpg filter=lfs diff=lfs merge=lfs -text
115
+ kim_comfyui_data/custom_nodes/ComfyUI-SeedVR2_VideoUpscaler/docs/usage_01.png filter=lfs diff=lfs merge=lfs -text
116
+ kim_comfyui_data/custom_nodes/ComfyUI-SeedVR2_VideoUpscaler/docs/usage_02.png filter=lfs diff=lfs merge=lfs -text
117
+ kim_comfyui_data/custom_nodes/ComfyUI-SeedVR2_VideoUpscaler/example_workflows/example_inputs/Mustache_640x360.mp4 filter=lfs diff=lfs merge=lfs -text
118
+ kim_comfyui_data/custom_nodes/ComfyUI-SeedVR2_VideoUpscaler/example_workflows/example_inputs/Sadhu_320x478.png filter=lfs diff=lfs merge=lfs -text
119
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_flux.png filter=lfs diff=lfs merge=lfs -text
120
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_flux_kontext.png filter=lfs diff=lfs merge=lfs -text
121
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_hidream_i1_dev.png filter=lfs diff=lfs merge=lfs -text
122
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_hidream_i1_fast.png filter=lfs diff=lfs merge=lfs -text
123
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_hidream_i1_full.png filter=lfs diff=lfs merge=lfs -text
124
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_lumina_image_2.png filter=lfs diff=lfs merge=lfs -text
125
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/compare_pulid_flux.png filter=lfs diff=lfs merge=lfs -text
126
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/fennec_girl_sing.png filter=lfs diff=lfs merge=lfs -text
127
+ kim_comfyui_data/custom_nodes/ComfyUI-TeaCache/assets/flux_dev_example.png filter=lfs diff=lfs merge=lfs -text
128
+ kim_comfyui_data/custom_nodes/ComfyUI-WanAnimatePreprocess/example.png filter=lfs diff=lfs merge=lfs -text
129
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/configs/T5_tokenizer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
130
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/MTV_crafter_example_pose.mp4 filter=lfs diff=lfs merge=lfs -text
131
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/env.png filter=lfs diff=lfs merge=lfs -text
132
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/human.png filter=lfs diff=lfs merge=lfs -text
133
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/jeep.mp4 filter=lfs diff=lfs merge=lfs -text
134
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/wolf_interpolated.mp4 filter=lfs diff=lfs merge=lfs -text
135
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/woman.jpg filter=lfs diff=lfs merge=lfs -text
136
+ kim_comfyui_data/custom_nodes/ComfyUI-WanVideoWrapper/example_workflows/example_inputs/woman.wav filter=lfs diff=lfs merge=lfs -text
137
+ kim_comfyui_data/custom_nodes/ComfyUI-to-Python-Extension/images/SDXL-UI-Example.PNG filter=lfs diff=lfs merge=lfs -text
138
+ kim_comfyui_data/custom_nodes/ComfyUI-to-Python-Extension/images/comfyui_to_python_banner.png filter=lfs diff=lfs merge=lfs -text
139
+ kim_comfyui_data/custom_nodes/ComfyUI-uinodesDOC/assets/20250625_181943.gif filter=lfs diff=lfs merge=lfs -text
140
+ kim_comfyui_data/custom_nodes/ComfyUI-uinodesDOC/assets/image-20250625181804375.png filter=lfs diff=lfs merge=lfs -text
141
+ kim_comfyui_data/custom_nodes/ComfyUI-utils-nodes/assets/deepfaceAnalyzeFaceAttributes.png filter=lfs diff=lfs merge=lfs -text
142
+ kim_comfyui_data/custom_nodes/ComfyUI-utils-nodes/assets/detectorForNSFW.png filter=lfs diff=lfs merge=lfs -text
143
+ kim_comfyui_data/custom_nodes/ComfyUI-utils-nodes/assets/maskFromFacemodel.png filter=lfs diff=lfs merge=lfs -text
144
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/AlumniSansCollegiateOne-Regular.ttf filter=lfs diff=lfs merge=lfs -text
145
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/Caveat-VariableFont_wght.ttf filter=lfs diff=lfs merge=lfs -text
146
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/NotoSansArabic-Regular.ttf filter=lfs diff=lfs merge=lfs -text
147
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/Roboto-Regular.ttf filter=lfs diff=lfs merge=lfs -text
148
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/YoungSerif-Regular.ttf filter=lfs diff=lfs merge=lfs -text
149
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/comic.ttf filter=lfs diff=lfs merge=lfs -text
150
+ kim_comfyui_data/custom_nodes/ComfyUI_Comfyroll_CustomNodes/fonts/impact.ttf filter=lfs diff=lfs merge=lfs -text
151
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/font/Alibaba-PuHuiTi-Heavy.ttf filter=lfs diff=lfs merge=lfs -text
152
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/birefnet_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
153
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/birefnet_ultra_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
154
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/birefnet_ultra_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
155
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/blend_mode_result.jpg filter=lfs diff=lfs merge=lfs -text
156
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/blend_mode_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
157
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/collage_example.jpg filter=lfs diff=lfs merge=lfs -text
158
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/collage_node.jpg filter=lfs diff=lfs merge=lfs -text
159
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/data_nodes_example.jpg filter=lfs diff=lfs merge=lfs -text
160
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/deepseek_api_example.jpg filter=lfs diff=lfs merge=lfs -text
161
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/deepseek_api_node.jpg filter=lfs diff=lfs merge=lfs -text
162
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/deepseek_api_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
163
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/draw_bbox_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
164
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/draw_bbox_mask_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
165
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/draw_bbox_mask_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
166
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/evf_sam_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
167
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/evf_sam_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
168
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/florence2_image2prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
169
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/florence2_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
170
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/gemini_example.jpg filter=lfs diff=lfs merge=lfs -text
171
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/gemini_image_edit_example.jpg filter=lfs diff=lfs merge=lfs -text
172
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/gemini_image_edit_node.jpg filter=lfs diff=lfs merge=lfs -text
173
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/gemini_node.jpg filter=lfs diff=lfs merge=lfs -text
174
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/gemini_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
175
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/gemini_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
176
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/get_color_tone_example.jpg filter=lfs diff=lfs merge=lfs -text
177
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/get_color_tone_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
178
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/human_parts_node.jpg filter=lfs diff=lfs merge=lfs -text
179
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/human_parts_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
180
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/image_auto_crop_example.jpg filter=lfs diff=lfs merge=lfs -text
181
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/image_auto_crop_node.jpg filter=lfs diff=lfs merge=lfs -text
182
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/image_auto_crop_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
183
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/image_auto_crop_v3_node.jpg filter=lfs diff=lfs merge=lfs -text
184
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/image_reward_filter_example.jpg filter=lfs diff=lfs merge=lfs -text
185
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/jimeng_image_to_image_api_example.jpg filter=lfs diff=lfs merge=lfs -text
186
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/jimeng_image_to_image_api_node.jpg filter=lfs diff=lfs merge=lfs -text
187
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/joycaption2_example.jpg filter=lfs diff=lfs merge=lfs -text
188
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/joycaption2_extra_options_node.jpg filter=lfs diff=lfs merge=lfs -text
189
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/joycaption2_node.jpg filter=lfs diff=lfs merge=lfs -text
190
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/joycaption_beta1_example.jpg filter=lfs diff=lfs merge=lfs -text
191
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/joycaption_beta1_extra_options_node.jpg filter=lfs diff=lfs merge=lfs -text
192
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/joycaption_beta_1_node.jpg filter=lfs diff=lfs merge=lfs -text
193
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/lama_example.jpg filter=lfs diff=lfs merge=lfs -text
194
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/light_leak_example.jpg filter=lfs diff=lfs merge=lfs -text
195
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/llama_vision_example.jpg filter=lfs diff=lfs merge=lfs -text
196
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/llama_vision_node.jpg filter=lfs diff=lfs merge=lfs -text
197
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/load_image_example.jpg filter=lfs diff=lfs merge=lfs -text
198
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/load_image_example_psd_file.jpg filter=lfs diff=lfs merge=lfs -text
199
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/load_joycaption_beta1_model_node.jpg filter=lfs diff=lfs merge=lfs -text
200
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/load_segmentanything_model_node.jpg filter=lfs diff=lfs merge=lfs -text
201
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/mask_by_different_example.jpg filter=lfs diff=lfs merge=lfs -text
202
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/mask_by_different_node.jpg filter=lfs diff=lfs merge=lfs -text
203
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/object_detector_gemini_example.jpg filter=lfs diff=lfs merge=lfs -text
204
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/object_detector_yolo_world_node.jpg filter=lfs diff=lfs merge=lfs -text
205
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/outer_glow_example.jpg filter=lfs diff=lfs merge=lfs -text
206
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/person_mask_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
207
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/person_mask_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
208
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/phi_prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
209
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/phi_prompt_node.jpg filter=lfs diff=lfs merge=lfs -text
210
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/prompt_embellish_example.jpg filter=lfs diff=lfs merge=lfs -text
211
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/prompt_tagger_example.jpg filter=lfs diff=lfs merge=lfs -text
212
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/prompt_tagger_example1.jpg filter=lfs diff=lfs merge=lfs -text
213
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/qwen_image2prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
214
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/sam2_example.jpg filter=lfs diff=lfs merge=lfs -text
215
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/sam2_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
216
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/sam2_ultra_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
217
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/sam2_video_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
218
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/saveimage_plus_example.jpg filter=lfs diff=lfs merge=lfs -text
219
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/saveimage_plus_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
220
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/sd3_negative_conditioning_example.jpg filter=lfs diff=lfs merge=lfs -text
221
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/sd3_negative_conditioning_node_note.jpg filter=lfs diff=lfs merge=lfs -text
222
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/segment_anything_ultra_compare.jpg filter=lfs diff=lfs merge=lfs -text
223
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/segment_anything_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
224
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/segment_anything_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
225
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/segment_anything_ultra_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
226
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/segment_anything_ultra_v3_example.jpg filter=lfs diff=lfs merge=lfs -text
227
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/segment_anything_ultra_v3_node.jpg filter=lfs diff=lfs merge=lfs -text
228
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/smollm2_example.jpg filter=lfs diff=lfs merge=lfs -text
229
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/smollm2_node.jpg filter=lfs diff=lfs merge=lfs -text
230
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/smolvlm_example.jpg filter=lfs diff=lfs merge=lfs -text
231
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/transparent_background_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
232
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/ultra_v2_nodes_example.jpg filter=lfs diff=lfs merge=lfs -text
233
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/userprompt_generator_txt2img_with_reference_node.jpg filter=lfs diff=lfs merge=lfs -text
234
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/water_color_example.jpg filter=lfs diff=lfs merge=lfs -text
235
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/watermark_example.jpg filter=lfs diff=lfs merge=lfs -text
236
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/yolov8_detect_example.jpg filter=lfs diff=lfs merge=lfs -text
237
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/zhipuglm4_example.jpg filter=lfs diff=lfs merge=lfs -text
238
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/zhipuglm4_node.jpg filter=lfs diff=lfs merge=lfs -text
239
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/image/zhipuglm4v_node.jpg filter=lfs diff=lfs merge=lfs -text
240
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Black-jiII8dog.woff2 filter=lfs diff=lfs merge=lfs -text
241
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-BlackItalic-1413vuen.woff2 filter=lfs diff=lfs merge=lfs -text
242
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Bold-srYz_-1B.woff2 filter=lfs diff=lfs merge=lfs -text
243
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-BoldItalic-dE_gZyur.woff2 filter=lfs diff=lfs merge=lfs -text
244
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-ExtraBold-TduDdwUu.woff2 filter=lfs diff=lfs merge=lfs -text
245
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-ExtraBoldItalic-BJafRE5I.woff2 filter=lfs diff=lfs merge=lfs -text
246
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-ExtraLight-w5HAp5iF.woff2 filter=lfs diff=lfs merge=lfs -text
247
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-ExtraLightItalic-ZptecSuc.woff2 filter=lfs diff=lfs merge=lfs -text
248
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Italic-f6M78thn.woff2 filter=lfs diff=lfs merge=lfs -text
249
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Light-DFhX0qo-.woff2 filter=lfs diff=lfs merge=lfs -text
250
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-LightItalic-fu56_DRc.woff2 filter=lfs diff=lfs merge=lfs -text
251
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Medium-dDRaJ8tM.woff2 filter=lfs diff=lfs merge=lfs -text
252
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-MediumItalic-zr3roggP.woff2 filter=lfs diff=lfs merge=lfs -text
253
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Regular-dEFHw1tF.woff2 filter=lfs diff=lfs merge=lfs -text
254
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-SemiBold-PyS8DO2L.woff2 filter=lfs diff=lfs merge=lfs -text
255
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-SemiBoldItalic-uIDb7hsH.woff2 filter=lfs diff=lfs merge=lfs -text
256
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-Thin-eKObIkJC.woff2 filter=lfs diff=lfs merge=lfs -text
257
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/Inter-ThinItalic-L6uBn3RP.woff2 filter=lfs diff=lfs merge=lfs -text
258
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/py/iopaint/web_app/assets/kofi_button_black-XI_Dr2zg.png filter=lfs diff=lfs merge=lfs -text
259
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/whl/hydra_core-1.3.2-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
260
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1280x720_seven_person.jpg filter=lfs diff=lfs merge=lfs -text
261
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1280x720car.jpg filter=lfs diff=lfs merge=lfs -text
262
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1280x768_city.png filter=lfs diff=lfs merge=lfs -text
263
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1344x768_beach.png filter=lfs diff=lfs merge=lfs -text
264
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1344x768_girl2.png filter=lfs diff=lfs merge=lfs -text
265
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1344x768_hair.png filter=lfs diff=lfs merge=lfs -text
266
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1344x768_redcar.png filter=lfs diff=lfs merge=lfs -text
267
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/1920x1080table.png filter=lfs diff=lfs merge=lfs -text
268
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/3840x2160car.jpg filter=lfs diff=lfs merge=lfs -text
269
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/512x512.png filter=lfs diff=lfs merge=lfs -text
270
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/768x1344_beach.png filter=lfs diff=lfs merge=lfs -text
271
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/768x1344_dress.png filter=lfs diff=lfs merge=lfs -text
272
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/fox_512x512.png filter=lfs diff=lfs merge=lfs -text
273
+ kim_comfyui_data/custom_nodes/ComfyUI_LayerStyle_Advance/workflow/girl_dino_1024.png filter=lfs diff=lfs merge=lfs -text
274
+ kim_comfyui_data/custom_nodes/Comfyui-Memory_Cleanup/1.png filter=lfs diff=lfs merge=lfs -text
275
+ kim_comfyui_data/custom_nodes/Comfyui-QwenEditUtils/Demo.mp4 filter=lfs diff=lfs merge=lfs -text
276
+ kim_comfyui_data/custom_nodes/Comfyui-QwenEditUtils/example.png filter=lfs diff=lfs merge=lfs -text
277
+ kim_comfyui_data/custom_nodes/Comfyui-QwenEditUtils/mask_example.png filter=lfs diff=lfs merge=lfs -text
278
+ kim_comfyui_data/custom_nodes/Comfyui-QwenEditUtils/mask_vs_no_mask.png filter=lfs diff=lfs merge=lfs -text
279
+ kim_comfyui_data/custom_nodes/Comfyui-QwenEditUtils/result.png filter=lfs diff=lfs merge=lfs -text
280
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/1细节优化.png filter=lfs diff=lfs merge=lfs -text
281
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/1细节优化2.png filter=lfs diff=lfs merge=lfs -text
282
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/2固定提示词大师.png filter=lfs diff=lfs merge=lfs -text
283
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/2自定义随机提示词大师.png filter=lfs diff=lfs merge=lfs -text
284
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/3水印大师.png filter=lfs diff=lfs merge=lfs -text
285
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/4图像尺寸获取.png filter=lfs diff=lfs merge=lfs -text
286
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/4图像指定保存路径.png filter=lfs diff=lfs merge=lfs -text
287
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/4图像镜像翻转.png filter=lfs diff=lfs merge=lfs -text
288
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/4滤镜.png filter=lfs diff=lfs merge=lfs -text
289
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/4颜色迁移.png filter=lfs diff=lfs merge=lfs -text
290
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/5遮罩处理.png filter=lfs diff=lfs merge=lfs -text
291
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/5遮罩处理2.png filter=lfs diff=lfs merge=lfs -text
292
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/5遮罩羽化.png filter=lfs diff=lfs merge=lfs -text
293
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/LOGO2.png filter=lfs diff=lfs merge=lfs -text
294
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/workflow.png filter=lfs diff=lfs merge=lfs -text
295
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/修复前原图.png filter=lfs diff=lfs merge=lfs -text
296
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/修复后.png filter=lfs diff=lfs merge=lfs -text
297
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/修复后2.png filter=lfs diff=lfs merge=lfs -text
298
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/内补前.png filter=lfs diff=lfs merge=lfs -text
299
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/内补后.png filter=lfs diff=lfs merge=lfs -text
300
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/加载任意图像.png filter=lfs diff=lfs merge=lfs -text
301
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/加载任意图像2.png filter=lfs diff=lfs merge=lfs -text
302
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/docs/局部修复后.png filter=lfs diff=lfs merge=lfs -text
303
+ kim_comfyui_data/custom_nodes/Comfyui-ergouzi-Nodes/fonts/优设标题黑.ttf filter=lfs diff=lfs merge=lfs -text
304
+ kim_comfyui_data/custom_nodes/Comfyui_TTP_Toolset/examples/Flux_8Mega_Pixel_image_upscale_process.png filter=lfs diff=lfs merge=lfs -text
305
+ kim_comfyui_data/custom_nodes/Comfyui_TTP_Toolset/examples/Flux_8Mega_Pixel_image_upscale_process_pixel.png filter=lfs diff=lfs merge=lfs -text
306
+ kim_comfyui_data/custom_nodes/Comfyui_TTP_Toolset/examples/Hunyuan_8Mega_Pixel_image_upscale_process_with_tile_cn.png filter=lfs diff=lfs merge=lfs -text
307
+ kim_comfyui_data/custom_nodes/LanPaint/Example.JPG filter=lfs diff=lfs merge=lfs -text
308
+ kim_comfyui_data/custom_nodes/LanPaint/Nodes.JPG filter=lfs diff=lfs merge=lfs -text
309
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Flux_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
310
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Hunyuan_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
311
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Masked_Qwen_Image_Edit.jpg filter=lfs diff=lfs merge=lfs -text
312
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Masked_Qwen_Image_Edit_2509.jpg filter=lfs diff=lfs merge=lfs -text
313
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Qwen_Image_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
314
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Qwen_Image_Outpaint.jpg filter=lfs diff=lfs merge=lfs -text
315
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/SDXL_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
316
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/Z_image_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
317
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/wan2_2_T2I_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
318
+ kim_comfyui_data/custom_nodes/LanPaint/example_workflows/wan2_2_T2I_Partial_Inpaint.jpg filter=lfs diff=lfs merge=lfs -text
319
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_1/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
320
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_1/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
321
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_1/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
322
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_10/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
323
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_10/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
324
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_11/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
325
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_11/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
326
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_12/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
327
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_12/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
328
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_13/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
329
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_13/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
330
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_13/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
331
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_14/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
332
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_14/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
333
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_14/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
334
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_14/QwenEdit_2509_InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
335
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_15/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
336
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_15/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
337
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_15/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
338
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_16/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
339
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_16/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
340
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_16/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
341
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_17/Inpainted_40frames_Drag_Me_to_ComfyUI.mp4 filter=lfs diff=lfs merge=lfs -text
342
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_17/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
343
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_17/Original_No_Mask.mp4 filter=lfs diff=lfs merge=lfs -text
344
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_17/Wan22_[[:space:]]5B_Inpainted_40frames_Drag_Me_to_ComfyUI.mp4 filter=lfs diff=lfs merge=lfs -text
345
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_18/Inpainted_40frames_Drag_Me_to_ComfyUI.mp4 filter=lfs diff=lfs merge=lfs -text
346
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_18/Inpainted_81frames_Drag_Me_to_ComfyUI.mp4 filter=lfs diff=lfs merge=lfs -text
347
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_18/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
348
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_18/Original_No_Mask.mp4 filter=lfs diff=lfs merge=lfs -text
349
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_19/Original_Load_Me_in_Loader.mp4 filter=lfs diff=lfs merge=lfs -text
350
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_19/Outpainted_40frames_Drag_Me_to_ComfyUI.mp4 filter=lfs diff=lfs merge=lfs -text
351
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_19/Outpainted_41frames_Drag_Me_to_ComfyUI.mp4 filter=lfs diff=lfs merge=lfs -text
352
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_2/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
353
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_2/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
354
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_2/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
355
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_20/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
356
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_20/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
357
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_20/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
358
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_21/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
359
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_21/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
360
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_21/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
361
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_22/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
362
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_22/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
363
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_22/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
364
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_23/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
365
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_23/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
366
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_23/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
367
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_3/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
368
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_3/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
369
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_3/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
370
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_4/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
371
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_4/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
372
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_4/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
373
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_5/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
374
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_5/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
375
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_5/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
376
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_6/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
377
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_6/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
378
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_6/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
379
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_7/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
380
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_7/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
381
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_7/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
382
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_8/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
383
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_8/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
384
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_8/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
385
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_9/InPainted_Drag_Me_to_ComfyUI.png filter=lfs diff=lfs merge=lfs -text
386
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_9/Masked_Load_Me_in_Loader.png filter=lfs diff=lfs merge=lfs -text
387
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Example_9/Original_No_Mask.png filter=lfs diff=lfs merge=lfs -text
388
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_04.jpg filter=lfs diff=lfs merge=lfs -text
389
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_05.jpg filter=lfs diff=lfs merge=lfs -text
390
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_06.jpg filter=lfs diff=lfs merge=lfs -text
391
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_07.jpg filter=lfs diff=lfs merge=lfs -text
392
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_08.jpg filter=lfs diff=lfs merge=lfs -text
393
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_09.jpg filter=lfs diff=lfs merge=lfs -text
394
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_10.jpg filter=lfs diff=lfs merge=lfs -text
395
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_11.jpg filter=lfs diff=lfs merge=lfs -text
396
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_12.jpg filter=lfs diff=lfs merge=lfs -text
397
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_13(1).jpg filter=lfs diff=lfs merge=lfs -text
398
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_13.jpg filter=lfs diff=lfs merge=lfs -text
399
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_14.jpg filter=lfs diff=lfs merge=lfs -text
400
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_45.jpg filter=lfs diff=lfs merge=lfs -text
401
+ kim_comfyui_data/custom_nodes/LanPaint/examples/InpaintChara_46.jpg filter=lfs diff=lfs merge=lfs -text
402
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Inpainted_40frames_Drag_Me_to_ComfyUI_example17.gif filter=lfs diff=lfs merge=lfs -text
403
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Inpainted_81frames_Drag_Me_to_ComfyUI_example18.gif filter=lfs diff=lfs merge=lfs -text
404
+ kim_comfyui_data/custom_nodes/LanPaint/examples/LanPaintQwen_01.jpg filter=lfs diff=lfs merge=lfs -text
405
+ kim_comfyui_data/custom_nodes/LanPaint/examples/LanPaintQwen_03.jpg filter=lfs diff=lfs merge=lfs -text
406
+ kim_comfyui_data/custom_nodes/LanPaint/examples/LanPaintQwen_04.jpg filter=lfs diff=lfs merge=lfs -text
407
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Mask_Example19_.png filter=lfs diff=lfs merge=lfs -text
408
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Original_Load_Me_in_Loader_example19.gif filter=lfs diff=lfs merge=lfs -text
409
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Original_No_Mask-example18.gif filter=lfs diff=lfs merge=lfs -text
410
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Original_No_Mask_example17.gif filter=lfs diff=lfs merge=lfs -text
411
+ kim_comfyui_data/custom_nodes/LanPaint/examples/Outpainted_40frames_Drag_Me_to_ComfyUI_example19.gif filter=lfs diff=lfs merge=lfs -text
412
+ kim_comfyui_data/custom_nodes/cg-use-everywhere/docs/simple-example-image.png filter=lfs diff=lfs merge=lfs -text
413
+ kim_comfyui_data/custom_nodes/cg-use-everywhere/docs/simple-example.png filter=lfs diff=lfs merge=lfs -text
414
+ kim_comfyui_data/custom_nodes/cg-use-everywhere/tests/compare.png filter=lfs diff=lfs merge=lfs -text
415
+ kim_comfyui_data/custom_nodes/cg-use-everywhere/tests/test.png filter=lfs diff=lfs merge=lfs -text
416
+ kim_comfyui_data/custom_nodes/cg-use-everywhere/tests/test2.png filter=lfs diff=lfs merge=lfs -text
417
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/inputs/cropped_faces/Adele_crop.png filter=lfs diff=lfs merge=lfs -text
418
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/inputs/cropped_faces/Julia_Roberts_crop.png filter=lfs diff=lfs merge=lfs -text
419
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/inputs/cropped_faces/Justin_Timberlake_crop.png filter=lfs diff=lfs merge=lfs -text
420
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/inputs/cropped_faces/Paris_Hilton_crop.png filter=lfs diff=lfs merge=lfs -text
421
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/inputs/whole_imgs/00.jpg filter=lfs diff=lfs merge=lfs -text
422
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/inputs/whole_imgs/10045.png filter=lfs diff=lfs merge=lfs -text
423
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/tests/data/ffhq_gt.lmdb/data.mdb filter=lfs diff=lfs merge=lfs -text
424
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/GFPGAN/tests/data/gt/00000000.png filter=lfs diff=lfs merge=lfs -text
425
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/frame_interpolation/moment.gif filter=lfs diff=lfs merge=lfs -text
426
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/frame_interpolation/photos/one.png filter=lfs diff=lfs merge=lfs -text
427
+ kim_comfyui_data/custom_nodes/comfy_mtb/extern/frame_interpolation/photos/two.png filter=lfs diff=lfs merge=lfs -text
428
+ kim_comfyui_data/custom_nodes/comfyui-easy-use/py/modules/kolors/chatglm/tokenizer/vocab.txt filter=lfs diff=lfs merge=lfs -text
429
+ kim_comfyui_data/custom_nodes/comfyui-easy-use/resources/OpenSans-Medium.ttf filter=lfs diff=lfs merge=lfs -text
430
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/ic_light_animated_example_01.png filter=lfs diff=lfs merge=lfs -text
431
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/ic_light_example_02.png filter=lfs diff=lfs merge=lfs -text
432
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/ic_light_fbc_example_02.png filter=lfs diff=lfs merge=lfs -text
433
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/ic_light_simple_light_multi_02.png filter=lfs diff=lfs merge=lfs -text
434
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/iclight_example_animated_multilight_01.png filter=lfs diff=lfs merge=lfs -text
435
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/iclight_example_fc_controlled_gradient_01.png filter=lfs diff=lfs merge=lfs -text
436
+ kim_comfyui_data/custom_nodes/comfyui-ic-light/example_workflows/iclight_normals_example_01.png filter=lfs diff=lfs merge=lfs -text
437
+ kim_comfyui_data/custom_nodes/comfyui-impact-pack/example_workflows/2-MaskDetailer.jpg filter=lfs diff=lfs merge=lfs -text
438
+ kim_comfyui_data/custom_nodes/comfyui-impact-pack/example_workflows/4-MakeTileSEGS-Upscale.jpg filter=lfs diff=lfs merge=lfs -text
439
+ kim_comfyui_data/custom_nodes/comfyui-impact-pack/example_workflows/5-prompt-per-tile.jpg filter=lfs diff=lfs merge=lfs -text
440
+ kim_comfyui_data/custom_nodes/comfyui-impact-pack/example_workflows/6-DetailerWildcard.jpg filter=lfs diff=lfs merge=lfs -text
441
+ kim_comfyui_data/custom_nodes/comfyui-impact-pack/troubleshooting/black1.png filter=lfs diff=lfs merge=lfs -text
442
+ kim_comfyui_data/custom_nodes/comfyui-impact-pack/troubleshooting/black2.png filter=lfs diff=lfs merge=lfs -text
443
+ kim_comfyui_data/custom_nodes/comfyui-inpaint-cropandstitch/example_workflows/inpaint_flux.jpg filter=lfs diff=lfs merge=lfs -text
444
+ kim_comfyui_data/custom_nodes/comfyui-inpaint-cropandstitch/example_workflows/inpaint_hires.jpg filter=lfs diff=lfs merge=lfs -text
445
+ kim_comfyui_data/custom_nodes/comfyui-inpaint-cropandstitch/example_workflows/inpaint_sd15.jpg filter=lfs diff=lfs merge=lfs -text
446
+ kim_comfyui_data/custom_nodes/comfyui-inpaint-cropandstitch/inpaint_flux.png filter=lfs diff=lfs merge=lfs -text
447
+ kim_comfyui_data/custom_nodes/comfyui-inpaint-cropandstitch/inpaint_hires.png filter=lfs diff=lfs merge=lfs -text
448
+ kim_comfyui_data/custom_nodes/comfyui-inpaint-cropandstitch/inpaint_sd15.png filter=lfs diff=lfs merge=lfs -text
449
+ kim_comfyui_data/custom_nodes/comfyui-lama-remover/example/example1.png filter=lfs diff=lfs merge=lfs -text
450
+ kim_comfyui_data/custom_nodes/comfyui-lama-remover/example/example2.png filter=lfs diff=lfs merge=lfs -text
451
+ kim_comfyui_data/custom_nodes/comfyui-lama-remover/example/example3.png filter=lfs diff=lfs merge=lfs -text
452
+ kim_comfyui_data/custom_nodes/comfyui-loop/ComfyUI-Loop.png filter=lfs diff=lfs merge=lfs -text
453
+ kim_comfyui_data/custom_nodes/comfyui-loop/_demo_videos/loop_and_paste.mkv filter=lfs diff=lfs merge=lfs -text
454
+ kim_comfyui_data/custom_nodes/comfyui-loop/_demo_videos/loop_and_save_any.mkv filter=lfs diff=lfs merge=lfs -text
455
+ kim_comfyui_data/custom_nodes/comfyui-loop/example_workflow/loop_workflow_example.png filter=lfs diff=lfs merge=lfs -text
456
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/0-m-app.png filter=lfs diff=lfs merge=lfs -text
457
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/1722517810720.png filter=lfs diff=lfs merge=lfs -text
458
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/1724308322276.png filter=lfs diff=lfs merge=lfs -text
459
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/1725710761451.png filter=lfs diff=lfs merge=lfs -text
460
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/3d-workflow.png filter=lfs diff=lfs merge=lfs -text
461
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/3d_app.png filter=lfs diff=lfs merge=lfs -text
462
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/3dimage.png filter=lfs diff=lfs merge=lfs -text
463
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/69feadc0e52f99b420cee0201660c4e.png filter=lfs diff=lfs merge=lfs -text
464
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/City_Snapshot_00005_.png filter=lfs diff=lfs merge=lfs -text
465
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/MaterialsVariantsShoe.glb filter=lfs diff=lfs merge=lfs -text
466
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/NeilArmstrong.glb filter=lfs diff=lfs merge=lfs -text
467
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/TransparentImage.png filter=lfs diff=lfs merge=lfs -text
468
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/VisualStylePrompting.png filter=lfs diff=lfs merge=lfs -text
469
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/appinfo-readme.png filter=lfs diff=lfs merge=lfs -text
470
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/appinfo-readme2.png filter=lfs diff=lfs merge=lfs -text
471
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/detect-face-all.png filter=lfs diff=lfs merge=lfs -text
472
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/fonts/庞门正道粗书体6.0.ttf filter=lfs diff=lfs merge=lfs -text
473
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/fonts/有爱黑体ARHei.ttf filter=lfs diff=lfs merge=lfs -text
474
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/fonts/有爱黑体arheiuhk_bd.ttf filter=lfs diff=lfs merge=lfs -text
475
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/fonts/王汉宗颜楷体繁.ttf filter=lfs diff=lfs merge=lfs -text
476
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/help.png filter=lfs diff=lfs merge=lfs -text
477
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/load-watch.png filter=lfs diff=lfs merge=lfs -text
478
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/prompt_weight.png filter=lfs diff=lfs merge=lfs -text
479
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/randomPrompt.png filter=lfs diff=lfs merge=lfs -text
480
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/screenshare.png filter=lfs diff=lfs merge=lfs -text
481
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/微信图片_20240421205440.png filter=lfs diff=lfs merge=lfs -text
482
+ kim_comfyui_data/custom_nodes/comfyui-mixlab-nodes/assets/挖掘机_00076_1.png filter=lfs diff=lfs merge=lfs -text
483
+ kim_comfyui_data/custom_nodes/comfyui-propost/examples/propost-compound.jpg filter=lfs diff=lfs merge=lfs -text
484
+ kim_comfyui_data/custom_nodes/comfyui-propost/examples/propost-depthmapblur-workflow.png filter=lfs diff=lfs merge=lfs -text
485
+ kim_comfyui_data/custom_nodes/comfyui-propost/examples/propost-filmgrain.jpg filter=lfs diff=lfs merge=lfs -text
486
+ kim_comfyui_data/custom_nodes/comfyui-propost/examples/propost-lut.jpg filter=lfs diff=lfs merge=lfs -text
487
+ kim_comfyui_data/custom_nodes/comfyui-propost/examples/propost.jpg filter=lfs diff=lfs merge=lfs -text
488
+ kim_comfyui_data/custom_nodes/comfyui-propost/examples/workflow.png filter=lfs diff=lfs merge=lfs -text
489
+ kim_comfyui_data/custom_nodes/comfyui-rmbg/sam3/perflib/tests/assets/masks.tiff filter=lfs diff=lfs merge=lfs -text
490
+ kim_comfyui_data/custom_nodes/comfyui-sam2/assets/workflow.png filter=lfs diff=lfs merge=lfs -text
491
+ kim_comfyui_data/custom_nodes/comfyui-show-text/imgs/showtext.png filter=lfs diff=lfs merge=lfs -text
492
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/NotoSans-Regular.ttf filter=lfs diff=lfs merge=lfs -text
493
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/CNAuxBanner.jpg filter=lfs diff=lfs merge=lfs -text
494
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/ExecuteAll.png filter=lfs diff=lfs merge=lfs -text
495
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/ExecuteAll1.jpg filter=lfs diff=lfs merge=lfs -text
496
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/ExecuteAll2.jpg filter=lfs diff=lfs merge=lfs -text
497
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/comfyui-controlnet-aux-logo.png filter=lfs diff=lfs merge=lfs -text
498
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_animal_pose.png filter=lfs diff=lfs merge=lfs -text
499
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_anime_face_segmentor.png filter=lfs diff=lfs merge=lfs -text
500
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_anyline.png filter=lfs diff=lfs merge=lfs -text
501
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_densepose.png filter=lfs diff=lfs merge=lfs -text
502
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_depth_anything.png filter=lfs diff=lfs merge=lfs -text
503
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_depth_anything_v2.png filter=lfs diff=lfs merge=lfs -text
504
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_dsine.png filter=lfs diff=lfs merge=lfs -text
505
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_marigold.png filter=lfs diff=lfs merge=lfs -text
506
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_marigold_flat.jpg filter=lfs diff=lfs merge=lfs -text
507
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_mesh_graphormer.png filter=lfs diff=lfs merge=lfs -text
508
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_metric3d.png filter=lfs diff=lfs merge=lfs -text
509
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_recolor.png filter=lfs diff=lfs merge=lfs -text
510
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_save_kps.png filter=lfs diff=lfs merge=lfs -text
511
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_teed.png filter=lfs diff=lfs merge=lfs -text
512
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_torchscript.png filter=lfs diff=lfs merge=lfs -text
513
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/examples/example_unimatch.png filter=lfs diff=lfs merge=lfs -text
514
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/src/custom_controlnet_aux/mesh_graphormer/hand_landmarker.task filter=lfs diff=lfs merge=lfs -text
515
+ kim_comfyui_data/custom_nodes/comfyui_controlnet_aux_backup/tests/pose.png filter=lfs diff=lfs merge=lfs -text
516
+ kim_comfyui_data/custom_nodes/comfyui_faceanalysis/face_analysis.jpg filter=lfs diff=lfs merge=lfs -text
517
+ kim_comfyui_data/custom_nodes/comfyui_facetools/examples/comparison.jpg filter=lfs diff=lfs merge=lfs -text
518
+ kim_comfyui_data/custom_nodes/comfyui_facetools/examples/full_inpainting.png filter=lfs diff=lfs merge=lfs -text
519
+ kim_comfyui_data/custom_nodes/comfyui_ipadapter_plus/examples/demo_workflow.jpg filter=lfs diff=lfs merge=lfs -text
520
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/font/Alibaba-PuHuiTi-Heavy.ttf filter=lfs diff=lfs merge=lfs -text
521
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/add_grain_example.jpg filter=lfs diff=lfs merge=lfs -text
522
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/auto_adjust_example.jpg filter=lfs diff=lfs merge=lfs -text
523
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/auto_adjust_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
524
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/auto_brightness_example.jpg filter=lfs diff=lfs merge=lfs -text
525
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/ben_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
526
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/ben_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
527
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/birefnet_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
528
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/birefnet_ultra_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
529
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/birefnet_ultra_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
530
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/blend_mode_result.jpg filter=lfs diff=lfs merge=lfs -text
531
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/blend_mode_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
532
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/blendif_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
533
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/channel_shake_example.jpg filter=lfs diff=lfs merge=lfs -text
534
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/chioce_text_preset_node.jpg filter=lfs diff=lfs merge=lfs -text
535
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/choice_text_preset_example.jpg filter=lfs diff=lfs merge=lfs -text
536
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_adapter_example.jpg filter=lfs diff=lfs merge=lfs -text
537
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_balance_example.jpg filter=lfs diff=lfs merge=lfs -text
538
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_image_example.jpg filter=lfs diff=lfs merge=lfs -text
539
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_negative_example.jpg filter=lfs diff=lfs merge=lfs -text
540
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_of_shadow_and_highlight_example.jpg filter=lfs diff=lfs merge=lfs -text
541
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_of_shadow_and_highlight_node.jpg filter=lfs diff=lfs merge=lfs -text
542
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_overlay_example.jpg filter=lfs diff=lfs merge=lfs -text
543
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/color_temperature_example.jpg filter=lfs diff=lfs merge=lfs -text
544
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/colormap_result.jpg filter=lfs diff=lfs merge=lfs -text
545
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/corp_by_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
546
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/corp_by_mask_node.jpg filter=lfs diff=lfs merge=lfs -text
547
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/create_gradient_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
548
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/create_gradient_mask_example2.jpg filter=lfs diff=lfs merge=lfs -text
549
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/data_nodes_example.jpg filter=lfs diff=lfs merge=lfs -text
550
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/draw_bbox_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
551
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/draw_rounded_rectangle_node.jpg filter=lfs diff=lfs merge=lfs -text
552
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/drop_shadow_example.jpg filter=lfs diff=lfs merge=lfs -text
553
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/evf_sam_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
554
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/evf_sam_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
555
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/exposure_example.jpg filter=lfs diff=lfs merge=lfs -text
556
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/extend_canvas_example.jpg filter=lfs diff=lfs merge=lfs -text
557
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/extend_canvas_node.jpg filter=lfs diff=lfs merge=lfs -text
558
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/film_example.jpg filter=lfs diff=lfs merge=lfs -text
559
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/film_node.jpg filter=lfs diff=lfs merge=lfs -text
560
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/florence2_image2prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
561
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/florence2_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
562
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/flux_kontext_image_scale_node_example.jpg filter=lfs diff=lfs merge=lfs -text
563
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/gaussian_blur_example.jpg filter=lfs diff=lfs merge=lfs -text
564
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/get_color_tone_example.jpg filter=lfs diff=lfs merge=lfs -text
565
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/get_color_tone_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
566
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/get_main_color_and_color_name_example.jpg filter=lfs diff=lfs merge=lfs -text
567
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/get_main_color_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
568
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/get_main_colors_example.jpg filter=lfs diff=lfs merge=lfs -text
569
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/gradient_image_example.jpg filter=lfs diff=lfs merge=lfs -text
570
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/gradient_overlay_example.jpg filter=lfs diff=lfs merge=lfs -text
571
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/halftone_example.jpg filter=lfs diff=lfs merge=lfs -text
572
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/halftone_node.jpg filter=lfs diff=lfs merge=lfs -text
573
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/hdr_effects_example.jpg filter=lfs diff=lfs merge=lfs -text
574
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/hl_frequency_detail_restore_example.jpg filter=lfs diff=lfs merge=lfs -text
575
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/human_parts_node.jpg filter=lfs diff=lfs merge=lfs -text
576
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/human_parts_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
577
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/icmask_example.jpg filter=lfs diff=lfs merge=lfs -text
578
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/if_example.jpg filter=lfs diff=lfs merge=lfs -text
579
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_auto_crop_example.jpg filter=lfs diff=lfs merge=lfs -text
580
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_auto_crop_node.jpg filter=lfs diff=lfs merge=lfs -text
581
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_auto_crop_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
582
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_auto_crop_v3_node.jpg filter=lfs diff=lfs merge=lfs -text
583
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_blend_advance_example.jpg filter=lfs diff=lfs merge=lfs -text
584
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_blend_advance_node.jpg filter=lfs diff=lfs merge=lfs -text
585
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_blend_example.jpg filter=lfs diff=lfs merge=lfs -text
586
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_channel_merge_example.jpg filter=lfs diff=lfs merge=lfs -text
587
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_channel_split_example.jpg filter=lfs diff=lfs merge=lfs -text
588
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_composite_handle_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
589
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_composite_handle_mask_node.jpg filter=lfs diff=lfs merge=lfs -text
590
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_hub_example.jpg filter=lfs diff=lfs merge=lfs -text
591
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_hub_node.jpg filter=lfs diff=lfs merge=lfs -text
592
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_mask_scale_as_example.jpg filter=lfs diff=lfs merge=lfs -text
593
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_mask_scale_as_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
594
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_mask_scale_as_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
595
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_reel_composit_node.jpg filter=lfs diff=lfs merge=lfs -text
596
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_reel_example.jpg filter=lfs diff=lfs merge=lfs -text
597
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_remove_alpha_example.jpg filter=lfs diff=lfs merge=lfs -text
598
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_reward_filter_example.jpg filter=lfs diff=lfs merge=lfs -text
599
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_scale_by_aspect_ratio_example.jpg filter=lfs diff=lfs merge=lfs -text
600
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_scale_restore_example.jpg filter=lfs diff=lfs merge=lfs -text
601
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_shift_example.jpg filter=lfs diff=lfs merge=lfs -text
602
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_shift_node.jpg filter=lfs diff=lfs merge=lfs -text
603
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_tagger_save_example.jpg filter=lfs diff=lfs merge=lfs -text
604
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_tagger_save_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
605
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/image_to_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
606
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/inner_glow_example.jpg filter=lfs diff=lfs merge=lfs -text
607
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/inner_shadow_example.jpg filter=lfs diff=lfs merge=lfs -text
608
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/joycaption2_example.jpg filter=lfs diff=lfs merge=lfs -text
609
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/joycaption2_extra_options_node.jpg filter=lfs diff=lfs merge=lfs -text
610
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/joycaption2_node.jpg filter=lfs diff=lfs merge=lfs -text
611
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/lama_example.jpg filter=lfs diff=lfs merge=lfs -text
612
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layer_image_transform_example.jpg filter=lfs diff=lfs merge=lfs -text
613
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layer_image_transform_node.jpg filter=lfs diff=lfs merge=lfs -text
614
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layer_mask_transform_node.jpg filter=lfs diff=lfs merge=lfs -text
615
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layercolor_nodes.jpg filter=lfs diff=lfs merge=lfs -text
616
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layercolor_title.jpg filter=lfs diff=lfs merge=lfs -text
617
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layerfilter_nodes.jpg filter=lfs diff=lfs merge=lfs -text
618
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layermask_nodes.jpg filter=lfs diff=lfs merge=lfs -text
619
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layerstyle_nodes.jpg filter=lfs diff=lfs merge=lfs -text
620
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layerstyle_title.jpg filter=lfs diff=lfs merge=lfs -text
621
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/layerutility_nodes.jpg filter=lfs diff=lfs merge=lfs -text
622
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/levels_example.jpg filter=lfs diff=lfs merge=lfs -text
623
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/light_leak_example.jpg filter=lfs diff=lfs merge=lfs -text
624
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/llama_vision_example.jpg filter=lfs diff=lfs merge=lfs -text
625
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/llama_vision_node.jpg filter=lfs diff=lfs merge=lfs -text
626
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/load_image_example.jpg filter=lfs diff=lfs merge=lfs -text
627
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/load_image_example_psd_file.jpg filter=lfs diff=lfs merge=lfs -text
628
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/lut_apply_example.jpg filter=lfs diff=lfs merge=lfs -text
629
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_box_detect_example.jpg filter=lfs diff=lfs merge=lfs -text
630
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_box_extend_example.jpg filter=lfs diff=lfs merge=lfs -text
631
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_box_extend_node.jpg filter=lfs diff=lfs merge=lfs -text
632
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_by_color_example.jpg filter=lfs diff=lfs merge=lfs -text
633
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_by_different_example.jpg filter=lfs diff=lfs merge=lfs -text
634
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_by_different_node.jpg filter=lfs diff=lfs merge=lfs -text
635
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_edge_compare.jpg filter=lfs diff=lfs merge=lfs -text
636
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_edge_shrink_example.jpg filter=lfs diff=lfs merge=lfs -text
637
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_edge_shrink_node.jpg filter=lfs diff=lfs merge=lfs -text
638
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_edge_ultra_detail_example.jpg filter=lfs diff=lfs merge=lfs -text
639
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_edge_ultra_detail_node.jpg filter=lfs diff=lfs merge=lfs -text
640
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_edge_ultra_detail_v2_example.jpg filter=lfs diff=lfs merge=lfs -text
641
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_gradient_example.jpg filter=lfs diff=lfs merge=lfs -text
642
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_grow_example.jpg filter=lfs diff=lfs merge=lfs -text
643
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_invert.jpg filter=lfs diff=lfs merge=lfs -text
644
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_motion_blur_example.jpg filter=lfs diff=lfs merge=lfs -text
645
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/mask_stroke_example.jpg filter=lfs diff=lfs merge=lfs -text
646
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/menu_layer_utility.jpg filter=lfs diff=lfs merge=lfs -text
647
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/name_to_color_example.jpg filter=lfs diff=lfs merge=lfs -text
648
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/name_to_color_node.jpg filter=lfs diff=lfs merge=lfs -text
649
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/object_detector_yolo_world_node.jpg filter=lfs diff=lfs merge=lfs -text
650
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/outer_glow_example.jpg filter=lfs diff=lfs merge=lfs -text
651
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/person_mask_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
652
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/person_mask_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
653
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/phi_prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
654
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/phi_prompt_node.jpg filter=lfs diff=lfs merge=lfs -text
655
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/pixel_spread_example.jpg filter=lfs diff=lfs merge=lfs -text
656
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/prompt_embellish_example.jpg filter=lfs diff=lfs merge=lfs -text
657
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/prompt_tagger_example.jpg filter=lfs diff=lfs merge=lfs -text
658
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/prompt_tagger_example1.jpg filter=lfs diff=lfs merge=lfs -text
659
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/purge_vram_example.jpg filter=lfs diff=lfs merge=lfs -text
660
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/queue_stop_example.jpg filter=lfs diff=lfs merge=lfs -text
661
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/qwen_image2prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
662
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/random_generator_example.jpg filter=lfs diff=lfs merge=lfs -text
663
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/random_generator_node.jpg filter=lfs diff=lfs merge=lfs -text
664
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/random_generator_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
665
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/rembg_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
666
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/rounded_rectangle_example.jpg filter=lfs diff=lfs merge=lfs -text
667
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/rounded_rectangle_node.jpg filter=lfs diff=lfs merge=lfs -text
668
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/sam2_example.jpg filter=lfs diff=lfs merge=lfs -text
669
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/sam2_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
670
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/sam2_video_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
671
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/saveimage_plus_example.jpg filter=lfs diff=lfs merge=lfs -text
672
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/sd3_negative_conditioning_example.jpg filter=lfs diff=lfs merge=lfs -text
673
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/sd3_negative_conditioning_node_note.jpg filter=lfs diff=lfs merge=lfs -text
674
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_clothes_example.jpg filter=lfs diff=lfs merge=lfs -text
675
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_clothes_pipeline_node.jpg filter=lfs diff=lfs merge=lfs -text
676
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_clothes_setting_node.jpg filter=lfs diff=lfs merge=lfs -text
677
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_fashion_example.jpg filter=lfs diff=lfs merge=lfs -text
678
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_fashion_pipeline_node.jpg filter=lfs diff=lfs merge=lfs -text
679
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_fashion_setting_node.jpg filter=lfs diff=lfs merge=lfs -text
680
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
681
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segformer_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
682
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segfromer_ultra_v3_node.jpg filter=lfs diff=lfs merge=lfs -text
683
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segment_anything_ultra_compare.jpg filter=lfs diff=lfs merge=lfs -text
684
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segment_anything_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
685
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segment_anything_ultra_node.jpg filter=lfs diff=lfs merge=lfs -text
686
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/segment_anything_ultra_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
687
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/shadow_and_highlight_mask_example.jpg filter=lfs diff=lfs merge=lfs -text
688
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/sharp_and_soft_example.jpg filter=lfs diff=lfs merge=lfs -text
689
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/simple_text_image_example.jpg filter=lfs diff=lfs merge=lfs -text
690
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/simple_text_image_node.jpg filter=lfs diff=lfs merge=lfs -text
691
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/skin_beauty_example.jpg filter=lfs diff=lfs merge=lfs -text
692
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/soft_light_example.jpg filter=lfs diff=lfs merge=lfs -text
693
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/string_condition_example.jpg filter=lfs diff=lfs merge=lfs -text
694
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/stroke_example.jpg filter=lfs diff=lfs merge=lfs -text
695
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/switch_case_example.jpg filter=lfs diff=lfs merge=lfs -text
696
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/text_image_example.jpg filter=lfs diff=lfs merge=lfs -text
697
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/text_image_node.jpg filter=lfs diff=lfs merge=lfs -text
698
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/text_image_v2_node.jpg filter=lfs diff=lfs merge=lfs -text
699
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/text_join_example.jpg filter=lfs diff=lfs merge=lfs -text
700
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/text_preseter_node.jpg filter=lfs diff=lfs merge=lfs -text
701
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/title.jpg filter=lfs diff=lfs merge=lfs -text
702
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/transparent_background_ultra_example.jpg filter=lfs diff=lfs merge=lfs -text
703
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/ultra_nodes.jpg filter=lfs diff=lfs merge=lfs -text
704
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/ultra_v2_nodes_example.jpg filter=lfs diff=lfs merge=lfs -text
705
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/userprompt_generator_txt2img_with_reference_node.jpg filter=lfs diff=lfs merge=lfs -text
706
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/vqa_prompt_example.jpg filter=lfs diff=lfs merge=lfs -text
707
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/vqa_prompt_node.jpg filter=lfs diff=lfs merge=lfs -text
708
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/water_color_example.jpg filter=lfs diff=lfs merge=lfs -text
709
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/watermark_example.jpg filter=lfs diff=lfs merge=lfs -text
710
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/xy2percent_example.jpg filter=lfs diff=lfs merge=lfs -text
711
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/image/yolov8_detect_example.jpg filter=lfs diff=lfs merge=lfs -text
712
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Black-jiII8dog.woff2 filter=lfs diff=lfs merge=lfs -text
713
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-BlackItalic-1413vuen.woff2 filter=lfs diff=lfs merge=lfs -text
714
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Bold-srYz_-1B.woff2 filter=lfs diff=lfs merge=lfs -text
715
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-BoldItalic-dE_gZyur.woff2 filter=lfs diff=lfs merge=lfs -text
716
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-ExtraBold-TduDdwUu.woff2 filter=lfs diff=lfs merge=lfs -text
717
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-ExtraBoldItalic-BJafRE5I.woff2 filter=lfs diff=lfs merge=lfs -text
718
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-ExtraLight-w5HAp5iF.woff2 filter=lfs diff=lfs merge=lfs -text
719
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-ExtraLightItalic-ZptecSuc.woff2 filter=lfs diff=lfs merge=lfs -text
720
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Italic-f6M78thn.woff2 filter=lfs diff=lfs merge=lfs -text
721
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Light-DFhX0qo-.woff2 filter=lfs diff=lfs merge=lfs -text
722
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-LightItalic-fu56_DRc.woff2 filter=lfs diff=lfs merge=lfs -text
723
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Medium-dDRaJ8tM.woff2 filter=lfs diff=lfs merge=lfs -text
724
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-MediumItalic-zr3roggP.woff2 filter=lfs diff=lfs merge=lfs -text
725
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Regular-dEFHw1tF.woff2 filter=lfs diff=lfs merge=lfs -text
726
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-SemiBold-PyS8DO2L.woff2 filter=lfs diff=lfs merge=lfs -text
727
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-SemiBoldItalic-uIDb7hsH.woff2 filter=lfs diff=lfs merge=lfs -text
728
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-Thin-eKObIkJC.woff2 filter=lfs diff=lfs merge=lfs -text
729
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/Inter-ThinItalic-L6uBn3RP.woff2 filter=lfs diff=lfs merge=lfs -text
730
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/py/iopaint/web_app/assets/kofi_button_black-XI_Dr2zg.png filter=lfs diff=lfs merge=lfs -text
731
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/whl/hydra_core-1.3.2-py3-none-any.whl filter=lfs diff=lfs merge=lfs -text
732
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1280x720_seven_person.jpg filter=lfs diff=lfs merge=lfs -text
733
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1280x720car.jpg filter=lfs diff=lfs merge=lfs -text
734
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1280x768_city.png filter=lfs diff=lfs merge=lfs -text
735
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1344x768_beach.png filter=lfs diff=lfs merge=lfs -text
736
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1344x768_girl2.png filter=lfs diff=lfs merge=lfs -text
737
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1344x768_hair.png filter=lfs diff=lfs merge=lfs -text
738
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1344x768_redcar.png filter=lfs diff=lfs merge=lfs -text
739
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/1920x1080table.png filter=lfs diff=lfs merge=lfs -text
740
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/3840x2160car.jpg filter=lfs diff=lfs merge=lfs -text
741
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/512x512.png filter=lfs diff=lfs merge=lfs -text
742
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/768x1344_beach.png filter=lfs diff=lfs merge=lfs -text
743
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/768x1344_dress.png filter=lfs diff=lfs merge=lfs -text
744
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/fox_512x512.png filter=lfs diff=lfs merge=lfs -text
745
+ kim_comfyui_data/custom_nodes/comfyui_layerstyle/workflow/girl_dino_1024.png filter=lfs diff=lfs merge=lfs -text
746
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/CachePreviewBridge.png filter=lfs diff=lfs merge=lfs -text
747
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/FastCanvas.png filter=lfs diff=lfs merge=lfs -text
748
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/color_adjust.jpg filter=lfs diff=lfs merge=lfs -text
749
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/crop.jpg filter=lfs diff=lfs merge=lfs -text
750
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/noise.jpg filter=lfs diff=lfs merge=lfs -text
751
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/refresh.png filter=lfs diff=lfs merge=lfs -text
752
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/size.jpg filter=lfs diff=lfs merge=lfs -text
753
+ kim_comfyui_data/custom_nodes/comfyui_lg_tools/assets/switch.jpg filter=lfs diff=lfs merge=lfs -text
754
+ kim_comfyui_data/custom_nodes/facerestore_cf/example.png filter=lfs diff=lfs merge=lfs -text
755
+ kim_comfyui_data/custom_nodes/pulid_comfyui/examples/pulid_wf.jpg filter=lfs diff=lfs merge=lfs -text
756
+ kim_comfyui_data/custom_nodes/rgthree-comfy/docs/rgthree_advanced.png filter=lfs diff=lfs merge=lfs -text
757
+ kim_comfyui_data/custom_nodes/rgthree-comfy/docs/rgthree_advanced_metadata.png filter=lfs diff=lfs merge=lfs -text
758
+ kim_comfyui_data/custom_nodes/rgthree-comfy/docs/rgthree_context.png filter=lfs diff=lfs merge=lfs -text
759
+ kim_comfyui_data/custom_nodes/rgthree-comfy/docs/rgthree_context_metadata.png filter=lfs diff=lfs merge=lfs -text
760
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/assets/masks1.png filter=lfs diff=lfs merge=lfs -text
761
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/assets/masks2.jpg filter=lfs diff=lfs merge=lfs -text
762
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/assets/minidemo.gif filter=lfs diff=lfs merge=lfs -text
763
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/assets/model_diagram.png filter=lfs diff=lfs merge=lfs -text
764
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/assets/notebook1.png filter=lfs diff=lfs merge=lfs -text
765
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/assets/notebook2.png filter=lfs diff=lfs merge=lfs -text
766
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/demo/src/assets/data/dogs.jpg filter=lfs diff=lfs merge=lfs -text
767
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/notebooks/images/groceries.jpg filter=lfs diff=lfs merge=lfs -text
768
+ kim_comfyui_data/custom_nodes/was-ns/repos/SAM/notebooks/images/truck.jpg filter=lfs diff=lfs merge=lfs -text
769
+ kim_comfyui_data/custom_nodes/zdx_comfyui/fonts/DejaVuSans.ttf filter=lfs diff=lfs merge=lfs -text
770
+ kim_comfyui_data/custom_nodes/zdx_comfyui/fonts/Itim-Regular-OTF.otf filter=lfs diff=lfs merge=lfs -text
771
+ kim_comfyui_data/custom_nodes/zdx_comfyui/fonts/Itim-Regular-TTF.ttf filter=lfs diff=lfs merge=lfs -text
772
+ kim_comfyui_data/custom_nodes/zdx_comfyui/fonts/arial.ttf filter=lfs diff=lfs merge=lfs -text
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/__init__.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # __init__.py
2
+ from .image_center_paste import NODE_CLASS_MAPPINGS as paste_mappings, NODE_DISPLAY_NAME_MAPPINGS as paste_display_mappings
3
+ from .yolov8_person_detect import NODE_CLASS_MAPPINGS as yolo_mappings, NODE_DISPLAY_NAME_MAPPINGS as yolo_display_mappings
4
+ from .yolov8_person_nomask import NODE_CLASS_MAPPINGS as yolo_nomask_mappings,NODE_DISPLAY_NAME_MAPPINGS as yolo_nomask_display_mappings
5
+ from .mask_solid_area import NODE_CLASS_MAPPINGS as mask_solid_area_mappings,NODE_DISPLAY_NAME_MAPPINGS as mask_solid_area_display_mappings
6
+ from .half_person_check import NODE_CLASS_MAPPINGS as half_person_check_mappings,NODE_DISPLAY_NAME_MAPPINGS as half_person_check_display_mappings
7
+ from .yolov8_detect import NODE_CLASS_MAPPINGS as yolov8_detect_mappings,NODE_DISPLAY_NAME_MAPPINGS as yolov8_detect_display_mappings
8
+ from .find_crop_box import NODE_CLASS_MAPPINGS as find_crop_box_mappings,NODE_DISPLAY_NAME_MAPPINGS as find_crop_box_display_mappings
9
+ from .reduce_mask_by_ratio import NODE_CLASS_MAPPINGS as reduce_mask_by_ratio_mappings,NODE_DISPLAY_NAME_MAPPINGS as reduce_mask_by_ratio_display_mappings
10
+
11
+ # 合并所有节点的映射
12
+ NODE_CLASS_MAPPINGS = {}
13
+ NODE_CLASS_MAPPINGS.update(paste_mappings)
14
+ NODE_CLASS_MAPPINGS.update(yolo_mappings)
15
+ NODE_CLASS_MAPPINGS.update(yolo_nomask_mappings)
16
+ NODE_CLASS_MAPPINGS.update(mask_solid_area_mappings)
17
+ NODE_CLASS_MAPPINGS.update(half_person_check_mappings)
18
+ NODE_CLASS_MAPPINGS.update(yolov8_detect_mappings)
19
+ NODE_CLASS_MAPPINGS.update(find_crop_box_mappings)
20
+ NODE_CLASS_MAPPINGS.update(reduce_mask_by_ratio_mappings)
21
+
22
+ NODE_DISPLAY_NAME_MAPPINGS = {}
23
+ NODE_DISPLAY_NAME_MAPPINGS.update(paste_display_mappings)
24
+ NODE_DISPLAY_NAME_MAPPINGS.update(yolo_display_mappings)
25
+ NODE_DISPLAY_NAME_MAPPINGS.update(yolo_nomask_display_mappings)
26
+ NODE_DISPLAY_NAME_MAPPINGS.update(mask_solid_area_display_mappings)
27
+ NODE_DISPLAY_NAME_MAPPINGS.update(half_person_check_display_mappings)
28
+ NODE_DISPLAY_NAME_MAPPINGS.update(yolov8_detect_display_mappings)
29
+ NODE_DISPLAY_NAME_MAPPINGS.update(find_crop_box_display_mappings)
30
+ NODE_DISPLAY_NAME_MAPPINGS.update(reduce_mask_by_ratio_display_mappings)
31
+ # 必须导出这两个变量
32
+
33
+ __all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS']
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/blendmodes.py ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ author: Chris Freilich
3
+ description: This extension provides a blend modes node with 30 blend modes.
4
+ """
5
+ from PIL import Image
6
+ import numpy as np
7
+ import torch
8
+ import torch.nn.functional as F
9
+ from colorsys import rgb_to_hsv
10
+
11
+ try:
12
+ from blend_modes import difference, normal, screen, soft_light, lighten_only, dodge, \
13
+ addition, darken_only, multiply, hard_light, \
14
+ grain_extract, grain_merge, divide, overlay
15
+ except ImportError:
16
+ print("ERROR: blend_modes package is not installed. Please install it with: pip install blend-modes")
17
+ raise
18
+
19
+ def dissolve(backdrop, source, opacity):
20
+ # Normalize the RGB and alpha values to 0-1
21
+ backdrop_norm = backdrop[:, :, :3] / 255
22
+ source_norm = source[:, :, :3] / 255
23
+ source_alpha_norm = source[:, :, 3] / 255
24
+
25
+ # Calculate the transparency of each pixel in the source image
26
+ transparency = opacity * source_alpha_norm
27
+
28
+ # Generate a random matrix with the same shape as the source image
29
+ random_matrix = np.random.random(source.shape[:2])
30
+
31
+ # Create a mask where the random values are less than the transparency
32
+ mask = random_matrix < transparency
33
+
34
+ # Use the mask to select pixels from the source or backdrop
35
+ blend = np.where(mask[..., None], source_norm, backdrop_norm)
36
+
37
+ # Apply the alpha channel of the source image to the blended image
38
+ new_rgb = (1 - source_alpha_norm[..., None]) * backdrop_norm + source_alpha_norm[..., None] * blend
39
+
40
+ # Ensure the RGB values are within the valid range
41
+ new_rgb = np.clip(new_rgb, 0, 1)
42
+
43
+ # Convert the RGB values back to 0-255
44
+ new_rgb = new_rgb * 255
45
+
46
+ # Calculate the new alpha value by taking the maximum of the backdrop and source alpha channels
47
+ new_alpha = np.maximum(backdrop[:, :, 3], source[:, :, 3])
48
+
49
+ # Create a new RGBA image with the calculated RGB and alpha values
50
+ result = np.dstack((new_rgb, new_alpha))
51
+
52
+ return result
53
+
54
+ def rgb_to_hsv_via_torch(rgb_numpy: np.ndarray, device=None) -> torch.Tensor:
55
+ """
56
+ Convert an RGB image to HSV.
57
+
58
+ :param rgb: A tensor of shape (3, H, W) where the three channels correspond to R, G, B.
59
+ The values should be in the range [0, 1].
60
+ :return: A tensor of shape (3, H, W) where the three channels correspond to H, S, V.
61
+ The hue (H) will be in the range [0, 1], while S and V will be in the range [0, 1].
62
+ """
63
+ if device is None:
64
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
65
+
66
+ rgb = torch.from_numpy(rgb_numpy).float().permute(2, 0, 1).to(device)
67
+ r, g, b = rgb[0], rgb[1], rgb[2]
68
+
69
+ max_val, _ = torch.max(rgb, dim=0)
70
+ min_val, _ = torch.min(rgb, dim=0)
71
+ delta = max_val - min_val
72
+
73
+ h = torch.zeros_like(max_val)
74
+ s = torch.zeros_like(max_val)
75
+ v = max_val
76
+
77
+ # calc hue... avoid div by zero (by masking the delta)
78
+ mask = delta != 0
79
+ r_eq_max = (r == max_val) & mask
80
+ g_eq_max = (g == max_val) & mask
81
+ b_eq_max = (b == max_val) & mask
82
+
83
+ h[r_eq_max] = (g[r_eq_max] - b[r_eq_max]) / delta[r_eq_max] % 6
84
+ h[g_eq_max] = (b[g_eq_max] - r[g_eq_max]) / delta[g_eq_max] + 2.0
85
+ h[b_eq_max] = (r[b_eq_max] - g[b_eq_max]) / delta[b_eq_max] + 4.0
86
+
87
+ h = (h / 6.0) % 1.0
88
+
89
+ # calc saturation
90
+ s[max_val != 0] = delta[max_val != 0] / max_val[max_val != 0]
91
+
92
+ hsv = torch.stack([h, s, v], dim=0)
93
+
94
+ hsv_numpy = hsv.permute(1, 2, 0).cpu().numpy()
95
+ return hsv_numpy
96
+
97
+ def hsv_to_rgb_via_torch(hsv_numpy: np.ndarray, device=None) -> torch.Tensor:
98
+ """
99
+ Convert an HSV image to RGB.
100
+
101
+ :param hsv: A tensor of shape (3, H, W) where the three channels correspond to H, S, V.
102
+ The H channel values should be in the range [0, 1], while S and V will be in the range [0, 1].
103
+ :return: A tensor of shape (3, H, W) where the three channels correspond to R, G, B.
104
+ The RGB values will be in the range [0, 1].
105
+ """
106
+ if device is None:
107
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
108
+
109
+ hsv = torch.from_numpy(hsv_numpy).float().permute(2, 0, 1).to(device)
110
+ h, s, v = hsv[0], hsv[1], hsv[2]
111
+
112
+ c = v * s # chroma
113
+ x = c * (1 - torch.abs((h * 6) % 2 - 1))
114
+ m = v - c # match value
115
+
116
+ z = torch.zeros_like(h)
117
+ rgb = torch.zeros_like(hsv)
118
+
119
+ # define conditions for different hue ranges
120
+ h_cond = [
121
+ (h < 1/6, torch.stack([c, x, z], dim=0)),
122
+ ((1/6 <= h) & (h < 2/6), torch.stack([x, c, z], dim=0)),
123
+ ((2/6 <= h) & (h < 3/6), torch.stack([z, c, x], dim=0)),
124
+ ((3/6 <= h) & (h < 4/6), torch.stack([z, x, c], dim=0)),
125
+ ((4/6 <= h) & (h < 5/6), torch.stack([x, z, c], dim=0)),
126
+ (h >= 5/6, torch.stack([c, z, x], dim=0)),
127
+ ]
128
+
129
+ # conditionally set RGB values based on the hue range
130
+ for cond, result in h_cond:
131
+ rgb[:, cond] = result[:, cond]
132
+
133
+ # add match value to convert to final RGB values
134
+ rgb = rgb + m
135
+
136
+ rgb_numpy = rgb.permute(1, 2, 0).cpu().numpy()
137
+ return rgb_numpy
138
+
139
+ def hsv(backdrop, source, opacity, channel):
140
+
141
+ # Convert RGBA to RGB, normalized
142
+ backdrop_rgb = backdrop[:, :, :3] / 255.0
143
+ source_rgb = source[:, :, :3] / 255.0
144
+ source_alpha = source[:, :, 3] / 255.0
145
+
146
+ # Convert RGB to HSV
147
+ backdrop_hsv = rgb_to_hsv_via_torch(backdrop_rgb)
148
+ source_hsv = rgb_to_hsv_via_torch(source_rgb)
149
+
150
+ # Combine HSV values
151
+ new_hsv = backdrop_hsv.copy()
152
+
153
+ # Determine which channel to operate on
154
+ if channel == "saturation":
155
+ new_hsv[:, :, 1] = (1 - opacity * source_alpha) * backdrop_hsv[:, :, 1] + opacity * source_alpha * source_hsv[:, :, 1]
156
+ elif channel == "luminance":
157
+ new_hsv[:, :, 2] = (1 - opacity * source_alpha) * backdrop_hsv[:, :, 2] + opacity * source_alpha * source_hsv[:, :, 2]
158
+ elif channel == "hue":
159
+ new_hsv[:, :, 0] = (1 - opacity * source_alpha) * backdrop_hsv[:, :, 0] + opacity * source_alpha * source_hsv[:, :, 0]
160
+ elif channel == "color":
161
+ new_hsv[:, :, :2] = (1 - opacity * source_alpha[..., None]) * backdrop_hsv[:, :, :2] + opacity * source_alpha[..., None] * source_hsv[:, :, :2]
162
+
163
+ # Convert HSV back to RGB
164
+ new_rgb = hsv_to_rgb_via_torch(new_hsv)
165
+
166
+ # Apply the alpha channel of the source image to the new RGB image
167
+ new_rgb = (1 - source_alpha[..., None]) * backdrop_rgb + source_alpha[..., None] * new_rgb
168
+
169
+ # Ensure the RGB values are within the valid range
170
+ new_rgb = np.clip(new_rgb, 0, 1)
171
+
172
+ # Convert RGB back to RGBA and scale to 0-255 range
173
+ new_rgba = np.dstack((new_rgb * 255, backdrop[:, :, 3]))
174
+
175
+ return new_rgba.astype(np.uint8)
176
+
177
+ def saturation(backdrop, source, opacity):
178
+ return hsv(backdrop, source, opacity, "saturation")
179
+
180
+ def luminance(backdrop, source, opacity):
181
+ return hsv(backdrop, source, opacity, "luminance")
182
+
183
+ def hue(backdrop, source, opacity):
184
+ return hsv(backdrop, source, opacity, "hue")
185
+
186
+ def color(backdrop, source, opacity):
187
+ return hsv(backdrop, source, opacity, "color")
188
+
189
+ def darker_lighter_color(backdrop, source, opacity, type):
190
+
191
+ # Normalize the RGB and alpha values to 0-1
192
+ backdrop_norm = backdrop[:, :, :3] / 255
193
+ source_norm = source[:, :, :3] / 255
194
+ source_alpha_norm = source[:, :, 3] / 255
195
+
196
+ # Convert RGB to HSV
197
+ backdrop_hsv = np.array([rgb_to_hsv(*rgb) for row in backdrop_norm for rgb in row]).reshape(backdrop.shape[:2] + (3,))
198
+ source_hsv = np.array([rgb_to_hsv(*rgb) for row in source_norm for rgb in row]).reshape(source.shape[:2] + (3,))
199
+
200
+ # Create a mask where the value (brightness) of the source image is less than the value of the backdrop image
201
+ if type == "dark":
202
+ mask = source_hsv[:, :, 2] < backdrop_hsv[:, :, 2]
203
+ else:
204
+ mask = source_hsv[:, :, 2] > backdrop_hsv[:, :, 2]
205
+
206
+ # Use the mask to select pixels from the source or backdrop
207
+ blend = np.where(mask[..., None], source_norm, backdrop_norm)
208
+
209
+ # Apply the alpha channel of the source image to the blended image
210
+ new_rgb = (1 - source_alpha_norm[..., None] * opacity) * backdrop_norm + source_alpha_norm[..., None] * opacity * blend
211
+
212
+ # Ensure the RGB values are within the valid range
213
+ new_rgb = np.clip(new_rgb, 0, 1)
214
+
215
+ # Convert the RGB values back to 0-255
216
+ new_rgb = new_rgb * 255
217
+
218
+ # Calculate the new alpha value by taking the maximum of the backdrop and source alpha channels
219
+ new_alpha = np.maximum(backdrop[:, :, 3], source[:, :, 3])
220
+
221
+ # Create a new RGBA image with the calculated RGB and alpha values
222
+ result = np.dstack((new_rgb, new_alpha))
223
+
224
+ return result
225
+
226
+ def darker_color(backdrop, source, opacity):
227
+ return darker_lighter_color(backdrop, source, opacity, "dark")
228
+
229
+ def lighter_color(backdrop, source, opacity):
230
+ return darker_lighter_color(backdrop, source, opacity, "light")
231
+
232
+ def simple_mode(backdrop, source, opacity, mode):
233
+ # Normalize the RGB and alpha values to 0-1
234
+ backdrop_norm = backdrop[:, :, :3] / 255
235
+ source_norm = source[:, :, :3] / 255
236
+ source_alpha_norm = source[:, :, 3:4] / 255
237
+
238
+ # Calculate the blend without any transparency considerations
239
+ if mode == "linear_burn":
240
+ blend = backdrop_norm + source_norm - 1
241
+ elif mode == "linear_light":
242
+ blend = backdrop_norm + (2 * source_norm) - 1
243
+ elif mode == "color_dodge":
244
+ blend = backdrop_norm / (1 - source_norm)
245
+ blend = np.clip(blend, 0, 1)
246
+ elif mode == "color_burn":
247
+ blend = 1 - ((1 - backdrop_norm) / source_norm)
248
+ blend = np.clip(blend, 0, 1)
249
+ elif mode == "exclusion":
250
+ blend = backdrop_norm + source_norm - (2 * backdrop_norm * source_norm)
251
+ elif mode == "subtract":
252
+ blend = backdrop_norm - source_norm
253
+ elif mode == "vivid_light":
254
+ blend = np.where(source_norm <= 0.5, backdrop_norm / (1 - 2 * source_norm), 1 - (1 -backdrop_norm) / (2 * source_norm - 0.5) )
255
+ blend = np.clip(blend, 0, 1)
256
+ elif mode == "pin_light":
257
+ blend = np.where(source_norm <= 0.5, np.minimum(backdrop_norm, 2 * source_norm), np.maximum(backdrop_norm, 2 * (source_norm - 0.5)))
258
+ elif mode == "hard_mix":
259
+ blend = simple_mode(backdrop, source, opacity, "linear_light")
260
+ blend = np.round(blend[:, :, :3] / 255)
261
+
262
+ # Apply the blended layer back onto the backdrop layer while utilizing the alpha channel and opacity information
263
+ new_rgb = (1 - source_alpha_norm * opacity) * backdrop_norm + source_alpha_norm * opacity * blend
264
+
265
+ # Ensure the RGB values are within the valid range
266
+ new_rgb = np.clip(new_rgb, 0, 1)
267
+
268
+ # Convert the RGB values back to 0-255
269
+ new_rgb = new_rgb * 255
270
+
271
+ # Calculate the new alpha value by taking the maximum of the backdrop and source alpha channels
272
+ new_alpha = np.maximum(backdrop[:, :, 3], source[:, :, 3])
273
+
274
+ # Create a new RGBA image with the calculated RGB and alpha values
275
+ result = np.dstack((new_rgb, new_alpha))
276
+
277
+ return result
278
+
279
+ def linear_light(backdrop, source, opacity):
280
+ return simple_mode(backdrop, source, opacity, "linear_light")
281
+ def vivid_light(backdrop, source, opacity):
282
+ return simple_mode(backdrop, source, opacity, "vivid_light")
283
+ def pin_light(backdrop, source, opacity):
284
+ return simple_mode(backdrop, source, opacity, "pin_light")
285
+ def hard_mix(backdrop, source, opacity):
286
+ return simple_mode(backdrop, source, opacity, "hard_mix")
287
+ def linear_burn(backdrop, source, opacity):
288
+ return simple_mode(backdrop, source, opacity, "linear_burn")
289
+ def color_dodge(backdrop, source, opacity):
290
+ return simple_mode(backdrop, source, opacity, "color_dodge")
291
+ def color_burn(backdrop, source, opacity):
292
+ return simple_mode(backdrop, source, opacity, "color_burn")
293
+ def exclusion(backdrop, source, opacity):
294
+ return simple_mode(backdrop, source, opacity, "exclusion")
295
+ def subtract(backdrop, source, opacity):
296
+ return simple_mode(backdrop, source, opacity, "subtract")
297
+
298
+ BLEND_MODES = {
299
+ "normal": normal,
300
+ "dissolve": dissolve,
301
+ "darken": darken_only,
302
+ "multiply": multiply,
303
+ "color burn": color_burn,
304
+ "linear burn": linear_burn,
305
+ "darker color": darker_color,
306
+ "lighten": lighten_only,
307
+ "screen": screen,
308
+ "color dodge": color_dodge,
309
+ "linear dodge(add)": addition,
310
+ "lighter color": lighter_color,
311
+ "dodge": dodge,
312
+ "overlay": overlay,
313
+ "soft light": soft_light,
314
+ "hard light": hard_light,
315
+ "vivid light": vivid_light,
316
+ "linear light": linear_light,
317
+ "pin light": pin_light,
318
+ "hard mix": hard_mix,
319
+ "difference": difference,
320
+ "exclusion": exclusion,
321
+ "subtract": subtract,
322
+ "divide": divide,
323
+ "hue": hue,
324
+ "saturation": saturation,
325
+ "color": color,
326
+ "luminosity": luminance,
327
+ "grain extract": grain_extract,
328
+ "grain merge": grain_merge
329
+ }
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/face_create_hole.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import os
3
+
4
+ from PIL import Image,ImageDraw
5
+ import glob
6
+ import numpy as np
7
+ import cv2
8
+ from ultralytics import YOLO
9
+
10
+ def tensor2pil(image):
11
+ if len(image.shape)<3:
12
+ image=image.unsqueeze(0)
13
+ return Image.fromarray((image[0].cpu().numpy()*255).astype(np.uint8))
14
+
15
+ def pil2tensor(image):
16
+ new_image=image.convert('RGB')
17
+ new_array=np.array(new_image).astype(np.float32)/255.-
18
+ new_tensor=torch.tensor(new_array)
19
+ return new_tensor.unsqueeze(0)
20
+
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/find_crop_box.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import numpy as np
3
+ from PIL import Image
4
+
5
+ def tensor2np(image):
6
+ if len(image.shape)<3:
7
+ image=image.unsqueeze(0)
8
+
9
+ return (image.cpu().numpy()*255).astype(np.uint8)
10
+
11
+ def np2tensor(image):
12
+ image=image.astype(np.float32)/255
13
+ image=torch.tensor(image)
14
+ return image.unsqueeze(0)
15
+
16
+ class FindCropBox:
17
+ CATEGORY="My Nodes/Find Crop Box"
18
+ RETURN_TYPES=("INT","INT","INT","INT")
19
+ RETURN_NAMES=("x","y","width","height")
20
+ FUNCTION="find_crop_box"
21
+
22
+ @classmethod
23
+ def INPUT_TYPES(cls):
24
+ return {
25
+ "required":{
26
+ "destination":("IMAGE",),
27
+ "source":("IMAGE",),
28
+ },
29
+ }
30
+
31
+ def find_crop_box(self,destination,source):
32
+ destination_np=tensor2np(destination)
33
+ source_np=tensor2np(source)
34
+
35
+ # destination_np / source_np: (B, H, W) or (B, H, W, C)
36
+ d_h, d_w = destination_np.shape[1], destination_np.shape[2]
37
+ s_h, s_w = source_np.shape[1], source_np.shape[2]
38
+
39
+ h_bound = d_h - s_h
40
+ w_bound = d_w - s_w
41
+
42
+ # 在 destination 中滑动窗口,查找与 source 完全相同的区域
43
+ for i in range(h_bound + 1):
44
+ for j in range(w_bound + 1):
45
+ # 取出 destination 中的一个候选区域,保留全部通道维度(若存在)
46
+ patch = destination_np[0, i:i + s_h, j:j + s_w, ...]
47
+ target = source_np[0, :s_h, :s_w, ...]
48
+ if np.array_equal(patch, target):
49
+ return (j, i, s_w, s_h)
50
+
51
+ return (0,0,0,0,)
52
+
53
+ NODE_CLASS_MAPPINGS={
54
+ "FindCropBox":FindCropBox
55
+ }
56
+
57
+ NODE_DISPLAY_NAME_MAPPINGS={
58
+ "FindCropBox":"Find Crop Box(My Node)"
59
+ }
60
+
61
+
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/half_person_check.py ADDED
@@ -0,0 +1,256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from PIL import Image
2
+ import numpy as np
3
+ import cv2
4
+ import torch
5
+ from ultralytics import YOLO
6
+
7
+ def tensor2pil(image):
8
+ if len(image.shape)<3:
9
+ image=image.unsqueeze(0) #在第0维加一个维度,数值为1,保证二维图像前面有个1能读取到整个图像,RGB图像一般都是四维的,灰度其实一般也都是三维
10
+ #image[0]是确保去掉第0维,取到完整图片
11
+ return Image.fromarray((image[0].cpu().numpy()*255).astype(np.uint8)) #Comfyui输入图片格式为归一化的tensor,需要*255,fromarray相当于由numpy生成PIL图片
12
+
13
+ def pil2tensor(image):
14
+ new_image=image.convert('RGB')
15
+ new_array=np.array(new_image).astype(np.float32)/255
16
+ new_tensor=torch.tensor(new_array)
17
+ return new_tensor.unsqueeze(0) #增加batch维度,数值为1
18
+
19
+ class half_person_check:
20
+ def __init__(self, model_path="/data/models/yolo/yolov8n-pose.pt"):
21
+ self.model = YOLO(model_path)
22
+ self.KEYPOINTS = {
23
+ "nose": 0, "left_eye": 1, "right_eye": 2, "left_ear": 3, "right_ear": 4,
24
+ "left_shoulder": 5, "right_shoulder": 6, "left_elbow": 7, "right_elbow": 8,
25
+ "left_wrist": 9, "right_wrist": 10, "left_hip": 11, "right_hip": 12,
26
+ "left_knee": 13, "right_knee": 14, "left_ankle": 15, "right_ankle": 16
27
+ }
28
+
29
+ # 定义关键点之间的连接关系(用于绘制骨架)
30
+ self.SKELETON = [
31
+ # 头部连接
32
+ [0, 1], [0, 2], [1, 3], [2, 4], # nose->eyes->ears
33
+ # 躯干连接
34
+ [5, 6], [5, 11], [6, 12], [11, 12], # shoulders, hips
35
+ # 左臂
36
+ [5, 7], [7, 9], # left_shoulder->left_elbow->left_wrist
37
+ # 右臂
38
+ [6, 8], [8, 10], # right_shoulder->right_elbow->right_wrist
39
+ # 左腿
40
+ [11, 13], [13, 15], # left_hip->left_knee->left_ankle
41
+ # 右腿
42
+ [12, 14], [14, 16], # right_hip->right_knee->right_ankle
43
+ ]
44
+
45
+ # 关键点颜色(BGR格式)
46
+ self.KP_COLORS = [
47
+ (255, 0, 0), # nose - 红色
48
+ (255, 85, 0), # left_eye
49
+ (255, 170, 0), # right_eye
50
+ (255, 255, 0), # left_ear
51
+ (170, 255, 0), # right_ear
52
+ (85, 255, 0), # left_shoulder
53
+ (0, 255, 0), # right_shoulder - 绿色
54
+ (0, 255, 85), # left_elbow
55
+ (0, 255, 170),# right_elbow
56
+ (0, 255, 255),# left_wrist
57
+ (0, 170, 255),# right_wrist
58
+ (0, 85, 255), # left_hip
59
+ (0, 0, 255), # right_hip - 蓝色
60
+ (85, 0, 255), # left_knee
61
+ (170, 0, 255),# right_knee
62
+ (255, 0, 255),# left_ankle
63
+ (255, 0, 170),# right_ankle
64
+ ]
65
+
66
+ # 连接线颜色(BGR格式)
67
+ self.LINE_COLOR = (0, 255, 255) # 黄色
68
+
69
+ def check_half_person_from_array(self, image_bgr, knee_conf=0.5, ankle_conf=0.8):
70
+ """
71
+ 从BGR格式的numpy数组检测姿态
72
+ """
73
+ # conf 参数:检测置信度阈值(confidence threshold)
74
+ # 范围: 0.0 - 1.0
75
+ # 含义: 只有检测到的人体边界框置信度 >= conf 时,才会被返回
76
+ # 例如: conf=0.5 表示只返回置信度 >= 0.5 的检测结果
77
+ # 值越小,检测越宽松(可能包含更多误检)
78
+ # 值越大,检测越严格(只返回高置信度的结果)
79
+ results = self.model(image_bgr, conf=0.5)
80
+
81
+ if len(results[0].boxes) == 0:
82
+ return None
83
+
84
+ # 取第一张图片的所有识别姿态
85
+ # keypoints.data 形状: [num_persons, 17, 3]
86
+ # 第三维格式: [x坐标, y坐标, 置信度]
87
+ keypoints = results[0].keypoints.data.cpu().numpy()
88
+
89
+ # 取一张图片第一个检测到的人体
90
+ # best_keypoints 形状: [17, 3] - 17个关键点,每个关键点是 [x, y, confidence]
91
+ # 重要:无论关键点是否可见,模型都会返回所有17个关键点的数据
92
+ # 不可见的关键点:置信度接近0,但坐标和置信度值仍然存在
93
+ best_keypoints = keypoints[0].tolist()
94
+
95
+ # 检查是否只有下半身
96
+ # best_keypoints[index] 返回格式: [x, y, confidence] - 长度为3的列表
97
+ # 例如: [123.45, 456.78, 0.95] 表示 x=123.45, y=456.78, 置信度=0.95
98
+ left_knee = best_keypoints[self.KEYPOINTS["left_knee"]] # [x, y, confidence]
99
+ right_knee = best_keypoints[self.KEYPOINTS["right_knee"]] # [x, y, confidence]
100
+ left_ankle = best_keypoints[self.KEYPOINTS["left_ankle"]] # [x, y, confidence]
101
+ right_ankle = best_keypoints[self.KEYPOINTS["right_ankle"]] # [x, y, confidence]
102
+
103
+
104
+ if left_knee[2] > knee_conf or right_knee[2] > knee_conf:
105
+ boolean=True
106
+ return image_bgr, best_keypoints, boolean
107
+ elif left_ankle[2] > ankle_conf or right_ankle[2] > ankle_conf:
108
+ boolean=True
109
+ return image_bgr, best_keypoints, boolean
110
+ else:
111
+ boolean=False
112
+ return image_bgr, best_keypoints, boolean
113
+
114
+ def check_half_person(self,image_path,knee_conf=0.5,ankle_conf=0.8):
115
+ image=Image.open(image_path)
116
+ image=np.array(image)
117
+ # RGB转BGR
118
+ image=cv2.cvtColor(image,cv2.COLOR_RGB2BGR)
119
+
120
+ return self.check_half_person_from_array(image, knee_conf, ankle_conf)
121
+
122
+
123
+
124
+ def draw_pose(self, image, keypoints_list, confidence_threshold=0.5, line_thickness=4, point_radius=5):
125
+ """
126
+ 在图像上绘制姿态图
127
+
128
+ Args:
129
+ image: 原始图像 (BGR格式)
130
+ keypoints_list: 关键点列表,格式为 [17, 3],每个关键点是 [x, y, confidence]
131
+ confidence_threshold: 置信度阈值,低于此值的关键点不绘制
132
+ line_thickness: 连接线粗细
133
+ point_radius: 关键点圆圈半径
134
+
135
+ Returns:
136
+ 绘制了姿态的图像
137
+ """
138
+ # 复制图像,避免修改原图
139
+ pose_image = image.copy()
140
+
141
+ # 将关键点转换为numpy数组便于处理
142
+ if isinstance(keypoints_list, list):
143
+ keypoints = np.array(keypoints_list)
144
+ else:
145
+ keypoints = keypoints_list
146
+
147
+ # 绘制连接线(骨架)
148
+ for connection in self.SKELETON:
149
+ pt1_idx, pt2_idx = connection
150
+ pt1 = keypoints[pt1_idx]
151
+ pt2 = keypoints[pt2_idx]
152
+
153
+ # 只有当两个关键点的置信度都超过阈值时才绘制连接线
154
+ if pt1[2] > confidence_threshold and pt2[2] > confidence_threshold:
155
+ pt1_coord = (int(pt1[0]), int(pt1[1]))
156
+ pt2_coord = (int(pt2[0]), int(pt2[1]))
157
+ cv2.line(pose_image, pt1_coord, pt2_coord, self.LINE_COLOR, line_thickness)
158
+
159
+ # 绘制关键点
160
+ for i, kp in enumerate(keypoints):
161
+ x, y, conf = kp
162
+ if conf > confidence_threshold:
163
+ center = (int(x), int(y))
164
+ color = self.KP_COLORS[i]
165
+ # 绘制实心圆
166
+ cv2.circle(pose_image, center, point_radius, color, -1)
167
+ # 绘制外圈(更明显)
168
+ cv2.circle(pose_image, center, point_radius + 2, (255, 255, 255), 1)
169
+
170
+ return pose_image
171
+
172
+
173
+
174
+ class half_person_check_node:
175
+ CATEGORY="My Nodes/half person check"
176
+ RETURN_TYPES=("BOOLEAN","IMAGE",)
177
+ RETURN_NAMES=("boolean","pose_image",)
178
+ FUNCTION="check_half"
179
+
180
+ def __init__(self):
181
+ # 初始化检测器(只加载一次模型)
182
+ self.checker = half_person_check()
183
+
184
+ @classmethod
185
+ def INPUT_TYPES(cls):
186
+ return{
187
+ "required":{
188
+ "image":("IMAGE",),
189
+ },
190
+ "optional":{
191
+ "knee_conf":("FLOAT",{
192
+ "default":0.5,
193
+ "min":0.0,
194
+ "max":1.0,
195
+ "step":0.01,
196
+ }),
197
+ "ankle_conf":("FLOAT",{
198
+ "default":0.7,
199
+ "min":0.0,
200
+ "max":1.0,
201
+ "step":0.01,
202
+ }),
203
+ "draw_conf":("FLOAT",{
204
+ "default":0.5,
205
+ "min":0.0,
206
+ "max":1.0,
207
+ "step":0.01,
208
+ }),
209
+ "line_thickness":("INT",{
210
+ "default":4,
211
+ "min":1,
212
+ "max":10,
213
+ "step":1,
214
+ }),
215
+ "point_radius":("INT",{
216
+ "default":5,
217
+ "min":1,
218
+ "max":10,
219
+ "step":1,
220
+ })
221
+ }
222
+ }
223
+
224
+ def check_half(self, image, knee_conf, ankle_conf, draw_conf, line_thickness, point_radius):
225
+ # 将tensor转换为PIL Image
226
+ pil_image = tensor2pil(image)
227
+ # 转换为numpy数组,然后转BGR格式
228
+ image_np = np.array(pil_image)
229
+ image_bgr = cv2.cvtColor(image_np, cv2.COLOR_RGB2BGR)
230
+
231
+ # 检测姿态(传入BGR格式的numpy数组)
232
+ result = self.checker.check_half_person_from_array(image_bgr, knee_conf, ankle_conf)
233
+
234
+ if result is None:
235
+ # 没有检测到人体,返回原图和False
236
+ return (False, image,)
237
+
238
+ result_img, result_keypoints, result_boolean = result
239
+
240
+ # 绘制姿态
241
+ pose_image = self.checker.draw_pose(result_img, result_keypoints, draw_conf, line_thickness, point_radius)
242
+
243
+ # 将BGR转回RGB,然后转换为tensor
244
+ pose_image_rgb = cv2.cvtColor(pose_image, cv2.COLOR_BGR2RGB)
245
+ pose_image_pil = Image.fromarray(pose_image_rgb)
246
+ pose_image_tensor = pil2tensor(pose_image_pil)
247
+
248
+ return (result_boolean, pose_image_tensor,)
249
+
250
+ NODE_CLASS_MAPPINGS={
251
+ "half_person_check_node":half_person_check_node
252
+ }
253
+
254
+ NODE_DISPLAY_NAME_MAPPINGS={
255
+ "half_person_check_node":"half_person_check_node(My Node)"
256
+ }
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/image_center_paste.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import numpy as np
3
+ from PIL import Image
4
+
5
+ def tensor2pil(image):
6
+ if len(image.shape)<3:
7
+ image=image.unsqueeze(0) #在第0维加一个维度,数值为1,保证二维图像前面有个1能读取到整个图像,RGB图像一般都是四维的,灰度其实一般也都是三维
8
+ #image[0]是确保去掉第0维,取到完整图片
9
+ return Image.fromarray((image[0].cpu().numpy()*255).astype(np.uint8)) #Comfyui输入图片格式为归一化的tensor,需要*255,fromarray相当于由numpy生成PIL图片
10
+
11
+ def pil2tensor(image):
12
+ new_image=image.convert('RGB')
13
+ new_array=np.array(new_image).astype(np.float32)/255
14
+ new_tensor=torch.tensor(new_array)
15
+ return new_tensor.unsqueeze(0) #增加batch维度,数值为1
16
+
17
+ def mask2tensor(mask):
18
+ new_mask=mask.convert("L")
19
+ new_array=np.array(new_mask).astype(np.float32)/255
20
+ new_tensor=torch.tensor(new_array)
21
+ return new_tensor.unsqueeze(0)
22
+
23
+ class Image_center_paste:
24
+ #节点在ComfyUI菜单中的位置)
25
+ CATEGORY="My Nodes/Image center paste"
26
+ #节点的输出类型
27
+ RETURN_TYPES=("IMAGE","MASK",) #就是输出节点类型,有(MASK,STRING,INT,FLOAT,BOOLEAN,LATENT,CONDITIONING)
28
+ RETURN_NAMES=("paste_img","mask") #输出节点名称
29
+ FUNCTION="image_paste"
30
+
31
+ @classmethod
32
+ def INPUT_TYPES(cls):
33
+ #定义节点输入参数
34
+ return{
35
+ # required就是必须要输入的参数,输入节点需要连线
36
+ "required":{
37
+ "need_paste_image":("IMAGE",),#一般后面会跟"IMAGE"后会跟一个字典,default是默认值,min最小值,max最大值
38
+ "back_image":("IMAGE",),
39
+ "mask":("MASK",),
40
+ },
41
+ # optional是可选参数
42
+ "optional":{
43
+ "ratio":("FLOAT",{
44
+ "default":float(2/3),
45
+ "min":0.01,
46
+ "max":1.0,
47
+ "step":0.1,
48
+ "display":"number" #显示数字输入框
49
+ }),
50
+ "x_ratio":("FLOAT",{
51
+ "default":float(0.5),
52
+ "min":0.01,
53
+ "max":1.0,
54
+ "step":0.01,
55
+ "display":"number"
56
+ }),
57
+ "y_ratio":("FLOAT",{
58
+ "default":float(1/10),
59
+ "min":0.01,
60
+ "max":1.0,
61
+ "step":0.01,
62
+ "display":"number"
63
+ })
64
+ }
65
+ }
66
+
67
+
68
+ def image_paste(self,need_paste_image,back_image,mask,ratio,x_ratio,y_ratio):
69
+ np_image=tensor2pil(need_paste_image)
70
+ bd_image=tensor2pil(back_image)
71
+ mask=tensor2pil(mask).convert("L")
72
+
73
+ bd_w,bd_h=bd_image.size
74
+
75
+ target=ratio*max(bd_h,bd_w)
76
+
77
+ m_bbox=mask.getbbox()
78
+ np_image=np_image.crop(m_bbox)
79
+ mask=mask.crop(m_bbox)
80
+
81
+ np_w,np_h=np_image.size
82
+
83
+ change_size=target/max(np_h,np_w)
84
+ new_w,new_h=int(np_w*change_size),int(np_h*change_size)
85
+
86
+ if new_w>=bd_w-1 or new_h>=bd_h-1:
87
+ raise ValueError(f'缩放图片的长宽不匹配,请调小ratio值')
88
+
89
+ x=int((bd_w-new_w))
90
+ y=int((bd_h-new_h))
91
+
92
+ x_trap_padding=int(x_ratio*x)
93
+ x=x-x_trap_padding
94
+
95
+ y_trap_padding=int(y_ratio*y)
96
+ y=y-y_trap_padding
97
+
98
+ np_image=np_image.resize((new_w,new_h),Image.LANCZOS)
99
+ mask=mask.resize((new_w,new_h),Image.LANCZOS)
100
+
101
+ np_image=np_image.convert("RGBA")
102
+ result_image=bd_image.copy().convert("RGBA")
103
+
104
+ result_image.paste(np_image,(x,y),mask)
105
+ result_image=pil2tensor(result_image.convert("RGB"))
106
+ mask=mask2tensor(mask)
107
+
108
+ return (result_image,mask,)
109
+
110
+ #将节点类映射到唯一标识符
111
+ NODE_CLASS_MAPPINGS={
112
+ "Image_center_paste":Image_center_paste
113
+ }
114
+
115
+ # 节点在UI中显示的名称(可选,默认使用类名)
116
+ NODE_DISPLAY_NAME_MAPPINGS={
117
+ "Image_center_paste":"Image_center_paste(My Node)"
118
+ }
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/imagefunc.py ADDED
The diff for this file is too large to render. See raw diff
 
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/mask_solid_area.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from PIL import Image,ImageDraw
3
+ import numpy as np
4
+
5
+
6
+
7
+ def mask2pil(mask):
8
+ if len(mask.shape)<3:
9
+ mask=mask.unsqueeze(0)
10
+ return Image.fromarray((mask[0].cpu().numpy()*255).astype(np.uint8))
11
+
12
+
13
+ def mask2tensor(mask):
14
+ mask=mask.convert('L')
15
+ mask_array=np.array(mask).astype(np.float32)/255.0
16
+ mask_tensor=torch.tensor(mask_array)
17
+ return mask_tensor
18
+
19
+ class MaskSolidArea:
20
+
21
+ @classmethod
22
+ def INPUT_TYPES(cls):
23
+ return {
24
+ "required": {
25
+ "mask": ("MASK",),
26
+ },
27
+ 'optional':{
28
+ 'radius':('INT',{
29
+ 'default':100,
30
+ 'min':0,
31
+ 'max':1000,
32
+ 'steo':1,
33
+ })
34
+ }
35
+ }
36
+
37
+ CATEGORY = "My Nodes/mask_solid_area"
38
+
39
+ RETURN_TYPES = ("MASK",)
40
+ RETURN_NAMES = ("MASKS",)
41
+
42
+ FUNCTION = "cut"
43
+ def cut(self, mask,radius):
44
+ mask_pil = mask2pil(mask)
45
+ mask_bbox = mask_pil.getbbox()
46
+ need_paste_mask=Image.new('L',mask_pil.size,"black")
47
+ if mask_bbox is None:
48
+ return (mask,)
49
+ # new_mask = torch.zeros_like(mask[0], dtype=torch.float32)
50
+ # # new_mask bbox 区域绘制为全1
51
+ # new_mask[mask_bbox[1]:mask_bbox[3], mask_bbox[0]:mask_bbox[2]] = 1.0
52
+ draw=ImageDraw.Draw(need_paste_mask)
53
+ draw.rounded_rectangle(
54
+ [mask_bbox[0],mask_bbox[1],mask_bbox[2],mask_bbox[3]],
55
+ radius=radius,
56
+ fill=255
57
+ )
58
+
59
+ new_mask=mask2tensor(need_paste_mask)
60
+ return (new_mask.unsqueeze(0),)
61
+
62
+ NODE_CLASS_MAPPINGS={
63
+ 'MaskSolidArea':MaskSolidArea
64
+ }
65
+
66
+ NODE_DISPLAY_NAME_MAPPINGS={
67
+ 'MaskSolidArea':'MaskSolidArea(My Node)'
68
+ }
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/node.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # my_nodes.py
2
+ import torch
3
+ import numpy as np
4
+ from PIL import Image
5
+
6
+
7
+ # --------------------------
8
+ # 节点 1:图像缩放节点
9
+ # --------------------------
10
+ class ImageResizeWithPreview:
11
+ """
12
+ 图像缩放节点,支持自定义宽度/高度,带实时预览
13
+ """
14
+ # 节点类别(在 ComfyUI 菜单中的位置)
15
+ CATEGORY = "My Nodes/Image Processing"
16
+ # 节点输出类型
17
+ RETURN_TYPES = ("IMAGE",)
18
+ RETURN_NAMES = ("resized_image",)
19
+ FUNCTION = "resize_image"
20
+
21
+ @classmethod
22
+ def INPUT_TYPES(cls):
23
+ """定义节点输入参数"""
24
+ return {
25
+ "required": {
26
+ "image": ("IMAGE",), # 输入图像(ComfyUI 内置类型)
27
+ "width": ("INT", {
28
+ "default": 512, # 默认值
29
+ "min": 64, # 最小值
30
+ "max": 4096, # 最大值
31
+ "step": 64, # 步长
32
+ "display": "number" # 显示为数字输入框
33
+ }),
34
+ "height": ("INT", {
35
+ "default": 512,
36
+ "min": 64,
37
+ "max": 4096,
38
+ "step": 64,
39
+ "display": "number"
40
+ }),
41
+ "interpolation": (["lanczos", "bilinear", "nearest"],), # 下拉选择插值方式
42
+ }
43
+ }
44
+
45
+ def resize_image(self, image, width, height, interpolation):
46
+ """
47
+ 核心执行逻辑:缩放图像
48
+ 参数:
49
+ image: 输入图像张量 [batch, H, W, C],值范围 [0,1]
50
+ width: 目标宽度
51
+ height: 目标高度
52
+ interpolation: 插值方式
53
+ 返回:
54
+ 缩放后的图像张量
55
+ """
56
+ # 映射插值方式到 PIL 对应的滤镜
57
+ interp_map = {
58
+ "lanczos": Image.LANCZOS,
59
+ "bilinear": Image.BILINEAR,
60
+ "nearest": Image.NEAREST
61
+ }
62
+ pil_interp = interp_map[interpolation]
63
+
64
+ batch_size = image.shape[0]
65
+ resized_images = []
66
+
67
+ for i in range(batch_size):
68
+ # 1. 转换为 PIL 图像:[0,1] -> [0,255] -> numpy -> PIL
69
+ img_np = image[i].cpu().numpy() # [H, W, C],float32
70
+ img_np = (img_np * 255).astype(np.uint8) # 映射到 [0,255]
71
+ pil_img = Image.fromarray(img_np)
72
+
73
+ # 2. 执行缩放
74
+ pil_img = pil_img.resize((width, height), pil_interp)
75
+
76
+ # 3. 转换回 ComfyUI 格式:PIL -> numpy -> [0,1] -> torch
77
+ resized_np = np.array(pil_img).astype(np.float32) / 255.0
78
+ resized_images.append(resized_np)
79
+
80
+ # 合并为 batch 张量 [batch, H, W, C]
81
+ resized_tensor = torch.tensor(np.stack(resized_images, axis=0))
82
+ return (resized_tensor,)
83
+
84
+
85
+ # --------------------------
86
+ # 节点 2:文本拼接节点
87
+ # --------------------------
88
+ class TextConcatenator:
89
+ """
90
+ 文本拼接节点,支持多个文本输入拼接
91
+ """
92
+ CATEGORY = "My Nodes/Text Processing"
93
+ RETURN_TYPES = ("STRING",)
94
+ RETURN_NAMES = ("concatenated_text",)
95
+ FUNCTION = "concat_texts"
96
+
97
+ @classmethod
98
+ def INPUT_TYPES(cls):
99
+ return {
100
+ "required": {
101
+ "text1": ("STRING", {
102
+ "default": "Hello",
103
+ "multiline": False # 单行输入
104
+ }),
105
+ "text2": ("STRING", {
106
+ "default": "World",
107
+ "multiline": False
108
+ }),
109
+ },
110
+ "optional": {
111
+ "separator": ("STRING", {
112
+ "default": " ",
113
+ "multiline": False
114
+ }),
115
+ "text3": ("STRING", {
116
+ "default": "",
117
+ "multiline": False
118
+ }),
119
+ }
120
+ }
121
+
122
+ def concat_texts(self, text1, text2, separator=" ", text3=""):
123
+ """拼接文本"""
124
+ # 过滤空文本
125
+ texts = [t for t in [text1, text2, text3] if t]
126
+ # 用分隔符拼接
127
+ result = separator.join(texts)
128
+ return (result,)
129
+
130
+
131
+ # --------------------------
132
+ # 节点注册(必须)
133
+ # --------------------------
134
+ # 将节点类映射到唯一标识符
135
+ NODE_CLASS_MAPPINGS = {
136
+ "ImageResizeWithPreview": ImageResizeWithPreview,
137
+ "TextConcatenator": TextConcatenator
138
+ }
139
+
140
+ # 节点在 UI 中显示的名称(可选,默认使用类名)
141
+ NODE_DISPLAY_NAME_MAPPINGS = {
142
+ "ImageResizeWithPreview": "Image Resize (My Node)",
143
+ "TextConcatenator": "Text Concatenator (My Node)"
144
+ }
145
+
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/reduce_mask_by_ratio.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import numpy as np
3
+ from PIL import Image
4
+
5
+ def tensor2np(image):
6
+ if len(image.shape)<3:
7
+ image=image.unsqueeze(0)
8
+ image_np=(image[0].cpu().numpy()*255).astype(np.uint8)
9
+ return image_np
10
+
11
+ def tensor2pil(image):
12
+ if len(image.shape)<3:
13
+ image=image.unsqueeze(0)
14
+ image_pil=Image.fromarray((image[0].cpu().numpy()*255).astype(np.uint8))
15
+ return image_pil
16
+
17
+ def mask2tensor(image):
18
+ new_mask=image.convert("L")
19
+ new_np=np.array(new_mask).astype(np.float32)/255.0
20
+ new_tensor=torch.tensor(new_np)
21
+ return new_tensor.unsqueeze(0)
22
+
23
+ def np2tensor(image):
24
+ image_tensor=torch.tensor(image.astype(np.float32)/255.0)
25
+ return image_tensor.unsqueeze(0)
26
+
27
+ class ReduceMaskByRatio:
28
+ CATEGORY="My Nodes/Reduce Mask By Ratio"
29
+ RETURN_TYPES=("MASK","BOX","INT","INT","INT","INT",)
30
+ RETURN_NAMES=("mask","crop_box","x","y","width","height",)
31
+ FUNCTION="reduce_mask_by_ratio"
32
+
33
+ @classmethod
34
+ def INPUT_TYPES(cls):
35
+ return {
36
+ "required":{"mask":("MASK",),},
37
+ "optional":{
38
+ "method": (["Original","Center Width/Height","Face Width/Height"], {"default": "Original"}),
39
+ "ratio": ("FLOAT", {
40
+ "default": 0.1,
41
+ "min": 0.0,
42
+ "max": 1.0,
43
+ "step": 0.01,
44
+ "display": "number"
45
+ }),
46
+ "x_ratio": ("FLOAT", {
47
+ "default": 0.1,
48
+ "min": 0.0,
49
+ "max": 1.0,
50
+ "step": 0.01,
51
+ "display": "number"
52
+ }),
53
+ "y_ratio": ("FLOAT", {
54
+ "default": 0.1,
55
+ "min": 0.0,
56
+ "max": 1.0,
57
+ "step": 0.01,
58
+ "display": "number"
59
+ }),
60
+ "up_y_ratio": ("FLOAT", {
61
+ "default": 0.1,
62
+ "min": 0.0,
63
+ "max": 1.0,
64
+ "step": 0.01,
65
+ "display": "number"
66
+ }),
67
+ }
68
+ }
69
+
70
+ def reduce_mask_by_ratio(self,mask,method,ratio,x_ratio,y_ratio,up_y_ratio):
71
+ mask_np=tensor2np(mask)
72
+ mask_pil=tensor2pil(mask).convert("L")
73
+ mask_bbox=mask_pil.getbbox()
74
+
75
+ if method=="Original":
76
+ # PIL getbbox 返回 (left, upper, right, lower)
77
+ x1,y1,x2,h2 = mask_bbox
78
+ w,h = x2-x1, h2-y1
79
+ reduce_w,reduce_h = w*ratio,h*ratio
80
+ # 在原有 bbox 中心缩小
81
+ new_w,new_h=int(w-reduce_w),int(h-reduce_h)
82
+ new_x,new_y=int(x1 + reduce_w/2),int(y1 + reduce_h/2)
83
+ new_mask=Image.new("L",size=mask_pil.size,color=0)
84
+ paste_mask=Image.new("L",size=(new_w,new_h),color=255)
85
+
86
+ new_mask.paste(paste_mask,(new_x,new_y))
87
+
88
+ new_mask=mask2tensor(new_mask)
89
+ return (new_mask,(new_x,new_y,new_w,new_h),new_x,new_y,new_w,new_h,)
90
+
91
+ elif method=='Center Width/Height':
92
+ x1,y1,x2,h2 = mask_bbox
93
+ w,h = x2-x1, h2-y1
94
+ reduce_w,reduce_h=w*x_ratio,h*y_ratio
95
+ new_w,new_h=int(w-reduce_w),int(h-reduce_h)
96
+ new_x,new_y=int(x1 + reduce_w/2),int(y1 + reduce_h/2)
97
+ new_mask=Image.new("L",size=mask_pil.size,color=0)
98
+ paste_mask=Image.new("L",size=(new_w,new_h),color=255)
99
+
100
+ new_mask.paste(paste_mask,(new_x,new_y))
101
+
102
+ new_mask=mask2tensor(new_mask)
103
+ return (new_mask,(new_x,new_y,new_w,new_h),new_x,new_y,new_w,new_h,)
104
+
105
+ elif method=='Face Width/Height':
106
+ x1,y1,x2,h2 = mask_bbox
107
+ w,h = x2-x1, h2-y1
108
+ reduce_w,reduce_h=w*x_ratio,h*up_y_ratio
109
+ new_w,new_h=int(w-reduce_w),int(h-reduce_h)
110
+ # Face模式:从顶部开始,只减少上方和两侧
111
+ new_x,new_y=int(x1 + reduce_w/2),int(y1 + reduce_h)
112
+ new_mask=Image.new("L",size=mask_pil.size,color=0)
113
+ paste_mask=Image.new("L",size=(new_w,new_h),color=255)
114
+
115
+ new_mask.paste(paste_mask,(new_x,new_y))
116
+
117
+ new_mask=mask2tensor(new_mask)
118
+ return (new_mask,(new_x,new_y,new_w,new_h),new_x,new_y,new_w,new_h,)
119
+
120
+ NODE_CLASS_MAPPINGS={
121
+ "ReduceMaskByRatio":ReduceMaskByRatio
122
+ }
123
+ NODE_DISPLAY_NAME_MAPPINGS={
124
+ "ReduceMaskByRatio":"Reduce Mask By Ratio(My Nodes)"
125
+ }
126
+
127
+
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/yolov8_detect.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import os.path
3
+ from .imagefunc import *
4
+
5
+ import torch
6
+ from PIL import Image
7
+ import numpy as np
8
+ from ultralytics import YOLO
9
+ import folder_paths
10
+ import cv2
11
+
12
+ model_path = os.path.join(folder_paths.models_dir, 'yolo')
13
+
14
+ class YoloV8Detect:
15
+
16
+ def __init__(self):
17
+ self.NODE_NAME = 'YoloV8Detect'
18
+
19
+
20
+ @classmethod
21
+ def INPUT_TYPES(self):
22
+ model_ext = [".pt"]
23
+ FILES_DICT = get_files(model_path, model_ext)
24
+ FILE_LIST = list(FILES_DICT.keys())
25
+ mask_merge = ["all", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
26
+ return {
27
+ "required": {
28
+ "image": ("IMAGE", ),
29
+ "yolo_model": (FILE_LIST,),
30
+ "mask_merge": (mask_merge,),
31
+ "conf_threshold": ("FLOAT", {"default": 0.25, "min": 0.0, "max": 1.0, "step": 0.01}),
32
+ },
33
+ "optional": {
34
+ }
35
+ }
36
+
37
+ RETURN_TYPES = ("MASK", "IMAGE", "MASK" )
38
+ RETURN_NAMES = ("mask", "yolo_plot_image", "yolo_masks")
39
+ FUNCTION = 'yolo_detect'
40
+ CATEGORY = 'My Nodes/yolov8 detect'
41
+
42
+ def yolo_detect(self, image,
43
+ yolo_model, mask_merge, conf_threshold
44
+ ):
45
+
46
+ ret_masks = []
47
+ ret_yolo_plot_images = []
48
+ ret_yolo_masks = []
49
+
50
+ yolo_model = YOLO(os.path.join(model_path, yolo_model))
51
+
52
+ for i in image:
53
+ i = torch.unsqueeze(i, 0)
54
+ _image = tensor2pil(i)
55
+ results = yolo_model(_image, retina_masks=True, conf=conf_threshold)
56
+ for result in results:
57
+ yolo_plot_image = cv2.cvtColor(result.plot(), cv2.COLOR_BGR2RGB)
58
+ ret_yolo_plot_images.append(pil2tensor(Image.fromarray(yolo_plot_image)))
59
+ # have mask
60
+ if result.masks is not None and len(result.masks) > 0:
61
+ masks_data = result.masks.data
62
+ for mask in masks_data:
63
+ _mask = mask.cpu().numpy() * 255
64
+ _mask = np2pil(_mask).convert("L")
65
+ ret_yolo_masks.append(image2mask(_mask))
66
+ # no mask, if have box, draw box
67
+ elif result.boxes is not None and len(result.boxes.xyxy) > 0:
68
+ white_image = Image.new('L', _image.size, "white")
69
+ for box in result.boxes:
70
+ x1, y1, x2, y2 = box.xyxy[0].cpu().numpy()
71
+ x1, y1, x2, y2 = int(x1), int(y1), int(x2), int(y2)
72
+ _mask = Image.new('L', _image.size, "black")
73
+ _mask.paste(white_image.crop((x1, y1, x2, y2)), (x1, y1))
74
+ ret_yolo_masks.append(image2mask(_mask))
75
+ # no mask and box, add a black mask
76
+ else:
77
+ ret_yolo_masks.append(torch.zeros((1, _image.size[1], _image.size[0]), dtype=torch.float32))
78
+ # ret_yolo_masks.append(image2mask(Image.new('L', _image.size, "black")))
79
+ log(f"{self.NODE_NAME} mask or box not detected.")
80
+
81
+ # merge mask
82
+ if len(ret_yolo_masks) > 0:
83
+ _mask = ret_yolo_masks[0]
84
+ if mask_merge == "all":
85
+ for idx in range(len(ret_yolo_masks) - 1):
86
+ _mask = add_mask(_mask, ret_yolo_masks[idx + 1])
87
+ else:
88
+ for idx in range(min(len(ret_yolo_masks), int(mask_merge)) - 1):
89
+ _mask = add_mask(_mask, ret_yolo_masks[idx + 1])
90
+ ret_masks.append(_mask)
91
+ else:
92
+ # 如果没有检测到任何mask,添加一个全黑的mask
93
+ ret_masks.append(torch.zeros((1, _image.size[1], _image.size[0]), dtype=torch.float32))
94
+
95
+ log(f"{self.NODE_NAME} Processed {len(ret_masks)} image(s).", message_type='finish')
96
+ return (torch.cat(ret_masks, dim=0),
97
+ torch.cat(ret_yolo_plot_images, dim=0),
98
+ torch.cat(ret_yolo_masks, dim=0),)
99
+
100
+ NODE_CLASS_MAPPINGS = {
101
+ "yolov8_detect": YoloV8Detect
102
+ }
103
+
104
+ NODE_DISPLAY_NAME_MAPPINGS = {
105
+ "yolov8_detect": "yolov8_detect/My Node"
106
+ }
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/yolov8_person_detect.py ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import os
3
+
4
+ import torch
5
+ from PIL import Image
6
+ import glob
7
+ import numpy as np
8
+ from ultralytics import YOLO
9
+ import folder_paths
10
+
11
+ models_dirs=os.path.join(folder_paths.models_dir,'yolo')
12
+ def get_files(models_dir,file_exp_list):
13
+ file_list=[]
14
+ for exp in file_exp_list:
15
+ # extend可将可迭代对象中的元素,逐个添加进当前列表
16
+ file_list.extend(glob.glob(os.path.join(models_dir,'*'+exp))) #glob就是模式匹配文件,支持通配符,每个文件返回一个列表,所以用extend
17
+ file_dict={}
18
+ for i in range(len(file_list)):
19
+ _,filename=os.path.split(file_list[i]) # 将文件路径拆成("C:/models/yolo", "model1.pt")
20
+ # 创建文件名到文件路径的映射
21
+ file_dict[filename]=file_list[i]
22
+
23
+ return file_dict
24
+
25
+ def tensor2pil(image):
26
+ if isinstance(image,Image.Image):
27
+ return image
28
+ else:
29
+ if len(image.shape)<3:
30
+ image=image.unsqueeze(0)
31
+ return Image.fromarray((image[0].cpu().numpy()*255).astype(np.uint8))
32
+
33
+ def pil2tensor(image):
34
+ new_image=image.convert('RGB')
35
+ image_array=np.array(new_image).astype(np.float32)/255.0
36
+ image_tensor=torch.tensor(image_array)
37
+ return image_tensor
38
+
39
+ def mask2tensor(mask):
40
+ mask=mask.convert('L')
41
+ mask_array=np.array(mask).astype(np.float32)/255.0
42
+ mask_tensor=torch.tensor(mask_array)
43
+ return mask_tensor
44
+
45
+ class Yolov8_person_detect:
46
+ CATEGORY="My Nodes/yolov8 person detect"
47
+ RETURN_TYPES=("MASK",)
48
+ RETURN_NAMES=("back_mask",)
49
+ FUNCTION="yolov8_person_detect"
50
+
51
+ @classmethod
52
+ def INPUT_TYPES(cls):
53
+ model_exp=["seg.pt"]
54
+ FILES_DICT=get_files(models_dirs,model_exp)
55
+ FILE_LIST=list(FILES_DICT.keys())
56
+
57
+ return{
58
+ "required":{
59
+ "back_image":("IMAGE",),
60
+ "mask":("MASK",),
61
+ "yolo_model":(FILE_LIST,),
62
+ },
63
+ "optional":{
64
+ "true_rate":("FLOAT",{
65
+ "default":0.85,
66
+ "min":0.01,
67
+ "max":1.0,
68
+ "step":0.01,
69
+ "display":"number"
70
+ }),
71
+ "img_ratio":("FLOAT",{
72
+ "default":float(2/3),
73
+ "min":0.01,
74
+ "max":1.0,
75
+ "step":0.01,
76
+ "display":"number"
77
+ }),
78
+ "x_ratio":("FLOAT",{
79
+ "default":float(0.5),
80
+ "min":0.01,
81
+ "max":1.0,
82
+ "step":0.01,
83
+ "display":"number"
84
+ }),
85
+ "y_ratio":("FLOAT",{
86
+ "default":float(1/10),
87
+ "min":0.01,
88
+ "max":1.0,
89
+ "step":0.01,
90
+ "display":"number"
91
+ })
92
+ }
93
+ }
94
+
95
+ def yolov8_person_detect(self,mask,back_image,yolo_model,true_rate,img_ratio,x_ratio,y_ratio):
96
+ back_image=tensor2pil(back_image)
97
+
98
+ yolo_model=YOLO(os.path.join(models_dirs,yolo_model))
99
+ result = yolo_model(
100
+ back_image,
101
+ retina_masks=True,
102
+ classes=[0],
103
+ conf=true_rate,
104
+ verbose=False
105
+ )[0]
106
+
107
+ if result.masks is not None and len(result.masks)>0:
108
+ if result.boxes is not None and len(result.boxes)>0:
109
+ if result.boxes.conf[0]>=true_rate:
110
+ masks_data=result.masks.data
111
+ n_mask=masks_data[0]
112
+ n_mask=n_mask.unsqueeze(0)
113
+ return(n_mask,)
114
+
115
+ # 直接生成mask,按背景 2/3
116
+ # bd_w,bd_h=back_image.size
117
+ # n_mask=Image.new('L',(bd_w,bd_h),"black")
118
+ # target=img_ratio*max(bd_h,bd_w)
119
+
120
+ # new_w,new_h=int(img_ratio*bd_w),int(img_ratio*bd_h)
121
+
122
+ # paste_mask=Image.new('L',(new_w,new_h),"white")
123
+
124
+
125
+ # x=int(bd_w-new_w)
126
+ # y=int(bd_h-new_h)
127
+
128
+ # x_trap_padding=int(x*x_ratio)
129
+ # x=x-x_trap_padding
130
+
131
+ # y_trap_padding=int(y*y_ratio)
132
+ # y=y-y_trap_padding
133
+
134
+ # n_mask.paste(paste_mask,(x,y))
135
+ # n_mask=mask2tensor(n_mask)
136
+
137
+ # if len(n_mask.shape)<3:
138
+ # n_mask=n_mask.unsqueeze(0)
139
+ # print("Yolov8_person_detect done")
140
+ # return (n_mask,)
141
+
142
+
143
+ # 利用原图人物mask,生成新mask
144
+ mask=tensor2pil(mask).convert('L')
145
+
146
+ bd_w,bd_h=back_image.size
147
+
148
+ n_mask=Image.new('L',(bd_w,bd_h),"black")
149
+ target=img_ratio*max(bd_h,bd_w)
150
+
151
+
152
+ m_bbox=mask.getbbox()
153
+
154
+ mask=mask.crop(m_bbox)
155
+
156
+ m_w,m_h=mask.size
157
+ ratio=target/max(m_w,m_h)
158
+ new_w,new_h=int(ratio*m_w),int(ratio*m_h)
159
+ mask=mask.resize((new_w,new_h),Image.LANCZOS)
160
+
161
+
162
+ if new_w>=bd_w or new_h>=bd_h:
163
+ raise ValueError(f'缩放图片的长宽超过背景图大小,请下调img_ratio值')
164
+
165
+ x=int(bd_w-new_w)
166
+ y=int(bd_h-new_h)
167
+
168
+ x_trap_padding=int(x*x_ratio)
169
+ x=x-x_trap_padding
170
+
171
+ y_trap_padding=int(y*y_ratio)
172
+ y=y-y_trap_padding
173
+
174
+ n_mask.paste(mask,(x,y))
175
+ n_mask=mask2tensor(n_mask)
176
+
177
+ if len(n_mask.shape)<3:
178
+ n_mask=n_mask.unsqueeze(0)
179
+ print("Yolov8_person_detect done")
180
+ return (n_mask,)
181
+
182
+ NODE_CLASS_MAPPINGS={
183
+ "Yolov8_person_detect":Yolov8_person_detect
184
+ }
185
+
186
+ NODE_DISPLAY_NAME_MAPPINGS={
187
+ "Yolov8_person_detect":"Yolov8_person_detect(My Node)"
188
+ }
kim_comfyui_data/custom_nodes/Cheny_custom_nodes/yolov8_person_nomask.py ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ import os
3
+
4
+ import torch
5
+ from PIL import Image,ImageDraw
6
+ import glob
7
+ import numpy as np
8
+ from ultralytics import YOLO
9
+ import folder_paths
10
+ import cv2
11
+
12
+ models_dirs=os.path.join(folder_paths.models_dir,'yolo')
13
+ def get_files(models_dir,file_exp_list):
14
+ file_list=[]
15
+ for exp in file_exp_list:
16
+ # extend可将可迭代对象中的元素,逐个添加进当前列表
17
+ file_list.extend(glob.glob(os.path.join(models_dir,'*'+exp))) #glob就是模式匹配文件,支持通配符,每个文件返回一个列表,所以用extend
18
+ file_dict={}
19
+ for i in range(len(file_list)):
20
+ _,filename=os.path.split(file_list[i]) # 将文件路径拆成("C:/models/yolo", "model1.pt")
21
+ # 创建文件名到文件路径的映射
22
+ file_dict[filename]=file_list[i]
23
+
24
+ return file_dict
25
+
26
+ def tensor2pil(image):
27
+ if isinstance(image,Image.Image):
28
+ return image
29
+ else:
30
+ if len(image.shape)<3:
31
+ image=image.unsqueeze(0)
32
+ return Image.fromarray((image[0].cpu().numpy()*255).astype(np.uint8))
33
+
34
+ def pil2tensor(image):
35
+ new_image=image.convert('RGB')
36
+ image_array=np.array(new_image).astype(np.float32)/255.0
37
+ image_tensor=torch.tensor(image_array)
38
+ return image_tensor
39
+
40
+ def mask2tensor(mask):
41
+ mask=mask.convert('L')
42
+ mask_array=np.array(mask).astype(np.float32)/255.0
43
+ mask_tensor=torch.tensor(mask_array)
44
+ return mask_tensor
45
+
46
+ def np2tensor(data):
47
+ data=data.astype(np.float32)/255.0
48
+ data_tensor=torch.tensor(data)
49
+ return data_tensor
50
+
51
+ class Yolov8_person_nomask:
52
+ CATEGORY="My Nodes/yolov8 person nomask"
53
+ RETURN_TYPES=("MASK","BOOLEAN",)
54
+ RETURN_NAMES=("back_mask","boolean")
55
+ FUNCTION="yolov8_person_nomask"
56
+
57
+ @classmethod
58
+ def INPUT_TYPES(cls):
59
+ model_exp=[".pt"]
60
+ FILES_DICT=get_files(models_dirs,model_exp)
61
+ FILE_LIST=list(FILES_DICT.keys())
62
+
63
+ return{
64
+ "required":{
65
+ "back_image":("IMAGE",),
66
+ "yolo_model":(FILE_LIST,),
67
+ },
68
+ "optional":{
69
+ "mask":("MASK",),
70
+ "true_rate":("FLOAT",{
71
+ "default":0.85,
72
+ "min":0.01,
73
+ "max":1.0,
74
+ "step":0.01,
75
+ "display":"number"
76
+ }),
77
+ "img_ratio":("FLOAT",{
78
+ "default":float(2/3),
79
+ "min":0.01,
80
+ "max":1.0,
81
+ "step":0.01,
82
+ "display":"number"
83
+ }),
84
+ "x_ratio":("FLOAT",{
85
+ "default":float(0.5),
86
+ "min":0.01,
87
+ "max":1.0,
88
+ "step":0.01,
89
+ "display":"number"
90
+ }),
91
+ "y_ratio":("FLOAT",{
92
+ "default":float(1/10),
93
+ "min":0.01,
94
+ "max":1.0,
95
+ "step":0.01,
96
+ "display":"number"
97
+ }),
98
+ "radius":("INT",{
99
+ "default":100,
100
+ "min":10,
101
+ "max":1000,
102
+ "step":10,
103
+ "display":"number"
104
+ }),
105
+ "blur_radius":("INT",{
106
+ "default":0,
107
+ "min":0,
108
+ "max":100,
109
+ "step":1,
110
+ "display":"number"
111
+ })
112
+ }
113
+ }
114
+
115
+ def yolov8_person_nomask(self,back_image,yolo_model,true_rate,img_ratio,x_ratio,y_ratio,radius,blur_radius,mask=None):
116
+ back_image=tensor2pil(back_image)
117
+
118
+ yolo_model=YOLO(os.path.join(models_dirs,yolo_model))
119
+ result = yolo_model(
120
+ back_image,
121
+ retina_masks=True,
122
+ classes=[0],
123
+ conf=true_rate,
124
+ verbose=False
125
+ )[0]
126
+
127
+ if result.masks is not None and len(result.masks)>0:
128
+ if result.boxes is not None and len(result.boxes)>0:
129
+ if result.boxes.conf[0]>=true_rate:
130
+ masks_data=result.masks.data
131
+ n_mask=masks_data[0]
132
+ n_mask=n_mask.unsqueeze(0)
133
+ boolean=True
134
+ return(n_mask,boolean,)
135
+
136
+ bd_w,bd_h=back_image.size
137
+
138
+ n_mask=Image.new('L',(bd_w,bd_h),"black")
139
+ if mask is None:
140
+ boolean=False
141
+ return (mask2tensor(n_mask),boolean,)
142
+ target=img_ratio*max(bd_h,bd_w)
143
+
144
+ mask=tensor2pil(mask).convert('L')
145
+
146
+
147
+ m_bbox=mask.getbbox()
148
+
149
+ mask=mask.crop(m_bbox)
150
+
151
+ m_w,m_h=mask.size
152
+ ratio=target/max(m_w,m_h)
153
+ new_w,new_h=int(ratio*m_w),int(ratio*m_h)
154
+
155
+
156
+ if new_w>=bd_w or new_h>=bd_h:
157
+ raise ValueError(f'缩放图片的长宽超过背景图大小,请下调img_ratio值')
158
+
159
+ x_left=int(bd_w-new_w)
160
+ y_left=int(bd_h-new_h)
161
+
162
+ x_trap_padding=int(x_left*x_ratio)
163
+ x_left=x_left-x_trap_padding
164
+
165
+ y_trap_padding=int(y_left*y_ratio)
166
+ y_left=y_left-y_trap_padding
167
+
168
+ x_right,y_right=x_left+new_w,y_left+new_h
169
+ draw=ImageDraw.Draw(n_mask)
170
+
171
+ draw.rounded_rectangle(
172
+ [x_left, y_left, x_right, y_right],
173
+ radius=int(radius),
174
+ fill=255 # 白色填充
175
+ )
176
+
177
+ n_mask=np.array(n_mask).astype(np.uint8)
178
+ blur_radius_int = int(round(blur_radius))
179
+ n_mask=cv2.GaussianBlur(n_mask, (2 * blur_radius_int + 1, 2 * blur_radius_int + 1), 0)
180
+
181
+ n_mask=np2tensor(n_mask)
182
+
183
+ if len(n_mask.shape)<3:
184
+ n_mask=n_mask.unsqueeze(0)
185
+ print("Yolov8_person_detect done")
186
+ boolean=False
187
+ return (n_mask,boolean,)
188
+
189
+ NODE_CLASS_MAPPINGS={
190
+ "Yolov8_person_nomask":Yolov8_person_nomask
191
+ }
192
+
193
+ NODE_DISPLAY_NAME_MAPPINGS={
194
+ "Yolov8_person_nomask":"Yolov8_person_nomask(My Node)"
195
+ }
kim_comfyui_data/custom_nodes/ComfyLiterals/.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+
2
+ __pycache__/
kim_comfyui_data/custom_nodes/ComfyLiterals/README.md ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # ComfyLiterals
2
+ ![CleanShot 2023-07-22 at 00 13 13](https://github.com/M1kep/ComfyLiterals/assets/2661819/c8bdc4f0-8cf3-4403-be96-34db357520b0)
kim_comfyui_data/custom_nodes/ComfyLiterals/__init__.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .nodes import IntLiteral, FloatLiteral, StringLiteral, CheckpointListLiteral, LoraListLiteral
2
+ from .operations import Operation
3
+ from .startup_utils import symlink_web_dir
4
+
5
+ NODE_CLASS_MAPPINGS = {
6
+ "Int": IntLiteral,
7
+ "Float": FloatLiteral,
8
+ "String": StringLiteral,
9
+ "KepStringLiteral": StringLiteral,
10
+ "Operation": Operation,
11
+ "Checkpoint": CheckpointListLiteral,
12
+ "Lora": LoraListLiteral,
13
+ }
14
+
15
+ NODE_DISPLAY_NAME_MAPPINGS = {
16
+ "KepStringLiteral": "String",
17
+ }
18
+
19
+ EXTENSION_NAME = "ComfyLiterals"
20
+
21
+ symlink_web_dir("js", EXTENSION_NAME)
kim_comfyui_data/custom_nodes/ComfyLiterals/js/operation-node.js ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import {app} from "/scripts/app.js";
2
+
3
+ app.registerExtension({
4
+ name: "ComfyLiterals.OperationNode",
5
+ nodeCreated(node, app) {
6
+ if (node['comfyClass'] === 'Operation') {
7
+ const onAdded = node.onAdded
8
+ node.onAdded = function (graph) {
9
+ console.log("OperationNode onAdded")
10
+ const firstCallbackResp = onAdded ? onAdded.apply(this, arguments) : undefined;
11
+
12
+ /**
13
+ * @type {Record<string, INodeInputSlot>}
14
+ */
15
+ const inputCache = {
16
+ "A": node.inputs[1],
17
+ "B": node.inputs[3]
18
+ }
19
+
20
+ if (this.widgets_values) {
21
+ const aType = this.widgets_values[0]
22
+ const bType = this.widgets_values[1]
23
+
24
+ // [IntA, FloatA, IntB, FloatB]
25
+ const aIdxToDelete = aType === "INT" ? 1 : 0
26
+ // [*A, IntB, FloatB]
27
+ const bIdxToDelete = bType === "INT" ? 3 : 1
28
+
29
+ inputCache["A"] = node.inputs[aIdxToDelete]
30
+ this.removeInput(aIdxToDelete)
31
+ inputCache["B"] = node.inputs[bIdxToDelete]
32
+ this.removeInput(bIdxToDelete)
33
+ } else {
34
+ // Nodes being restored/pasted don't have widget_values
35
+ // Node has 4 inputs(IntA, FloatA, IntB, FloatB)
36
+ // Remove both float inputs, Float B moves to index 2 after Float A is removed
37
+ this.removeInput(1)
38
+ this.removeInput(2)
39
+ }
40
+
41
+ // Add a toggle widget to the node
42
+ this.widgets[0].callback = function (v, canvas, node) {
43
+ addInputAtIndex(node, inputCache["A"], 0)
44
+ inputCache["A"] = node.inputs[1]
45
+ node.removeInput(1)
46
+ }
47
+ this.widgets[1].callback = function (v, canvas, node) {
48
+ addInputAtIndex(node, inputCache["B"], 2)
49
+ inputCache["B"] = node.inputs[1]
50
+ node.removeInput(1)
51
+ }
52
+ }
53
+ }
54
+ }
55
+ })
56
+
57
+ /**
58
+ * Adds an input to a node at the given index.
59
+ * @param node {LGraphNode}
60
+ * @param input {INodeInputSlot}
61
+ * @param index {number}
62
+ * @returns {INodeInputSlot}
63
+ */
64
+ function addInputAtIndex(node, input, index) {
65
+ if (!node.inputs) {
66
+ node.inputs = [];
67
+ }
68
+
69
+ if (index > node.inputs.length) {
70
+ console.warn("LiteGraph: Warning adding port index: " + index + " of node " + node.id + ", it doesnt have so many inputs");
71
+ node.inputs.push(input);
72
+ } else {
73
+ node.inputs.splice(index, 0, input);
74
+ }
75
+ if (node.onInputAdded) {
76
+ node.onInputAdded(input);
77
+ }
78
+ node.setSize(node.computeSize());
79
+ LiteGraph.registerNodeAndSlotType(node, input.type || 0);
80
+
81
+ node.setDirtyCanvas(true, true);
82
+ return input;
83
+ }
kim_comfyui_data/custom_nodes/ComfyLiterals/nodes.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import folder_paths
2
+
3
+ # Hack: string type that is always equal in not equal comparisons
4
+ class AnyType(str):
5
+ def __ne__(self, __value: object) -> bool:
6
+ return False
7
+
8
+
9
+ # Our any instance wants to be a wildcard string
10
+ ANY = AnyType("*")
11
+ class IntLiteral:
12
+ def __init__(self, ):
13
+ pass
14
+
15
+ @classmethod
16
+ def INPUT_TYPES(s):
17
+ return {
18
+ "required": {
19
+ "Number": ("STRING", {}),
20
+ }
21
+ }
22
+
23
+ RETURN_TYPES = ("INT",)
24
+ FUNCTION = "to_int"
25
+
26
+ CATEGORY = "Literals"
27
+
28
+ def to_int(self, Number):
29
+ try:
30
+ ret_val = int(Number)
31
+ except Exception:
32
+ raise Exception("Invalid value provided for INT")
33
+ return (ret_val,)
34
+
35
+
36
+ class FloatLiteral:
37
+ def __init__(self, ):
38
+ pass
39
+
40
+ @classmethod
41
+ def INPUT_TYPES(s):
42
+ return {
43
+ "required": {
44
+ "Number": ("STRING", {}),
45
+ }
46
+ }
47
+
48
+ RETURN_TYPES = ("FLOAT",)
49
+ FUNCTION = "to_float"
50
+
51
+ CATEGORY = "Literals"
52
+
53
+ def to_float(self, Number):
54
+ try:
55
+ ret_val = float(Number)
56
+ except Exception:
57
+ raise Exception("Invalid value provided for FLOAT")
58
+ return (ret_val,)
59
+
60
+
61
+ class StringLiteral:
62
+ def __init__(self, ):
63
+ pass
64
+
65
+ @classmethod
66
+ def INPUT_TYPES(s):
67
+ return {
68
+ "required": {
69
+ "String": ("STRING", {"multiline": True}),
70
+ }
71
+ }
72
+
73
+ RETURN_TYPES = ("STRING",)
74
+ FUNCTION = "to_string"
75
+
76
+ CATEGORY = "Literals"
77
+
78
+ def to_string(self, String):
79
+ return (String,)
80
+
81
+
82
+ class CheckpointListLiteral:
83
+ def __init__(self):
84
+ pass
85
+
86
+ @classmethod
87
+ def INPUT_TYPES(s):
88
+ return {
89
+ "required": {
90
+ "literal": ("STRING", {
91
+ "multiline": True,
92
+ "default": "\n".join(folder_paths.get_filename_list("checkpoints"))
93
+ }),
94
+ },
95
+ }
96
+
97
+ RETURN_TYPES = (ANY,)
98
+ RETURN_NAMES = ("Selected Checkpoints",)
99
+ OUTPUT_IS_LIST = (True,)
100
+ FUNCTION = "parse_literal"
101
+
102
+ # OUTPUT_NODE = False
103
+
104
+ CATEGORY = "List Stuff"
105
+
106
+ def parse_literal(self, literal):
107
+ split = list(filter(None, literal.split("\n")))
108
+ return (split,)
109
+
110
+ class LoraListLiteral:
111
+ def __init__(self):
112
+ pass
113
+
114
+ @classmethod
115
+ def INPUT_TYPES(s):
116
+ return {
117
+ "required": {
118
+ "literal": ("STRING", {
119
+ "multiline": True,
120
+ "default": "\n".join(folder_paths.get_filename_list("loras"))
121
+ }),
122
+ },
123
+ }
124
+
125
+ RETURN_TYPES = (ANY,)
126
+ RETURN_NAMES = ("Selected Loras",)
127
+ OUTPUT_IS_LIST = (True,)
128
+ FUNCTION = "parse_literal"
129
+
130
+ # OUTPUT_NODE = False
131
+
132
+ CATEGORY = "List Stuff"
133
+
134
+ def parse_literal(self, literal):
135
+ split = list(filter(None, literal.split("\n")))
136
+ return (split,)
kim_comfyui_data/custom_nodes/ComfyLiterals/operations.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ class Operation:
2
+ def __init__(self, ):
3
+ pass
4
+
5
+ @classmethod
6
+ def INPUT_TYPES(s):
7
+ return {
8
+ "required": {
9
+ "A Type": (["Int", "Float"],),
10
+ "B Type": (["Int", "Float"],),
11
+ "Operation": (["A+B", "A-B", "A*B", "A/B"],)
12
+ },
13
+ "optional": {
14
+ "A - Int": ("INT", {"forceInput": True}),
15
+ "A - Float": ("FLOAT", {"forceInput": True}),
16
+ "B - Int": ("INT", {"forceInput": True}),
17
+ "B - Float": ("FLOAT", {"forceInput": True})
18
+ }
19
+ }
20
+
21
+ RETURN_TYPES = ("INT", "FLOAT")
22
+ FUNCTION = "do_operation"
23
+
24
+ CATEGORY = "Literals"
25
+
26
+ def _do_addition(self, a_val, b_val):
27
+ return (int(a_val + b_val), float(a_val + b_val))
28
+
29
+ def _do_subtraction(self, a_val, b_val):
30
+ return (int(a_val - b_val), float(a_val - b_val))
31
+
32
+ def _do_multiplication(self, a_val, b_val):
33
+ return (int(a_val * b_val), float(a_val * b_val))
34
+
35
+ def _do_division(self, a_val, b_val):
36
+ return (int(a_val / b_val), float(a_val / b_val))
37
+
38
+ def do_operation(self, **kwargs):
39
+ print(f"PrintNode: {kwargs}")
40
+ is_a_int = kwargs["A Type"] == "Int"
41
+ is_b_int = kwargs["B Type"] == "Int"
42
+ a_val = kwargs["A - Int"] if is_a_int else kwargs["A - Float"]
43
+ b_val = kwargs["B - Int"] if is_b_int else kwargs["B - Float"]
44
+
45
+ if kwargs["Operation"] == "A+B":
46
+ return self._do_addition(a_val, b_val)
47
+ elif kwargs["Operation"] == "A-B":
48
+ return self._do_subtraction(a_val, b_val)
49
+ elif kwargs["Operation"] == "A*B":
50
+ return self._do_multiplication(a_val, b_val)
51
+ elif kwargs["Operation"] == "A/B":
52
+ return self._do_division(a_val, b_val)
53
+ else:
54
+ raise Exception("Invalid operation provided")
kim_comfyui_data/custom_nodes/ComfyLiterals/startup_utils.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from pathlib import Path
3
+
4
+ import folder_paths
5
+
6
+
7
+ def symlink_web_dir(local_path, extension_name):
8
+ comfy_web_ext_root = Path(os.path.join(folder_paths.base_path, "web", "extensions"))
9
+ target_dir = Path(os.path.join(comfy_web_ext_root, extension_name))
10
+ extension_path = Path(__file__).parent.resolve()
11
+
12
+ if target_dir.exists():
13
+ print(f"Web extensions folder found at {target_dir}")
14
+ elif comfy_web_ext_root.exists():
15
+ try:
16
+ os.symlink((os.path.join(extension_path, local_path)), target_dir)
17
+ except OSError as e: # OSError
18
+ print(
19
+ f"Error:\n{e}\n"
20
+ f"Failed to create symlink to {target_dir}. Please copy the folder manually.\n"
21
+ f"Source: {os.path.join(extension_path, local_path)}\n"
22
+ f"Target: {target_dir}"
23
+ )
24
+ except Exception as e:
25
+ print(f"Unexpected error:\n{e}")
26
+ else:
27
+ print(
28
+ f"Failed to find comfy root automatically, please copy the folder {os.path.join(extension_path, 'web')} manually in the web/extensions folder of ComfyUI"
29
+ )
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/.github/workflows/publish.yml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Publish to Comfy registry
2
+ on:
3
+ workflow_dispatch:
4
+ push:
5
+ branches:
6
+ - main
7
+ - master
8
+ paths:
9
+ - "pyproject.toml"
10
+
11
+ permissions:
12
+ issues: write
13
+
14
+ jobs:
15
+ publish-node:
16
+ name: Publish Custom Node to registry
17
+ runs-on: ubuntu-latest
18
+ if: ${{ github.repository_owner == 'akawana' }}
19
+ steps:
20
+ - name: Check out code
21
+ uses: actions/checkout@v4
22
+ with:
23
+ submodules: true
24
+ - name: Publish Custom Node
25
+ uses: Comfy-Org/publish-node-action@v1
26
+ with:
27
+ ## Add your own personal access token to your Github Repository secrets and reference it here.
28
+ personal_access_token: ${{ secrets.REGISTRY_ACCESS_TOKEN }}
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/.gitignore ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python bytecode
2
+ __pycache__/
3
+ *.pyc
4
+ *.pyo
5
+ *.pyd
6
+
7
+ # Virtual environment
8
+ .venv/
9
+ venv/
10
+ env/
11
+
12
+ # Editor/IDE specific files
13
+ .idea/
14
+ .vscode/
15
+
16
+ # Operating System files
17
+ .DS_Store
18
+ Thumbs.db
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/README.md ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #### Other My Nodes
2
+ - 📘 [ComfyUI-AK-Pack](https://github.com/akawana/ComfyUI-AK-Pack)
3
+ - 📘 [ComfyUI-AK-XZ-Axis](https://github.com/akawana/ComfyUI-AK-XZ-Axis)
4
+ - 📘 [ComfyUI RGBYP Mask Editor](https://github.com/akawana/ComfyUI-RGBYP-Mask-Editor)
5
+ - 📘 [ComfyUI Folded Prompts](https://github.com/akawana/ComfyUI-Folded-Prompts)
6
+
7
+ # ComfyUI AK Pack
8
+ This is a pack of useful ComfyUI nodes and UI extensions. It was created for **complex and large** workflows. The main goal of this pack is to unify and simplify working with my other packs: 📘 [ComfyUI-AK-XZ-Axis](https://github.com/akawana/ComfyUI-AK-XZ-Axis), 📘 [ComfyUI RGBYP Mask Editor](https://github.com/akawana/ComfyUI-RGBYP-Mask-Editor), 📘 [ComfyUI Folded Prompts](https://github.com/akawana/ComfyUI-Folded-Prompts).
9
+
10
+ The most interesting parts of this pack for most users are:
11
+ - Multiple Samplers Control
12
+ - Project Settings
13
+ - AK Base
14
+
15
+ The other nodes are also useful, but they are of secondary importance. You should be able to figure them out on your own.
16
+
17
+ I have also created example workflows:
18
+
19
+ ---
20
+ #### UI: Multiple Samplers Control
21
+
22
+ This is not a node in the traditional sense, but a **ComfyUI UI extension**. The panel allows you to control multiple KSamplers or Detailers from a single place.
23
+
24
+ In my workflows, I often use 6–8 KSamplers and Detailers at the same time. I spent a long time looking for a fast and centralized way to control them, and this panel is the result of that effort.
25
+
26
+ The panel is integrated into the left toolbar of ComfyUI and can be disabled in Settings.
27
+
28
+ The panel includes a Settings tab where you configure, using comma-separated lists, which KSampler nodes you want to control. You can specify node names or node IDs in these lists.
29
+
30
+ For example: "KSampler, 12, 33, My Detailer, BlaBlaSampler" - will find all the samplers mentioned.
31
+
32
+ Once configured, the detected nodes will appear in a dropdown list in the Control tab, allowing you to adjust the settings of your KSamplers or Detailers from a single centralized interface.
33
+
34
+ | [![](./img/MSamplers.jpg)](./img/MSamplers.jpg) | [![](./img/MSamplersSettings.jpg)](./img/MSamplersSettings.jpg) | [![](./img/MSamplersChooser.jpg)](./img/MSamplersChooser.jpg) |
35
+ |---|---|---|
36
+
37
+ ---
38
+ #### UI: Project Settings
39
+
40
+ This is not a node in the strict sense. It is a **UI extension** — a panel that allows you to quickly enter the main parameters of your project.
41
+ The panel is embedded into the left toolbar of ComfyUI and can be disabled in the settings.
42
+
43
+ I work a lot with img2img and often use fairly large workflows. I got tired of constantly navigating to different parts of the workflow every time I needed to adjust input parameters, so I created this panel. Now I can load an image and enter all the required values in one place.
44
+
45
+ All panel data is stored directly in the workflow graph, except for the image itself. **The image** is copied to the **input/garbage/** folder. If you delete this folder later, nothing bad will happen — you will simply need to select the image again.
46
+
47
+ To retrieve the data from this panel, I created a separate node called **AKProjectSettingsOut**. Just place it anywhere you need in your workflow to access the panel values.
48
+
49
+ | [![](./img/ProjectSettings.jpg)](./img/ProjectSettings.jpg) | [![](./img/ProjectSettingsOptions.jpg)](./img/ProjectSettingsOptions.jpg) | [![](./img/AKProjectSettingsOut.jpg)](./img/AKProjectSettingsOut.jpg) |
50
+ |---|---|---|
51
+
52
+ ---
53
+ #### Node: AK Base
54
+
55
+ This node is designed as a central hub for working within a workflow, which is why it is called AK Base. At its core, it provides a preview-based comparison of two images. Similar nodes already exist, but this one adds support for a gallery of multiple images.
56
+
57
+ In addition, it includes:
58
+ - a button to copy a single image to the clipboard,
59
+ - a button to copy the entire gallery to the clipboard,
60
+ - a dialog that allows you to inspect the results while working in a different part of the workflow.
61
+
62
+ In the Properties panel, this node provides a setting called node_list.
63
+ This is a text field where you can specify a list of nodes, using either node names or node IDs.
64
+
65
+ Once configured, AK Base can automatically adjust the parameters seed, denoise, cfg, and xz_steps based on the image selected in the gallery.
66
+ This functionality was implemented specifically to support my 📘 [ComfyUI-AK-XZ-Axis](https://github.com/akawana/ComfyUI-AK-XZ-Axis) nodes.
67
+
68
+ | [![](./img/AKBase.jpg)](./img/AKBase.jpg) | [![](./img/AKBaseGallery.jpg)](./img/AKBaseGallery.jpg) | [![](./img/AKBasePIP.jpg)](./img/AKBasePIP.jpg) |
69
+ |---|---|---|
70
+
71
+ ---
72
+ #### Node: Setter & Getter
73
+
74
+ These nodes already exist in several other packs. My goal was to make them faster. In my implementation, the nodes do not use JavaScript to store or pass data. All data is passed only through Python and direct connections between nodes. Simply put, they hide links and hide outputs and inputs.
75
+
76
+ In my setup, JavaScript is responsible only for updating the list of variables and does not affect the Run process in any way. Based on my comparisons, in complex workflows with 20–30 Getter/Setter nodes, my nodes perform much faster.
77
+
78
+ ---
79
+ #### Node: Repeat Group State
80
+
81
+ A connection-free interactive node that synchronizes the state of its own group with the state of other groups matching a given substring. This allows groups to depend on other groups without wires, similar to rgthree repeaters.
82
+
83
+ - Finds groups with names containing the target substring.
84
+ - Checks whether any of them are Active.
85
+ - If **all** matching groups are disabled -> it disables **its own group**.
86
+ - If **any** matching group is active -> it enables **its own group**.
87
+
88
+ <img src="./img/preview_repeater.jpg" height="200"/>
89
+
90
+ ---
91
+ #### Node: IsOneOfGroupsActive
92
+
93
+ Checks the state of all groups whose names **contain a specified substring**.
94
+
95
+ - If **at least one** matching group is Active -> output is `true`.
96
+ - If **all** matching groups are Muted/Bypassed -> output is `false`.
97
+
98
+ <img src="./img/preview_is_group_active.jpg" height="200"/>
99
+
100
+ ---
101
+ #### Node: AK Index Multiple
102
+
103
+ Extracts a specific range from any **List** (images, masks, latents, text etc..) and creates individual outputs for that range.
104
+ Optionally replaces missing values with a fallback (`if_none`).
105
+
106
+ <img src="./img/AKIndexMultiple.jpg" height="200">
107
+
108
+ ---
109
+ #### Node: AK CLIP Encode Multiple
110
+
111
+ Extracts a specific range from any **List** of strings and CLIP encodes individual outputs for that range.
112
+
113
+ Same as **AK Index Multiple** but for CLIP encoding. Works faster than regular CLIP Encoders because does only one encoding for all NONE input strings. Caches the stings and does not encode them if no changes. Also outputs combined conditioning.
114
+
115
+ <img src="./img/AKCLIPEncodeMultiple.jpg" height="200">
116
+
117
+ ---
118
+ #### Node: AK KSampler Settings & AK KSampler Settings Out
119
+
120
+ Allows you to define sampler settings anywhere in the workflow.
121
+ The Out node lets you retrieve these settings wherever they are needed.
122
+
123
+ - `Seed`, `Sampler`, `Scheduler`, `Steps`, `Cfg`, `Denoise`, `xz_steps`
124
+
125
+ > [!NOTE]
126
+ > This node includes an additional field called xz_steps.
127
+ > It is used to control the number of steps when working with my 📘 [ComfyUI-AK-XZ-Axis](https://github.com/akawana/ComfyUI-AK-XZ-Axis) nodes.
128
+ > This field is also convenient when used together with the **AK Base** node, because it can automatically set xz_steps = 1 when a single image is selected in the gallery.
129
+
130
+ ---
131
+ #### Node: AK Replace Alpha With Color & AK Replace Color With Alpha
132
+
133
+ `AK Replace Alpha With Color` Replaces alpha with a color.
134
+
135
+ `AK Replace Color With Alpha` Performs the reverse operation: Replaces a color with alpha. You can specify the color manually, or use automatic modes that detect the color from pixels in different corners of the image. Image **cropping** is also available. This is useful because when rendering images on a flat background, the outermost pixels around the edges often have color artifacts and are better removed.
136
+
137
+ > [!NOTE]
138
+ > Both of these nodes are useful when working with images that have a transparent background. You can first replace alpha with a color, perform the generation, and then, before saving, remove that color and convert it back to alpha.
139
+
140
+ <img src="./img/AKReplaceAlphaWithColor.jpg" height="200">
141
+
142
+ ---
143
+ #### Other nodes
144
+
145
+ ##### Node: AK Pipe & AK Pipe Loop
146
+
147
+ AK Pipe is a zero-copy pipeline node that passes a structured pipe object through the graph and updates only explicitly connected inputs. It avoids unnecessary allocations by reusing the original pipe when no values change and creates a new container only on real object replacement. This design minimizes memory churn and Python overhead, making it significantly faster than traditional pipe merge nodes.
148
+
149
+ `AK Pipe Loop` Always outputs the most recently modified connected AK Pipe.
150
+
151
+ ##### Node: CLIP Text Encode Cached
152
+
153
+ Simple node for encoding conditioning with a small caching experiment.
154
+
155
+ ##### Node: CLIP Text Encode and Combine Cached
156
+
157
+ Can combine the input conditioning with the text entered in the node.
158
+
159
+ <img src="./img/CLIPTextEncodeAndCombineCached.jpg" height="200">
160
+
161
+ ##### Node: AK Resize On Boolean
162
+
163
+ Resizes the image and optional mask based on a boolean parameter (enable / disable). Useful when working with the **Project Settings** panel.
164
+
165
+ <img src="./img/AKResizeOnBoolean.jpg" height="200">
166
+
167
+ ##### Node: AK Contrast And Saturate Image
168
+
169
+ Node for adjusting image contrast and saturation.
170
+
171
+ ##### Node: Preview Raw Text
172
+
173
+ Displays text and can format **JSON**. It also supports displaying a list of texts.
174
+
175
+ ---
176
+
177
+ # Installation
178
+
179
+ From your ComfyUI root directory:
180
+
181
+ ```bash
182
+ cd ComfyUI/custom_nodes
183
+ git clone https://github.com/akawana/ComfyUI-Utils-extra.git
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/__init__.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ WEB_DIRECTORY = "./js"
2
+
3
+ from .nodes.AKIndexMultiple import NODE_CLASS_MAPPINGS as AKIndexMultiple_MAPPINGS
4
+ from .nodes.AKIndexMultiple import NODE_DISPLAY_NAME_MAPPINGS as AKIndexMultiple_DISPLAY
5
+
6
+ from .nodes.AKCLIPEncodeMultiple import NODE_CLASS_MAPPINGS as AKCLIPEncodeMultiple_MAPPINGS
7
+ from .nodes.AKCLIPEncodeMultiple import NODE_DISPLAY_NAME_MAPPINGS as AKCLIPEncodeMultiple_DISPLAY
8
+
9
+ from .nodes.IsOneOfGroupsActive import NODE_CLASS_MAPPINGS as GROUPCHK_MAPPINGS
10
+ from .nodes.IsOneOfGroupsActive import NODE_DISPLAY_NAME_MAPPINGS as GROUPCHK_DISPLAY
11
+
12
+ from .nodes.RepeatGroupState import NODE_CLASS_MAPPINGS as RPSTATE_MAPPINGS
13
+ from .nodes.RepeatGroupState import NODE_DISPLAY_NAME_MAPPINGS as RPSTATE_DISPLAY
14
+
15
+ from .nodes.PreviewRawText import NODE_CLASS_MAPPINGS as PRTSTATE_MAPPINGS
16
+ from .nodes.PreviewRawText import NODE_DISPLAY_NAME_MAPPINGS as PRTSTATE_DISPLAY
17
+
18
+ from .nodes.CLIPTextEncodeCached import NODE_CLASS_MAPPINGS as CLCSTATE_MAPPINGS
19
+ from .nodes.CLIPTextEncodeCached import NODE_DISPLAY_NAME_MAPPINGS as CLCSTATE_DISPLAY
20
+
21
+ from .nodes.CLIPTextEncodeAndCombineCached import NODE_CLASS_MAPPINGS as CLCOMBINE_STATE_MAPPINGS
22
+ from .nodes.CLIPTextEncodeAndCombineCached import NODE_DISPLAY_NAME_MAPPINGS as CLCOMBINE_STATE_DISPLAY
23
+
24
+ from .nodes.AKBase import NODE_CLASS_MAPPINGS as AKBSTATE_MAPPINGS
25
+ from .nodes.AKBase import NODE_DISPLAY_NAME_MAPPINGS as AKBSTATE_DISPLAY
26
+
27
+ from .nodes.AKPipe import NODE_CLASS_MAPPINGS as AKPIPESTATE_MAPPINGS
28
+ from .nodes.AKPipe import NODE_DISPLAY_NAME_MAPPINGS as AKPIPESTATE_DISPLAY
29
+
30
+ from .nodes.AKPipeLoop import NODE_CLASS_MAPPINGS as AKPIPEL_STATE_MAPPINGS
31
+ from .nodes.AKPipeLoop import NODE_DISPLAY_NAME_MAPPINGS as AKPIPEL_STATE_DISPLAY
32
+
33
+ from .nodes.Setter import NODE_CLASS_MAPPINGS as SETTERSTATE_MAPPINGS
34
+ from .nodes.Setter import NODE_DISPLAY_NAME_MAPPINGS as SETTERSTATE_DISPLAY
35
+
36
+ from .nodes.Getter import NODE_CLASS_MAPPINGS as GETTERSTATE_MAPPINGS
37
+ from .nodes.Getter import NODE_DISPLAY_NAME_MAPPINGS as GETTERSTATE_DISPLAY
38
+
39
+ from .nodes.AKResizeOnBoolean import NODE_CLASS_MAPPINGS as RESIZESTATE_MAPPINGS
40
+ from .nodes.AKResizeOnBoolean import NODE_DISPLAY_NAME_MAPPINGS as RESIZESTATE_DISPLAY
41
+
42
+ from .nodes.IsMaskEmpty import NODE_CLASS_MAPPINGS as ISMSTATE_MAPPINGS
43
+ from .nodes.IsMaskEmpty import NODE_DISPLAY_NAME_MAPPINGS as ISMSTATE_DISPLAY
44
+
45
+ from .nodes.AKContrastAndSaturateImage import NODE_CLASS_MAPPINGS as AKSAT_STATE_MAPPINGS
46
+ from .nodes.AKContrastAndSaturateImage import NODE_DISPLAY_NAME_MAPPINGS as AKSAT_STATE_DISPLAY
47
+
48
+ from .nodes.AKReplaceAlphaWithColor import NODE_CLASS_MAPPINGS as AKRALPHA_STATE_MAPPINGS
49
+ from .nodes.AKReplaceAlphaWithColor import NODE_DISPLAY_NAME_MAPPINGS as AKRALPHA_STATE_DISPLAY
50
+
51
+ from .nodes.AKReplaceColorWithAlpha import NODE_CLASS_MAPPINGS as AKRCOLOR_STATE_MAPPINGS
52
+ from .nodes.AKReplaceColorWithAlpha import NODE_DISPLAY_NAME_MAPPINGS as AKRCOLOR_STATE_DISPLAY
53
+
54
+ from .nodes.AKControlMultipleKSamplers import NODE_CLASS_MAPPINGS as AK_CONTROL_SAMPLERS_COLOR_STATE_MAPPINGS
55
+ from .nodes.AKControlMultipleKSamplers import NODE_DISPLAY_NAME_MAPPINGS as AK_CONTROL_SAMPLERS_COLOR_STATE_DISPLAY
56
+
57
+ from .nodes.AKKSamplerSettings import NODE_CLASS_MAPPINGS as AKKSamplerSettings_STATE_MAPPINGS
58
+ from .nodes.AKKSamplerSettings import NODE_DISPLAY_NAME_MAPPINGS as AKKSamplerSettings_STATE_DISPLAY
59
+
60
+ from .nodes.AKProjectSettingsOut import NODE_CLASS_MAPPINGS as AKProjectSettingsOut_STATE_MAPPINGS
61
+ from .nodes.AKProjectSettingsOut import NODE_DISPLAY_NAME_MAPPINGS as AKProjectSettingsOut_STATE_DISPLAY
62
+
63
+ NODE_CLASS_MAPPINGS = {
64
+ **AKIndexMultiple_MAPPINGS,
65
+ **AKCLIPEncodeMultiple_MAPPINGS,
66
+ **GROUPCHK_MAPPINGS,
67
+ **RPSTATE_MAPPINGS,
68
+ **PRTSTATE_MAPPINGS,
69
+ **CLCSTATE_MAPPINGS,
70
+ **CLCOMBINE_STATE_MAPPINGS,
71
+ **AKBSTATE_MAPPINGS,
72
+ **AKPIPESTATE_MAPPINGS,
73
+ **AKPIPEL_STATE_MAPPINGS,
74
+ **SETTERSTATE_MAPPINGS,
75
+ **GETTERSTATE_MAPPINGS,
76
+ **RESIZESTATE_MAPPINGS,
77
+ **ISMSTATE_MAPPINGS,
78
+ **AKSAT_STATE_MAPPINGS,
79
+ **AKRALPHA_STATE_MAPPINGS,
80
+ **AKRCOLOR_STATE_MAPPINGS,
81
+ **AK_CONTROL_SAMPLERS_COLOR_STATE_MAPPINGS,
82
+ **AKKSamplerSettings_STATE_MAPPINGS,
83
+ **AKProjectSettingsOut_STATE_MAPPINGS,
84
+ }
85
+
86
+ NODE_DISPLAY_NAME_MAPPINGS = {
87
+ **AKIndexMultiple_DISPLAY,
88
+ **AKCLIPEncodeMultiple_DISPLAY,
89
+ **GROUPCHK_DISPLAY,
90
+ **RPSTATE_DISPLAY,
91
+ **PRTSTATE_DISPLAY,
92
+ **CLCSTATE_DISPLAY,
93
+ **CLCOMBINE_STATE_DISPLAY,
94
+ **AKBSTATE_DISPLAY,
95
+ **AKPIPESTATE_DISPLAY,
96
+ **AKPIPEL_STATE_DISPLAY,
97
+ **SETTERSTATE_DISPLAY,
98
+ **GETTERSTATE_DISPLAY,
99
+ **RESIZESTATE_DISPLAY,
100
+ **ISMSTATE_DISPLAY,
101
+ **AKSAT_STATE_DISPLAY,
102
+ **AKRALPHA_STATE_DISPLAY,
103
+ **AKRCOLOR_STATE_DISPLAY,
104
+ **AK_CONTROL_SAMPLERS_COLOR_STATE_DISPLAY,
105
+ **AKKSamplerSettings_STATE_DISPLAY,
106
+ **AKProjectSettingsOut_STATE_DISPLAY,
107
+ }
108
+
109
+ __all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS", "WEB_DIRECTORY"]
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/icon.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBase.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBaseGallery.jpg ADDED

Git LFS Details

  • SHA256: eb4db3e2ac3f5defdefe270ccfee9fb297de7940af11e074db386bbad033031c
  • Pointer size: 131 Bytes
  • Size of remote file: 111 kB
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBasePIP.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKBase_big.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKCLIPEncodeMultiple.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKIndexMultiple.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKProjectSettingsOut.jpg ADDED

Git LFS Details

  • SHA256: 3d3b81351e6282680a3426115038f130b05f5c52b825f13aa26e7a3818d90b1c
  • Pointer size: 131 Bytes
  • Size of remote file: 102 kB
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKReplaceAlphaWithColor.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/AKResizeOnBoolean.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/CLIPTextEncodeAndCombineCached.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplers.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplersChooser.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplersSettings.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/MSamplers_big.jpg ADDED

Git LFS Details

  • SHA256: 59fdb3f9a927f6f5f2f968dcf1ba9808c123022b050026830684c06a8ec279f9
  • Pointer size: 131 Bytes
  • Size of remote file: 294 kB
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettings.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettingsOptions.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/ProjectSettings_big.jpg ADDED

Git LFS Details

  • SHA256: 1db29defea3df557763622f7493ceed4790b6ed855efecb5b08e07789cd23552
  • Pointer size: 131 Bytes
  • Size of remote file: 303 kB
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/preview_is_group_active.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/img/preview_repeater.jpg ADDED
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase.js ADDED
@@ -0,0 +1,263 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { app } from "/scripts/app.js";
2
+ import { api } from "/scripts/api.js";
3
+
4
+ import { buildTempViewUrl, loadImageFromUrl, loadGalleryByCount, fetchTempJson, IO_SETTINGS } from "./AKBase_io.js";
5
+ import { installInputHandlers } from "./AKBase_input.js";
6
+ import { applyNodeLayout, installDraw } from "./AKBase_ui.js";
7
+
8
+ import "./AKBase_pip.js";
9
+
10
+
11
+ const AKBASE_VERSION = "v11-statefile";
12
+
13
+ window.AKBASE_DEBUG ??= true;
14
+ const DBG = (...a) => { if (window.AKBASE_DEBUG) console.log(`[AKBase ${AKBASE_VERSION}]`, ...a); };
15
+
16
+
17
+ function releaseImage(img) {
18
+ if (!img) return;
19
+ try {
20
+ img.onload = null;
21
+ img.onerror = null;
22
+ img.src = "";
23
+ img = null;
24
+ } catch (_) {
25
+ }
26
+ }
27
+
28
+ async function loadCompare(node, stateJson) {
29
+ const state = node._akBase;
30
+ const token = ++state.loadingToken;
31
+
32
+ if (state?.a?.img) {
33
+ releaseImage(state.a.img);
34
+ state.a.img = null;
35
+ }
36
+ if (state?.b?.img) {
37
+ releaseImage(state.b.img);
38
+ state.b.img = null;
39
+ }
40
+ if (state?.gallery?.images?.length) {
41
+ for (const img of state.gallery.images) {
42
+ releaseImage(img);
43
+ }
44
+ state.gallery.images = [];
45
+ state.gallery.urls = [];
46
+ state.gallery.hoverIndex = -1;
47
+ }
48
+ state.a.url = null;
49
+ state.b.url = null;
50
+ state.a.loaded = false;
51
+ state.b.loaded = false;
52
+
53
+ const nid = node?.id;
54
+ const suffix = (nid !== undefined && nid !== null) ? `_${nid}` : "";
55
+
56
+ const aFn = stateJson?.a?.filename ?? `ak_base_image_a${suffix}.png`;
57
+ const bFn = stateJson?.b?.filename ?? `ak_base_image_b${suffix}.png`;
58
+
59
+ const aUrl = buildTempViewUrl(aFn);
60
+ const bUrl = buildTempViewUrl(bFn);
61
+
62
+ state.a.loaded = false;
63
+ state.b.loaded = false;
64
+ state.a.url = aUrl;
65
+ state.b.url = bUrl;
66
+
67
+ DBG("compare loading", { aUrl, bUrl });
68
+
69
+ const [aImg, bImg] = await Promise.all([loadImageFromUrl(aUrl), loadImageFromUrl(bUrl)]);
70
+ if (state.loadingToken !== token) return;
71
+
72
+ state.mode = "compare";
73
+ state.hasGallery = false;
74
+ state.galleryMeta = null;
75
+ state.gallery.images = [];
76
+ state.gallery.urls = [];
77
+ state.gallery.hoverIndex = -1;
78
+
79
+ state.a.img = aImg;
80
+ state.b.img = bImg;
81
+ state.a.loaded = true;
82
+ state.b.loaded = true;
83
+
84
+ DBG("compare loaded", { a: [aImg.naturalWidth, aImg.naturalHeight], b: [bImg.naturalWidth, bImg.naturalHeight] });
85
+ app.graph.setDirtyCanvas(true, true);
86
+ }
87
+
88
+ async function loadGallery(node, stateJson) {
89
+ const state = node._akBase;
90
+ const token = ++state.loadingToken;
91
+
92
+ if (state?.a?.img) {
93
+ releaseImage(state.a.img);
94
+ state.a.img = null;
95
+ }
96
+ if (state?.b?.img) {
97
+ releaseImage(state.b.img);
98
+ state.b.img = null;
99
+ }
100
+ if (state?.gallery?.images?.length) {
101
+ for (const img of state.gallery.images) {
102
+ releaseImage(img);
103
+ }
104
+ state.gallery.images = [];
105
+ state.gallery.urls = [];
106
+ state.gallery.hoverIndex = -1;
107
+ }
108
+ state.a.url = null;
109
+ state.b.url = null;
110
+ state.a.loaded = false;
111
+ state.b.loaded = false;
112
+
113
+ const count = Math.max(0, Math.min(4096, Number(stateJson?.count ?? 0)));
114
+ const nid = node?.id;
115
+ const prefix = String(stateJson?.gallery_prefix ?? ((nid !== undefined && nid !== null) ? `ak_base_image_xy_${nid}_` : "ak_base_image_xy_"));
116
+
117
+ DBG("gallery loading", { count, prefix });
118
+
119
+ const { images, urls } = await loadGalleryByCount(prefix, count);
120
+ if (state.loadingToken !== token) return;
121
+
122
+ state.mode = "gallery";
123
+ state.hasGallery = true;
124
+ state.galleryMeta = { count, prefix };
125
+ state.a.loaded = false;
126
+ state.b.loaded = false;
127
+
128
+ state.gallery.images = images;
129
+ state.gallery.urls = urls;
130
+ state.gallery.hoverIndex = -1;
131
+
132
+ DBG("gallery loaded", { count: images.length });
133
+ app.graph.setDirtyCanvas(true, true);
134
+ }
135
+
136
+ async function loadFromStateFile(node) {
137
+ let s = null;
138
+ try {
139
+ const nid = node?.id;
140
+ const stateFn = (nid !== undefined && nid !== null) ? `ak_base_state_${nid}.json` : IO_SETTINGS.stateFilename;
141
+ s = await fetchTempJson(stateFn);
142
+ DBG("state json", s);
143
+ } catch (e) {
144
+ DBG("state json missing", e);
145
+ return;
146
+ }
147
+
148
+ if (s?.mode === "gallery") {
149
+ await loadGallery(node, s);
150
+ } else {
151
+ await loadCompare(node, s);
152
+ }
153
+ }
154
+
155
+ function installOnNode(node) {
156
+ if (node._akBaseInstalled) return;
157
+ node._akBaseInstalled = true;
158
+
159
+ node._akBase = {
160
+ mode: "compare",
161
+ a: { img: null, url: null, loaded: false },
162
+ b: { img: null, url: null, loaded: false },
163
+ hover: false,
164
+ inPreview: false,
165
+ cursorX: 0.5,
166
+ loadingToken: 0,
167
+ _drawLogged: false,
168
+ hasGallery: false,
169
+ galleryMeta: null,
170
+ gallery: {
171
+ images: [],
172
+ urls: [],
173
+ hoverIndex: -1,
174
+ grid: null,
175
+ },
176
+ };
177
+
178
+ const state = node._akBase;
179
+
180
+ if (!node.properties) node.properties = {};
181
+ if (!Object.prototype.hasOwnProperty.call(node.properties, "node_list")) {
182
+ if (typeof node.addProperty === "function") {
183
+ node.addProperty("node_list", "", "string");
184
+ } else {
185
+ node.properties.node_list = "";
186
+ }
187
+ }
188
+
189
+
190
+ applyNodeLayout(node);
191
+
192
+ state.backToGallery = async () => {
193
+ try {
194
+ if (!state.hasGallery) return false;
195
+ if (state.mode === "gallery") return false;
196
+
197
+ const meta = state.galleryMeta;
198
+ if (!meta || !meta.prefix || !meta.count) return false;
199
+
200
+ const token = ++state.loadingToken;
201
+ const { images, urls } = await loadGalleryByCount(String(meta.prefix), Number(meta.count));
202
+ if (state.loadingToken !== token) return false;
203
+
204
+ state.mode = "gallery";
205
+ state.a.loaded = false;
206
+ state.b.loaded = false;
207
+
208
+ state.gallery.images = images;
209
+ state.gallery.urls = urls;
210
+ state.gallery.hoverIndex = -1;
211
+
212
+ // app.graph.setDirtyCanvas(true, true);
213
+ return true;
214
+ } catch (_) {
215
+ return false;
216
+ }
217
+ };
218
+
219
+ const origOnResize = node.onResize;
220
+ node.onResize = function (size) {
221
+ const r = origOnResize?.call(this, size);
222
+ applyNodeLayout(this);
223
+
224
+ return r;
225
+ };
226
+
227
+ installInputHandlers(node);
228
+ installDraw(node, DBG);
229
+ }
230
+
231
+
232
+ app.registerExtension({
233
+ name: "AKBase",
234
+ async beforeRegisterNodeDef(nodeType, nodeData) {
235
+ if (nodeData?.name !== "AK Base") return;
236
+
237
+ const onCreated = nodeType.prototype.onNodeCreated;
238
+ nodeType.prototype.onNodeCreated = function () {
239
+ const r = onCreated?.apply(this, arguments);
240
+ installOnNode(this);
241
+ return r;
242
+ };
243
+ },
244
+ });
245
+
246
+ api.addEventListener("executed", async (e) => {
247
+ const detail = e?.detail;
248
+ const nodeId = detail?.node;
249
+ if (!nodeId) return;
250
+
251
+ const node = app.graph.getNodeById(nodeId);
252
+ if (!node) return;
253
+ if (node.comfyClass !== "AK Base") return;
254
+
255
+ installOnNode(node);
256
+
257
+ try {
258
+ await loadFromStateFile(node);
259
+ } catch (err) {
260
+ DBG("load error", err);
261
+ // app.graph.setDirtyCanvas(true, true);
262
+ }
263
+ });
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_input.js ADDED
@@ -0,0 +1,599 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { app } from "/scripts/app.js";
2
+ import { previewRect, backButtonRect, copyButtonRect, pipButtonRect } from "./AKBase_ui.js";
3
+ import { fetchTempJson, buildTempViewUrl, loadImageFromUrl } from "./AKBase_io.js";
4
+
5
+ export function installInputHandlers(node) {
6
+ const state = node._akBase;
7
+ if (!state) return;
8
+
9
+ async function copyTopLayerImageToClipboard() {
10
+ try {
11
+ const enabled = (state.mode === "compare");
12
+ console.log("[AKBase] copyTopLayerImageToClipboard", { enabled, mode: state.mode });
13
+ if (!enabled) return false;
14
+
15
+ const img = state?.b?.img || null;
16
+ const url = state?.b?.url || (img?.src || null);
17
+
18
+ if (!navigator?.clipboard?.write || typeof window.ClipboardItem !== "function") {
19
+ console.log("[AKBase] copy failed: ClipboardItem API not available");
20
+ return false;
21
+ }
22
+
23
+ let blob = null;
24
+
25
+ if (url) {
26
+ try {
27
+ const res = await fetch(url, { cache: "no-store" });
28
+ console.log("[AKBase] copy fetch", { ok: res.ok, status: res.status, url });
29
+ if (res.ok) blob = await res.blob();
30
+ } catch (e) {
31
+ console.log("[AKBase] copy fetch error", e);
32
+ }
33
+ }
34
+
35
+ if (!blob && img) {
36
+ const w = Math.max(1, Number(img.naturalWidth || img.width || 0) || 1);
37
+ const h = Math.max(1, Number(img.naturalHeight || img.height || 0) || 1);
38
+ const canvas = document.createElement("canvas");
39
+ canvas.width = w;
40
+ canvas.height = h;
41
+ const ctx = canvas.getContext("2d");
42
+ if (!ctx) {
43
+ console.log("[AKBase] copy failed: no canvas context");
44
+ return false;
45
+ }
46
+ try { ctx.drawImage(img, 0, 0, w, h); } catch (e) { console.log("[AKBase] copy drawImage error", e); }
47
+ blob = await new Promise((resolve) => {
48
+ try { canvas.toBlob(resolve, "image/png"); } catch (_) { resolve(null); }
49
+ });
50
+ }
51
+
52
+ if (!blob) {
53
+ console.log("[AKBase] copy failed: no blob");
54
+ return false;
55
+ }
56
+
57
+ const mime = (blob.type && String(blob.type).startsWith("image/")) ? blob.type : "image/png";
58
+ await navigator.clipboard.write([new ClipboardItem({ [mime]: blob })]);
59
+ console.log("[AKBase] compare image copied to clipboard", { mime, size: blob.size });
60
+ return true;
61
+ } catch (e) {
62
+ console.log("[AKBase] copyTopLayerImageToClipboard exception", e);
63
+ return false;
64
+ }
65
+ }
66
+
67
+ async function copyGalleryToClipboard() {
68
+ try {
69
+ const enabled = (state.mode === "gallery");
70
+ console.log("[AKBase] copyGalleryToClipboard", { enabled, mode: state.mode });
71
+ if (!enabled) return false;
72
+
73
+ const imgs = state?.gallery?.images ?? [];
74
+ const N = imgs.length;
75
+ if (!N) return false;
76
+
77
+ if (!navigator?.clipboard?.write || typeof window.ClipboardItem !== "function") {
78
+ console.log("[AKBase] copy failed: ClipboardItem API not available");
79
+ return false;
80
+ }
81
+
82
+ let cellW = 0;
83
+ let cellH = 0;
84
+ for (const im of imgs) {
85
+ const w = Number(im?.naturalWidth || im?.width || 0) || 0;
86
+ const h = Number(im?.naturalHeight || im?.height || 0) || 0;
87
+ if (w > cellW) cellW = w;
88
+ if (h > cellH) cellH = h;
89
+ }
90
+ if (cellW <= 0 || cellH <= 0) return false;
91
+
92
+ const cols = Math.max(1, Math.ceil(Math.sqrt(N)));
93
+ const rows = Math.max(1, Math.ceil(N / cols));
94
+ const gap = 0;
95
+
96
+ let outW = cols * cellW + gap * (cols - 1);
97
+ let outH = rows * cellH + gap * (rows - 1);
98
+
99
+ const MAX_SIDE = 8192;
100
+ const MAX_PIXELS = 64 * 1024 * 1024;
101
+ let scale = 1;
102
+
103
+ if (outW > MAX_SIDE || outH > MAX_SIDE) {
104
+ scale = Math.min(scale, MAX_SIDE / outW, MAX_SIDE / outH);
105
+ }
106
+ if ((outW * outH) > MAX_PIXELS) {
107
+ scale = Math.min(scale, Math.sqrt(MAX_PIXELS / (outW * outH)));
108
+ }
109
+
110
+ const canvas = document.createElement("canvas");
111
+ canvas.width = Math.max(1, Math.floor(outW * scale));
112
+ canvas.height = Math.max(1, Math.floor(outH * scale));
113
+
114
+ const ctx = canvas.getContext("2d");
115
+ if (!ctx) {
116
+ console.log("[AKBase] copy failed: no canvas context");
117
+ return false;
118
+ }
119
+
120
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
121
+
122
+ const drawCellW = cellW * scale;
123
+ const drawCellH = cellH * scale;
124
+
125
+ for (let i = 0; i < N; i++) {
126
+ const im = imgs[i];
127
+ if (!im) continue;
128
+
129
+ const w = Number(im?.naturalWidth || im?.width || 0) || 1;
130
+ const h = Number(im?.naturalHeight || im?.height || 0) || 1;
131
+
132
+ const col = i % cols;
133
+ const row = Math.floor(i / cols);
134
+
135
+ const x0 = col * drawCellW;
136
+ const y0 = row * drawCellH;
137
+
138
+ const s = Math.min(drawCellW / w, drawCellH / h);
139
+ const dw = w * s;
140
+ const dh = h * s;
141
+
142
+ const dx = x0 + (drawCellW - dw) * 0.5;
143
+ const dy = y0 + (drawCellH - dh) * 0.5;
144
+
145
+ try {
146
+ ctx.drawImage(im, dx, dy, dw, dh);
147
+ } catch (e) {
148
+ console.log("[AKBase] copy gallery drawImage error", e);
149
+ }
150
+ }
151
+
152
+ const blob = await new Promise((resolve) => {
153
+ try { canvas.toBlob(resolve, "image/png"); } catch (_) { resolve(null); }
154
+ });
155
+
156
+ if (!blob) {
157
+ console.log("[AKBase] copy failed: no blob");
158
+ return false;
159
+ }
160
+
161
+ await navigator.clipboard.write([new ClipboardItem({ "image/png": blob })]);
162
+ console.log("[AKBase] gallery copied to clipboard", { w: canvas.width, h: canvas.height, images: N });
163
+ return true;
164
+ } catch (e) {
165
+ console.log("[AKBase] copyGalleryToClipboard exception", e);
166
+ return false;
167
+ }
168
+ }
169
+
170
+ async function copyCurrentViewToClipboard() {
171
+ if (state.mode === "compare") {
172
+ return await copyTopLayerImageToClipboard();
173
+ }
174
+ if (state.mode === "gallery") {
175
+ return await copyGalleryToClipboard();
176
+ }
177
+ return false;
178
+ }
179
+
180
+ function applyImageSettingsToControlledNodes(result) {
181
+ // console.log("[AKBase] applyImageSettingsToControlledNodes", { result });
182
+ try {
183
+ const listRaw = String(node?.properties?.node_list ?? "").trim();
184
+ if (!listRaw) return;
185
+
186
+ const tokens = listRaw
187
+ .split(",")
188
+ .map(s => s.trim())
189
+ .filter(Boolean);
190
+
191
+
192
+ if (!tokens.length) return;
193
+
194
+ const g = app?.graph;
195
+ const nodes = g?._nodes;
196
+ if (!Array.isArray(nodes) || nodes.length === 0) return;
197
+
198
+ const selfId = node?.id;
199
+
200
+ function findWidgetByName(n, widgetName) {
201
+ const ALIASES = {
202
+ seed: ["seed", "seed_value", "seed "],
203
+ steps: ["steps", "step"],
204
+ };
205
+ const ws = n?.widgets;
206
+ if (!Array.isArray(ws)) return null;
207
+ for (const w of ws) {
208
+ if (!w) continue;
209
+ // console.log("[AKBase] checking widget", { name: w.name, widgetName });
210
+ // if (w.name === widgetName || w.name.slice(0, -1) === widgetName) return w;
211
+ const names = ALIASES[widgetName] || [widgetName];
212
+ if (names.includes(w.name) || names.includes(w.name.slice(0, -1))) return w;
213
+ }
214
+ return null;
215
+ }
216
+
217
+ function setWidgetValue(n, widgetName, value) {
218
+ if (value === undefined || value === null) return false;
219
+
220
+ const w = findWidgetByName(n, widgetName);
221
+ console.log("[AKBase] setWidgetValue", { nodeId: n?.id, widgetName, value, widget: w });
222
+ if (!w) return false;
223
+
224
+ let v = value;
225
+
226
+ if (widgetName === "seed" || widgetName === "seed " || widgetName === "seed_value" || widgetName === "steps") {
227
+ const num = Number(v);
228
+ if (!Number.isFinite(num)) return false;
229
+ v = Math.max(0, Math.trunc(num));
230
+ } else if (widgetName === "cfg" || widgetName === "denoise") {
231
+ const num = Number(v);
232
+ if (!Number.isFinite(num)) return false;
233
+ v = num;
234
+ }
235
+
236
+ if (w.value === v) return true;
237
+
238
+ w.value = v;
239
+
240
+ if (typeof w.callback === "function") {
241
+ try { w.callback(v, app); } catch (_) { }
242
+ }
243
+
244
+ if (typeof n.onWidgetChanged === "function") {
245
+ try { n.onWidgetChanged(w.name, v, w); } catch (_) { }
246
+ }
247
+
248
+ if (typeof n.setDirtyCanvas === "function") {
249
+ try { n.setDirtyCanvas(true, true); } catch (_) { }
250
+ }
251
+
252
+ return true;
253
+ }
254
+
255
+ function resolveControlledNode(tok) {
256
+ const raw = tok.replace(/\s+/g, "");
257
+ const num = Number(raw);
258
+
259
+ // by id (keep exactly as was)
260
+ if (Number.isFinite(num) && String(num) === raw) {
261
+ const id = Math.trunc(num);
262
+ return nodes.find(n => n?.id === id) || null;
263
+ }
264
+
265
+ const t = tok.toLowerCase();
266
+
267
+ // exact match first
268
+ const exact =
269
+ nodes.find(n => String(n?.title ?? "").toLowerCase() === t) ||
270
+ nodes.find(n => String(n?.comfyClass ?? "").toLowerCase() === t);
271
+ if (exact) return exact;
272
+
273
+ // substring match
274
+ return (
275
+ nodes.find(n =>
276
+ String(n?.title ?? "").toLowerCase().includes(t)
277
+ ) ||
278
+ nodes.find(n =>
279
+ String(n?.comfyClass ?? "").toLowerCase().includes(t)
280
+ ) ||
281
+ null
282
+ );
283
+ }
284
+
285
+ for (const tok of tokens) {
286
+ const target = resolveControlledNode(tok);
287
+ console.log("[AKBase] resolved controlled node", { token: tok, targetId: target?.id });
288
+ if (!target) continue;
289
+ if (target?.id === selfId) continue;
290
+
291
+ setWidgetValue(target, "seed", result.seed);
292
+ setWidgetValue(target, "cfg", result.cfg);
293
+ setWidgetValue(target, "steps", result.steps);
294
+ setWidgetValue(target, "denoise", result.denoise);
295
+ setWidgetValue(target, "xz_steps", 1);
296
+ }
297
+
298
+ if (app?.canvas) {
299
+ try { app.canvas.setDirty(true, true); } catch (_) { }
300
+ }
301
+ } catch (e) {
302
+ console.log("[AKBase] applyImageSettingsToControlledNodes failed (ignored)", e);
303
+ }
304
+ }
305
+
306
+
307
+ async function getPropertiesFromImage(imageNumber) {
308
+ console.log("[AKBase] getPropertiesFromImage", { imageNumber });
309
+
310
+ try {
311
+ const idx = Number(imageNumber);
312
+
313
+ const result = {
314
+ seed: null,
315
+ cfg: null,
316
+ denoise: null,
317
+ steps: null,
318
+ };
319
+
320
+ try {
321
+ const nid = node?.id;
322
+ if (nid !== undefined && nid !== null) {
323
+ const cfgFn = `ak_base_xz_config_${nid}.json`;
324
+ const cfg = await fetchTempJson(cfgFn);
325
+ const images = cfg?.image;
326
+
327
+ if (Array.isArray(images)) {
328
+ const it = images[idx];
329
+ if (it && typeof it === "object") {
330
+ const pairs = [
331
+ {
332
+ name: String(it?.x_parameter_name_0 ?? "").toLowerCase(),
333
+ value: it?.x_parameter_value_0,
334
+ },
335
+ {
336
+ name: String(it?.z_parameter_name_0 ?? "").toLowerCase(),
337
+ value: it?.z_parameter_value_0,
338
+ },
339
+ ];
340
+
341
+ for (const { name, value } of pairs) {
342
+ if (value === undefined || value === null) continue;
343
+
344
+ if (name === "seed") {
345
+ result.seed = value;
346
+ } else if (name === "cfg") {
347
+ result.cfg = value;
348
+ } else if (name === "denoise") {
349
+ result.denoise = value;
350
+ } else if (name === "step") {
351
+ result.steps = value;
352
+ }
353
+ }
354
+ }
355
+ }
356
+ }
357
+ } catch (e) {
358
+ console.log(
359
+ "[AKBase] getPropertiesFromImage: xz_config read failed (ignored)",
360
+ e
361
+ );
362
+ }
363
+
364
+ return result;
365
+ } catch (e) {
366
+ console.log(
367
+ "[AKBase] getPropertiesFromImage exception (non-fatal):",
368
+ e
369
+ );
370
+ return result;
371
+ }
372
+ }
373
+
374
+ async function setPreviewImage(imageNumber) {
375
+ const g = state.gallery;
376
+ const imgs = g?.images ?? [];
377
+ const idx = Number(imageNumber);
378
+
379
+ if (!imgs.length) return;
380
+
381
+ const bImg = (idx >= 0 && idx < imgs.length) ? imgs[idx] : null;
382
+ if (!bImg) return;
383
+
384
+ const nid = node?.id;
385
+ if (nid === undefined || nid === null) return;
386
+
387
+ const aFn = `ak_base_image_a_${nid}.png`;
388
+ const aUrl = buildTempViewUrl(aFn);
389
+
390
+ let aImg = null;
391
+ try {
392
+ aImg = await loadImageFromUrl(aUrl);
393
+ } catch (_) {
394
+ return;
395
+ }
396
+
397
+ state.mode = "compare";
398
+
399
+ state.a.img = aImg;
400
+ state.a.loaded = true;
401
+ state.a.url = null;
402
+
403
+ state.b.img = bImg;
404
+ state.b.loaded = true;
405
+ state.b.url = null;
406
+
407
+ state.gallery.images = [];
408
+ state.gallery.urls = [];
409
+ state.gallery.hoverIndex = -1;
410
+
411
+ state.inPreview = true;
412
+ state.hover = true;
413
+ state.cursorX = 0.5;
414
+
415
+ node?._akBase?.updateBackBtn?.();
416
+ // app.graph.setDirtyCanvas(true, true);
417
+ }
418
+
419
+ async function copyCompareImageToClipboard() {
420
+ try {
421
+ const enabled = (state.mode === "compare");
422
+ if (!enabled) return false;
423
+
424
+ const hasReady = !!state?.a?.loaded || !!state?.b?.loaded;
425
+ if (!hasReady) return false;
426
+
427
+ const url =
428
+ state?.b?.url || state?.b?.img?.src ||
429
+ state?.a?.url || state?.a?.img?.src ||
430
+ null;
431
+
432
+ if (!url) return false;
433
+
434
+ if (!navigator?.clipboard || typeof navigator.clipboard.write !== "function" || typeof window.ClipboardItem !== "function") {
435
+ console.log("[AKBase] clipboard image write is not supported");
436
+ return false;
437
+ }
438
+
439
+ const res = await fetch(url, { cache: "no-store" });
440
+ if (!res.ok) {
441
+ console.log("[AKBase] copy fetch failed", res.status);
442
+ return false;
443
+ }
444
+
445
+ const blob = await res.blob();
446
+ const mime = blob?.type || "image/png";
447
+
448
+ await navigator.clipboard.write([
449
+ new ClipboardItem({ [mime]: blob })
450
+ ]);
451
+
452
+ console.log("[AKBase] image copied to clipboard");
453
+ return true;
454
+ } catch (e) {
455
+ console.log("[AKBase] copyCompareImageToClipboard exception:", e);
456
+ return false;
457
+ }
458
+ }
459
+
460
+ node.onMouseMove = function (e, pos) {
461
+ let localX = pos[0];
462
+ let localY = pos[1];
463
+ if (localX > this.size[0] || localY > this.size[1] || localX < 0 || localY < 0) {
464
+ localX = pos[0] - this.pos[0];
465
+ localY = pos[1] - this.pos[1];
466
+ }
467
+
468
+ const r = previewRect(this);
469
+ const inside = localX >= r.x && localX <= r.x + r.w && localY >= r.y && localY <= r.y + r.h;
470
+
471
+ state.inPreview = inside;
472
+ state.hover = inside;
473
+
474
+ if (!inside) {
475
+ if (state.mode === "gallery") state.gallery.hoverIndex = -1;
476
+ return;
477
+ }
478
+
479
+ if (state.mode === "gallery") {
480
+ const g = state.gallery;
481
+ const grid = g?.grid;
482
+ const N = g?.images?.length ?? 0;
483
+ if (!grid || !N) return;
484
+
485
+ const x = localX - r.x;
486
+ const y = localY - r.y;
487
+
488
+ const col = Math.floor(x / grid.cellW);
489
+ const row = Math.floor(y / grid.cellH);
490
+ const idx = row * grid.cols + col;
491
+
492
+ g.hoverIndex = (idx >= 0 && idx < N) ? idx : -1;
493
+ // app.graph.setDirtyCanvas(true, true);
494
+ return;
495
+ }
496
+
497
+ if (r.w > 0) {
498
+ state.cursorX = Math.min(1, Math.max(0, (localX - r.x) / r.w));
499
+ // app.graph.setDirtyCanvas(true, true);
500
+ }
501
+ };
502
+
503
+
504
+ node.onMouseDown = function (e, pos) {
505
+ console.log("[AKBase] onMouseDown", { mode: state.mode, pos });
506
+
507
+ let localX = pos[0];
508
+ let localY = pos[1];
509
+ if (localX > this.size[0] || localY > this.size[1] || localX < 0 || localY < 0) {
510
+ localX = pos[0] - this.pos[0];
511
+ localY = pos[1] - this.pos[1];
512
+ }
513
+
514
+ const btn = backButtonRect(this);
515
+ const insideBtn = localX >= btn.x && localX <= btn.x + btn.w && localY >= btn.y && localY <= btn.y + btn.h;
516
+ if (insideBtn) {
517
+ const enabled = (state.mode === "compare") && !!state.hasGallery;
518
+ console.log("[AKBase] back button click", { enabled });
519
+ if (enabled) {
520
+ (async () => { await state.backToGallery?.(); })();
521
+ }
522
+ return true;
523
+ }
524
+
525
+ const copyBtn = copyButtonRect(this);
526
+ const insideCopyBtn = localX >= copyBtn.x && localX <= copyBtn.x + copyBtn.w && localY >= copyBtn.y && localY <= copyBtn.y + copyBtn.h;
527
+ if (insideCopyBtn) {
528
+ const enabled = (state.mode === "compare") || (state.mode === "gallery");
529
+ console.log("[AKBase] copy button click", { enabled, mode: state.mode });
530
+ if (enabled) {
531
+ (async () => { await copyCurrentViewToClipboard(); })();
532
+ }
533
+ return true;
534
+ }
535
+
536
+ const pipBtn = pipButtonRect(this);
537
+ const insidePipBtn = localX >= pipBtn.x && localX <= pipBtn.x + pipBtn.w && localY >= pipBtn.y && localY <= pipBtn.y + pipBtn.h;
538
+ if (insidePipBtn) {
539
+ console.log("[AKBase] Open PIP button click");
540
+ try {
541
+ const nid = node?.id;
542
+ if (window.AKBasePip && typeof window.AKBasePip.openForNode === "function") {
543
+ window.AKBasePip.openForNode(nid);
544
+ }
545
+ } catch (e) {
546
+ console.log("[AKBase] Open PIP error", e);
547
+ }
548
+ return true;
549
+ }
550
+
551
+ if (state.mode !== "gallery") return false;
552
+
553
+ const r = previewRect(this);
554
+ const inside = localX >= r.x && localX <= r.x + r.w && localY >= r.y && localY <= r.y + r.h;
555
+ console.log("[AKBase] click inside preview:", inside);
556
+
557
+ if (!inside) return false;
558
+
559
+ const g = state.gallery;
560
+ const grid = g?.grid;
561
+ const N = g?.images?.length ?? 0;
562
+
563
+ if (!grid || !N) {
564
+ console.log("[AKBase] gallery grid/images missing", { hasGrid: !!grid, N });
565
+ return false;
566
+ }
567
+
568
+ const x = localX - r.x;
569
+ const y = localY - r.y;
570
+
571
+ const col = Math.floor(x / grid.cellW);
572
+ const row = Math.floor(y / grid.cellH);
573
+ const idx = row * grid.cols + col;
574
+
575
+ console.log("[AKBase] computed gallery index:", idx, { row, col, cols: grid.cols, cellW: grid.cellW, cellH: grid.cellH, N });
576
+
577
+ if (!(idx >= 0 && idx < N)) return false;
578
+
579
+ g.hoverIndex = idx;
580
+
581
+ (async () => {
582
+ const props = await getPropertiesFromImage(idx);
583
+ console.log("[AKBase] getPropertiesFromImage result:", props);
584
+ if (props) {
585
+ applyImageSettingsToControlledNodes(props);
586
+ await setPreviewImage(idx);
587
+ }
588
+ })();
589
+
590
+ return true;
591
+ };
592
+
593
+ node.onMouseLeave = function () {
594
+ state.hover = false;
595
+ state.inPreview = false;
596
+ if (state.mode === "gallery") state.gallery.hoverIndex = -1;
597
+ // app.graph.setDirtyCanvas(true, true);
598
+ };
599
+ }
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_io.js ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { api } from "/scripts/api.js";
2
+
3
+ export const IO_SETTINGS = {
4
+ cacheBustParam: "_akb",
5
+ stateFilename: "ak_base_state.json",
6
+ };
7
+
8
+
9
+ export function buildTempViewUrl(filename) {
10
+ const fn = encodeURIComponent(filename ?? "");
11
+ const base = `/view?filename=${fn}&type=temp&subfolder=&${IO_SETTINGS.cacheBustParam}=${Date.now()}`;
12
+ return api.apiURL(base);
13
+ }
14
+
15
+ export async function fetchTempJson(filename) {
16
+ const url = buildTempViewUrl(filename);
17
+ const res = await fetch(url, { cache: "no-store" });
18
+ if (!res.ok) throw new Error(`HTTP ${res.status}`);
19
+ const txt = await res.text();
20
+ return JSON.parse(txt);
21
+ }
22
+
23
+ export async function loadImageFromUrl(url) {
24
+ const img = new Image();
25
+ img.crossOrigin = "anonymous";
26
+ img.src = url;
27
+ await new Promise((resolve, reject) => {
28
+ img.onload = resolve;
29
+ img.onerror = reject;
30
+ });
31
+ return img;
32
+ }
33
+
34
+ export async function loadGalleryByCount(prefix, count) {
35
+ const images = [];
36
+ const urls = [];
37
+ for (let i = 0; i < count; i++) {
38
+ const filename = `${prefix}${i}.png`;
39
+ const url = buildTempViewUrl(filename);
40
+ const img = await loadImageFromUrl(url);
41
+ images.push(img);
42
+ urls.push(url);
43
+ }
44
+ return { images, urls };
45
+ }
46
+
47
+
48
+ export async function readPngTextChunks(url) {
49
+ const res = await fetch(url, { cache: "no-store" });
50
+ const buf = await res.arrayBuffer();
51
+ const bytes = new Uint8Array(buf);
52
+ if (bytes.length < 8) return [];
53
+
54
+ let off = 8;
55
+ const out = [];
56
+
57
+ const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
58
+ const dec = new TextDecoder();
59
+
60
+ while (off + 12 <= bytes.length) {
61
+ const len = dv.getUint32(off, false); off += 4;
62
+ const type = dec.decode(bytes.slice(off, off + 4)); off += 4;
63
+
64
+ if (off + len + 4 > bytes.length) break;
65
+
66
+ const data = bytes.slice(off, off + len); off += len;
67
+ off += 4; // crc
68
+
69
+ if (type === "tEXt") {
70
+ const i0 = data.indexOf(0);
71
+ if (i0 > 0) {
72
+ const key = dec.decode(data.slice(0, i0));
73
+ const val = dec.decode(data.slice(i0 + 1));
74
+ out.push({ type, key, val });
75
+ }
76
+ } else if (type === "iTXt") {
77
+ const i0 = data.indexOf(0);
78
+ if (i0 > 0) {
79
+ const key = dec.decode(data.slice(0, i0));
80
+ let p = i0 + 1;
81
+ if (p + 2 <= data.length) {
82
+ const compressionFlag = data[p]; p += 1;
83
+ p += 1; // compressionMethod
84
+
85
+ const z1 = data.indexOf(0, p);
86
+ if (z1 < 0) { out.push({ type, key, val: "" }); continue; }
87
+ p = z1 + 1;
88
+
89
+ const z2 = data.indexOf(0, p);
90
+ if (z2 < 0) { out.push({ type, key, val: "" }); continue; }
91
+ p = z2 + 1;
92
+
93
+ const z3 = data.indexOf(0, p);
94
+ if (z3 < 0) { out.push({ type, key, val: "" }); continue; }
95
+ p = z3 + 1;
96
+
97
+ let val = "";
98
+ if (compressionFlag === 0) {
99
+ val = dec.decode(data.slice(p));
100
+ }
101
+ out.push({ type, key, val });
102
+ }
103
+ }
104
+ }
105
+
106
+ if (type === "IEND") break;
107
+ }
108
+
109
+ return out;
110
+ }
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_pip.js ADDED
@@ -0,0 +1,926 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { app } from "/scripts/app.js";
2
+ import { renderCompare } from "./AKBase_ui.js";
3
+
4
+ const TITLE_HEIGHT = 40;
5
+ const PIP_ID = "akbase-pip-window";
6
+ const AKBASE_PIP_MAX_WIDTH_RATIO = 0.9;
7
+ const AKBASE_PIP_MAX_HEIGHT_RATIO = 0.9;
8
+
9
+ const AKBASE_PIP_MAX_BACKING_WIDTH = 2048;
10
+ const AKBASE_PIP_MAX_BACKING_HEIGHT = 2048;
11
+
12
+ const AKBASE_PIP_BUTTON_SIZE = 18;
13
+
14
+ const extensionBaseUrl = "/extensions/ComfyUI-AK-Pack/";
15
+
16
+ function createTitleIconButton(src, title, onClick) {
17
+ const btn = document.createElement("button");
18
+ btn.type = "button";
19
+ btn.title = title || "";
20
+ btn.style.border = "none";
21
+ btn.style.outline = "none";
22
+ btn.style.margin = "0";
23
+ btn.style.padding = "0";
24
+ btn.style.width = AKBASE_PIP_BUTTON_SIZE + "px";
25
+ btn.style.height = AKBASE_PIP_BUTTON_SIZE + "px";
26
+ btn.style.background = "transparent";
27
+ btn.style.cursor = "pointer";
28
+ btn.style.display = "inline-flex";
29
+ btn.style.alignItems = "center";
30
+ btn.style.justifyContent = "center";
31
+ btn.style.flex = "0 0 auto";
32
+
33
+ const img = document.createElement("img");
34
+ img.src = extensionBaseUrl + src;
35
+ img.alt = title || "";
36
+ img.style.display = "block";
37
+ img.style.width = "100%";
38
+ img.style.height = "100%";
39
+ img.style.objectFit = "contain";
40
+
41
+ btn.appendChild(img);
42
+
43
+ btn.addEventListener("click", (e) => {
44
+ e.stopPropagation();
45
+ if (typeof onClick === "function") {
46
+ onClick(e);
47
+ }
48
+ });
49
+
50
+ btn.addEventListener("mousedown", (e) => {
51
+ e.stopPropagation();
52
+ });
53
+
54
+ return btn;
55
+ }
56
+
57
+ function cleanupPipWindow(container) {
58
+ try {
59
+ if (!container) return;
60
+ if (typeof container._akPipDragCleanup === "function") {
61
+ container._akPipDragCleanup();
62
+ }
63
+ if (typeof container._akPipResizeCleanup === "function") {
64
+ container._akPipResizeCleanup();
65
+ }
66
+ if (typeof container._akPipCanvasCleanup === "function") {
67
+ container._akPipCanvasCleanup();
68
+ }
69
+ const canvas = container.querySelector("canvas");
70
+ if (canvas) {
71
+ canvas.width = 0;
72
+ canvas.height = 0;
73
+ }
74
+ container._akPipDragCleanup = null;
75
+ container._akPipResizeCleanup = null;
76
+ } catch (_) {}
77
+ }
78
+
79
+ function destroyPipWindow(container) {
80
+ if (!container) {
81
+ container = document.getElementById(PIP_ID);
82
+ }
83
+ if (!container) return;
84
+ cleanupPipWindow(container);
85
+ if (container.parentNode) {
86
+ container.parentNode.removeChild(container);
87
+ }
88
+ }
89
+
90
+ function createPipWindow(nodeId) {
91
+ const pipId = `${PIP_ID}-${nodeId}`;
92
+ if (document.getElementById(pipId)) return;
93
+
94
+ const dpr = window.devicePixelRatio || 1;
95
+ const viewportWidth = window.innerWidth || 800;
96
+ const viewportHeight = window.innerHeight || 600;
97
+
98
+ // Aspect of viewport: width / height
99
+ const aspect = viewportWidth / viewportHeight;
100
+ // 20% of larger side
101
+ const maxSide = Math.max(viewportWidth, viewportHeight) * 0.2;
102
+
103
+ let canvasWidth;
104
+ let canvasHeight;
105
+
106
+ if (aspect >= 1) {
107
+ // Landscape-like viewport
108
+ canvasWidth = maxSide;
109
+ canvasHeight = maxSide / aspect;
110
+ } else {
111
+ // Portrait-like viewport
112
+ canvasHeight = maxSide;
113
+ canvasWidth = maxSide * aspect;
114
+ }
115
+
116
+ canvasWidth = Math.max(160, Math.floor(canvasWidth));
117
+ canvasHeight = Math.max(90, Math.floor(canvasHeight));
118
+
119
+ const windowWidth = canvasWidth;
120
+ const windowHeight = canvasHeight + TITLE_HEIGHT;
121
+
122
+ const container = document.createElement("div");
123
+ container.id = pipId;
124
+ container.style.position = "fixed";
125
+ container.style.boxSizing = "border-box";
126
+ container.style.left = `${Math.max(10, (viewportWidth - windowWidth) / 2)}px`;
127
+ container.style.top = "20px";
128
+ container.style.width = `${windowWidth}px`;
129
+ container.style.height = `${windowHeight}px`;
130
+ container.style.background = "#111";
131
+ container.style.border = "1px solid #444";
132
+ container.style.borderRadius = "6px";
133
+ container.style.boxShadow = "0 4px 16px rgba(0,0,0,0.5)";
134
+ container.style.zIndex = "9999";
135
+ container.style.display = "flex";
136
+ container.style.flexDirection = "column";
137
+ container.style.padding = "0";
138
+ container.style.margin = "0";
139
+ container.style.overflow = "hidden";
140
+
141
+ // Title bar (drag handle)
142
+ const titleBar = document.createElement("div");
143
+ titleBar.style.flex = "0 0 auto";
144
+ titleBar.style.height = `${TITLE_HEIGHT}px`;
145
+ titleBar.style.cursor = "move";
146
+ titleBar.style.userSelect = "none";
147
+ titleBar.style.WebkitUserSelect = "none";
148
+ titleBar.style.MozUserSelect = "none";
149
+ titleBar.style.padding = "0 12px";
150
+ titleBar.style.background = "#222";
151
+ titleBar.style.color = "#eee";
152
+ titleBar.style.fontSize = "12px";
153
+ titleBar.style.fontFamily =
154
+ "Inter, system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif";
155
+ titleBar.style.display = "flex";
156
+ titleBar.style.position = "relative";
157
+ titleBar.style.zIndex = "2";
158
+ titleBar.style.alignItems = "center";
159
+ titleBar.style.justifyContent = "space-between";
160
+ titleBar.style.boxSizing = "border-box";
161
+
162
+ const leftButtons = document.createElement("div");
163
+ leftButtons.style.flex = "0 0 auto";
164
+ leftButtons.style.display = "flex";
165
+ leftButtons.style.alignItems = "center";
166
+
167
+ const titleText = document.createElement("div");
168
+ titleText.textContent = "AK Base PiP";
169
+ titleText.style.flex = "1 1 auto";
170
+ titleText.style.overflow = "hidden";
171
+ titleText.style.whiteSpace = "nowrap";
172
+ titleText.style.textOverflow = "ellipsis";
173
+ titleText.style.textAlign = "center";
174
+
175
+ const rightButtons = document.createElement("div");
176
+ rightButtons.style.flex = "0 0 auto";
177
+ rightButtons.style.display = "flex";
178
+ rightButtons.style.alignItems = "center";
179
+ rightButtons.style.gap = "6px";
180
+
181
+ titleBar.appendChild(leftButtons);
182
+ titleBar.appendChild(titleText);
183
+ titleBar.appendChild(rightButtons);
184
+
185
+ // Canvas area, no padding around
186
+ const canvas = document.createElement("canvas");
187
+ canvas.style.flex = "0 0 auto";
188
+ canvas.style.display = "block";
189
+ canvas.style.background = "red";
190
+ canvas.style.margin = "0 auto";
191
+ canvas.style.padding = "0";
192
+
193
+ canvas.width = Math.floor(canvasWidth * dpr);
194
+ canvas.height = Math.floor(canvasHeight * dpr);
195
+
196
+ const pipState = container._akPipState || (container._akPipState = {
197
+ inPreview: false,
198
+ cursorX: 0.5,
199
+ zoom: 1,
200
+ offsetX: 0,
201
+ offsetY: 0,
202
+ });
203
+
204
+ function onCanvasMove(e) {
205
+ const rect = canvas.getBoundingClientRect();
206
+ const x = e.clientX - rect.left;
207
+ const y = e.clientY - rect.top;
208
+ if (x >= 0 && y >= 0 && x <= rect.width && y <= rect.height) {
209
+ pipState.inPreview = true;
210
+ pipState.cursorX = rect.width > 0 ? Math.min(1, Math.max(0, x / rect.width)) : 0.5;
211
+ } else {
212
+ pipState.inPreview = false;
213
+ }
214
+ }
215
+
216
+ function onCanvasLeave() {
217
+ pipState.inPreview = false;
218
+ }
219
+
220
+ function onCanvasWheel(e) {
221
+ e.preventDefault();
222
+
223
+ const canvasRect = canvas.getBoundingClientRect();
224
+ const containerRect = container.getBoundingClientRect();
225
+
226
+ const oldZoom = pipState.zoom || 1;
227
+ const zoomFactor = e.deltaY < 0 ? 1.1 : 1.0 / 1.1;
228
+ const newZoom = Math.min(25, Math.max(0.2, oldZoom * zoomFactor));
229
+
230
+ // Determine if pointer is currently over the canvas
231
+ const overCanvas =
232
+ e.clientX >= canvasRect.left &&
233
+ e.clientX <= canvasRect.right &&
234
+ e.clientY >= canvasRect.top &&
235
+ e.clientY <= canvasRect.bottom;
236
+
237
+ // Zoom IN only when the mouse is over the canvas
238
+ if (newZoom > oldZoom && !overCanvas) {
239
+ return;
240
+ }
241
+
242
+ // Mouse position relative to the dialog (container-local)
243
+ const mouseX = e.clientX - containerRect.left;
244
+ const mouseY = e.clientY - containerRect.top;
245
+
246
+ // Anchor is stored in dialog coordinates only
247
+ if (newZoom > oldZoom) {
248
+ pipState.lastZoomAnchor = {
249
+ mouseX,
250
+ mouseY,
251
+ };
252
+ }
253
+
254
+ // Decide which anchor to use for this wheel step
255
+ let anchorX = mouseX;
256
+ let anchorY = mouseY;
257
+ if (newZoom < oldZoom && pipState.lastZoomAnchor) {
258
+ anchorX = pipState.lastZoomAnchor.mouseX;
259
+ anchorY = pipState.lastZoomAnchor.mouseY;
260
+ }
261
+
262
+ // Canvas position relative to dialog
263
+ const oldWidth = canvasRect.width || 1;
264
+ const oldHeight = canvasRect.height || 1;
265
+ const canvasLeft = canvasRect.left - containerRect.left;
266
+ const canvasTop = canvasRect.top - containerRect.top;
267
+
268
+ // Relative position of the anchor within the canvas,
269
+ // computed from dialog-space anchor and current canvas position.
270
+ const relX = (anchorX - canvasLeft) / oldWidth;
271
+ const relY = (anchorY - canvasTop) / oldHeight;
272
+
273
+ pipState.zoom = newZoom;
274
+
275
+ if (typeof resizeCanvasToWindow === "function") {
276
+ resizeCanvasToWindow(container, canvas, true);
277
+ }
278
+
279
+ // Reset transform to measure freshly sized canvas
280
+ canvas.style.transform = "translate(0px, 0px)";
281
+
282
+ const baseRect = canvas.getBoundingClientRect();
283
+ const baseLeft = baseRect.left - containerRect.left;
284
+ const baseTop = baseRect.top - containerRect.top;
285
+
286
+ const newWidth = baseRect.width || 1;
287
+ const newHeight = baseRect.height || 1;
288
+
289
+ // Keep the same logical point under the anchor after resize
290
+ const desiredLeft = anchorX - relX * newWidth;
291
+ const desiredTop = anchorY - relY * newHeight;
292
+
293
+ const offsetX = desiredLeft - baseLeft;
294
+ const offsetY = desiredTop - baseTop;
295
+
296
+ pipState.offsetX = offsetX;
297
+ pipState.offsetY = offsetY;
298
+
299
+ canvas.style.transform = `translate(${offsetX}px, ${offsetY}px)`;
300
+ }
301
+
302
+
303
+ let isPanning = false;
304
+ let panStartX = 0;
305
+ let panStartY = 0;
306
+ let panStartOffsetX = 0;
307
+ let panStartOffsetY = 0;
308
+
309
+ function onCanvasPanMouseDown(e) {
310
+ if (e.button !== 0) return;
311
+ e.preventDefault();
312
+ isPanning = true;
313
+ panStartX = e.clientX;
314
+ panStartY = e.clientY;
315
+ panStartOffsetX = pipState.offsetX || 0;
316
+ panStartOffsetY = pipState.offsetY || 0;
317
+
318
+ window.addEventListener("mousemove", onCanvasPanMouseMove);
319
+ window.addEventListener("mouseup", onCanvasPanMouseUp);
320
+ }
321
+
322
+ function onCanvasPanMouseMove(e) {
323
+ if (!isPanning) return;
324
+ const dx = e.clientX - panStartX;
325
+ const dy = e.clientY - panStartY;
326
+ pipState.offsetX = panStartOffsetX + dx;
327
+ pipState.offsetY = panStartOffsetY + dy;
328
+ }
329
+
330
+ function onCanvasPanMouseUp(e) {
331
+ if (!isPanning) return;
332
+ isPanning = false;
333
+ window.removeEventListener("mousemove", onCanvasPanMouseMove);
334
+ window.removeEventListener("mouseup", onCanvasPanMouseUp);
335
+ }
336
+ canvas.addEventListener("mousemove", onCanvasMove);
337
+ canvas.addEventListener("mouseleave", onCanvasLeave);
338
+ container.addEventListener("wheel", onCanvasWheel, { passive: false });
339
+ canvas.addEventListener("mousedown", onCanvasPanMouseDown);
340
+
341
+ container._akPipCanvasCleanup = function () {
342
+ canvas.removeEventListener("mousemove", onCanvasMove);
343
+ canvas.removeEventListener("mouseleave", onCanvasLeave);
344
+ canvas.removeEventListener("wheel", onCanvasWheel);
345
+ canvas.removeEventListener("mousedown", onCanvasPanMouseDown);
346
+ window.removeEventListener("mousemove", onCanvasPanMouseMove);
347
+ window.removeEventListener("mouseup", onCanvasPanMouseUp);
348
+ };
349
+
350
+ container.appendChild(titleBar);
351
+ container.appendChild(canvas);
352
+ container.dataset.akbaseNodeId = (nodeId !== undefined && nodeId !== null) ? String(nodeId) : "";
353
+ container._akBaseNodeId = container.dataset.akbaseNodeId;
354
+ const maximizeButton = createTitleIconButton("img/i_max.png", "Maximize", () => {
355
+ const isMaximized = !!container._akPipMaximized;
356
+ if (!isMaximized) {
357
+ const rect = container.getBoundingClientRect();
358
+ container._akPipPrevRect = {
359
+ left: rect.left,
360
+ top: rect.top,
361
+ width: rect.width,
362
+ height: rect.height,
363
+ };
364
+
365
+ const viewportWidth = window.innerWidth || 800;
366
+ const viewportHeight = window.innerHeight || 600;
367
+
368
+ const newWidth = Math.floor(viewportWidth * AKBASE_PIP_MAX_WIDTH_RATIO);
369
+ const newHeight = Math.floor(viewportHeight * AKBASE_PIP_MAX_HEIGHT_RATIO);
370
+
371
+ const left = Math.max(0, Math.floor((viewportWidth - newWidth) / 2));
372
+ const top = Math.max(0, Math.floor((viewportHeight - newHeight) / 2));
373
+
374
+ container.style.left = left + "px";
375
+ container.style.top = top + "px";
376
+ container.style.width = newWidth + "px";
377
+ container.style.height = newHeight + "px";
378
+ container.style.right = "";
379
+ container.style.bottom = "";
380
+
381
+ container._akPipMaximized = true;
382
+
383
+ const img = maximizeButton.querySelector("img");
384
+ if (img) {
385
+ img.src = extensionBaseUrl + "img/i_min.png";
386
+ }
387
+ maximizeButton.title = "Minimize";
388
+
389
+ resizeCanvasToWindow(container, canvas);
390
+ } else {
391
+ const prev = container._akPipPrevRect;
392
+ if (prev) {
393
+ container.style.left = prev.left + "px";
394
+ container.style.top = prev.top + "px";
395
+ container.style.width = prev.width + "px";
396
+ container.style.height = prev.height + "px";
397
+ container.style.right = "";
398
+ container.style.bottom = "";
399
+ }
400
+
401
+ container._akPipMaximized = false;
402
+
403
+ const img = maximizeButton.querySelector("img");
404
+ if (img) {
405
+ img.src = extensionBaseUrl + "img/i_max.png";
406
+ }
407
+ maximizeButton.title = "Maximize";
408
+
409
+ resizeCanvasToWindow(container, canvas);
410
+ }
411
+ });
412
+
413
+ const resetZoomButton = createTitleIconButton("img/i_reset_off.png", "Reset zoom", () => {
414
+ pipState.zoom = 1;
415
+ pipState.offsetX = 0;
416
+ pipState.offsetY = 0;
417
+ resizeCanvasToWindow(container, canvas);
418
+ });
419
+
420
+ const resetZoomImg = resetZoomButton.querySelector("img");
421
+ if (resetZoomImg) {
422
+ container._akPipResetImg = resetZoomImg;
423
+ }
424
+
425
+ if (typeof leftButtons !== "undefined") {
426
+ leftButtons.appendChild(maximizeButton);
427
+ leftButtons.appendChild(resetZoomButton);
428
+ leftButtons.style.gap = "6px";
429
+ leftButtons.style.minWidth = AKBASE_PIP_BUTTON_SIZE * 2 + "px";
430
+ }
431
+
432
+ const closeButton = createTitleIconButton("img/i_close.png", "Close", () => {
433
+ destroyPipWindow(container);
434
+ });
435
+
436
+ if (typeof rightButtons !== "undefined") {
437
+ rightButtons.appendChild(closeButton);
438
+ rightButtons.style.minWidth = AKBASE_PIP_BUTTON_SIZE + "px";
439
+ }
440
+
441
+
442
+ // Resize handles in corners
443
+ const handles = createResizeHandles(container);
444
+ handles.forEach((h) => container.appendChild(h));
445
+
446
+ document.body.appendChild(container);
447
+
448
+ installDragBehavior(container, titleBar);
449
+ installResizeBehavior(container, canvas);
450
+
451
+ startPipRenderLoop(container, canvas);
452
+
453
+ return { container, titleBar, canvas };
454
+ }
455
+
456
+ function createResizeHandles(container) {
457
+ const size = 10;
458
+ const corners = ["nw", "ne", "sw", "se"];
459
+ const handles = [];
460
+
461
+ for (const corner of corners) {
462
+ const h = document.createElement("div");
463
+ h.dataset.corner = corner;
464
+ h.style.position = "absolute";
465
+ h.style.width = `${size}px`;
466
+ h.style.height = `${size}px`;
467
+ h.style.zIndex = "10000";
468
+ h.style.background = "transparent";
469
+
470
+ if (corner === "nw") {
471
+ h.style.left = "0";
472
+ h.style.top = "0";
473
+ h.style.cursor = "nwse-resize";
474
+ } else if (corner === "ne") {
475
+ h.style.right = "0";
476
+ h.style.top = "0";
477
+ h.style.cursor = "nesw-resize";
478
+ } else if (corner === "sw") {
479
+ h.style.left = "0";
480
+ h.style.bottom = "0";
481
+ h.style.cursor = "nesw-resize";
482
+ } else if (corner === "se") {
483
+ h.style.right = "0";
484
+ h.style.bottom = "0";
485
+ h.style.cursor = "nwse-resize";
486
+ }
487
+
488
+ handles.push(h);
489
+ }
490
+
491
+ return handles;
492
+ }
493
+
494
+ function installDragBehavior(container, titleBar) {
495
+ let isDragging = false;
496
+ let dragOffsetX = 0;
497
+ let dragOffsetY = 0;
498
+
499
+ function onMouseDown(e) {
500
+ if (e.button !== 0) return;
501
+
502
+ isDragging = true;
503
+ const rect = container.getBoundingClientRect();
504
+ dragOffsetX = e.clientX - rect.left;
505
+ dragOffsetY = e.clientY - rect.top;
506
+
507
+ window.addEventListener("mousemove", onMouseMove);
508
+ window.addEventListener("mouseup", onMouseUp);
509
+ }
510
+
511
+ function onMouseMove(e) {
512
+ if (!isDragging) return;
513
+
514
+ const viewportWidth = window.innerWidth || 800;
515
+ const viewportHeight = window.innerHeight || 600;
516
+
517
+ let left = e.clientX - dragOffsetX;
518
+ let top = e.clientY - dragOffsetY;
519
+
520
+ const rect = container.getBoundingClientRect();
521
+ const width = rect.width;
522
+ const height = rect.height;
523
+
524
+ const minLeft = 0;
525
+ const minTop = 0;
526
+ const maxLeft = viewportWidth - width;
527
+ const maxTop = viewportHeight - height;
528
+
529
+ left = Math.min(Math.max(minLeft, left), Math.max(minLeft, maxLeft));
530
+ top = Math.min(Math.max(minTop, top), Math.max(minTop, maxTop));
531
+
532
+ container.style.left = `${left}px`;
533
+ container.style.top = `${top}px`;
534
+ container.style.right = "";
535
+ container.style.bottom = "";
536
+ }
537
+
538
+ function onMouseUp() {
539
+ if (!isDragging) return;
540
+ isDragging = false;
541
+ window.removeEventListener("mousemove", onMouseMove);
542
+ window.removeEventListener("mouseup", onMouseUp);
543
+ }
544
+
545
+ titleBar.addEventListener("mousedown", onMouseDown);
546
+
547
+ container._akPipDragCleanup = function () {
548
+ titleBar.removeEventListener("mousedown", onMouseDown);
549
+ window.removeEventListener("mousemove", onMouseMove);
550
+ window.removeEventListener("mouseup", onMouseUp);
551
+ };
552
+ }
553
+
554
+ function installResizeBehavior(container, canvas) {
555
+ let isResizing = false;
556
+ let activeCorner = null;
557
+ let startX = 0;
558
+ let startY = 0;
559
+ let startWidth = 0;
560
+ let startHeight = 0;
561
+ let startLeft = 0;
562
+ let startTop = 0;
563
+
564
+ const minWidth = 160;
565
+ const minHeight = 120;
566
+
567
+ const cornerHandles = container.querySelectorAll("div[data-corner]");
568
+
569
+ function onHandleMouseDown(e) {
570
+ if (e.button !== 0) return;
571
+
572
+ const target = e.currentTarget;
573
+ const corner = target?.dataset?.corner;
574
+ if (!corner) return;
575
+
576
+ const rect = container.getBoundingClientRect();
577
+
578
+ isResizing = true;
579
+ activeCorner = corner;
580
+ startX = e.clientX;
581
+ startY = e.clientY;
582
+ startWidth = rect.width;
583
+ startHeight = rect.height;
584
+ startLeft = rect.left;
585
+ startTop = rect.top;
586
+
587
+ window.addEventListener("mousemove", onMouseMove);
588
+ window.addEventListener("mouseup", onMouseUp);
589
+
590
+ e.stopPropagation();
591
+ e.preventDefault();
592
+ }
593
+
594
+ function onMouseMove(e) {
595
+ if (!isResizing || !activeCorner) return;
596
+
597
+ const dx = e.clientX - startX;
598
+ const dy = e.clientY - startY;
599
+
600
+ let newWidth = startWidth;
601
+ let newHeight = startHeight;
602
+ let newLeft = startLeft;
603
+ let newTop = startTop;
604
+
605
+ if (activeCorner === "se") {
606
+ newWidth = startWidth + dx;
607
+ newHeight = startHeight + dy;
608
+ } else if (activeCorner === "sw") {
609
+ newWidth = startWidth - dx;
610
+ newHeight = startHeight + dy;
611
+ newLeft = startLeft + dx;
612
+ } else if (activeCorner === "ne") {
613
+ newWidth = startWidth + dx;
614
+ newHeight = startHeight - dy;
615
+ newTop = startTop + dy;
616
+ } else if (activeCorner === "nw") {
617
+ newWidth = startWidth - dx;
618
+ newHeight = startHeight - dy;
619
+ newLeft = startLeft + dx;
620
+ newTop = startTop + dy;
621
+ }
622
+
623
+ newWidth = Math.max(minWidth, newWidth);
624
+ newHeight = Math.max(minHeight, newHeight);
625
+
626
+ container.style.width = `${newWidth}px`;
627
+ container.style.height = `${newHeight}px`;
628
+ container.style.left = `${newLeft}px`;
629
+ container.style.top = `${newTop}px`;
630
+ container.style.right = "";
631
+ container.style.bottom = "";
632
+
633
+ resizeCanvasToWindow(container, canvas);
634
+ }
635
+
636
+ function onMouseUp() {
637
+ if (!isResizing || !activeCorner) return;
638
+ isResizing = false;
639
+ activeCorner = null;
640
+ window.removeEventListener("mousemove", onMouseMove);
641
+ window.removeEventListener("mouseup", onMouseUp);
642
+ }
643
+
644
+ cornerHandles.forEach((handle) => {
645
+ handle.addEventListener("mousedown", onHandleMouseDown);
646
+ });
647
+
648
+ container._akPipResizeCleanup = function () {
649
+ cornerHandles.forEach((handle) => {
650
+ handle.removeEventListener("mousedown", onHandleMouseDown);
651
+ });
652
+ window.removeEventListener("mousemove", onMouseMove);
653
+ window.removeEventListener("mouseup", onMouseUp);
654
+ };
655
+ }
656
+
657
+
658
+ function startPipRenderLoop(container, canvas) {
659
+ const ctx = canvas.getContext("2d");
660
+ if (!ctx) return;
661
+
662
+ const pipState = container._akPipState || (container._akPipState = {
663
+ inPreview: false,
664
+ cursorX: 0.5,
665
+ zoom: 1,
666
+ offsetX: 0,
667
+ offsetY: 0,
668
+ });
669
+
670
+ function getNodeStateForPip() {
671
+ const nidRaw = container._akBaseNodeId || container.dataset.akbaseNodeId;
672
+ const nid = (nidRaw !== undefined && nidRaw !== null && nidRaw !== "") ? Number(nidRaw) : null;
673
+
674
+ const graph = app?.graph;
675
+ const nodes = graph?._nodes;
676
+ if (!nodes || !nodes.length) return null;
677
+
678
+ let target = null;
679
+ if (nid !== null && Number.isFinite(nid)) {
680
+ if (typeof graph.getNodeById === "function") {
681
+ target = graph.getNodeById(nid) || null;
682
+ } else {
683
+ for (let i = 0; i < nodes.length; i++) {
684
+ const n = nodes[i];
685
+ if (n && n.id === nid) {
686
+ target = n;
687
+ break;
688
+ }
689
+ }
690
+ }
691
+ }
692
+
693
+ if (!target) {
694
+ for (let i = 0; i < nodes.length; i++) {
695
+ const n = nodes[i];
696
+ if (n && n._akBase) {
697
+ target = n;
698
+ break;
699
+ }
700
+ }
701
+ }
702
+
703
+ if (!target || !target._akBase) return null;
704
+
705
+ const src = target._akBase;
706
+ const state = {
707
+ mode: src.mode,
708
+ a: {
709
+ img: src.a?.img || null,
710
+ url: src.a?.url || null,
711
+ loaded: !!src.a?.loaded,
712
+ },
713
+ b: {
714
+ img: src.b?.img || null,
715
+ url: src.b?.url || null,
716
+ loaded: !!src.b?.loaded,
717
+ },
718
+ inPreview: !!pipState.inPreview,
719
+ cursorX: typeof pipState.cursorX === "number" ? pipState.cursorX : 0.5,
720
+ };
721
+
722
+ return state;
723
+ }
724
+
725
+ function frame() {
726
+ if (!document.body.contains(container)) return;
727
+
728
+ const dpr = window.devicePixelRatio || 1;
729
+ const rect = canvas.getBoundingClientRect();
730
+ const logicalWidth = rect.width || (canvas.width / dpr);
731
+ const logicalHeight = rect.height || (canvas.height / dpr);
732
+
733
+ ctx.setTransform(1, 0, 0, 1, 0, 0);
734
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
735
+
736
+ const state = getNodeStateForPip();
737
+
738
+ // Track source image dimensions for canvas sizing.
739
+ if (state) {
740
+ let img = null;
741
+ if (state.a && state.a.img) {
742
+ img = state.a.img;
743
+ } else if (state.b && state.b.img) {
744
+ img = state.b.img;
745
+ }
746
+ if (img && typeof img.naturalWidth === "number" && img.naturalWidth > 0 && typeof img.naturalHeight === "number" && img.naturalHeight > 0) {
747
+ const prevW = container._akPipImgWidth || 0;
748
+ const prevH = container._akPipImgHeight || 0;
749
+
750
+ container._akPipImgWidth = img.naturalWidth;
751
+ container._akPipImgHeight = img.naturalHeight;
752
+
753
+ // When image dimensions appear for the first time (or change) in normal mode,
754
+ // recalculate canvas size so it fits/centers correctly without requiring a manual resize.
755
+ const pip = container._akPipState;
756
+ const zoomNow = pip && typeof pip.zoom === "number" ? pip.zoom : 1;
757
+ const isZoom = zoomNow !== 1;
758
+
759
+ if (!isZoom && typeof resizeCanvasToWindow === "function") {
760
+ if (prevW !== container._akPipImgWidth || prevH !== container._akPipImgHeight) {
761
+ resizeCanvasToWindow(container, canvas, false);
762
+ }
763
+ }
764
+ }
765
+ }
766
+
767
+ const scaleX = canvas.width / logicalWidth;
768
+ const scaleY = canvas.height / logicalHeight;
769
+
770
+ ctx.save();
771
+ ctx.scale(scaleX, scaleY);
772
+
773
+ const zoom = pipState.zoom || 1;
774
+ const offsetX = pipState.offsetX || 0;
775
+ const offsetY = pipState.offsetY || 0;
776
+
777
+ // Move the whole canvas inside the dialog using CSS transform.
778
+ canvas.style.transform = `translate(${offsetX}px, ${offsetY}px)`;
779
+
780
+ const isZoomMode =
781
+ zoom !== 1 ||
782
+ (offsetX || 0) !== 0 ||
783
+ (offsetY || 0) !== 0;
784
+
785
+ const resetImg = container._akPipResetImg || null;
786
+ if (resetImg) {
787
+ const expectedSrc = extensionBaseUrl + (isZoomMode ? "img/i_reset.png" : "img/i_reset_off.png");
788
+ if (resetImg.src !== expectedSrc) {
789
+ resetImg.src = expectedSrc;
790
+ }
791
+ }
792
+
793
+ if (state && state.mode === "compare" && typeof renderCompare === "function") {
794
+ const view = {
795
+ zoom: 1,
796
+ offsetX: 0,
797
+ offsetY: 0,
798
+ };
799
+ const r = { x: 0, y: 0, w: logicalWidth, h: logicalHeight };
800
+ try {
801
+ renderCompare(ctx, r, state, view);
802
+ } catch (e) {
803
+ console.log("[AKBasePiP] renderCompare error", e);
804
+ }
805
+ } else {
806
+ ctx.fillStyle = "#111";
807
+ ctx.fillRect(0, 0, logicalWidth, logicalHeight);
808
+ ctx.fillStyle = "#eee";
809
+ ctx.font = "12px sans-serif";
810
+ ctx.textAlign = "center";
811
+ ctx.textBaseline = "middle";
812
+
813
+ let msg = "PiP: compare mode only";
814
+ if (state && state.mode === "gallery") {
815
+ msg = "Gallery mode is not supported";
816
+ } else if (!state) {
817
+ msg = "No AKBase node state";
818
+ }
819
+
820
+ ctx.fillText(msg, logicalWidth / 2, logicalHeight / 2);
821
+ }
822
+
823
+ ctx.restore();
824
+
825
+ window.requestAnimationFrame(frame);
826
+ }
827
+
828
+ window.requestAnimationFrame(frame);
829
+ }
830
+
831
+ function resizeCanvasToWindow(container, canvas, allowZoomResize = false) {
832
+ const dpr = window.devicePixelRatio || 1;
833
+ const rect = container.getBoundingClientRect();
834
+ const containerWidth = rect.width;
835
+ const containerHeight = rect.height;
836
+
837
+ const pipState = container._akPipState || {};
838
+ const zoom = pipState.zoom || 1;
839
+ const isZoomMode = zoom !== 1;
840
+
841
+ // In zoom mode, ignore window size changes triggered by container resize when not explicitly allowed.
842
+ if (isZoomMode && !allowZoomResize) {
843
+ const currentRect = canvas.getBoundingClientRect();
844
+ const currentW = currentRect.width || (canvas.width / dpr);
845
+ const currentH = currentRect.height || (canvas.height / dpr);
846
+
847
+ container._akPipLastWidth = currentW;
848
+ container._akPipLastHeight = currentH;
849
+
850
+ canvas.style.width = `${currentW}px`;
851
+ canvas.style.height = `${currentH}px`;
852
+
853
+ canvas.width = Math.max(1, Math.floor(currentW * dpr));
854
+ canvas.height = Math.max(1, Math.floor(currentH * dpr));
855
+ return;
856
+ }
857
+
858
+ // Base image size (fallback to container size if unknown).
859
+ const imgW =
860
+ container._akPipImgWidth && container._akPipImgWidth > 0
861
+ ? container._akPipImgWidth
862
+ : containerWidth;
863
+ const imgH =
864
+ container._akPipImgHeight && container._akPipImgHeight > 0
865
+ ? container._akPipImgHeight
866
+ : (containerHeight - TITLE_HEIGHT);
867
+
868
+ const availW = Math.max(1, Math.floor(containerWidth));
869
+ const availH = Math.max(1, Math.floor(containerHeight - TITLE_HEIGHT));
870
+
871
+ // Base scale: how the image fits into the window at zoom = 1.
872
+ let baseScale =
873
+ typeof container._akPipBaseScale === "number" && container._akPipBaseScale > 0
874
+ ? container._akPipBaseScale
875
+ : null;
876
+
877
+ // Recalculate base scale when not in zoom mode or if it was never set.
878
+ if (!isZoomMode || baseScale === null) {
879
+ baseScale = Math.min(availW / imgW, availH / imgH);
880
+ container._akPipBaseScale = baseScale;
881
+ }
882
+
883
+ // Visual scale controls how large the image appears inside the dialog.
884
+ const displayScale = baseScale * zoom;
885
+ const displayWidth = Math.max(1, Math.floor(imgW * displayScale));
886
+ const displayHeight = Math.max(1, Math.floor(imgH * displayScale));
887
+
888
+ // Backing scale controls how many pixels we actually render.
889
+ let backingScale = displayScale;
890
+
891
+ const maxBackingWidth = AKBASE_PIP_MAX_BACKING_WIDTH;
892
+ const maxBackingHeight = AKBASE_PIP_MAX_BACKING_HEIGHT;
893
+
894
+ const backingWidthCandidate = imgW * backingScale;
895
+ const backingHeightCandidate = imgH * backingScale;
896
+
897
+ if (backingWidthCandidate > maxBackingWidth || backingHeightCandidate > maxBackingHeight) {
898
+ const widthRatio = maxBackingWidth / backingWidthCandidate;
899
+ const heightRatio = maxBackingHeight / backingHeightCandidate;
900
+ const ratio = Math.min(widthRatio, heightRatio);
901
+ backingScale = backingScale * ratio;
902
+ }
903
+
904
+ const backingWidth = Math.max(1, Math.floor(imgW * backingScale));
905
+ const backingHeight = Math.max(1, Math.floor(imgH * backingScale));
906
+
907
+ container._akPipLastWidth = displayWidth;
908
+ container._akPipLastHeight = displayHeight;
909
+
910
+ canvas.style.width = `${displayWidth}px`;
911
+ canvas.style.height = `${displayHeight}px`;
912
+
913
+ canvas.width = Math.max(1, Math.floor(backingWidth * dpr));
914
+ canvas.height = Math.max(1, Math.floor(backingHeight * dpr));
915
+ }
916
+
917
+
918
+ app.registerExtension({
919
+ name: "AKBasePiP",
920
+ setup() {
921
+ window.AKBasePip = window.AKBasePip || {};
922
+ window.AKBasePip.openForNode = function (nodeId) {
923
+ createPipWindow(nodeId);
924
+ };
925
+ },
926
+ });
kim_comfyui_data/custom_nodes/ComfyUI-AK-Pack/js/AKBase_ui.js ADDED
@@ -0,0 +1,392 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export const UI_SETTINGS = {
2
+ backButtonYShift: 0,
3
+ buttonToolbarWidthPercent: 50,
4
+ previewPadding: 10,
5
+ previewYShift: 80,
6
+ previewMinWidth: 240,
7
+ previewMinHeight: 180,
8
+
9
+ imageFitMode: "contain",
10
+ previewBgAlpha: 0.08,
11
+ overlayAlpha: 1.0,
12
+
13
+ wipeMode: true,
14
+ lineAlpha: 0.7,
15
+ lineWidth: 1,
16
+
17
+ galleryBorderWidth: 2,
18
+ galleryBorderAlpha: 0.85,
19
+ galleryBorderColor: "rgba(255,120,0,1.0)",
20
+
21
+ galleryGap: 4,
22
+ };
23
+
24
+ const EXT_BASE_URL = "/extensions/ComfyUI-AK-Pack/";
25
+
26
+ const BUTTON_ROW_HEIGHT = 28;
27
+ const BUTTON_ROW_GAP = 4;
28
+ const BUTTON_ICON_SIZE = 20;
29
+
30
+ function createIcon(src) {
31
+ const img = new Image();
32
+ img.src = EXT_BASE_URL + src;
33
+ return img;
34
+ }
35
+
36
+ const AKBASE_ICONS = {
37
+ back: createIcon("img/i_gallery.png"),
38
+ copy: createIcon("img/i_copy.png"),
39
+ pip: createIcon("img/i_pip_in.png"),
40
+ };
41
+
42
+ function drawIconButton(ctx, rect, img, enabled) {
43
+ const alpha = enabled ? 1.0 : 0.45;
44
+
45
+ ctx.save();
46
+ ctx.globalAlpha = alpha;
47
+ ctx.fillStyle = "#2a2a2a";
48
+ ctx.strokeStyle = "#555";
49
+ ctx.lineWidth = 1;
50
+
51
+ const rr = 6;
52
+ const x0 = rect.x, y0 = rect.y, x1 = rect.x + rect.w, y1 = rect.y + rect.h;
53
+ ctx.beginPath();
54
+ ctx.moveTo(x0 + rr, y0);
55
+ ctx.lineTo(x1 - rr, y0);
56
+ ctx.quadraticCurveTo(x1, y0, x1, y0 + rr);
57
+ ctx.lineTo(x1, y1 - rr);
58
+ ctx.quadraticCurveTo(x1, y1, x1 - rr, y1);
59
+ ctx.lineTo(x0 + rr, y1);
60
+ ctx.quadraticCurveTo(x0, y1, x0, y1 - rr);
61
+ ctx.lineTo(x0, y0 + rr);
62
+ ctx.quadraticCurveTo(x0, y0, x0 + rr, y0);
63
+ ctx.closePath();
64
+ ctx.fill();
65
+ ctx.stroke();
66
+
67
+ if (img && img.complete && img.naturalWidth && img.naturalHeight) {
68
+ const size = Math.min(BUTTON_ICON_SIZE, rect.w - 6, rect.h - 6);
69
+ const ix = rect.x + (rect.w - size) * 0.5;
70
+ const iy = rect.y + (rect.h - size) * 0.5;
71
+ ctx.drawImage(img, ix, iy, size, size);
72
+ }
73
+
74
+ ctx.restore();
75
+ }
76
+
77
+
78
+ export function applyNodeLayout(node) {
79
+ const minW = UI_SETTINGS.previewPadding * 2 + UI_SETTINGS.previewMinWidth;
80
+
81
+ const widgetCount = Array.isArray(node?.widgets) ? node.widgets.length : 0;
82
+ const widgetH = widgetCount ? (widgetCount * 28 + 8) : 0;
83
+
84
+ const minH = Math.max(260, UI_SETTINGS.previewPadding * 2 + UI_SETTINGS.previewMinHeight + widgetH);
85
+ node.size[0] = Math.max(node.size[0], minW);
86
+ node.size[1] = Math.max(node.size[1], minH);
87
+ }
88
+
89
+ export function previewRect(node) {
90
+ const pad = UI_SETTINGS.previewPadding;
91
+ const x = pad;
92
+ const y = pad + UI_SETTINGS.previewYShift;
93
+
94
+ const widgetCount = Array.isArray(node?.widgets) ? node.widgets.length : 0;
95
+ const widgetH = widgetCount ? (widgetCount * 28 + 8) : 0;
96
+
97
+ const w = Math.max(10, node.size[0] - x - pad);
98
+ const h = Math.max(10, node.size[1] - y - pad - widgetH);
99
+ return { x, y, w, h };
100
+ }
101
+
102
+ export function backButtonRect(node) {
103
+ const pad = UI_SETTINGS.previewPadding;
104
+ const availW = Math.max(10, node.size[0] - pad * 2);
105
+
106
+ const pct = Math.max(
107
+ 1,
108
+ Math.min(100, Number(UI_SETTINGS.buttonToolbarWidthPercent) || 100)
109
+ );
110
+ const toolbarW = availW * (pct / 100);
111
+
112
+ const wSingle = Math.max(10, (toolbarW - 2 * BUTTON_ROW_GAP) / 3);
113
+ const h = BUTTON_ROW_HEIGHT;
114
+
115
+ const x = pad + (availW - toolbarW) * 0.5;
116
+ const y = pad + Number(UI_SETTINGS.backButtonYShift || 0);
117
+
118
+ return { x, y, w: wSingle, h };
119
+ }
120
+
121
+ export function copyButtonRect(node) {
122
+ const b = backButtonRect(node);
123
+ const x = b.x + b.w + BUTTON_ROW_GAP;
124
+ const y = b.y;
125
+ return { x, y, w: b.w, h: b.h };
126
+ }
127
+
128
+ export function pipButtonRect(node) {
129
+ const c = copyButtonRect(node);
130
+ const x = c.x + c.w + BUTTON_ROW_GAP;
131
+ const y = c.y;
132
+ return { x, y, w: c.w, h: c.h };
133
+ }
134
+
135
+
136
+
137
+ function fitRect(srcW, srcH, dstW, dstH, mode) {
138
+ if (srcW <= 0 || srcH <= 0 || dstW <= 0 || dstH <= 0) return { x: 0, y: 0, w: 0, h: 0 };
139
+ const s = (mode === "cover") ? Math.max(dstW / srcW, dstH / srcH) : Math.min(dstW / srcW, dstH / srcH);
140
+ const w = srcW * s;
141
+ const h = srcH * s;
142
+ return { x: (dstW - w) * 0.5, y: (dstH - h) * 0.5, w, h };
143
+ }
144
+
145
+ function computeBestGrid(W, H, iw, ih, N, gap) {
146
+ let best = { cols: 1, rows: N, scale: 0, cellW: W, cellH: H, drawW: iw, drawH: ih };
147
+ if (N <= 0 || iw <= 0 || ih <= 0 || W <= 0 || H <= 0) return best;
148
+
149
+ gap = Math.max(0, Number(gap ?? 0));
150
+
151
+ for (let cols = 1; cols <= N; cols++) {
152
+ const rows = Math.ceil(N / cols);
153
+
154
+ const availW = Math.max(1, W - gap * (cols - 1));
155
+ const availH = Math.max(1, H - gap * (rows - 1));
156
+
157
+ const cellW = availW / cols;
158
+ const cellH = availH / rows;
159
+
160
+ const scale = Math.min(cellW / iw, cellH / ih);
161
+ if (scale > best.scale) {
162
+ best = {
163
+ cols,
164
+ rows,
165
+ scale,
166
+ cellW,
167
+ cellH,
168
+ drawW: iw * scale,
169
+ drawH: ih * scale,
170
+ };
171
+ }
172
+ }
173
+ return best;
174
+ }
175
+
176
+
177
+
178
+ export function renderCompare(ctx, r, state, view) {
179
+ ctx.save();
180
+ ctx.beginPath();
181
+ ctx.rect(r.x, r.y, r.w, r.h);
182
+ ctx.clip();
183
+
184
+ ctx.fillStyle = `rgba(0,0,0,${UI_SETTINGS.previewBgAlpha})`;
185
+ ctx.fillRect(r.x, r.y, r.w, r.h);
186
+
187
+ const zoom = view && typeof view.zoom === "number" ? view.zoom : 1;
188
+ const offsetX = view && typeof view.offsetX === "number" ? view.offsetX : 0;
189
+ const offsetY = view && typeof view.offsetY === "number" ? view.offsetY : 0;
190
+ const hasViewTransform = zoom !== 1 || offsetX !== 0 || offsetY !== 0;
191
+
192
+ const drawImg = (img, alpha) => {
193
+ if (!img) return;
194
+ const fit = fitRect(img.naturalWidth, img.naturalHeight, r.w, r.h, UI_SETTINGS.imageFitMode);
195
+ let dx = r.x + fit.x;
196
+ let dy = r.y + fit.y;
197
+ let dw = fit.w;
198
+ let dh = fit.h;
199
+
200
+ if (hasViewTransform) {
201
+ dx = dx * zoom + offsetX;
202
+ dy = dy * zoom + offsetY;
203
+ dw = dw * zoom;
204
+ dh = dh * zoom;
205
+ }
206
+
207
+ ctx.globalAlpha = alpha;
208
+ ctx.drawImage(img, dx, dy, dw, dh);
209
+ };
210
+
211
+
212
+ const aReady = !!state?.a?.loaded;
213
+ const bReady = !!state?.b?.loaded;
214
+
215
+ if (!aReady && !bReady) {
216
+ ctx.globalAlpha = 1.0;
217
+ ctx.fillStyle = "rgba(255,255,255,0.4)";
218
+ ctx.font = "12px sans-serif";
219
+ ctx.fillText("No preview images loaded", r.x + 10, r.y + 24);
220
+ if (state?.a?.url) ctx.fillText("A: " + state.a.url, r.x + 10, r.y + 44);
221
+ if (state?.b?.url) ctx.fillText("B: " + state.b.url, r.x + 10, r.y + 62);
222
+ } else if (UI_SETTINGS.wipeMode && state?.inPreview && aReady && bReady) {
223
+ drawImg(state.a.img, 1.0);
224
+
225
+ const cx = r.x + r.w * (state.cursorX ?? 0.5);
226
+ ctx.save();
227
+ ctx.beginPath();
228
+ ctx.rect(cx, r.y, r.x + r.w - cx, r.h);
229
+ ctx.clip();
230
+ drawImg(state.b.img, 1.0);
231
+ ctx.restore();
232
+
233
+ ctx.save();
234
+ ctx.globalAlpha = UI_SETTINGS.lineAlpha;
235
+ ctx.strokeStyle = "rgba(255,255,255,1)";
236
+ ctx.lineWidth = UI_SETTINGS.lineWidth;
237
+ ctx.beginPath();
238
+ ctx.moveTo(cx + 0.5, r.y);
239
+ ctx.lineTo(cx + 0.5, r.y + r.h);
240
+ ctx.stroke();
241
+ ctx.restore();
242
+ } else {
243
+ if (aReady) drawImg(state.a.img, 1.0);
244
+ if (bReady) drawImg(state.b.img, 1.0);
245
+ }
246
+
247
+ ctx.restore();
248
+ ctx.save();
249
+ ctx.strokeStyle = "rgba(255,255,255,0.15)";
250
+ ctx.lineWidth = 1;
251
+ ctx.strokeRect(r.x + 0.5, r.y + 0.5, r.w - 1, r.h - 1);
252
+ ctx.restore();
253
+ }
254
+
255
+ export function installDraw(node, dbg) {
256
+ const state = node._akBase;
257
+ if (!state) return;
258
+
259
+ node.onDrawForeground = function (ctx) {
260
+ const r = previewRect(this);
261
+
262
+ const btn = backButtonRect(this);
263
+ const btnEnabled = (state.mode === "compare") && !!state.hasGallery;
264
+
265
+ const copyBtn = copyButtonRect(this);
266
+ const copyBtnEnabled = ((state.mode === "compare") && (state.a.loaded || state.b.loaded)) || ((state.mode === "gallery") && ((state.gallery?.images?.length ?? 0) > 0));
267
+
268
+ const pipBtn = pipButtonRect(this);
269
+ const pipBtnEnabled = true;
270
+
271
+ drawIconButton(ctx, btn, AKBASE_ICONS.back, btnEnabled);
272
+ drawIconButton(ctx, copyBtn, AKBASE_ICONS.copy, copyBtnEnabled);
273
+ drawIconButton(ctx, pipBtn, AKBASE_ICONS.pip, pipBtnEnabled);
274
+
275
+ // if (!state._drawLogged) {
276
+ // state._drawLogged = true;
277
+ // dbg("first draw", this.id, { mode: state.mode });
278
+ // }
279
+
280
+ ctx.save();
281
+ ctx.beginPath();
282
+ ctx.rect(r.x, r.y, r.w, r.h);
283
+ ctx.clip();
284
+
285
+ ctx.fillStyle = `rgba(0,0,0,${UI_SETTINGS.previewBgAlpha})`;
286
+ ctx.fillRect(r.x, r.y, r.w, r.h);
287
+
288
+ if (state.mode === "gallery") {
289
+ const imgs = state.gallery?.images ?? [];
290
+ const N = imgs.length;
291
+
292
+ if (!N) {
293
+ ctx.globalAlpha = 1.0;
294
+ ctx.fillStyle = UI_SETTINGS.galleryBorderColor;
295
+ ctx.font = "12px sans-serif";
296
+ ctx.fillText("Gallery: no images loaded", r.x + 10, r.y + 24);
297
+ ctx.restore();
298
+ return;
299
+ }
300
+
301
+ const iw = imgs[0].naturalWidth || 1;
302
+ const ih = imgs[0].naturalHeight || 1;
303
+
304
+ const gap = UI_SETTINGS.galleryGap ?? 0;
305
+ const grid = computeBestGrid(r.w, r.h, iw, ih, N, gap);
306
+ state.gallery.grid = grid;
307
+
308
+ for (let i = 0; i < N; i++) {
309
+ const col = i % grid.cols;
310
+ const row = Math.floor(i / grid.cols);
311
+
312
+ const cellX = r.x + col * (grid.cellW + gap);
313
+ const cellY = r.y + row * (grid.cellH + gap);
314
+
315
+ const x = cellX + (grid.cellW - grid.drawW) * 0.5;
316
+ const y = cellY + (grid.cellH - grid.drawH) * 0.5;
317
+
318
+ ctx.globalAlpha = 1.0;
319
+ ctx.drawImage(imgs[i], x, y, grid.drawW, grid.drawH);
320
+
321
+ if (state.gallery.hoverIndex === i && state.inPreview) {
322
+ ctx.save();
323
+ ctx.globalAlpha = UI_SETTINGS.galleryBorderAlpha;
324
+ ctx.strokeStyle = UI_SETTINGS.galleryBorderColor;
325
+ ctx.lineWidth = UI_SETTINGS.galleryBorderWidth;
326
+ ctx.strokeRect(x + 0.5, y + 0.5, grid.drawW - 1, grid.drawH - 1);
327
+ ctx.restore();
328
+ }
329
+ }
330
+
331
+ ctx.restore();
332
+ ctx.save();
333
+ ctx.strokeStyle = "rgba(255,255,255,0.15)";
334
+ ctx.lineWidth = 1;
335
+ ctx.strokeRect(r.x + 0.5, r.y + 0.5, r.w - 1, r.h - 1);
336
+ ctx.restore();
337
+ return;
338
+ }
339
+
340
+ const drawImg = (img, alpha) => {
341
+ if (!img) return;
342
+ const fit = fitRect(img.naturalWidth, img.naturalHeight, r.w, r.h, UI_SETTINGS.imageFitMode);
343
+ ctx.globalAlpha = alpha;
344
+ ctx.drawImage(img, r.x + fit.x, r.y + fit.y, fit.w, fit.h);
345
+ };
346
+
347
+ const aReady = !!state.a.loaded;
348
+ const bReady = !!state.b.loaded;
349
+
350
+ if (!aReady && !bReady) {
351
+ ctx.globalAlpha = 1.0;
352
+ ctx.fillStyle = "rgba(255,255,255,0.4)";
353
+ ctx.font = "12px sans-serif";
354
+ ctx.fillText("No preview images loaded", r.x + 10, r.y + 24);
355
+ if (state.a.url) ctx.fillText("A: " + state.a.url, r.x + 10, r.y + 44);
356
+ if (state.b.url) ctx.fillText("B: " + state.b.url, r.x + 10, r.y + 62);
357
+ } else if (UI_SETTINGS.wipeMode && state.inPreview && aReady && bReady) {
358
+ drawImg(state.a.img, 1.0);
359
+
360
+ const cx = r.x + r.w * (state.cursorX ?? 0.5);
361
+ ctx.save();
362
+ ctx.beginPath();
363
+ // ctx.rect(r.x, r.y, Math.max(0, cx - r.x), r.h);
364
+ ctx.rect(cx, r.y, r.x + r.w - cx, r.h);
365
+
366
+ ctx.clip();
367
+ drawImg(state.b.img, 1.0);
368
+ ctx.restore();
369
+
370
+ ctx.save();
371
+ ctx.globalAlpha = UI_SETTINGS.lineAlpha;
372
+ ctx.strokeStyle = "rgba(255,255,255,1)";
373
+ ctx.lineWidth = UI_SETTINGS.lineWidth;
374
+ ctx.beginPath();
375
+ ctx.moveTo(cx + 0.5, r.y);
376
+ ctx.lineTo(cx + 0.5, r.y + r.h);
377
+ ctx.stroke();
378
+ ctx.restore();
379
+ } else {
380
+ if (aReady) drawImg(state.a.img, 1.0);
381
+ if (bReady) drawImg(state.b.img, 1.0);
382
+ }
383
+
384
+ ctx.restore();
385
+
386
+ ctx.save();
387
+ ctx.strokeStyle = "rgba(255,255,255,0.15)";
388
+ ctx.lineWidth = 1;
389
+ ctx.strokeRect(r.x + 0.5, r.y + 0.5, r.w - 1, r.h - 1);
390
+ ctx.restore();
391
+ };
392
+ }