theanhntp commited on
Commit
d5486bb
·
verified ·
1 Parent(s): f13ef7e

Upload ChangeBackground_v1_Compact_api.json

Browse files
API_changeBackground_v1/ChangeBackground_v1_Compact_api.json ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "13": {
3
+ "inputs": {
4
+ "prompt": "fruit, plate,",
5
+ "threshold": 0.2,
6
+ "sam_model": [
7
+ "14",
8
+ 0
9
+ ],
10
+ "grounding_dino_model": [
11
+ "15",
12
+ 0
13
+ ],
14
+ "image": [
15
+ "60",
16
+ 0
17
+ ]
18
+ },
19
+ "class_type": "GroundingDinoSAMSegment (segment anything)",
20
+ "_meta": {
21
+ "title": "GroundingDinoSAMSegment (segment anything)"
22
+ }
23
+ },
24
+ "14": {
25
+ "inputs": {
26
+ "model_name": "sam_hq_vit_h (2.57GB)"
27
+ },
28
+ "class_type": "SAMModelLoader (segment anything)",
29
+ "_meta": {
30
+ "title": "SAMModelLoader (segment anything)"
31
+ }
32
+ },
33
+ "15": {
34
+ "inputs": {
35
+ "model_name": "GroundingDINO_SwinT_OGC (694MB)"
36
+ },
37
+ "class_type": "GroundingDinoModelLoader (segment anything)",
38
+ "_meta": {
39
+ "title": "GroundingDinoModelLoader (segment anything)"
40
+ }
41
+ },
42
+ "16": {
43
+ "inputs": {
44
+ "images": [
45
+ "13",
46
+ 0
47
+ ]
48
+ },
49
+ "class_type": "PreviewImage",
50
+ "_meta": {
51
+ "title": "Preview Image"
52
+ }
53
+ },
54
+ "17": {
55
+ "inputs": {
56
+ "mask": [
57
+ "13",
58
+ 1
59
+ ]
60
+ },
61
+ "class_type": "InvertMask",
62
+ "_meta": {
63
+ "title": "InvertMask"
64
+ }
65
+ },
66
+ "18": {
67
+ "inputs": {
68
+ "expand": 1,
69
+ "tapered_corners": true,
70
+ "mask": [
71
+ "17",
72
+ 0
73
+ ]
74
+ },
75
+ "class_type": "GrowMask",
76
+ "_meta": {
77
+ "title": "GrowMask"
78
+ }
79
+ },
80
+ "29": {
81
+ "inputs": {
82
+ "positive": [
83
+ "54:1",
84
+ 0
85
+ ],
86
+ "negative": [
87
+ "54:1",
88
+ 1
89
+ ],
90
+ "vae": [
91
+ "58",
92
+ 2
93
+ ],
94
+ "pixels": [
95
+ "60",
96
+ 0
97
+ ],
98
+ "mask": [
99
+ "18",
100
+ 0
101
+ ]
102
+ },
103
+ "class_type": "InpaintModelConditioning",
104
+ "_meta": {
105
+ "title": "InpaintModelConditioning"
106
+ }
107
+ },
108
+ "35": {
109
+ "inputs": {
110
+ "seed": 1105431360287181,
111
+ "steps": 8,
112
+ "cfg": 0.96,
113
+ "sampler_name": "euler_ancestral",
114
+ "scheduler": "normal",
115
+ "denoise": 1,
116
+ "model": [
117
+ "61",
118
+ 0
119
+ ],
120
+ "positive": [
121
+ "29",
122
+ 0
123
+ ],
124
+ "negative": [
125
+ "29",
126
+ 1
127
+ ],
128
+ "latent_image": [
129
+ "29",
130
+ 2
131
+ ]
132
+ },
133
+ "class_type": "KSampler",
134
+ "_meta": {
135
+ "title": "KSampler"
136
+ }
137
+ },
138
+ "36": {
139
+ "inputs": {
140
+ "samples": [
141
+ "35",
142
+ 0
143
+ ],
144
+ "vae": [
145
+ "58",
146
+ 2
147
+ ]
148
+ },
149
+ "class_type": "VAEDecode",
150
+ "_meta": {
151
+ "title": "VAE Decode"
152
+ }
153
+ },
154
+ "39": {
155
+ "inputs": {
156
+ "filename_prefix": "ComfyUI",
157
+ "images": [
158
+ "36",
159
+ 0
160
+ ]
161
+ },
162
+ "class_type": "SaveImage",
163
+ "_meta": {
164
+ "title": "Save Image"
165
+ }
166
+ },
167
+ "56": {
168
+ "inputs": {
169
+ "image": "IMG20240313112329.jpg",
170
+ "upload": "image"
171
+ },
172
+ "class_type": "LoadImage",
173
+ "_meta": {
174
+ "title": "Load Image"
175
+ }
176
+ },
177
+ "57": {
178
+ "inputs": {
179
+ "text": "(worst quality, low quality, normal quality:2),Text,Watermark,logo,nsfw,text,lowres,bad anatomy,bad hands,text,error,missing fingers,extra",
180
+ "seed": 1665,
181
+ "autorefresh": "No"
182
+ },
183
+ "class_type": "DPRandomGenerator",
184
+ "_meta": {
185
+ "title": "Negative Prompts"
186
+ }
187
+ },
188
+ "58": {
189
+ "inputs": {
190
+ "ckpt_name": "XLWedding.safetensors"
191
+ },
192
+ "class_type": "CheckpointLoaderSimple",
193
+ "_meta": {
194
+ "title": "Load Checkpoint"
195
+ }
196
+ },
197
+ "59": {
198
+ "inputs": {
199
+ "text": "A stunning intricate shot of frontal view of tea package and white cup and coffee beans on an empty white wooden table with a modern kitch in the background, sharp focus, natural lighting, subsurface scattering, f2,35mm, film grain",
200
+ "seed": 726,
201
+ "autorefresh": "No"
202
+ },
203
+ "class_type": "DPRandomGenerator",
204
+ "_meta": {
205
+ "title": "Positive prompt"
206
+ }
207
+ },
208
+ "60": {
209
+ "inputs": {
210
+ "mode": "resize",
211
+ "supersample": "true",
212
+ "resampling": "nearest",
213
+ "rescale_factor": 2,
214
+ "resize_width": 768,
215
+ "resize_height": 1024,
216
+ "image": [
217
+ "56",
218
+ 0
219
+ ]
220
+ },
221
+ "class_type": "Image Resize",
222
+ "_meta": {
223
+ "title": "Image Resize"
224
+ }
225
+ },
226
+ "61": {
227
+ "inputs": {
228
+ "lora_name": "sdxl_lightning_8step_lora.safetensors",
229
+ "strength_model": 1,
230
+ "model": [
231
+ "58",
232
+ 0
233
+ ]
234
+ },
235
+ "class_type": "LoraLoaderModelOnly",
236
+ "_meta": {
237
+ "title": "LoraLoaderModelOnly"
238
+ }
239
+ },
240
+ "62": {
241
+ "inputs": {
242
+ "stop_at_clip_layer": -1,
243
+ "clip": [
244
+ "58",
245
+ 1
246
+ ]
247
+ },
248
+ "class_type": "CLIPSetLastLayer",
249
+ "_meta": {
250
+ "title": "CLIP Set Last Layer"
251
+ }
252
+ },
253
+ "63": {
254
+ "inputs": {
255
+ "text": [
256
+ "59",
257
+ 0
258
+ ],
259
+ "clip": [
260
+ "62",
261
+ 0
262
+ ]
263
+ },
264
+ "class_type": "CLIPTextEncode",
265
+ "_meta": {
266
+ "title": "CLIP Text Encode (Prompt)"
267
+ }
268
+ },
269
+ "64": {
270
+ "inputs": {
271
+ "text": [
272
+ "57",
273
+ 0
274
+ ],
275
+ "clip": [
276
+ "62",
277
+ 0
278
+ ]
279
+ },
280
+ "class_type": "CLIPTextEncode",
281
+ "_meta": {
282
+ "title": "CLIP Text Encode (Prompt)"
283
+ }
284
+ },
285
+ "54:0": {
286
+ "inputs": {
287
+ "control_net_name": "XLControlnet/sai_xl_depth_128lora.safetensors"
288
+ },
289
+ "class_type": "ControlNetLoader",
290
+ "_meta": {
291
+ "title": "Load ControlNet Model"
292
+ }
293
+ },
294
+ "54:1": {
295
+ "inputs": {
296
+ "strength": 1,
297
+ "start_percent": 0,
298
+ "end_percent": 1,
299
+ "positive": [
300
+ "63",
301
+ 0
302
+ ],
303
+ "negative": [
304
+ "64",
305
+ 0
306
+ ],
307
+ "control_net": [
308
+ "54:0",
309
+ 0
310
+ ],
311
+ "image": [
312
+ "13",
313
+ 0
314
+ ]
315
+ },
316
+ "class_type": "ControlNetApplyAdvanced",
317
+ "_meta": {
318
+ "title": "Apply ControlNet (Advanced)"
319
+ }
320
+ }
321
+ }