Rishabh12j commited on
Commit
5e9c804
·
verified ·
1 Parent(s): 2ca93c2

Upload mockup_api.json

Browse files
Files changed (1) hide show
  1. mockup_api.json +259 -0
mockup_api.json ADDED
@@ -0,0 +1,259 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "19": {
3
+ "inputs": {
4
+ "image": "{{Image}}",
5
+ "upload": "image"
6
+ },
7
+ "class_type": "LoadImage",
8
+ "_meta": {
9
+ "title": "Load Image"
10
+ }
11
+ },
12
+ "20": {
13
+ "inputs": {
14
+ "mode": "pixels",
15
+ "origin": "topleft",
16
+ "x": 410,
17
+ "y": 150,
18
+ "width": 200,
19
+ "height": 200,
20
+ "image_width": 320,
21
+ "image_height": 320,
22
+ "copy_image_size": [
23
+ "21",
24
+ 0
25
+ ]
26
+ },
27
+ "class_type": "Create Rect Mask",
28
+ "_meta": {
29
+ "title": "Create Rect Mask"
30
+ }
31
+ },
32
+ "21": {
33
+ "inputs": {
34
+ "max_width": 1024,
35
+ "max_height": 1024,
36
+ "min_width": 0,
37
+ "min_height": 0,
38
+ "crop_if_required": "yes",
39
+ "images": [
40
+ "19",
41
+ 0
42
+ ]
43
+ },
44
+ "class_type": "ConstrainImage|pysssss",
45
+ "_meta": {
46
+ "title": "Constrain Image 🐍"
47
+ }
48
+ },
49
+ "23": {
50
+ "inputs": {
51
+ "grow_mask_by": 10,
52
+ "pixels": [
53
+ "21",
54
+ 0
55
+ ],
56
+ "vae": [
57
+ "60",
58
+ 2
59
+ ],
60
+ "mask": [
61
+ "33",
62
+ 0
63
+ ]
64
+ },
65
+ "class_type": "VAEEncodeForInpaint",
66
+ "_meta": {
67
+ "title": "VAE Encode (for Inpainting)"
68
+ }
69
+ },
70
+ "31": {
71
+ "inputs": {
72
+ "images": [
73
+ "40",
74
+ 5
75
+ ]
76
+ },
77
+ "class_type": "PreviewImage",
78
+ "_meta": {
79
+ "title": "Preview Image"
80
+ }
81
+ },
82
+ "33": {
83
+ "inputs": {
84
+ "method": "alpha",
85
+ "image": [
86
+ "20",
87
+ 0
88
+ ]
89
+ },
90
+ "class_type": "Image To Mask",
91
+ "_meta": {
92
+ "title": "Image To Mask"
93
+ }
94
+ },
95
+ "34": {
96
+ "inputs": {
97
+ "text_positive": [
98
+ "35",
99
+ 0
100
+ ],
101
+ "text_negative": [
102
+ "36",
103
+ 0
104
+ ],
105
+ "style": "sai-3d-model",
106
+ "log_prompt": "No"
107
+ },
108
+ "class_type": "SDXLPromptStyler",
109
+ "_meta": {
110
+ "title": "SDXL Prompt Styler"
111
+ }
112
+ },
113
+ "35": {
114
+ "inputs": {
115
+ "prompt": "{{positive_prompt}}"
116
+ },
117
+ "class_type": "CR Prompt Text",
118
+ "_meta": {
119
+ "title": "Positive"
120
+ }
121
+ },
122
+ "36": {
123
+ "inputs": {
124
+ "prompt": "{{negative_prompt}}"
125
+ },
126
+ "class_type": "CR Prompt Text",
127
+ "_meta": {
128
+ "title": "Negative"
129
+ }
130
+ },
131
+ "40": {
132
+ "inputs": {
133
+ "seed": 499829007631404,
134
+ "steps": 20,
135
+ "cfg": 2,
136
+ "sampler_name": "lcm",
137
+ "scheduler": "karras",
138
+ "denoise": 1,
139
+ "preview_method": "none",
140
+ "vae_decode": "true",
141
+ "model": [
142
+ "62",
143
+ 0
144
+ ],
145
+ "positive": [
146
+ "64",
147
+ 0
148
+ ],
149
+ "negative": [
150
+ "63",
151
+ 0
152
+ ],
153
+ "latent_image": [
154
+ "23",
155
+ 0
156
+ ],
157
+ "optional_vae": [
158
+ "60",
159
+ 2
160
+ ]
161
+ },
162
+ "class_type": "KSampler (Efficient)",
163
+ "_meta": {
164
+ "title": "KSampler (Efficient)"
165
+ }
166
+ },
167
+ "58": {
168
+ "inputs": {
169
+ "filename_prefix": "ComfyUI",
170
+ "images": [
171
+ "40",
172
+ 5
173
+ ]
174
+ },
175
+ "class_type": "SaveImage",
176
+ "_meta": {
177
+ "title": "Save Image"
178
+ }
179
+ },
180
+ "60": {
181
+ "inputs": {
182
+ "ckpt_name": "dynavisionXLAllInOneStylized_releaseV0610Bakedvae.safetensors"
183
+ },
184
+ "class_type": "CheckpointLoaderSimple",
185
+ "_meta": {
186
+ "title": "Load Checkpoint"
187
+ }
188
+ },
189
+ "61": {
190
+ "inputs": {
191
+ "lora_name": "Harrlogos_v2.0.safetensors",
192
+ "strength_model": 1,
193
+ "strength_clip": 1,
194
+ "model": [
195
+ "60",
196
+ 0
197
+ ],
198
+ "clip": [
199
+ "60",
200
+ 1
201
+ ]
202
+ },
203
+ "class_type": "LoraLoader",
204
+ "_meta": {
205
+ "title": "Load LoRA"
206
+ }
207
+ },
208
+ "62": {
209
+ "inputs": {
210
+ "lora_name": "xl_more_art-full_v1.safetensors",
211
+ "strength_model": 1,
212
+ "strength_clip": 1,
213
+ "model": [
214
+ "61",
215
+ 0
216
+ ],
217
+ "clip": [
218
+ "61",
219
+ 1
220
+ ]
221
+ },
222
+ "class_type": "LoraLoader",
223
+ "_meta": {
224
+ "title": "Load LoRA"
225
+ }
226
+ },
227
+ "63": {
228
+ "inputs": {
229
+ "text": [
230
+ "34",
231
+ 1
232
+ ],
233
+ "clip": [
234
+ "62",
235
+ 1
236
+ ]
237
+ },
238
+ "class_type": "CLIPTextEncode",
239
+ "_meta": {
240
+ "title": "CLIP Text Encode (Prompt)"
241
+ }
242
+ },
243
+ "64": {
244
+ "inputs": {
245
+ "text": [
246
+ "34",
247
+ 0
248
+ ],
249
+ "clip": [
250
+ "62",
251
+ 1
252
+ ]
253
+ },
254
+ "class_type": "CLIPTextEncode",
255
+ "_meta": {
256
+ "title": "CLIP Text Encode (Prompt)"
257
+ }
258
+ }
259
+ }