comfyui/sdxl_lightning_workflow_full.json
6.5 KB · 412 lines · json Raw
1 {
2 "last_node_id": 13,
3 "last_link_id": 13,
4 "nodes": [
5 {
6 "id": 8,
7 "type": "VAEDecode",
8 "pos": [
9 1209,
10 188
11 ],
12 "size": {
13 "0": 210,
14 "1": 46
15 },
16 "flags": {},
17 "order": 7,
18 "mode": 0,
19 "inputs": [
20 {
21 "name": "samples",
22 "type": "LATENT",
23 "link": 7
24 },
25 {
26 "name": "vae",
27 "type": "VAE",
28 "link": 8
29 }
30 ],
31 "outputs": [
32 {
33 "name": "IMAGE",
34 "type": "IMAGE",
35 "links": [
36 9
37 ],
38 "slot_index": 0
39 }
40 ],
41 "properties": {
42 "Node name for S&R": "VAEDecode"
43 }
44 },
45 {
46 "id": 7,
47 "type": "CLIPTextEncode",
48 "pos": [
49 413,
50 389
51 ],
52 "size": {
53 "0": 425.27801513671875,
54 "1": 180.6060791015625
55 },
56 "flags": {},
57 "order": 5,
58 "mode": 0,
59 "inputs": [
60 {
61 "name": "clip",
62 "type": "CLIP",
63 "link": 5
64 }
65 ],
66 "outputs": [
67 {
68 "name": "CONDITIONING",
69 "type": "CONDITIONING",
70 "links": [
71 6
72 ],
73 "slot_index": 0
74 }
75 ],
76 "properties": {
77 "Node name for S&R": "CLIPTextEncode"
78 },
79 "widgets_values": [
80 ""
81 ]
82 },
83 {
84 "id": 6,
85 "type": "CLIPTextEncode",
86 "pos": [
87 415,
88 186
89 ],
90 "size": {
91 "0": 422.84503173828125,
92 "1": 164.31304931640625
93 },
94 "flags": {},
95 "order": 4,
96 "mode": 0,
97 "inputs": [
98 {
99 "name": "clip",
100 "type": "CLIP",
101 "link": 3
102 }
103 ],
104 "outputs": [
105 {
106 "name": "CONDITIONING",
107 "type": "CONDITIONING",
108 "links": [
109 4
110 ],
111 "slot_index": 0
112 }
113 ],
114 "properties": {
115 "Node name for S&R": "CLIPTextEncode"
116 },
117 "widgets_values": [
118 "A girl smiling"
119 ]
120 },
121 {
122 "id": 5,
123 "type": "EmptyLatentImage",
124 "pos": [
125 473,
126 609
127 ],
128 "size": {
129 "0": 315,
130 "1": 106
131 },
132 "flags": {},
133 "order": 0,
134 "mode": 0,
135 "outputs": [
136 {
137 "name": "LATENT",
138 "type": "LATENT",
139 "links": [
140 2
141 ],
142 "slot_index": 0
143 }
144 ],
145 "properties": {
146 "Node name for S&R": "EmptyLatentImage"
147 },
148 "widgets_values": [
149 1024,
150 1024,
151 1
152 ]
153 },
154 {
155 "id": 9,
156 "type": "SaveImage",
157 "pos": [
158 1451,
159 189
160 ],
161 "size": {
162 "0": 210,
163 "1": 270
164 },
165 "flags": {},
166 "order": 8,
167 "mode": 0,
168 "inputs": [
169 {
170 "name": "images",
171 "type": "IMAGE",
172 "link": 9
173 }
174 ],
175 "properties": {},
176 "widgets_values": [
177 "ComfyUI"
178 ]
179 },
180 {
181 "id": 12,
182 "type": "Note",
183 "pos": [
184 44,
185 71
186 ],
187 "size": {
188 "0": 314.0921630859375,
189 "1": 59.37213134765625
190 },
191 "flags": {},
192 "order": 1,
193 "mode": 0,
194 "properties": {
195 "text": ""
196 },
197 "widgets_values": [
198 "Remember to use the correct checkpoint for your inference step setting!"
199 ],
200 "color": "#432",
201 "bgcolor": "#653"
202 },
203 {
204 "id": 3,
205 "type": "KSampler",
206 "pos": [
207 863,
208 186
209 ],
210 "size": {
211 "0": 315,
212 "1": 262
213 },
214 "flags": {},
215 "order": 6,
216 "mode": 0,
217 "inputs": [
218 {
219 "name": "model",
220 "type": "MODEL",
221 "link": 13
222 },
223 {
224 "name": "positive",
225 "type": "CONDITIONING",
226 "link": 4
227 },
228 {
229 "name": "negative",
230 "type": "CONDITIONING",
231 "link": 6
232 },
233 {
234 "name": "latent_image",
235 "type": "LATENT",
236 "link": 2
237 }
238 ],
239 "outputs": [
240 {
241 "name": "LATENT",
242 "type": "LATENT",
243 "links": [
244 7
245 ],
246 "slot_index": 0
247 }
248 ],
249 "properties": {
250 "Node name for S&R": "KSampler"
251 },
252 "widgets_values": [
253 777803651532148,
254 "randomize",
255 4,
256 1,
257 "euler",
258 "sgm_uniform",
259 1
260 ]
261 },
262 {
263 "id": 13,
264 "type": "Note",
265 "pos": [
266 861,
267 72
268 ],
269 "size": {
270 "0": 315.6669921875,
271 "1": 58
272 },
273 "flags": {},
274 "order": 2,
275 "mode": 0,
276 "properties": {
277 "text": ""
278 },
279 "widgets_values": [
280 "Use Euler sampler with sgm_uniform.\nCFG 1 is the fastest."
281 ],
282 "color": "#432",
283 "bgcolor": "#653"
284 },
285 {
286 "id": 4,
287 "type": "CheckpointLoaderSimple",
288 "pos": [
289 45,
290 192
291 ],
292 "size": {
293 "0": 315,
294 "1": 98
295 },
296 "flags": {},
297 "order": 3,
298 "mode": 0,
299 "outputs": [
300 {
301 "name": "MODEL",
302 "type": "MODEL",
303 "links": [
304 13
305 ],
306 "slot_index": 0
307 },
308 {
309 "name": "CLIP",
310 "type": "CLIP",
311 "links": [
312 3,
313 5
314 ],
315 "slot_index": 1
316 },
317 {
318 "name": "VAE",
319 "type": "VAE",
320 "links": [
321 8
322 ],
323 "slot_index": 2
324 }
325 ],
326 "properties": {
327 "Node name for S&R": "CheckpointLoaderSimple"
328 },
329 "widgets_values": [
330 "sdxl_lightning_4step.safetensors"
331 ]
332 }
333 ],
334 "links": [
335 [
336 2,
337 5,
338 0,
339 3,
340 3,
341 "LATENT"
342 ],
343 [
344 3,
345 4,
346 1,
347 6,
348 0,
349 "CLIP"
350 ],
351 [
352 4,
353 6,
354 0,
355 3,
356 1,
357 "CONDITIONING"
358 ],
359 [
360 5,
361 4,
362 1,
363 7,
364 0,
365 "CLIP"
366 ],
367 [
368 6,
369 7,
370 0,
371 3,
372 2,
373 "CONDITIONING"
374 ],
375 [
376 7,
377 3,
378 0,
379 8,
380 0,
381 "LATENT"
382 ],
383 [
384 8,
385 4,
386 2,
387 8,
388 1,
389 "VAE"
390 ],
391 [
392 9,
393 8,
394 0,
395 9,
396 0,
397 "IMAGE"
398 ],
399 [
400 13,
401 4,
402 0,
403 3,
404 0,
405 "MODEL"
406 ]
407 ],
408 "groups": [],
409 "config": {},
410 "extra": {},
411 "version": 0.4
412 }