vslinx commited on
Commit
9fdbe9b
·
1 Parent(s): acf1aee

final version & assets of v4.0 for TXT2IMG and IMG2IMG

Browse files
workflows/IMG2IMG/v4.0/IMG2IMG-ADetailer-v4.0-vslinx.json CHANGED
@@ -765,7 +765,7 @@
765
  "flags": {
766
  "collapsed": true
767
  },
768
- "order": 81,
769
  "mode": 4,
770
  "inputs": [
771
  {
@@ -818,7 +818,7 @@
818
  "flags": {
819
  "collapsed": true
820
  },
821
- "order": 80,
822
  "mode": 0,
823
  "inputs": [
824
  {
@@ -914,7 +914,7 @@
914
  "flags": {
915
  "collapsed": true
916
  },
917
- "order": 75,
918
  "mode": 0,
919
  "inputs": [
920
  {
@@ -1084,7 +1084,7 @@
1084
  "flags": {
1085
  "collapsed": true
1086
  },
1087
- "order": 87,
1088
  "mode": 0,
1089
  "inputs": [
1090
  {
@@ -1138,7 +1138,7 @@
1138
  "flags": {
1139
  "collapsed": true
1140
  },
1141
- "order": 88,
1142
  "mode": 0,
1143
  "inputs": [
1144
  {
@@ -1192,7 +1192,7 @@
1192
  "flags": {
1193
  "collapsed": true
1194
  },
1195
- "order": 76,
1196
  "mode": 0,
1197
  "inputs": [
1198
  {
@@ -1250,7 +1250,7 @@
1250
  "flags": {
1251
  "collapsed": true
1252
  },
1253
- "order": 90,
1254
  "mode": 0,
1255
  "inputs": [
1256
  {
@@ -1304,7 +1304,7 @@
1304
  "flags": {
1305
  "collapsed": true
1306
  },
1307
- "order": 78,
1308
  "mode": 0,
1309
  "inputs": [
1310
  {
@@ -1524,7 +1524,7 @@
1524
  "flags": {
1525
  "collapsed": true
1526
  },
1527
- "order": 77,
1528
  "mode": 0,
1529
  "inputs": [
1530
  {
@@ -1582,7 +1582,7 @@
1582
  "flags": {
1583
  "collapsed": true
1584
  },
1585
- "order": 89,
1586
  "mode": 0,
1587
  "inputs": [
1588
  {
@@ -3439,7 +3439,7 @@
3439
  "flags": {
3440
  "collapsed": true
3441
  },
3442
- "order": 91,
3443
  "mode": 0,
3444
  "inputs": [
3445
  {
@@ -3947,45 +3947,6 @@
3947
  "bgcolor": "#353",
3948
  "shape": 1
3949
  },
3950
- {
3951
- "id": 477,
3952
- "type": "Primitive string multiline [Crystools]",
3953
- "pos": [
3954
- 1030,
3955
- 380
3956
- ],
3957
- "size": [
3958
- 230,
3959
- 110
3960
- ],
3961
- "flags": {},
3962
- "order": 43,
3963
- "mode": 4,
3964
- "inputs": [],
3965
- "outputs": [
3966
- {
3967
- "name": "string",
3968
- "type": "STRING",
3969
- "links": [
3970
- 3905,
3971
- 3924
3972
- ]
3973
- }
3974
- ],
3975
- "title": "Lips prompt",
3976
- "properties": {
3977
- "cnr_id": "comfyui-crystools",
3978
- "ver": "576b44b9b79e3bf4b5d50457a28924d89a42e7e1",
3979
- "Node name for S&R": "Primitive string multiline [Crystools]",
3980
- "widget_ue_connectable": {}
3981
- },
3982
- "widgets_values": [
3983
- "lips, mouth, smile"
3984
- ],
3985
- "color": "#232",
3986
- "bgcolor": "#353",
3987
- "shape": 1
3988
- },
3989
  {
3990
  "id": 486,
3991
  "type": "Primitive string multiline [Crystools]",
@@ -4000,7 +3961,7 @@
4000
  "flags": {
4001
  "collapsed": false
4002
  },
4003
- "order": 44,
4004
  "mode": 4,
4005
  "inputs": [],
4006
  "outputs": [
@@ -4039,7 +4000,7 @@
4039
  30
4040
  ],
4041
  "flags": {},
4042
- "order": 45,
4043
  "mode": 0,
4044
  "inputs": [],
4045
  "outputs": [
@@ -4085,7 +4046,7 @@
4085
  30
4086
  ],
4087
  "flags": {},
4088
- "order": 46,
4089
  "mode": 0,
4090
  "inputs": [],
4091
  "outputs": [
@@ -4131,7 +4092,7 @@
4131
  180
4132
  ],
4133
  "flags": {},
4134
- "order": 47,
4135
  "mode": 0,
4136
  "inputs": [],
4137
  "outputs": [],
@@ -4159,7 +4120,7 @@
4159
  "flags": {
4160
  "collapsed": false
4161
  },
4162
- "order": 48,
4163
  "mode": 4,
4164
  "inputs": [],
4165
  "outputs": [
@@ -4200,7 +4161,7 @@
4200
  "flags": {
4201
  "collapsed": false
4202
  },
4203
- "order": 49,
4204
  "mode": 4,
4205
  "inputs": [],
4206
  "outputs": [
@@ -4239,7 +4200,7 @@
4239
  620
4240
  ],
4241
  "flags": {},
4242
- "order": 92,
4243
  "mode": 0,
4244
  "inputs": [
4245
  {
@@ -4303,7 +4264,7 @@
4303
  190
4304
  ],
4305
  "flags": {},
4306
- "order": 50,
4307
  "mode": 0,
4308
  "inputs": [],
4309
  "outputs": [],
@@ -4698,7 +4659,7 @@
4698
  150
4699
  ],
4700
  "flags": {},
4701
- "order": 51,
4702
  "mode": 0,
4703
  "inputs": [],
4704
  "outputs": [],
@@ -4724,7 +4685,7 @@
4724
  30
4725
  ],
4726
  "flags": {},
4727
- "order": 52,
4728
  "mode": 0,
4729
  "inputs": [],
4730
  "outputs": [
@@ -4829,7 +4790,7 @@
4829
  "flags": {
4830
  "collapsed": false
4831
  },
4832
- "order": 53,
4833
  "mode": 0,
4834
  "inputs": [],
4835
  "outputs": [
@@ -6297,7 +6258,7 @@
6297
  30
6298
  ],
6299
  "flags": {},
6300
- "order": 54,
6301
  "mode": 0,
6302
  "inputs": [],
6303
  "outputs": [
@@ -6554,7 +6515,7 @@
6554
  90
6555
  ],
6556
  "flags": {},
6557
- "order": 55,
6558
  "mode": 0,
6559
  "inputs": [],
6560
  "outputs": [
@@ -6604,7 +6565,7 @@
6604
  30
6605
  ],
6606
  "flags": {},
6607
- "order": 56,
6608
  "mode": 0,
6609
  "inputs": [],
6610
  "outputs": [
@@ -6660,7 +6621,7 @@
6660
  "collapsed": false,
6661
  "pinned": true
6662
  },
6663
- "order": 57,
6664
  "mode": 0,
6665
  "inputs": [],
6666
  "outputs": [],
@@ -6687,7 +6648,7 @@
6687
  190
6688
  ],
6689
  "flags": {},
6690
- "order": 58,
6691
  "mode": 0,
6692
  "inputs": [],
6693
  "outputs": [],
@@ -6702,33 +6663,6 @@
6702
  "bgcolor": "#000",
6703
  "shape": 1
6704
  },
6705
- {
6706
- "id": 323,
6707
- "type": "MarkdownNote",
6708
- "pos": [
6709
- -1150,
6710
- -180
6711
- ],
6712
- "size": [
6713
- 370,
6714
- 690
6715
- ],
6716
- "flags": {},
6717
- "order": 59,
6718
- "mode": 0,
6719
- "inputs": [],
6720
- "outputs": [],
6721
- "title": "How to use",
6722
- "properties": {
6723
- "widget_ue_connectable": {}
6724
- },
6725
- "widgets_values": [
6726
- "#### Model Backend\nIn the **Model Backend** group, choose your Checkpoint, VAE, Upscale Model as well as IPAdapter Model & Clip vision Model if you want to use that feature. <br>\nIf you're using a v-prediction model, make sure to enable \"Is V-Pred Model\".\n\n---\n\n\n#### LoRA's / Detection Models\nIn the second row select your detection models for the body parts, select the LoRA's of your choice and fill out the prompts for specific body parts(or leave as is) and select Detailer LoRA's for specific body-parts if you have any.\n\n---\n\n#### Function Control\nIn the \"Function Control & Basic Prompt Settings\"-Group you can select which Detailers to run, what manual body part prompts to use and what detailer LoRA's to activate.\nHere you can also control if you want to use a pre-pended quality prompt, what negative prompt to use across the detailers and if you want to use the IPAdapter to imitate the originals style.\n\n---\n\n#### IPAdapter\nIPAdapter let's you copy the style of an image simply by providing an IPAdapter Model & a Clip Model. <br>\nIt basically works as a 1-Image-LoRA that injects itself into the Model and applies the style of the image like a LoRA. <br>\n<br>\nBe aware that you still have to prompt correctly with IPAdapter.\n\n---\n\n#### Prompts\nConsists of start quality prompts and negative prompts (in the \"Function Control & Basic Prompt Settings\"-Group\" as well as the detailer prompts for each individual bodypart.<br>\nThere is no automatic append or prepend of the promp, so if you want to use the start quality prompt you should end the prompt itself on a \", \" to make sure it adds itself correctly to the detailer prompts. You can see a full example prompt of the eyes inside the Debug section to get a feel of how these connect and end up in your detailer-node.\n\n---\n\n\n`To the left are my recommended detection models as well as Detailer LoRA's for some of them that i use regularly. At the bottom you can find collections of Poses for ControlNet.`"
6727
- ],
6728
- "color": "#222",
6729
- "bgcolor": "#000",
6730
- "shape": 1
6731
- },
6732
  {
6733
  "id": 353,
6734
  "type": "MarkdownNote",
@@ -6743,7 +6677,7 @@
6743
  "flags": {
6744
  "collapsed": false
6745
  },
6746
- "order": 60,
6747
  "mode": 0,
6748
  "inputs": [],
6749
  "outputs": [],
@@ -6772,7 +6706,7 @@
6772
  "flags": {
6773
  "collapsed": false
6774
  },
6775
- "order": 61,
6776
  "mode": 0,
6777
  "inputs": [],
6778
  "outputs": [],
@@ -6898,7 +6832,7 @@
6898
  620
6899
  ],
6900
  "flags": {},
6901
- "order": 62,
6902
  "mode": 0,
6903
  "inputs": [],
6904
  "outputs": [
@@ -7046,7 +6980,7 @@
7046
  260
7047
  ],
7048
  "flags": {},
7049
- "order": 79,
7050
  "mode": 0,
7051
  "inputs": [
7052
  {
@@ -7291,7 +7225,7 @@
7291
  30
7292
  ],
7293
  "flags": {},
7294
- "order": 63,
7295
  "mode": 0,
7296
  "inputs": [],
7297
  "outputs": [
@@ -7337,7 +7271,7 @@
7337
  30
7338
  ],
7339
  "flags": {},
7340
- "order": 64,
7341
  "mode": 0,
7342
  "inputs": [],
7343
  "outputs": [
@@ -7370,6 +7304,72 @@
7370
  "color": "#432",
7371
  "bgcolor": "#653",
7372
  "shape": 1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7373
  }
7374
  ],
7375
  "links": [
@@ -9421,8 +9421,8 @@
9421
  "ds": {
9422
  "scale": 0.7513148009015777,
9423
  "offset": [
9424
- -896.868305899917,
9425
- 54.4391912044033
9426
  ]
9427
  },
9428
  "ue_links": [
 
765
  "flags": {
766
  "collapsed": true
767
  },
768
+ "order": 79,
769
  "mode": 4,
770
  "inputs": [
771
  {
 
818
  "flags": {
819
  "collapsed": true
820
  },
821
+ "order": 78,
822
  "mode": 0,
823
  "inputs": [
824
  {
 
914
  "flags": {
915
  "collapsed": true
916
  },
917
+ "order": 80,
918
  "mode": 0,
919
  "inputs": [
920
  {
 
1084
  "flags": {
1085
  "collapsed": true
1086
  },
1087
+ "order": 92,
1088
  "mode": 0,
1089
  "inputs": [
1090
  {
 
1138
  "flags": {
1139
  "collapsed": true
1140
  },
1141
+ "order": 87,
1142
  "mode": 0,
1143
  "inputs": [
1144
  {
 
1192
  "flags": {
1193
  "collapsed": true
1194
  },
1195
+ "order": 75,
1196
  "mode": 0,
1197
  "inputs": [
1198
  {
 
1250
  "flags": {
1251
  "collapsed": true
1252
  },
1253
+ "order": 89,
1254
  "mode": 0,
1255
  "inputs": [
1256
  {
 
1304
  "flags": {
1305
  "collapsed": true
1306
  },
1307
+ "order": 77,
1308
  "mode": 0,
1309
  "inputs": [
1310
  {
 
1524
  "flags": {
1525
  "collapsed": true
1526
  },
1527
+ "order": 76,
1528
  "mode": 0,
1529
  "inputs": [
1530
  {
 
1582
  "flags": {
1583
  "collapsed": true
1584
  },
1585
+ "order": 88,
1586
  "mode": 0,
1587
  "inputs": [
1588
  {
 
3439
  "flags": {
3440
  "collapsed": true
3441
  },
3442
+ "order": 90,
3443
  "mode": 0,
3444
  "inputs": [
3445
  {
 
3947
  "bgcolor": "#353",
3948
  "shape": 1
3949
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3950
  {
3951
  "id": 486,
3952
  "type": "Primitive string multiline [Crystools]",
 
3961
  "flags": {
3962
  "collapsed": false
3963
  },
3964
+ "order": 43,
3965
  "mode": 4,
3966
  "inputs": [],
3967
  "outputs": [
 
4000
  30
4001
  ],
4002
  "flags": {},
4003
+ "order": 44,
4004
  "mode": 0,
4005
  "inputs": [],
4006
  "outputs": [
 
4046
  30
4047
  ],
4048
  "flags": {},
4049
+ "order": 45,
4050
  "mode": 0,
4051
  "inputs": [],
4052
  "outputs": [
 
4092
  180
4093
  ],
4094
  "flags": {},
4095
+ "order": 46,
4096
  "mode": 0,
4097
  "inputs": [],
4098
  "outputs": [],
 
4120
  "flags": {
4121
  "collapsed": false
4122
  },
4123
+ "order": 47,
4124
  "mode": 4,
4125
  "inputs": [],
4126
  "outputs": [
 
4161
  "flags": {
4162
  "collapsed": false
4163
  },
4164
+ "order": 48,
4165
  "mode": 4,
4166
  "inputs": [],
4167
  "outputs": [
 
4200
  620
4201
  ],
4202
  "flags": {},
4203
+ "order": 91,
4204
  "mode": 0,
4205
  "inputs": [
4206
  {
 
4264
  190
4265
  ],
4266
  "flags": {},
4267
+ "order": 49,
4268
  "mode": 0,
4269
  "inputs": [],
4270
  "outputs": [],
 
4659
  150
4660
  ],
4661
  "flags": {},
4662
+ "order": 50,
4663
  "mode": 0,
4664
  "inputs": [],
4665
  "outputs": [],
 
4685
  30
4686
  ],
4687
  "flags": {},
4688
+ "order": 51,
4689
  "mode": 0,
4690
  "inputs": [],
4691
  "outputs": [
 
4790
  "flags": {
4791
  "collapsed": false
4792
  },
4793
+ "order": 52,
4794
  "mode": 0,
4795
  "inputs": [],
4796
  "outputs": [
 
6258
  30
6259
  ],
6260
  "flags": {},
6261
+ "order": 53,
6262
  "mode": 0,
6263
  "inputs": [],
6264
  "outputs": [
 
6515
  90
6516
  ],
6517
  "flags": {},
6518
+ "order": 54,
6519
  "mode": 0,
6520
  "inputs": [],
6521
  "outputs": [
 
6565
  30
6566
  ],
6567
  "flags": {},
6568
+ "order": 55,
6569
  "mode": 0,
6570
  "inputs": [],
6571
  "outputs": [
 
6621
  "collapsed": false,
6622
  "pinned": true
6623
  },
6624
+ "order": 56,
6625
  "mode": 0,
6626
  "inputs": [],
6627
  "outputs": [],
 
6648
  190
6649
  ],
6650
  "flags": {},
6651
+ "order": 57,
6652
  "mode": 0,
6653
  "inputs": [],
6654
  "outputs": [],
 
6663
  "bgcolor": "#000",
6664
  "shape": 1
6665
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6666
  {
6667
  "id": 353,
6668
  "type": "MarkdownNote",
 
6677
  "flags": {
6678
  "collapsed": false
6679
  },
6680
+ "order": 58,
6681
  "mode": 0,
6682
  "inputs": [],
6683
  "outputs": [],
 
6706
  "flags": {
6707
  "collapsed": false
6708
  },
6709
+ "order": 59,
6710
  "mode": 0,
6711
  "inputs": [],
6712
  "outputs": [],
 
6832
  620
6833
  ],
6834
  "flags": {},
6835
+ "order": 60,
6836
  "mode": 0,
6837
  "inputs": [],
6838
  "outputs": [
 
6980
  260
6981
  ],
6982
  "flags": {},
6983
+ "order": 81,
6984
  "mode": 0,
6985
  "inputs": [
6986
  {
 
7225
  30
7226
  ],
7227
  "flags": {},
7228
+ "order": 61,
7229
  "mode": 0,
7230
  "inputs": [],
7231
  "outputs": [
 
7271
  30
7272
  ],
7273
  "flags": {},
7274
+ "order": 62,
7275
  "mode": 0,
7276
  "inputs": [],
7277
  "outputs": [
 
7304
  "color": "#432",
7305
  "bgcolor": "#653",
7306
  "shape": 1
7307
+ },
7308
+ {
7309
+ "id": 477,
7310
+ "type": "Primitive string multiline [Crystools]",
7311
+ "pos": [
7312
+ 1030,
7313
+ 380
7314
+ ],
7315
+ "size": [
7316
+ 230,
7317
+ 110
7318
+ ],
7319
+ "flags": {},
7320
+ "order": 63,
7321
+ "mode": 4,
7322
+ "inputs": [],
7323
+ "outputs": [
7324
+ {
7325
+ "name": "string",
7326
+ "type": "STRING",
7327
+ "links": [
7328
+ 3905,
7329
+ 3924
7330
+ ]
7331
+ }
7332
+ ],
7333
+ "title": "Lips prompt",
7334
+ "properties": {
7335
+ "cnr_id": "comfyui-crystools",
7336
+ "ver": "576b44b9b79e3bf4b5d50457a28924d89a42e7e1",
7337
+ "Node name for S&R": "Primitive string multiline [Crystools]",
7338
+ "widget_ue_connectable": {}
7339
+ },
7340
+ "widgets_values": [
7341
+ "lips, mouth, smile"
7342
+ ],
7343
+ "color": "#232",
7344
+ "bgcolor": "#353",
7345
+ "shape": 1
7346
+ },
7347
+ {
7348
+ "id": 323,
7349
+ "type": "MarkdownNote",
7350
+ "pos": [
7351
+ -1150,
7352
+ -180
7353
+ ],
7354
+ "size": [
7355
+ 370,
7356
+ 690
7357
+ ],
7358
+ "flags": {},
7359
+ "order": 64,
7360
+ "mode": 0,
7361
+ "inputs": [],
7362
+ "outputs": [],
7363
+ "title": "How to use",
7364
+ "properties": {
7365
+ "widget_ue_connectable": {}
7366
+ },
7367
+ "widgets_values": [
7368
+ "#### Model Backend\nIn the **Model Backend** group, choose your Checkpoint, VAE, Upscale Model as well as IPAdapter Model & Clip vision Model if you want to use that feature. <br>\nIf you're using a v-prediction model, make sure to enable \"Is V-Pred Model\".\n\n---\n\n\n#### LoRA's / Detection Models\nIn the second row select your detection models for the body parts, select the LoRA's of your choice and fill out the prompts for specific body parts(or leave as is) and select Detailer LoRA's for specific body-parts if you have any.\n\n---\n\n#### Function Control\nIn the \"Function Control & Basic Prompt Settings\"-Group you can select which Detailers to run, what manual body part prompts to use and what detailer LoRA's to activate.\nHere you can also control if you want to use a pre-pended quality prompt, what negative prompt to use across the detailers and if you want to use the IPAdapter to imitate the originals style.\n\n---\n\n#### IPAdapter\nIPAdapter let's you copy the style of an image simply by providing an IPAdapter Model & a Clip Model. <br>\nIt basically works as a 1-Image-LoRA that injects itself into the Model and applies the style of the image like a LoRA. <br>\n<br>\nBe aware that you still have to prompt correctly with IPAdapter.\n\n---\n\n#### Prompts\nConsists of start quality prompts and negative prompts (in the \"Function Control & Basic Prompt Settings\"-Group\" as well as the detailer prompts for each individual bodypart.<br>\nThere is no automatic append or prepend of the promp, so if you want to use the start quality prompt you should end the prompt itself on a \", \" to make sure it adds itself correctly to the detailer prompts. You can see a full example prompt of the eyes inside the Debug section to get a feel of how these connect and end up in your detailer-node.\n\n---\n\n\n`To the left are my recommended detection models as well as Detailer LoRA's for some of them that i use regularly. At the bottom you can find collections of Poses for ControlNet.`"
7369
+ ],
7370
+ "color": "#222",
7371
+ "bgcolor": "#000",
7372
+ "shape": 1
7373
  }
7374
  ],
7375
  "links": [
 
9421
  "ds": {
9422
  "scale": 0.7513148009015777,
9423
  "offset": [
9424
+ -179.45930589991676,
9425
+ 127.64419120440343
9426
  ]
9427
  },
9428
  "ue_links": [
workflows/TXT2IMG/v4.0/TXT2IMG-ADetailer-v4.0-vslinx.json CHANGED
The diff for this file is too large to render. See raw diff
 
workflows/TXT2IMG/v4.0/{example.png → sample_workflow.png} RENAMED
File without changes
workflows/TXT2IMG/v4.0/{example_ipadapter.png → sample_workflow_ipadapter.png} RENAMED
File without changes
workflows/TXT2IMG/v4.0/zoomin.png CHANGED

Git LFS Details

  • SHA256: 590de6c0aae8c3858b6517636de353dd0873dc865a6e8b179b7945fa74688f53
  • Pointer size: 131 Bytes
  • Size of remote file: 321 kB

Git LFS Details

  • SHA256: c6d7d92a8033124b5583b077bf62a87d7e0cd579fe91d98bbfde49397f662850
  • Pointer size: 131 Bytes
  • Size of remote file: 238 kB
workflows/TXT2IMG/v4.0/zoomin_ipadapter.png ADDED

Git LFS Details

  • SHA256: 293f171fc6f067af15cd69a1b375ab0c66609b4dd0fa14f081458695da8d644b
  • Pointer size: 131 Bytes
  • Size of remote file: 268 kB
workflows/TXT2IMG/v4.0/zoomout.png CHANGED

Git LFS Details

  • SHA256: 8a5a5746deb9269ec983a9d03de2efd3c1aaf3511bf2b1ec7de37907d36b9333
  • Pointer size: 131 Bytes
  • Size of remote file: 255 kB

Git LFS Details

  • SHA256: 16e8ecde164bb9dcfb0c1d9220573433286ccd9634587dc12310fc40abb45e84
  • Pointer size: 131 Bytes
  • Size of remote file: 212 kB
workflows/TXT2IMG/v4.0/zoomout_ipadapter.png ADDED

Git LFS Details

  • SHA256: aa1868a5ee827a3274596725c3bdf287ea28d171145cc27facef6d2d371dac36
  • Pointer size: 131 Bytes
  • Size of remote file: 190 kB