Upload 5 files
Browse files- .gitignore +1 -0
- LICENSE +201 -0
- README.md +42 -0
- locon.py +57 -0
- locon_compvis.py +488 -0
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
__pycache__
|
LICENSE
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Apache License
|
| 2 |
+
Version 2.0, January 2004
|
| 3 |
+
http://www.apache.org/licenses/
|
| 4 |
+
|
| 5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 6 |
+
|
| 7 |
+
1. Definitions.
|
| 8 |
+
|
| 9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 11 |
+
|
| 12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 13 |
+
the copyright owner that is granting the License.
|
| 14 |
+
|
| 15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 16 |
+
other entities that control, are controlled by, or are under common
|
| 17 |
+
control with that entity. For the purposes of this definition,
|
| 18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 19 |
+
direction or management of such entity, whether by contract or
|
| 20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 22 |
+
|
| 23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 24 |
+
exercising permissions granted by this License.
|
| 25 |
+
|
| 26 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 27 |
+
including but not limited to software source code, documentation
|
| 28 |
+
source, and configuration files.
|
| 29 |
+
|
| 30 |
+
"Object" form shall mean any form resulting from mechanical
|
| 31 |
+
transformation or translation of a Source form, including but
|
| 32 |
+
not limited to compiled object code, generated documentation,
|
| 33 |
+
and conversions to other media types.
|
| 34 |
+
|
| 35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 36 |
+
Object form, made available under the License, as indicated by a
|
| 37 |
+
copyright notice that is included in or attached to the work
|
| 38 |
+
(an example is provided in the Appendix below).
|
| 39 |
+
|
| 40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 41 |
+
form, that is based on (or derived from) the Work and for which the
|
| 42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 44 |
+
of this License, Derivative Works shall not include works that remain
|
| 45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 46 |
+
the Work and Derivative Works thereof.
|
| 47 |
+
|
| 48 |
+
"Contribution" shall mean any work of authorship, including
|
| 49 |
+
the original version of the Work and any modifications or additions
|
| 50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 54 |
+
means any form of electronic, verbal, or written communication sent
|
| 55 |
+
to the Licensor or its representatives, including but not limited to
|
| 56 |
+
communication on electronic mailing lists, source code control systems,
|
| 57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 59 |
+
excluding communication that is conspicuously marked or otherwise
|
| 60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 61 |
+
|
| 62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 64 |
+
subsequently incorporated within the Work.
|
| 65 |
+
|
| 66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 71 |
+
Work and such Derivative Works in Source or Object form.
|
| 72 |
+
|
| 73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 76 |
+
(except as stated in this section) patent license to make, have made,
|
| 77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 78 |
+
where such license applies only to those patent claims licensable
|
| 79 |
+
by such Contributor that are necessarily infringed by their
|
| 80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 82 |
+
institute patent litigation against any entity (including a
|
| 83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 84 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 85 |
+
or contributory patent infringement, then any patent licenses
|
| 86 |
+
granted to You under this License for that Work shall terminate
|
| 87 |
+
as of the date such litigation is filed.
|
| 88 |
+
|
| 89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 90 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 91 |
+
modifications, and in Source or Object form, provided that You
|
| 92 |
+
meet the following conditions:
|
| 93 |
+
|
| 94 |
+
(a) You must give any other recipients of the Work or
|
| 95 |
+
Derivative Works a copy of this License; and
|
| 96 |
+
|
| 97 |
+
(b) You must cause any modified files to carry prominent notices
|
| 98 |
+
stating that You changed the files; and
|
| 99 |
+
|
| 100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 101 |
+
that You distribute, all copyright, patent, trademark, and
|
| 102 |
+
attribution notices from the Source form of the Work,
|
| 103 |
+
excluding those notices that do not pertain to any part of
|
| 104 |
+
the Derivative Works; and
|
| 105 |
+
|
| 106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 107 |
+
distribution, then any Derivative Works that You distribute must
|
| 108 |
+
include a readable copy of the attribution notices contained
|
| 109 |
+
within such NOTICE file, excluding those notices that do not
|
| 110 |
+
pertain to any part of the Derivative Works, in at least one
|
| 111 |
+
of the following places: within a NOTICE text file distributed
|
| 112 |
+
as part of the Derivative Works; within the Source form or
|
| 113 |
+
documentation, if provided along with the Derivative Works; or,
|
| 114 |
+
within a display generated by the Derivative Works, if and
|
| 115 |
+
wherever such third-party notices normally appear. The contents
|
| 116 |
+
of the NOTICE file are for informational purposes only and
|
| 117 |
+
do not modify the License. You may add Your own attribution
|
| 118 |
+
notices within Derivative Works that You distribute, alongside
|
| 119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 120 |
+
that such additional attribution notices cannot be construed
|
| 121 |
+
as modifying the License.
|
| 122 |
+
|
| 123 |
+
You may add Your own copyright statement to Your modifications and
|
| 124 |
+
may provide additional or different license terms and conditions
|
| 125 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 126 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 127 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 128 |
+
the conditions stated in this License.
|
| 129 |
+
|
| 130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 132 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 133 |
+
this License, without any additional terms or conditions.
|
| 134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 135 |
+
the terms of any separate license agreement you may have executed
|
| 136 |
+
with Licensor regarding such Contributions.
|
| 137 |
+
|
| 138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 140 |
+
except as required for reasonable and customary use in describing the
|
| 141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 142 |
+
|
| 143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 144 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 147 |
+
implied, including, without limitation, any warranties or conditions
|
| 148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 150 |
+
appropriateness of using or redistributing the Work and assume any
|
| 151 |
+
risks associated with Your exercise of permissions under this License.
|
| 152 |
+
|
| 153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 154 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 155 |
+
unless required by applicable law (such as deliberate and grossly
|
| 156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 157 |
+
liable to You for damages, including any direct, indirect, special,
|
| 158 |
+
incidental, or consequential damages of any character arising as a
|
| 159 |
+
result of this License or out of the use or inability to use the
|
| 160 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 161 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 162 |
+
other commercial damages or losses), even if such Contributor
|
| 163 |
+
has been advised of the possibility of such damages.
|
| 164 |
+
|
| 165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 168 |
+
or other liability obligations and/or rights consistent with this
|
| 169 |
+
License. However, in accepting such obligations, You may act only
|
| 170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 171 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 172 |
+
defend, and hold each Contributor harmless for any liability
|
| 173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 174 |
+
of your accepting any such warranty or additional liability.
|
| 175 |
+
|
| 176 |
+
END OF TERMS AND CONDITIONS
|
| 177 |
+
|
| 178 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 179 |
+
|
| 180 |
+
To apply the Apache License to your work, attach the following
|
| 181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 182 |
+
replaced with your own identifying information. (Don't include
|
| 183 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 184 |
+
comment syntax for the file format. We also recommend that a
|
| 185 |
+
file or class name and description of purpose be included on the
|
| 186 |
+
same "printed page" as the copyright notice for easier
|
| 187 |
+
identification within third-party archives.
|
| 188 |
+
|
| 189 |
+
Copyright [2023] [KohakuBlueLeaf]
|
| 190 |
+
|
| 191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 192 |
+
you may not use this file except in compliance with the License.
|
| 193 |
+
You may obtain a copy of the License at
|
| 194 |
+
|
| 195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 196 |
+
|
| 197 |
+
Unless required by applicable law or agreed to in writing, software
|
| 198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 200 |
+
See the License for the specific language governing permissions and
|
| 201 |
+
limitations under the License.
|
README.md
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# a1111-sd-webui-locon
|
| 2 |
+
|
| 3 |
+
An extension for loading lycoris model in sd-webui. (include locon and loha)
|
| 4 |
+
|
| 5 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 6 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 7 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 8 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 9 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 10 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 11 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 12 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 13 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 14 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 15 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 16 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 17 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 18 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 19 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 20 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 21 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 22 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 23 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 24 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 25 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 26 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 27 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 28 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 29 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 30 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 31 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 32 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 33 |
+
# THIS EXTENSION IS NOT FOR ADDITIONAL NETWORK
|
| 34 |
+
|
| 35 |
+
### LyCORIS
|
| 36 |
+
https://github.com/KohakuBlueleaf/LyCORIS
|
| 37 |
+
|
| 38 |
+
### usage
|
| 39 |
+
Install and use locon model as lora model. <br>
|
| 40 |
+
Make sure your sd-webui has built-in lora
|
| 41 |
+
|
| 42 |
+

|
locon.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
https://github.com/KohakuBlueleaf/LoCon
|
| 3 |
+
'''
|
| 4 |
+
|
| 5 |
+
import math
|
| 6 |
+
|
| 7 |
+
import torch
|
| 8 |
+
import torch.nn as nn
|
| 9 |
+
import torch.nn.functional as F
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class LoConModule(nn.Module):
|
| 13 |
+
"""
|
| 14 |
+
modifed from kohya-ss/sd-scripts/networks/lora:LoRAModule
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __init__(self, lora_name, org_module: nn.Module, multiplier=1.0, lora_dim=4, alpha=1):
|
| 18 |
+
""" if alpha == 0 or None, alpha is rank (no scaling). """
|
| 19 |
+
super().__init__()
|
| 20 |
+
self.lora_name = lora_name
|
| 21 |
+
self.lora_dim = lora_dim
|
| 22 |
+
|
| 23 |
+
if org_module.__class__.__name__ == 'Conv2d':
|
| 24 |
+
# For general LoCon
|
| 25 |
+
in_dim = org_module.in_channels
|
| 26 |
+
k_size = org_module.kernel_size
|
| 27 |
+
stride = org_module.stride
|
| 28 |
+
padding = org_module.padding
|
| 29 |
+
out_dim = org_module.out_channels
|
| 30 |
+
self.lora_down = nn.Conv2d(in_dim, lora_dim, k_size, stride, padding, bias=False)
|
| 31 |
+
self.lora_up = nn.Conv2d(lora_dim, out_dim, (1, 1), bias=False)
|
| 32 |
+
else:
|
| 33 |
+
in_dim = org_module.in_features
|
| 34 |
+
out_dim = org_module.out_features
|
| 35 |
+
self.lora_down = nn.Linear(in_dim, lora_dim, bias=False)
|
| 36 |
+
self.lora_up = nn.Linear(lora_dim, out_dim, bias=False)
|
| 37 |
+
|
| 38 |
+
if type(alpha) == torch.Tensor:
|
| 39 |
+
alpha = alpha.detach().float().numpy() # without casting, bf16 causes error
|
| 40 |
+
alpha = lora_dim if alpha is None or alpha == 0 else alpha
|
| 41 |
+
self.scale = alpha / self.lora_dim
|
| 42 |
+
self.register_buffer('alpha', torch.tensor(alpha)) # 定数として扱える
|
| 43 |
+
|
| 44 |
+
# same as microsoft's
|
| 45 |
+
torch.nn.init.kaiming_uniform_(self.lora_down.weight, a=math.sqrt(5))
|
| 46 |
+
torch.nn.init.zeros_(self.lora_up.weight)
|
| 47 |
+
|
| 48 |
+
self.multiplier = multiplier
|
| 49 |
+
self.org_module = org_module # remove in applying
|
| 50 |
+
|
| 51 |
+
def apply_to(self):
|
| 52 |
+
self.org_forward = self.org_module.forward
|
| 53 |
+
self.org_module.forward = self.forward
|
| 54 |
+
del self.org_module
|
| 55 |
+
|
| 56 |
+
def forward(self, x):
|
| 57 |
+
return self.org_forward(x) + self.lora_up(self.lora_down(x)) * self.multiplier * self.scale
|
locon_compvis.py
ADDED
|
@@ -0,0 +1,488 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
'''
|
| 2 |
+
Hijack version of kohya-ss/additional_networks/scripts/lora_compvis.py
|
| 3 |
+
'''
|
| 4 |
+
# LoRA network module
|
| 5 |
+
# reference:
|
| 6 |
+
# https://github.com/microsoft/LoRA/blob/main/loralib/layers.py
|
| 7 |
+
# https://github.com/cloneofsimo/lora/blob/master/lora_diffusion/lora.py
|
| 8 |
+
|
| 9 |
+
import copy
|
| 10 |
+
import math
|
| 11 |
+
import re
|
| 12 |
+
from typing import NamedTuple
|
| 13 |
+
import torch
|
| 14 |
+
from locon import LoConModule
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class LoRAInfo(NamedTuple):
|
| 18 |
+
lora_name: str
|
| 19 |
+
module_name: str
|
| 20 |
+
module: torch.nn.Module
|
| 21 |
+
multiplier: float
|
| 22 |
+
dim: int
|
| 23 |
+
alpha: float
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def create_network_and_apply_compvis(du_state_dict, multiplier_tenc, multiplier_unet, text_encoder, unet, **kwargs):
|
| 27 |
+
# get device and dtype from unet
|
| 28 |
+
for module in unet.modules():
|
| 29 |
+
if module.__class__.__name__ == "Linear":
|
| 30 |
+
param: torch.nn.Parameter = module.weight
|
| 31 |
+
# device = param.device
|
| 32 |
+
dtype = param.dtype
|
| 33 |
+
break
|
| 34 |
+
|
| 35 |
+
# get dims (rank) and alpha from state dict
|
| 36 |
+
# currently it is assumed all LoRA have same alpha. alpha may be different in future.
|
| 37 |
+
network_alpha = None
|
| 38 |
+
conv_alpha = None
|
| 39 |
+
network_dim = None
|
| 40 |
+
conv_dim = None
|
| 41 |
+
for key, value in du_state_dict.items():
|
| 42 |
+
if network_alpha is None and 'alpha' in key:
|
| 43 |
+
network_alpha = value
|
| 44 |
+
if network_dim is None and 'lora_down' in key and len(value.size()) == 2:
|
| 45 |
+
network_dim = value.size()[0]
|
| 46 |
+
if network_alpha is not None and network_dim is not None:
|
| 47 |
+
break
|
| 48 |
+
if network_alpha is None:
|
| 49 |
+
network_alpha = network_dim
|
| 50 |
+
|
| 51 |
+
print(f"dimension: {network_dim},\n"
|
| 52 |
+
f"alpha: {network_alpha},\n"
|
| 53 |
+
f"multiplier_unet: {multiplier_unet},\n"
|
| 54 |
+
f"multiplier_tenc: {multiplier_tenc}"
|
| 55 |
+
)
|
| 56 |
+
if network_dim is None:
|
| 57 |
+
print(f"The selected model is not LoRA or not trained by `sd-scripts`?")
|
| 58 |
+
network_dim = 4
|
| 59 |
+
network_alpha = 1
|
| 60 |
+
|
| 61 |
+
# create, apply and load weights
|
| 62 |
+
network = LoConNetworkCompvis(
|
| 63 |
+
text_encoder, unet, du_state_dict,
|
| 64 |
+
multiplier_tenc = multiplier_tenc,
|
| 65 |
+
multiplier_unet = multiplier_unet,
|
| 66 |
+
)
|
| 67 |
+
state_dict = network.apply_lora_modules(du_state_dict) # some weights are applied to text encoder
|
| 68 |
+
network.to(dtype) # with this, if error comes from next line, the model will be used
|
| 69 |
+
info = network.load_state_dict(state_dict, strict=False)
|
| 70 |
+
|
| 71 |
+
# remove redundant warnings
|
| 72 |
+
if len(info.missing_keys) > 4:
|
| 73 |
+
missing_keys = []
|
| 74 |
+
alpha_count = 0
|
| 75 |
+
for key in info.missing_keys:
|
| 76 |
+
if 'alpha' not in key:
|
| 77 |
+
missing_keys.append(key)
|
| 78 |
+
else:
|
| 79 |
+
if alpha_count == 0:
|
| 80 |
+
missing_keys.append(key)
|
| 81 |
+
alpha_count += 1
|
| 82 |
+
if alpha_count > 1:
|
| 83 |
+
missing_keys.append(
|
| 84 |
+
f"... and {alpha_count-1} alphas. The model doesn't have alpha, use dim (rannk) as alpha. You can ignore this message.")
|
| 85 |
+
|
| 86 |
+
info = torch.nn.modules.module._IncompatibleKeys(missing_keys, info.unexpected_keys)
|
| 87 |
+
|
| 88 |
+
return network, info
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class LoConNetworkCompvis(torch.nn.Module):
|
| 92 |
+
# UNET_TARGET_REPLACE_MODULE = ["Transformer2DModel", "Attention"]
|
| 93 |
+
# TEXT_ENCODER_TARGET_REPLACE_MODULE = ["CLIPAttention", "CLIPMLP"]
|
| 94 |
+
LOCON_TARGET = ["ResBlock", "Downsample", "Upsample"]
|
| 95 |
+
UNET_TARGET_REPLACE_MODULE = ["SpatialTransformer"] + LOCON_TARGET # , "Attention"]
|
| 96 |
+
TEXT_ENCODER_TARGET_REPLACE_MODULE = ["ResidualAttentionBlock", "CLIPAttention", "CLIPMLP"]
|
| 97 |
+
|
| 98 |
+
LORA_PREFIX_UNET = 'lora_unet'
|
| 99 |
+
LORA_PREFIX_TEXT_ENCODER = 'lora_te'
|
| 100 |
+
|
| 101 |
+
@classmethod
|
| 102 |
+
def convert_diffusers_name_to_compvis(cls, v2, du_name):
|
| 103 |
+
"""
|
| 104 |
+
convert diffusers's LoRA name to CompVis
|
| 105 |
+
"""
|
| 106 |
+
cv_name = None
|
| 107 |
+
if "lora_unet_" in du_name:
|
| 108 |
+
m = re.search(r"_down_blocks_(\d+)_attentions_(\d+)_(.+)", du_name)
|
| 109 |
+
if m:
|
| 110 |
+
du_block_index = int(m.group(1))
|
| 111 |
+
du_attn_index = int(m.group(2))
|
| 112 |
+
du_suffix = m.group(3)
|
| 113 |
+
|
| 114 |
+
cv_index = 1 + du_block_index * 3 + du_attn_index # 1,2, 4,5, 7,8
|
| 115 |
+
cv_name = f"lora_unet_input_blocks_{cv_index}_1_{du_suffix}"
|
| 116 |
+
return cv_name
|
| 117 |
+
|
| 118 |
+
m = re.search(r"_mid_block_attentions_(\d+)_(.+)", du_name)
|
| 119 |
+
if m:
|
| 120 |
+
du_suffix = m.group(2)
|
| 121 |
+
cv_name = f"lora_unet_middle_block_1_{du_suffix}"
|
| 122 |
+
return cv_name
|
| 123 |
+
|
| 124 |
+
m = re.search(r"_up_blocks_(\d+)_attentions_(\d+)_(.+)", du_name)
|
| 125 |
+
if m:
|
| 126 |
+
du_block_index = int(m.group(1))
|
| 127 |
+
du_attn_index = int(m.group(2))
|
| 128 |
+
du_suffix = m.group(3)
|
| 129 |
+
|
| 130 |
+
cv_index = du_block_index * 3 + du_attn_index # 3,4,5, 6,7,8, 9,10,11
|
| 131 |
+
cv_name = f"lora_unet_output_blocks_{cv_index}_1_{du_suffix}"
|
| 132 |
+
return cv_name
|
| 133 |
+
|
| 134 |
+
m = re.search(r"_down_blocks_(\d+)_resnets_(\d+)_(.+)", du_name)
|
| 135 |
+
if m:
|
| 136 |
+
du_block_index = int(m.group(1))
|
| 137 |
+
du_res_index = int(m.group(2))
|
| 138 |
+
du_suffix = m.group(3)
|
| 139 |
+
cv_suffix = {
|
| 140 |
+
'conv1': 'in_layers_2',
|
| 141 |
+
'conv2': 'out_layers_3',
|
| 142 |
+
'time_emb_proj': 'emb_layers_1',
|
| 143 |
+
'conv_shortcut': 'skip_connection'
|
| 144 |
+
}[du_suffix]
|
| 145 |
+
|
| 146 |
+
cv_index = 1 + du_block_index * 3 + du_res_index # 1,2, 4,5, 7,8
|
| 147 |
+
cv_name = f"lora_unet_input_blocks_{cv_index}_0_{cv_suffix}"
|
| 148 |
+
return cv_name
|
| 149 |
+
|
| 150 |
+
m = re.search(r"_down_blocks_(\d+)_downsamplers_0_conv", du_name)
|
| 151 |
+
if m:
|
| 152 |
+
block_index = int(m.group(1))
|
| 153 |
+
cv_index = 3 + block_index * 3
|
| 154 |
+
cv_name = f"lora_unet_input_blocks_{cv_index}_0_op"
|
| 155 |
+
return cv_name
|
| 156 |
+
|
| 157 |
+
m = re.search(r"_mid_block_resnets_(\d+)_(.+)", du_name)
|
| 158 |
+
if m:
|
| 159 |
+
index = int(m.group(1))
|
| 160 |
+
du_suffix = m.group(2)
|
| 161 |
+
cv_suffix = {
|
| 162 |
+
'conv1': 'in_layers_2',
|
| 163 |
+
'conv2': 'out_layers_3',
|
| 164 |
+
'time_emb_proj': 'emb_layers_1',
|
| 165 |
+
'conv_shortcut': 'skip_connection'
|
| 166 |
+
}[du_suffix]
|
| 167 |
+
cv_name = f"lora_unet_middle_block_{index*2}_{cv_suffix}"
|
| 168 |
+
return cv_name
|
| 169 |
+
|
| 170 |
+
m = re.search(r"_up_blocks_(\d+)_resnets_(\d+)_(.+)", du_name)
|
| 171 |
+
if m:
|
| 172 |
+
du_block_index = int(m.group(1))
|
| 173 |
+
du_res_index = int(m.group(2))
|
| 174 |
+
du_suffix = m.group(3)
|
| 175 |
+
cv_suffix = {
|
| 176 |
+
'conv1': 'in_layers_2',
|
| 177 |
+
'conv2': 'out_layers_3',
|
| 178 |
+
'time_emb_proj': 'emb_layers_1',
|
| 179 |
+
'conv_shortcut': 'skip_connection'
|
| 180 |
+
}[du_suffix]
|
| 181 |
+
|
| 182 |
+
cv_index = du_block_index * 3 + du_res_index # 1,2, 4,5, 7,8
|
| 183 |
+
cv_name = f"lora_unet_output_blocks_{cv_index}_0_{cv_suffix}"
|
| 184 |
+
return cv_name
|
| 185 |
+
|
| 186 |
+
m = re.search(r"_up_blocks_(\d+)_upsamplers_0_conv", du_name)
|
| 187 |
+
if m:
|
| 188 |
+
block_index = int(m.group(1))
|
| 189 |
+
cv_index = block_index * 3 + 2
|
| 190 |
+
cv_name = f"lora_unet_output_blocks_{cv_index}_{bool(block_index)+1}_conv"
|
| 191 |
+
return cv_name
|
| 192 |
+
|
| 193 |
+
elif "lora_te_" in du_name:
|
| 194 |
+
m = re.search(r"_model_encoder_layers_(\d+)_(.+)", du_name)
|
| 195 |
+
if m:
|
| 196 |
+
du_block_index = int(m.group(1))
|
| 197 |
+
du_suffix = m.group(2)
|
| 198 |
+
|
| 199 |
+
cv_index = du_block_index
|
| 200 |
+
if v2:
|
| 201 |
+
if 'mlp_fc1' in du_suffix:
|
| 202 |
+
cv_name = f"lora_te_wrapped_model_transformer_resblocks_{cv_index}_{du_suffix.replace('mlp_fc1', 'mlp_c_fc')}"
|
| 203 |
+
elif 'mlp_fc2' in du_suffix:
|
| 204 |
+
cv_name = f"lora_te_wrapped_model_transformer_resblocks_{cv_index}_{du_suffix.replace('mlp_fc2', 'mlp_c_proj')}"
|
| 205 |
+
elif 'self_attn':
|
| 206 |
+
# handled later
|
| 207 |
+
cv_name = f"lora_te_wrapped_model_transformer_resblocks_{cv_index}_{du_suffix.replace('self_attn', 'attn')}"
|
| 208 |
+
else:
|
| 209 |
+
cv_name = f"lora_te_wrapped_transformer_text_model_encoder_layers_{cv_index}_{du_suffix}"
|
| 210 |
+
|
| 211 |
+
assert cv_name is not None, f"conversion failed: {du_name}. the model may not be trained by `sd-scripts`."
|
| 212 |
+
return cv_name
|
| 213 |
+
|
| 214 |
+
@classmethod
|
| 215 |
+
def convert_state_dict_name_to_compvis(cls, v2, state_dict):
|
| 216 |
+
"""
|
| 217 |
+
convert keys in state dict to load it by load_state_dict
|
| 218 |
+
"""
|
| 219 |
+
new_sd = {}
|
| 220 |
+
for key, value in state_dict.items():
|
| 221 |
+
tokens = key.split('.')
|
| 222 |
+
compvis_name = LoConNetworkCompvis.convert_diffusers_name_to_compvis(v2, tokens[0])
|
| 223 |
+
new_key = compvis_name + '.' + '.'.join(tokens[1:])
|
| 224 |
+
new_sd[new_key] = value
|
| 225 |
+
|
| 226 |
+
return new_sd
|
| 227 |
+
|
| 228 |
+
def __init__(self, text_encoder, unet, du_state_dict, multiplier_tenc=1.0, multiplier_unet=1.0) -> None:
|
| 229 |
+
super().__init__()
|
| 230 |
+
self.multiplier_unet = multiplier_unet
|
| 231 |
+
self.multiplier_tenc = multiplier_tenc
|
| 232 |
+
|
| 233 |
+
# create module instances
|
| 234 |
+
for name, module in text_encoder.named_modules():
|
| 235 |
+
for child_name, child_module in module.named_modules():
|
| 236 |
+
if child_module.__class__.__name__ == 'MultiheadAttention':
|
| 237 |
+
self.v2 = True
|
| 238 |
+
break
|
| 239 |
+
else:
|
| 240 |
+
continue
|
| 241 |
+
break
|
| 242 |
+
else:
|
| 243 |
+
self.v2 = False
|
| 244 |
+
comp_state_dict = {}
|
| 245 |
+
|
| 246 |
+
def create_modules(prefix, root_module: torch.nn.Module, target_replace_modules, multiplier):
|
| 247 |
+
nonlocal comp_state_dict
|
| 248 |
+
loras = []
|
| 249 |
+
replaced_modules = []
|
| 250 |
+
for name, module in root_module.named_modules():
|
| 251 |
+
if module.__class__.__name__ in target_replace_modules:
|
| 252 |
+
for child_name, child_module in module.named_modules():
|
| 253 |
+
layer = child_module.__class__.__name__
|
| 254 |
+
lora_name = prefix + '.' + name + '.' + child_name
|
| 255 |
+
lora_name = lora_name.replace('.', '_')
|
| 256 |
+
if layer == "Linear" or layer == "Conv2d":
|
| 257 |
+
if '_resblocks_23_' in lora_name: # ignore last block in StabilityAi Text Encoder
|
| 258 |
+
break
|
| 259 |
+
if f'{lora_name}.lora_down.weight' not in comp_state_dict:
|
| 260 |
+
if module.__class__.__name__ in LoConNetworkCompvis.LOCON_TARGET:
|
| 261 |
+
continue
|
| 262 |
+
else:
|
| 263 |
+
print(f'Cannot find: "{lora_name}", skipped')
|
| 264 |
+
continue
|
| 265 |
+
rank = comp_state_dict[f'{lora_name}.lora_down.weight'].shape[0]
|
| 266 |
+
alpha = comp_state_dict.get(f'{lora_name}.alpha', torch.tensor(rank)).item()
|
| 267 |
+
lora = LoConModule(lora_name, child_module, multiplier, rank, alpha)
|
| 268 |
+
loras.append(lora)
|
| 269 |
+
|
| 270 |
+
replaced_modules.append(child_module)
|
| 271 |
+
elif child_module.__class__.__name__ == "MultiheadAttention":
|
| 272 |
+
# make four modules: not replacing forward method but merge weights
|
| 273 |
+
self.v2 = True
|
| 274 |
+
for suffix in ['q', 'k', 'v', 'out']:
|
| 275 |
+
module_name = prefix + '.' + name + '.' + child_name # ~.attn
|
| 276 |
+
module_name = module_name.replace('.', '_')
|
| 277 |
+
if '_resblocks_23_' in module_name: # ignore last block in StabilityAi Text Encoder
|
| 278 |
+
break
|
| 279 |
+
lora_name = module_name + '_' + suffix
|
| 280 |
+
lora_info = LoRAInfo(lora_name, module_name, child_module, multiplier, 0, 0)
|
| 281 |
+
loras.append(lora_info)
|
| 282 |
+
|
| 283 |
+
replaced_modules.append(child_module)
|
| 284 |
+
return loras, replaced_modules
|
| 285 |
+
|
| 286 |
+
for k,v in LoConNetworkCompvis.convert_state_dict_name_to_compvis(self.v2, du_state_dict).items():
|
| 287 |
+
comp_state_dict[k] = v
|
| 288 |
+
|
| 289 |
+
self.text_encoder_loras, te_rep_modules = create_modules(
|
| 290 |
+
LoConNetworkCompvis.LORA_PREFIX_TEXT_ENCODER,
|
| 291 |
+
text_encoder,
|
| 292 |
+
LoConNetworkCompvis.TEXT_ENCODER_TARGET_REPLACE_MODULE,
|
| 293 |
+
self.multiplier_tenc
|
| 294 |
+
)
|
| 295 |
+
print(f"create LoCon for Text Encoder: {len(self.text_encoder_loras)} modules.")
|
| 296 |
+
|
| 297 |
+
self.unet_loras, unet_rep_modules = create_modules(
|
| 298 |
+
LoConNetworkCompvis.LORA_PREFIX_UNET,
|
| 299 |
+
unet,
|
| 300 |
+
LoConNetworkCompvis.UNET_TARGET_REPLACE_MODULE,
|
| 301 |
+
self.multiplier_unet
|
| 302 |
+
)
|
| 303 |
+
print(f"create LoCon for U-Net: {len(self.unet_loras)} modules.")
|
| 304 |
+
|
| 305 |
+
# make backup of original forward/weights, if multiple modules are applied, do in 1st module only
|
| 306 |
+
backed_up = False # messaging purpose only
|
| 307 |
+
for rep_module in te_rep_modules + unet_rep_modules:
|
| 308 |
+
if rep_module.__class__.__name__ == "MultiheadAttention": # multiple MHA modules are in list, prevent to backed up forward
|
| 309 |
+
if not hasattr(rep_module, "_lora_org_weights"):
|
| 310 |
+
# avoid updating of original weights. state_dict is reference to original weights
|
| 311 |
+
rep_module._lora_org_weights = copy.deepcopy(rep_module.state_dict())
|
| 312 |
+
backed_up = True
|
| 313 |
+
elif not hasattr(rep_module, "_lora_org_forward"):
|
| 314 |
+
rep_module._lora_org_forward = rep_module.forward
|
| 315 |
+
backed_up = True
|
| 316 |
+
if backed_up:
|
| 317 |
+
print("original forward/weights is backed up.")
|
| 318 |
+
|
| 319 |
+
# assertion
|
| 320 |
+
names = set()
|
| 321 |
+
for lora in self.text_encoder_loras + self.unet_loras:
|
| 322 |
+
assert lora.lora_name not in names, f"duplicated lora name: {lora.lora_name}"
|
| 323 |
+
names.add(lora.lora_name)
|
| 324 |
+
|
| 325 |
+
def restore(self, text_encoder, unet):
|
| 326 |
+
# restore forward/weights from property for all modules
|
| 327 |
+
restored = False # messaging purpose only
|
| 328 |
+
modules = []
|
| 329 |
+
modules.extend(text_encoder.modules())
|
| 330 |
+
modules.extend(unet.modules())
|
| 331 |
+
for module in modules:
|
| 332 |
+
if hasattr(module, "_lora_org_forward"):
|
| 333 |
+
module.forward = module._lora_org_forward
|
| 334 |
+
del module._lora_org_forward
|
| 335 |
+
restored = True
|
| 336 |
+
if hasattr(module, "_lora_org_weights"): # module doesn't have forward and weights at same time currently, but supports it for future changing
|
| 337 |
+
module.load_state_dict(module._lora_org_weights)
|
| 338 |
+
del module._lora_org_weights
|
| 339 |
+
restored = True
|
| 340 |
+
|
| 341 |
+
if restored:
|
| 342 |
+
print("original forward/weights is restored.")
|
| 343 |
+
|
| 344 |
+
def apply_lora_modules(self, du_state_dict):
|
| 345 |
+
# conversion 1st step: convert names in state_dict
|
| 346 |
+
state_dict = LoConNetworkCompvis.convert_state_dict_name_to_compvis(self.v2, du_state_dict)
|
| 347 |
+
|
| 348 |
+
# check state_dict has text_encoder or unet
|
| 349 |
+
weights_has_text_encoder = weights_has_unet = False
|
| 350 |
+
for key in state_dict.keys():
|
| 351 |
+
if key.startswith(LoConNetworkCompvis.LORA_PREFIX_TEXT_ENCODER):
|
| 352 |
+
weights_has_text_encoder = True
|
| 353 |
+
elif key.startswith(LoConNetworkCompvis.LORA_PREFIX_UNET):
|
| 354 |
+
weights_has_unet = True
|
| 355 |
+
if weights_has_text_encoder and weights_has_unet:
|
| 356 |
+
break
|
| 357 |
+
|
| 358 |
+
apply_text_encoder = weights_has_text_encoder
|
| 359 |
+
apply_unet = weights_has_unet
|
| 360 |
+
|
| 361 |
+
if apply_text_encoder:
|
| 362 |
+
print("enable LoCon for text encoder")
|
| 363 |
+
else:
|
| 364 |
+
self.text_encoder_loras = []
|
| 365 |
+
|
| 366 |
+
if apply_unet:
|
| 367 |
+
print("enable LoCon for U-Net")
|
| 368 |
+
else:
|
| 369 |
+
self.unet_loras = []
|
| 370 |
+
|
| 371 |
+
# add modules to network: this makes state_dict can be got from LoRANetwork
|
| 372 |
+
mha_loras = {}
|
| 373 |
+
for lora in self.text_encoder_loras + self.unet_loras:
|
| 374 |
+
if type(lora) == LoConModule:
|
| 375 |
+
lora.apply_to() # ensure remove reference to original Linear: reference makes key of state_dict
|
| 376 |
+
self.add_module(lora.lora_name, lora)
|
| 377 |
+
else:
|
| 378 |
+
# SD2.x MultiheadAttention merge weights to MHA weights
|
| 379 |
+
lora_info: LoRAInfo = lora
|
| 380 |
+
if lora_info.module_name not in mha_loras:
|
| 381 |
+
mha_loras[lora_info.module_name] = {}
|
| 382 |
+
|
| 383 |
+
lora_dic = mha_loras[lora_info.module_name]
|
| 384 |
+
lora_dic[lora_info.lora_name] = lora_info
|
| 385 |
+
if len(lora_dic) == 4:
|
| 386 |
+
# calculate and apply
|
| 387 |
+
w_q_dw = state_dict.get(lora_info.module_name + '_q_proj.lora_down.weight')
|
| 388 |
+
if w_q_dw is not None: # corresponding LoRa module exists
|
| 389 |
+
w_q_up = state_dict[lora_info.module_name + '_q_proj.lora_up.weight']
|
| 390 |
+
w_q_ap = state_dict.get(lora_info.module_name + '_q_proj.alpha', None)
|
| 391 |
+
w_k_dw = state_dict[lora_info.module_name + '_k_proj.lora_down.weight']
|
| 392 |
+
w_k_up = state_dict[lora_info.module_name + '_k_proj.lora_up.weight']
|
| 393 |
+
w_k_ap = state_dict.get(lora_info.module_name + '_k_proj.alpha', None)
|
| 394 |
+
w_v_dw = state_dict[lora_info.module_name + '_v_proj.lora_down.weight']
|
| 395 |
+
w_v_up = state_dict[lora_info.module_name + '_v_proj.lora_up.weight']
|
| 396 |
+
w_v_ap = state_dict.get(lora_info.module_name + '_v_proj.alpha', None)
|
| 397 |
+
w_out_dw = state_dict[lora_info.module_name + '_out_proj.lora_down.weight']
|
| 398 |
+
w_out_up = state_dict[lora_info.module_name + '_out_proj.lora_up.weight']
|
| 399 |
+
w_out_ap = state_dict.get(lora_info.module_name + '_out_proj.alpha', None)
|
| 400 |
+
|
| 401 |
+
sd = lora_info.module.state_dict()
|
| 402 |
+
qkv_weight = sd['in_proj_weight']
|
| 403 |
+
out_weight = sd['out_proj.weight']
|
| 404 |
+
dev = qkv_weight.device
|
| 405 |
+
|
| 406 |
+
def merge_weights(weight, up_weight, down_weight, alpha=None):
|
| 407 |
+
# calculate in float
|
| 408 |
+
if alpha is None:
|
| 409 |
+
alpha = down_weight.shape[0]
|
| 410 |
+
alpha = float(alpha)
|
| 411 |
+
scale = alpha / down_weight.shape[0]
|
| 412 |
+
dtype = weight.dtype
|
| 413 |
+
weight = weight.float() + lora_info.multiplier * (up_weight.to(dev, dtype=torch.float) @ down_weight.to(dev, dtype=torch.float)) * scale
|
| 414 |
+
weight = weight.to(dtype)
|
| 415 |
+
return weight
|
| 416 |
+
|
| 417 |
+
q_weight, k_weight, v_weight = torch.chunk(qkv_weight, 3)
|
| 418 |
+
if q_weight.size()[1] == w_q_up.size()[0]:
|
| 419 |
+
q_weight = merge_weights(q_weight, w_q_up, w_q_dw, w_q_ap)
|
| 420 |
+
k_weight = merge_weights(k_weight, w_k_up, w_k_dw, w_k_ap)
|
| 421 |
+
v_weight = merge_weights(v_weight, w_v_up, w_v_dw, w_v_ap)
|
| 422 |
+
qkv_weight = torch.cat([q_weight, k_weight, v_weight])
|
| 423 |
+
|
| 424 |
+
out_weight = merge_weights(out_weight, w_out_up, w_out_dw, w_out_ap)
|
| 425 |
+
|
| 426 |
+
sd['in_proj_weight'] = qkv_weight.to(dev)
|
| 427 |
+
sd['out_proj.weight'] = out_weight.to(dev)
|
| 428 |
+
|
| 429 |
+
lora_info.module.load_state_dict(sd)
|
| 430 |
+
else:
|
| 431 |
+
# different dim, version mismatch
|
| 432 |
+
print(f"shape of weight is different: {lora_info.module_name}. SD version may be different")
|
| 433 |
+
|
| 434 |
+
for t in ["q", "k", "v", "out"]:
|
| 435 |
+
del state_dict[f"{lora_info.module_name}_{t}_proj.lora_down.weight"]
|
| 436 |
+
del state_dict[f"{lora_info.module_name}_{t}_proj.lora_up.weight"]
|
| 437 |
+
alpha_key = f"{lora_info.module_name}_{t}_proj.alpha"
|
| 438 |
+
if alpha_key in state_dict:
|
| 439 |
+
del state_dict[alpha_key]
|
| 440 |
+
else:
|
| 441 |
+
# corresponding weight not exists: version mismatch
|
| 442 |
+
pass
|
| 443 |
+
|
| 444 |
+
# conversion 2nd step: convert weight's shape (and handle wrapped)
|
| 445 |
+
state_dict = self.convert_state_dict_shape_to_compvis(state_dict)
|
| 446 |
+
|
| 447 |
+
return state_dict
|
| 448 |
+
|
| 449 |
+
def convert_state_dict_shape_to_compvis(self, state_dict):
|
| 450 |
+
# shape conversion
|
| 451 |
+
current_sd = self.state_dict() # to get target shape
|
| 452 |
+
wrapped = False
|
| 453 |
+
count = 0
|
| 454 |
+
for key in list(state_dict.keys()):
|
| 455 |
+
if key not in current_sd:
|
| 456 |
+
continue # might be error or another version
|
| 457 |
+
if "wrapped" in key:
|
| 458 |
+
wrapped = True
|
| 459 |
+
|
| 460 |
+
value: torch.Tensor = state_dict[key]
|
| 461 |
+
if value.size() != current_sd[key].size():
|
| 462 |
+
# print(key, value.size(), current_sd[key].size())
|
| 463 |
+
# print(f"convert weights shape: {key}, from: {value.size()}, {len(value.size())}")
|
| 464 |
+
count += 1
|
| 465 |
+
if '.alpha' in key:
|
| 466 |
+
assert value.size().numel() == 1
|
| 467 |
+
value = torch.tensor(value.item())
|
| 468 |
+
elif len(value.size()) == 4:
|
| 469 |
+
value = value.squeeze(3).squeeze(2)
|
| 470 |
+
else:
|
| 471 |
+
value = value.unsqueeze(2).unsqueeze(3)
|
| 472 |
+
state_dict[key] = value
|
| 473 |
+
if tuple(value.size()) != tuple(current_sd[key].size()):
|
| 474 |
+
print(
|
| 475 |
+
f"weight's shape is different: {key} expected {current_sd[key].size()} found {value.size()}. SD version may be different")
|
| 476 |
+
del state_dict[key]
|
| 477 |
+
print(f"shapes for {count} weights are converted.")
|
| 478 |
+
|
| 479 |
+
# convert wrapped
|
| 480 |
+
if not wrapped:
|
| 481 |
+
print("remove 'wrapped' from keys")
|
| 482 |
+
for key in list(state_dict.keys()):
|
| 483 |
+
if "_wrapped_" in key:
|
| 484 |
+
new_key = key.replace("_wrapped_", "_")
|
| 485 |
+
state_dict[new_key] = state_dict[key]
|
| 486 |
+
del state_dict[key]
|
| 487 |
+
|
| 488 |
+
return state_dict
|