Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- onnx/up_blocks.0/attentions.1.transformer_blocks.0.norm1.weight +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.0.norm3.bias +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.1.norm1.weight +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.1.norm2.weight +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.1.norm3.weight +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.2.norm2.bias +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.2.norm3.bias +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.3.ff.net.0.proj.bias +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.3.ff.net.2.bias +0 -0
- onnx/up_blocks.0/attentions.1.transformer_blocks.3.norm1.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.4.norm2.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.5.norm1.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.5.norm3.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.6.attn2.to_out.0.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.6.ff.net.0.proj.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm1.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm1.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm2.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm3.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.7.attn1.to_out.0.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.7.norm2.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.7.norm2.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.attn1.to_out.0.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.attn2.to_out.0.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.ff.net.0.proj.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.ff.net.2.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm1.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm1.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm2.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm2.weight +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm3.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm3.weight +3 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.9.attn1.to_out.0.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.9.attn2.to_out.0.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.9.norm1.bias +0 -0
- onnx/up_blocks.0/attentions.2.transformer_blocks.9.norm2.weight +0 -0
- onnx/up_blocks.0/onnx__Add_6908 +0 -0
- onnx/up_blocks.0/onnx__Add_7152 +0 -0
- onnx/up_blocks.0/onnx__Add_7154 +0 -0
- onnx/up_blocks.0/onnx__Add_7400 +0 -0
- onnx/up_blocks.0/onnx__Mul_7153 +0 -0
- onnx/up_blocks.0/onnx__Mul_7399 +0 -0
- onnx/up_blocks.0/onnx__Mul_7401 +0 -0
- onnx/up_blocks.0/resnets.1.conv2.bias +0 -0
- onnx/up_blocks.0/resnets.1.time_emb_proj.bias +0 -0
- onnx/up_blocks.0/resnets.2.conv1.bias +0 -0
- onnx/up_blocks.0/resnets.2.conv2.bias +0 -0
- onnx/up_blocks.0/upsamplers.0.conv.bias +0 -0
- src/assets/sdxl_cache.png +0 -0
- src/cache_diffusion/cachify.py +144 -0
onnx/up_blocks.0/attentions.1.transformer_blocks.0.norm1.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.0.norm3.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.1.norm1.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.1.norm2.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.1.norm3.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.2.norm2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.2.norm3.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.3.ff.net.0.proj.bias
ADDED
|
Binary file (20.5 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.3.ff.net.2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.1.transformer_blocks.3.norm1.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.4.norm2.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.5.norm1.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.5.norm3.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.6.attn2.to_out.0.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.6.ff.net.0.proj.bias
ADDED
|
Binary file (20.5 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm1.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm1.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.6.norm3.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.7.attn1.to_out.0.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.7.norm2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.7.norm2.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.attn1.to_out.0.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.attn2.to_out.0.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.ff.net.0.proj.bias
ADDED
|
Binary file (20.5 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.ff.net.2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm1.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm1.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm2.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm3.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.8.norm3.weight
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
M:f:#:r::D:N::\:I:J:V:n:G:\:d:P:R:9:
|
| 2 |
+
:N:z:t:n:�9V:V:D:Y:M:F:
|
| 3 |
+
:�:Z:P:f::S:Y:+:?:_:J:0:6:V:\:F:t::U:*:�:P:�:Y:}:0:E:^:f:�9�:F:R:V:_:d:M:<:m:Q:O:<:a:^:8:�:�:>:8:L:(:M:Y:V:Z:R::L:�:Z:�:�:-:-:X::V:^:I:1:E:f:J:&:n:X:e:5:\:>:Q:f:f:u:�:\:Y:9:F:1:;:f:z:\:E:6:f:j:c:Q:l:,::0:^:�:K:�:\:o:z:\:.:�:�:a:O:':�:L:l:�:-:=:V::]:p:m:U:f:d:Q:Q:E:]:L:f:l:\:u:E:�:�9O:z:M:(:f:l:V:]:P:Z:^:(:M:>:p:>:f:N:O:l:t::f:.:m:X:�:P:P:x:�:(:S:N:N:I:�:*:[:�:d:w:P:E::(:M::X:S:u:n:F:\:�:y:(:z:�:M:�9I:^:l:X:F:u:f:{:M:F:u:\:6:u:P:i:>:u:u:V:U:@:\:�:d:G:e:6:�:>:^:^:/:E::f:z:\::u::X:F:V::t:^:M:M:\:f:�9d:n:<:V:Z:^:x:>:]:F:4:+:m:\:t:~:1:l:S:(:p:J:>:E:,:::K:T:u:n:<:=:�:u:N:<:V:Q:6:d:Z:r:i:N:j:f:K:l::R:9:�:6:z:(:U:E:E:N:M:<:1::m:^:N:n:�:.:(:�:j:�:�:I:I:f:d:M::Q:Q:�:S:Y:n::>:*:_:g:�9P:(:N:P:h:F:d:V:F:�:n:�:S:\:K:^:0:F:5:\:>:?:]:;:t:T:�:.:>::M:\:<:N:F::]:�:>:X:;:M:�:\:K:o:\:^:M:M:@:E:l:R:F::�:4:m:l:D:<:R:+:�:�:<:~:-:�:}:<:(:S:l:D:~:u:d:M:1:a:L:�9Z:2:K:f:F:0:R:+:a:z:K:6:<:d:\:`:r:<:%:N:-:P:N:-:}:<:z::f:N:l:�:p::6:L:r:K:K:F:�:D:V:>:u:>:d:�:2:>:c:F:4:f:L:E:S:K:T:>:F::V:O::>:l:X:d:.:>::G:+:o:6:^:V::M:�8>:K:Y:;:�9j:P:�:]:f:<:^:F:t:g:p:|:<:V:j:+:n:F::`:;:`:M:V:M:m:0:t::+:�:d::0:=:N:\:m:<:G:V:e:%:X:E:F:>:n:F:^:Y:a:0:�:`:�::�:<:+:z:m:U:Z:&:P:Z:P:L::<:^:<:P:0:�:L:�:\:O:\:v:>:
|
onnx/up_blocks.0/attentions.2.transformer_blocks.9.attn1.to_out.0.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.9.attn2.to_out.0.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.9.norm1.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/attentions.2.transformer_blocks.9.norm2.weight
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Add_6908
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Add_7152
ADDED
|
Binary file (5.12 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Add_7154
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Add_7400
ADDED
|
Binary file (3.84 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Mul_7153
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Mul_7399
ADDED
|
Binary file (3.84 kB). View file
|
|
|
onnx/up_blocks.0/onnx__Mul_7401
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/resnets.1.conv2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/resnets.1.time_emb_proj.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/resnets.2.conv1.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/resnets.2.conv2.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
onnx/up_blocks.0/upsamplers.0.conv.bias
ADDED
|
Binary file (2.56 kB). View file
|
|
|
src/assets/sdxl_cache.png
ADDED
|
src/cache_diffusion/cachify.py
ADDED
|
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
| 2 |
+
# SPDX-License-Identifier: MIT
|
| 3 |
+
#
|
| 4 |
+
# Permission is hereby granted, free of charge, to any person obtaining a
|
| 5 |
+
# copy of this software and associated documentation files (the "Software"),
|
| 6 |
+
# to deal in the Software without restriction, including without limitation
|
| 7 |
+
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
| 8 |
+
# and/or sell copies of the Software, and to permit persons to whom the
|
| 9 |
+
# Software is furnished to do so, subject to the following conditions:
|
| 10 |
+
#
|
| 11 |
+
# The above copyright notice and this permission notice shall be included in
|
| 12 |
+
# all copies or substantial portions of the Software.
|
| 13 |
+
#
|
| 14 |
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 15 |
+
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 16 |
+
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
| 17 |
+
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 18 |
+
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 19 |
+
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 20 |
+
# DEALINGS IN THE SOFTWARE.
|
| 21 |
+
|
| 22 |
+
import fnmatch
|
| 23 |
+
from contextlib import contextmanager
|
| 24 |
+
|
| 25 |
+
from diffusers.models.attention import BasicTransformerBlock, JointTransformerBlock
|
| 26 |
+
from diffusers.models.transformers.pixart_transformer_2d import PixArtTransformer2DModel
|
| 27 |
+
from diffusers.models.transformers.transformer_sd3 import SD3Transformer2DModel
|
| 28 |
+
from diffusers.models.unets.unet_2d_blocks import (
|
| 29 |
+
CrossAttnDownBlock2D,
|
| 30 |
+
CrossAttnUpBlock2D,
|
| 31 |
+
DownBlock2D,
|
| 32 |
+
UNetMidBlock2DCrossAttn,
|
| 33 |
+
UpBlock2D,
|
| 34 |
+
)
|
| 35 |
+
from diffusers.models.unets.unet_2d_condition import UNet2DConditionModel
|
| 36 |
+
from diffusers.models.unets.unet_3d_blocks import (
|
| 37 |
+
CrossAttnDownBlockSpatioTemporal,
|
| 38 |
+
CrossAttnUpBlockSpatioTemporal,
|
| 39 |
+
DownBlockSpatioTemporal,
|
| 40 |
+
UNetMidBlockSpatioTemporal,
|
| 41 |
+
UpBlockSpatioTemporal,
|
| 42 |
+
)
|
| 43 |
+
from diffusers.models.unets.unet_spatio_temporal_condition import UNetSpatioTemporalConditionModel
|
| 44 |
+
|
| 45 |
+
from .module import CachedModule
|
| 46 |
+
from .utils import replace_module
|
| 47 |
+
|
| 48 |
+
CACHED_PIPE = {
|
| 49 |
+
UNet2DConditionModel: (
|
| 50 |
+
DownBlock2D,
|
| 51 |
+
CrossAttnDownBlock2D,
|
| 52 |
+
UNetMidBlock2DCrossAttn,
|
| 53 |
+
CrossAttnUpBlock2D,
|
| 54 |
+
UpBlock2D,
|
| 55 |
+
),
|
| 56 |
+
PixArtTransformer2DModel: (BasicTransformerBlock),
|
| 57 |
+
UNetSpatioTemporalConditionModel: (
|
| 58 |
+
CrossAttnDownBlockSpatioTemporal,
|
| 59 |
+
DownBlockSpatioTemporal,
|
| 60 |
+
UpBlockSpatioTemporal,
|
| 61 |
+
CrossAttnUpBlockSpatioTemporal,
|
| 62 |
+
UNetMidBlockSpatioTemporal,
|
| 63 |
+
),
|
| 64 |
+
SD3Transformer2DModel: (JointTransformerBlock),
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _apply_to_modules(model, action, modules=None, config_list=None):
|
| 69 |
+
if hasattr(model, "use_trt_infer") and model.use_trt_infer:
|
| 70 |
+
for key, module in model.engines.items():
|
| 71 |
+
if isinstance(module, CachedModule):
|
| 72 |
+
action(module)
|
| 73 |
+
elif config_list:
|
| 74 |
+
for config in config_list:
|
| 75 |
+
if _pass(key, config["wildcard_or_filter_func"]):
|
| 76 |
+
model.engines[key] = CachedModule(module, config["select_cache_step_func"])
|
| 77 |
+
else:
|
| 78 |
+
for name, module in model.named_modules():
|
| 79 |
+
if isinstance(module, CachedModule):
|
| 80 |
+
action(module)
|
| 81 |
+
elif modules and config_list:
|
| 82 |
+
for config in config_list:
|
| 83 |
+
if _pass(name, config["wildcard_or_filter_func"]) and isinstance(
|
| 84 |
+
module, modules
|
| 85 |
+
):
|
| 86 |
+
replace_module(
|
| 87 |
+
model,
|
| 88 |
+
name,
|
| 89 |
+
CachedModule(module, config["select_cache_step_func"]),
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def cachify(model, config_list, modules):
|
| 94 |
+
def cache_action(module):
|
| 95 |
+
pass # No action needed, caching is handled in the loop itself
|
| 96 |
+
|
| 97 |
+
_apply_to_modules(model, cache_action, modules, config_list)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def disable(pipe):
|
| 101 |
+
model = get_model(pipe)
|
| 102 |
+
_apply_to_modules(model, lambda module: module.disable_cache())
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
def enable(pipe):
|
| 106 |
+
model = get_model(pipe)
|
| 107 |
+
_apply_to_modules(model, lambda module: module.enable_cache())
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def reset_status(pipe):
|
| 111 |
+
model = get_model(pipe)
|
| 112 |
+
_apply_to_modules(model, lambda module: setattr(module, "cur_step", 0))
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _pass(name, wildcard_or_filter_func):
|
| 116 |
+
if isinstance(wildcard_or_filter_func, str):
|
| 117 |
+
return fnmatch.fnmatch(name, wildcard_or_filter_func)
|
| 118 |
+
elif callable(wildcard_or_filter_func):
|
| 119 |
+
return wildcard_or_filter_func(name)
|
| 120 |
+
else:
|
| 121 |
+
raise NotImplementedError(f"Unsupported type {type(wildcard_or_filter_func)}")
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def get_model(pipe):
|
| 125 |
+
if hasattr(pipe, "unet"):
|
| 126 |
+
return pipe.unet
|
| 127 |
+
elif hasattr(pipe, "transformer"):
|
| 128 |
+
return pipe.transformer
|
| 129 |
+
else:
|
| 130 |
+
raise KeyError
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
@contextmanager
|
| 134 |
+
def infer(pipe):
|
| 135 |
+
try:
|
| 136 |
+
yield pipe
|
| 137 |
+
finally:
|
| 138 |
+
reset_status(pipe)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def prepare(pipe, config_list):
|
| 142 |
+
model = get_model(pipe)
|
| 143 |
+
assert model.__class__ in CACHED_PIPE.keys(), f"{model.__class__} is not supported!"
|
| 144 |
+
cachify(model, config_list, CACHED_PIPE[model.__class__])
|