Buckets:
| <meta charset="utf-8" /><meta name="hf:doc:metadata" content="{"title":"Getting Started: VAE Decode with Hybrid Inference","local":"getting-started-vae-decode-with-hybrid-inference","sections":[{"title":"Memory","local":"memory","sections":[],"depth":2},{"title":"Available VAEs","local":"available-vaes","sections":[],"depth":2},{"title":"Code","local":"code","sections":[{"title":"Basic example","local":"basic-example","sections":[],"depth":3},{"title":"Generation","local":"generation","sections":[],"depth":3},{"title":"Queueing","local":"queueing","sections":[],"depth":3}],"depth":2},{"title":"Integrations","local":"integrations","sections":[],"depth":2}],"depth":1}"> | |
| <link href="/docs/diffusers/pr_11739/en/_app/immutable/assets/0.e3b0c442.css" rel="modulepreload"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/entry/start.56da5b2b.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/scheduler.53228c21.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/singletons.70d778ba.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/index.e93d0901.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/paths.a0989064.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/entry/app.794fd245.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/preload-helper.b5c24ab3.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/index.100fac89.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/nodes/0.d68fe2c3.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/each.e59479a4.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/nodes/260.989b612b.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/CopyLLMTxtMenu.ed0e3681.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/globals.7f7f1b26.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/IconCopy.38cf8f56.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/MermaidChart.svelte_svelte_type_style_lang.dd42f483.js"> | |
| <link rel="modulepreload" href="/docs/diffusers/pr_11739/en/_app/immutable/chunks/CodeBlock.d30a6509.js"><!-- HEAD_svelte-u9bgzb_START --><meta name="hf:doc:metadata" content="{"title":"Getting Started: VAE Decode with Hybrid Inference","local":"getting-started-vae-decode-with-hybrid-inference","sections":[{"title":"Memory","local":"memory","sections":[],"depth":2},{"title":"Available VAEs","local":"available-vaes","sections":[],"depth":2},{"title":"Code","local":"code","sections":[{"title":"Basic example","local":"basic-example","sections":[],"depth":3},{"title":"Generation","local":"generation","sections":[],"depth":3},{"title":"Queueing","local":"queueing","sections":[],"depth":3}],"depth":2},{"title":"Integrations","local":"integrations","sections":[],"depth":2}],"depth":1}"><!-- HEAD_svelte-u9bgzb_END --> <p></p> <div class="items-center shrink-0 min-w-[100px] max-sm:min-w-[50px] justify-end ml-auto flex" style="float: right; margin-left: 10px; display: inline-flex; position: relative; z-index: 10;"><div class="inline-flex rounded-md max-sm:rounded-sm"><button class="inline-flex items-center gap-1 max-sm:gap-0.5 h-6 max-sm:h-5 px-2 max-sm:px-1.5 text-[11px] max-sm:text-[9px] font-medium text-gray-800 border border-r-0 rounded-l-md max-sm:rounded-l-sm border-gray-200 bg-white hover:shadow-inner dark:border-gray-850 dark:bg-gray-950 dark:text-gray-200 dark:hover:bg-gray-800" aria-live="polite"><span class="inline-flex items-center justify-center rounded-md p-0.5 max-sm:p-0"><svg class="w-3 h-3 max-sm:w-2.5 max-sm:h-2.5" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg></span> <span>Copy page</span></button> <button class="inline-flex items-center justify-center w-6 max-sm:w-5 h-6 max-sm:h-5 disabled:pointer-events-none text-sm text-gray-500 hover:text-gray-700 dark:hover:text-white rounded-r-md max-sm:rounded-r-sm border border-l transition border-gray-200 bg-white hover:shadow-inner dark:border-gray-850 dark:bg-gray-950 dark:text-gray-200 dark:hover:bg-gray-800" aria-haspopup="menu" aria-expanded="false" aria-label="Open copy menu"><svg class="transition-transform text-gray-400 overflow-visible w-3 h-3 max-sm:w-2.5 max-sm:h-2.5 rotate-0" width="1em" height="1em" viewBox="0 0 12 7" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1 1L6 6L11 1" stroke="currentColor"></path></svg></button></div> </div> <h1 class="relative group"><a id="getting-started-vae-decode-with-hybrid-inference" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#getting-started-vae-decode-with-hybrid-inference"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Getting Started: VAE Decode with Hybrid Inference</span></h1> <p data-svelte-h="svelte-11yet1x">VAE decode is an essential component of diffusion models - turning latent representations into images or videos.</p> <h2 class="relative group"><a id="memory" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#memory"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Memory</span></h2> <p data-svelte-h="svelte-yrxpjh">These tables demonstrate the VRAM requirements for VAE decode with SD v1 and SD XL on different GPUs.</p> <p data-svelte-h="svelte-6p6f4c">For the majority of these GPUs the memory usage % dictates other models (text encoders, UNet/Transformer) must be offloaded, or tiled decoding has to be used which increases time taken and impacts quality.</p> <details data-svelte-h="svelte-1b5t2ka"><summary>SD v1.5</summary> <table><thead><tr><th>GPU</th> <th>Resolution</th> <th>Time (seconds)</th> <th>Memory (%)</th> <th>Tiled Time (secs)</th> <th>Tiled Memory (%)</th></tr></thead> <tbody><tr><td>NVIDIA GeForce RTX 4090</td> <td>512x512</td> <td>0.031</td> <td>5.60%</td> <td>0.031 (0%)</td> <td>5.60%</td></tr> <tr><td>NVIDIA GeForce RTX 4090</td> <td>1024x1024</td> <td>0.148</td> <td>20.00%</td> <td>0.301 (+103%)</td> <td>5.60%</td></tr> <tr><td>NVIDIA GeForce RTX 4080</td> <td>512x512</td> <td>0.05</td> <td>8.40%</td> <td>0.050 (0%)</td> <td>8.40%</td></tr> <tr><td>NVIDIA GeForce RTX 4080</td> <td>1024x1024</td> <td>0.224</td> <td>30.00%</td> <td>0.356 (+59%)</td> <td>8.40%</td></tr> <tr><td>NVIDIA GeForce RTX 4070 Ti</td> <td>512x512</td> <td>0.066</td> <td>11.30%</td> <td>0.066 (0%)</td> <td>11.30%</td></tr> <tr><td>NVIDIA GeForce RTX 4070 Ti</td> <td>1024x1024</td> <td>0.284</td> <td>40.50%</td> <td>0.454 (+60%)</td> <td>11.40%</td></tr> <tr><td>NVIDIA GeForce RTX 3090</td> <td>512x512</td> <td>0.062</td> <td>5.20%</td> <td>0.062 (0%)</td> <td>5.20%</td></tr> <tr><td>NVIDIA GeForce RTX 3090</td> <td>1024x1024</td> <td>0.253</td> <td>18.50%</td> <td>0.464 (+83%)</td> <td>5.20%</td></tr> <tr><td>NVIDIA GeForce RTX 3080</td> <td>512x512</td> <td>0.07</td> <td>12.80%</td> <td>0.070 (0%)</td> <td>12.80%</td></tr> <tr><td>NVIDIA GeForce RTX 3080</td> <td>1024x1024</td> <td>0.286</td> <td>45.30%</td> <td>0.466 (+63%)</td> <td>12.90%</td></tr> <tr><td>NVIDIA GeForce RTX 3070</td> <td>512x512</td> <td>0.102</td> <td>15.90%</td> <td>0.102 (0%)</td> <td>15.90%</td></tr> <tr><td>NVIDIA GeForce RTX 3070</td> <td>1024x1024</td> <td>0.421</td> <td>56.30%</td> <td>0.746 (+77%)</td> <td>16.00%</td></tr></tbody></table></details> <details data-svelte-h="svelte-1565v4j"><summary>SDXL</summary> <table><thead><tr><th>GPU</th> <th>Resolution</th> <th>Time (seconds)</th> <th>Memory Consumed (%)</th> <th>Tiled Time (seconds)</th> <th>Tiled Memory (%)</th></tr></thead> <tbody><tr><td>NVIDIA GeForce RTX 4090</td> <td>512x512</td> <td>0.057</td> <td>10.00%</td> <td>0.057 (0%)</td> <td>10.00%</td></tr> <tr><td>NVIDIA GeForce RTX 4090</td> <td>1024x1024</td> <td>0.256</td> <td>35.50%</td> <td>0.257 (+0.4%)</td> <td>35.50%</td></tr> <tr><td>NVIDIA GeForce RTX 4080</td> <td>512x512</td> <td>0.092</td> <td>15.00%</td> <td>0.092 (0%)</td> <td>15.00%</td></tr> <tr><td>NVIDIA GeForce RTX 4080</td> <td>1024x1024</td> <td>0.406</td> <td>53.30%</td> <td>0.406 (0%)</td> <td>53.30%</td></tr> <tr><td>NVIDIA GeForce RTX 4070 Ti</td> <td>512x512</td> <td>0.121</td> <td>20.20%</td> <td>0.120 (-0.8%)</td> <td>20.20%</td></tr> <tr><td>NVIDIA GeForce RTX 4070 Ti</td> <td>1024x1024</td> <td>0.519</td> <td>72.00%</td> <td>0.519 (0%)</td> <td>72.00%</td></tr> <tr><td>NVIDIA GeForce RTX 3090</td> <td>512x512</td> <td>0.107</td> <td>10.50%</td> <td>0.107 (0%)</td> <td>10.50%</td></tr> <tr><td>NVIDIA GeForce RTX 3090</td> <td>1024x1024</td> <td>0.459</td> <td>38.00%</td> <td>0.460 (+0.2%)</td> <td>38.00%</td></tr> <tr><td>NVIDIA GeForce RTX 3080</td> <td>512x512</td> <td>0.121</td> <td>25.60%</td> <td>0.121 (0%)</td> <td>25.60%</td></tr> <tr><td>NVIDIA GeForce RTX 3080</td> <td>1024x1024</td> <td>0.524</td> <td>93.00%</td> <td>0.524 (0%)</td> <td>93.00%</td></tr> <tr><td>NVIDIA GeForce RTX 3070</td> <td>512x512</td> <td>0.183</td> <td>31.80%</td> <td>0.183 (0%)</td> <td>31.80%</td></tr> <tr><td>NVIDIA GeForce RTX 3070</td> <td>1024x1024</td> <td>0.794</td> <td>96.40%</td> <td>0.794 (0%)</td> <td>96.40%</td></tr></tbody></table></details> <h2 class="relative group"><a id="available-vaes" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#available-vaes"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Available VAEs</span></h2> <table data-svelte-h="svelte-2ec97g"><thead><tr><th align="center"></th> <th align="center"><strong>Endpoint</strong></th> <th align="center"><strong>Model</strong></th></tr></thead> <tbody><tr><td align="center"><strong>Stable Diffusion v1</strong></td> <td align="center"><a href="https://q1bj3bpq6kzilnsu.us-east-1.aws.endpoints.huggingface.cloud" rel="nofollow">https://q1bj3bpq6kzilnsu.us-east-1.aws.endpoints.huggingface.cloud</a></td> <td align="center"><a href="https://hf.co/stabilityai/sd-vae-ft-mse" rel="nofollow"><code>stabilityai/sd-vae-ft-mse</code></a></td></tr> <tr><td align="center"><strong>Stable Diffusion XL</strong></td> <td align="center"><a href="https://x2dmsqunjd6k9prw.us-east-1.aws.endpoints.huggingface.cloud" rel="nofollow">https://x2dmsqunjd6k9prw.us-east-1.aws.endpoints.huggingface.cloud</a></td> <td align="center"><a href="https://hf.co/madebyollin/sdxl-vae-fp16-fix" rel="nofollow"><code>madebyollin/sdxl-vae-fp16-fix</code></a></td></tr> <tr><td align="center"><strong>Flux</strong></td> <td align="center"><a href="https://whhx50ex1aryqvw6.us-east-1.aws.endpoints.huggingface.cloud" rel="nofollow">https://whhx50ex1aryqvw6.us-east-1.aws.endpoints.huggingface.cloud</a></td> <td align="center"><a href="https://hf.co/black-forest-labs/FLUX.1-schnell" rel="nofollow"><code>black-forest-labs/FLUX.1-schnell</code></a></td></tr> <tr><td align="center"><strong>HunyuanVideo</strong></td> <td align="center"><a href="https://o7ywnmrahorts457.us-east-1.aws.endpoints.huggingface.cloud" rel="nofollow">https://o7ywnmrahorts457.us-east-1.aws.endpoints.huggingface.cloud</a></td> <td align="center"><a href="https://hf.co/hunyuanvideo-community/HunyuanVideo" rel="nofollow"><code>hunyuanvideo-community/HunyuanVideo</code></a></td></tr></tbody></table> <blockquote class="tip" data-svelte-h="svelte-d0r3c5"><p>Model support can be requested <a href="https://github.com/huggingface/diffusers/issues/new?template=remote-vae-pilot-feedback.yml" rel="nofollow">here</a>.</p></blockquote> <h2 class="relative group"><a id="code" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#code"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Code</span></h2> <blockquote class="tip" data-svelte-h="svelte-11oojap"><p>Install <code>diffusers</code> from <code>main</code> to run the code: <code>pip install git+https://github.com/huggingface/diffusers@main</code></p></blockquote> <p data-svelte-h="svelte-1056iw0">A helper method simplifies interacting with Hybrid Inference.</p> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> diffusers.utils.remote_utils <span class="hljs-keyword">import</span> remote_decode<!-- HTML_TAG_END --></pre></div> <h3 class="relative group"><a id="basic-example" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#basic-example"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Basic example</span></h3> <p data-svelte-h="svelte-759h8c">Here, we show how to use the remote VAE on random tensors.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->image = remote_decode( | |
| endpoint=<span class="hljs-string">"https://q1bj3bpq6kzilnsu.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=torch.randn([<span class="hljs-number">1</span>, <span class="hljs-number">4</span>, <span class="hljs-number">64</span>, <span class="hljs-number">64</span>], dtype=torch.float16), | |
| scaling_factor=<span class="hljs-number">0.18215</span>, | |
| )<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-1e4q5du"><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/output.png"></figure> <p data-svelte-h="svelte-1iiplqs">Usage for Flux is slightly different. Flux latents are packed so we need to send the <code>height</code> and <code>width</code>.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->image = remote_decode( | |
| endpoint=<span class="hljs-string">"https://whhx50ex1aryqvw6.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=torch.randn([<span class="hljs-number">1</span>, <span class="hljs-number">4096</span>, <span class="hljs-number">64</span>], dtype=torch.float16), | |
| height=<span class="hljs-number">1024</span>, | |
| width=<span class="hljs-number">1024</span>, | |
| scaling_factor=<span class="hljs-number">0.3611</span>, | |
| shift_factor=<span class="hljs-number">0.1159</span>, | |
| )<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-a1t5o1"><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/flux_random_latent.png"></figure> <p data-svelte-h="svelte-1dy3nc4">Finally, an example for HunyuanVideo.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START -->video = remote_decode( | |
| endpoint=<span class="hljs-string">"https://o7ywnmrahorts457.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=torch.randn([<span class="hljs-number">1</span>, <span class="hljs-number">16</span>, <span class="hljs-number">3</span>, <span class="hljs-number">40</span>, <span class="hljs-number">64</span>], dtype=torch.float16), | |
| output_type=<span class="hljs-string">"mp4"</span>, | |
| ) | |
| <span class="hljs-keyword">with</span> <span class="hljs-built_in">open</span>(<span class="hljs-string">"video.mp4"</span>, <span class="hljs-string">"wb"</span>) <span class="hljs-keyword">as</span> f: | |
| f.write(video)<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-1whz541"><video alt="queue.mp4" autoplay="" loop="" autobuffer="" muted="" playsinline=""><source src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/video_1.mp4" type="video/mp4"></video></figure> <h3 class="relative group"><a id="generation" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#generation"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Generation</span></h3> <p data-svelte-h="svelte-18xvtm3">But we want to use the VAE on an actual pipeline to get an actual image, not random noise. The example below shows how to do it with SD v1.5.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> StableDiffusionPipeline | |
| pipe = StableDiffusionPipeline.from_pretrained( | |
| <span class="hljs-string">"stable-diffusion-v1-5/stable-diffusion-v1-5"</span>, | |
| torch_dtype=torch.float16, | |
| variant=<span class="hljs-string">"fp16"</span>, | |
| vae=<span class="hljs-literal">None</span>, | |
| ).to(<span class="hljs-string">"cuda"</span>) | |
| prompt = <span class="hljs-string">"Strawberry ice cream, in a stylish modern glass, coconut, splashing milk cream and honey, in a gradient purple background, fluid motion, dynamic movement, cinematic lighting, Mysterious"</span> | |
| latent = pipe( | |
| prompt=prompt, | |
| output_type=<span class="hljs-string">"latent"</span>, | |
| ).images | |
| image = remote_decode( | |
| endpoint=<span class="hljs-string">"https://q1bj3bpq6kzilnsu.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=latent, | |
| scaling_factor=<span class="hljs-number">0.18215</span>, | |
| ) | |
| image.save(<span class="hljs-string">"test.jpg"</span>)<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-mfhmkn"><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/test.jpg"></figure> <p data-svelte-h="svelte-osi5i">Here’s another example with Flux.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> FluxPipeline | |
| pipe = FluxPipeline.from_pretrained( | |
| <span class="hljs-string">"black-forest-labs/FLUX.1-schnell"</span>, | |
| torch_dtype=torch.bfloat16, | |
| vae=<span class="hljs-literal">None</span>, | |
| ).to(<span class="hljs-string">"cuda"</span>) | |
| prompt = <span class="hljs-string">"Strawberry ice cream, in a stylish modern glass, coconut, splashing milk cream and honey, in a gradient purple background, fluid motion, dynamic movement, cinematic lighting, Mysterious"</span> | |
| latent = pipe( | |
| prompt=prompt, | |
| guidance_scale=<span class="hljs-number">0.0</span>, | |
| num_inference_steps=<span class="hljs-number">4</span>, | |
| output_type=<span class="hljs-string">"latent"</span>, | |
| ).images | |
| image = remote_decode( | |
| endpoint=<span class="hljs-string">"https://whhx50ex1aryqvw6.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=latent, | |
| height=<span class="hljs-number">1024</span>, | |
| width=<span class="hljs-number">1024</span>, | |
| scaling_factor=<span class="hljs-number">0.3611</span>, | |
| shift_factor=<span class="hljs-number">0.1159</span>, | |
| ) | |
| image.save(<span class="hljs-string">"test.jpg"</span>)<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-df3hbj"><img src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/test_1.jpg"></figure> <p data-svelte-h="svelte-1paijes">Here’s an example with HunyuanVideo.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> HunyuanVideoPipeline, HunyuanVideoTransformer3DModel | |
| model_id = <span class="hljs-string">"hunyuanvideo-community/HunyuanVideo"</span> | |
| transformer = HunyuanVideoTransformer3DModel.from_pretrained( | |
| model_id, subfolder=<span class="hljs-string">"transformer"</span>, torch_dtype=torch.bfloat16 | |
| ) | |
| pipe = HunyuanVideoPipeline.from_pretrained( | |
| model_id, transformer=transformer, vae=<span class="hljs-literal">None</span>, torch_dtype=torch.float16 | |
| ).to(<span class="hljs-string">"cuda"</span>) | |
| latent = pipe( | |
| prompt=<span class="hljs-string">"A cat walks on the grass, realistic"</span>, | |
| height=<span class="hljs-number">320</span>, | |
| width=<span class="hljs-number">512</span>, | |
| num_frames=<span class="hljs-number">61</span>, | |
| num_inference_steps=<span class="hljs-number">30</span>, | |
| output_type=<span class="hljs-string">"latent"</span>, | |
| ).frames | |
| video = remote_decode( | |
| endpoint=<span class="hljs-string">"https://o7ywnmrahorts457.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=latent, | |
| output_type=<span class="hljs-string">"mp4"</span>, | |
| ) | |
| <span class="hljs-keyword">if</span> <span class="hljs-built_in">isinstance</span>(video, <span class="hljs-built_in">bytes</span>): | |
| <span class="hljs-keyword">with</span> <span class="hljs-built_in">open</span>(<span class="hljs-string">"video.mp4"</span>, <span class="hljs-string">"wb"</span>) <span class="hljs-keyword">as</span> f: | |
| f.write(video)<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-f15wo1"><video alt="queue.mp4" autoplay="" loop="" autobuffer="" muted="" playsinline=""><source src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/video.mp4" type="video/mp4"></video></figure> <h3 class="relative group"><a id="queueing" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#queueing"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Queueing</span></h3> <p data-svelte-h="svelte-1giv276">One of the great benefits of using a remote VAE is that we can queue multiple generation requests. While the current latent is being processed for decoding, we can already queue another one. This helps improve concurrency.</p> <details><summary data-svelte-h="svelte-14caxiv">Code</summary> <div class="code-block relative "><div class="absolute top-2.5 right-4"><button class="inline-flex items-center relative text-sm focus:text-green-500 cursor-pointer focus:outline-none transition duration-200 ease-in-out opacity-0 mx-0.5 text-gray-600 " title="code excerpt" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> <div class="absolute pointer-events-none transition-opacity bg-black text-white py-1 px-2 leading-tight rounded font-normal shadow left-1/2 top-full transform -translate-x-1/2 translate-y-2 opacity-0"><div class="absolute bottom-full left-1/2 transform -translate-x-1/2 w-0 h-0 border-black border-4 border-t-0" style="border-left-color: transparent; border-right-color: transparent; "></div> Copied</div></button></div> <pre class=""><!-- HTML_TAG_START --><span class="hljs-keyword">import</span> queue | |
| <span class="hljs-keyword">import</span> threading | |
| <span class="hljs-keyword">from</span> IPython.display <span class="hljs-keyword">import</span> display | |
| <span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> StableDiffusionPipeline | |
| <span class="hljs-keyword">def</span> <span class="hljs-title function_">decode_worker</span>(<span class="hljs-params">q: queue.Queue</span>): | |
| <span class="hljs-keyword">while</span> <span class="hljs-literal">True</span>: | |
| item = q.get() | |
| <span class="hljs-keyword">if</span> item <span class="hljs-keyword">is</span> <span class="hljs-literal">None</span>: | |
| <span class="hljs-keyword">break</span> | |
| image = remote_decode( | |
| endpoint=<span class="hljs-string">"https://q1bj3bpq6kzilnsu.us-east-1.aws.endpoints.huggingface.cloud/"</span>, | |
| tensor=item, | |
| scaling_factor=<span class="hljs-number">0.18215</span>, | |
| ) | |
| display(image) | |
| q.task_done() | |
| q = queue.Queue() | |
| thread = threading.Thread(target=decode_worker, args=(q,), daemon=<span class="hljs-literal">True</span>) | |
| thread.start() | |
| <span class="hljs-keyword">def</span> <span class="hljs-title function_">decode</span>(<span class="hljs-params">latent: torch.Tensor</span>): | |
| q.put(latent) | |
| prompts = [ | |
| <span class="hljs-string">"Blueberry ice cream, in a stylish modern glass , ice cubes, nuts, mint leaves, splashing milk cream, in a gradient purple background, fluid motion, dynamic movement, cinematic lighting, Mysterious"</span>, | |
| <span class="hljs-string">"Lemonade in a glass, mint leaves, in an aqua and white background, flowers, ice cubes, halo, fluid motion, dynamic movement, soft lighting, digital painting, rule of thirds composition, Art by Greg rutkowski, Coby whitmore"</span>, | |
| <span class="hljs-string">"Comic book art, beautiful, vintage, pastel neon colors, extremely detailed pupils, delicate features, light on face, slight smile, Artgerm, Mary Blair, Edmund Dulac, long dark locks, bangs, glowing, fashionable style, fairytale ambience, hot pink."</span>, | |
| <span class="hljs-string">"Masterpiece, vanilla cone ice cream garnished with chocolate syrup, crushed nuts, choco flakes, in a brown background, gold, cinematic lighting, Art by WLOP"</span>, | |
| <span class="hljs-string">"A bowl of milk, falling cornflakes, berries, blueberries, in a white background, soft lighting, intricate details, rule of thirds, octane render, volumetric lighting"</span>, | |
| <span class="hljs-string">"Cold Coffee with cream, crushed almonds, in a glass, choco flakes, ice cubes, wet, in a wooden background, cinematic lighting, hyper realistic painting, art by Carne Griffiths, octane render, volumetric lighting, fluid motion, dynamic movement, muted colors,"</span>, | |
| ] | |
| pipe = StableDiffusionPipeline.from_pretrained( | |
| <span class="hljs-string">"Lykon/dreamshaper-8"</span>, | |
| torch_dtype=torch.float16, | |
| vae=<span class="hljs-literal">None</span>, | |
| ).to(<span class="hljs-string">"cuda"</span>) | |
| pipe.unet = pipe.unet.to(memory_format=torch.channels_last) | |
| pipe.unet = torch.<span class="hljs-built_in">compile</span>(pipe.unet, mode=<span class="hljs-string">"reduce-overhead"</span>, fullgraph=<span class="hljs-literal">True</span>) | |
| _ = pipe( | |
| prompt=prompts[<span class="hljs-number">0</span>], | |
| output_type=<span class="hljs-string">"latent"</span>, | |
| ) | |
| <span class="hljs-keyword">for</span> prompt <span class="hljs-keyword">in</span> prompts: | |
| latent = pipe( | |
| prompt=prompt, | |
| output_type=<span class="hljs-string">"latent"</span>, | |
| ).images | |
| decode(latent) | |
| q.put(<span class="hljs-literal">None</span>) | |
| thread.join()<!-- HTML_TAG_END --></pre></div></details> <figure class="image flex flex-col items-center justify-center text-center m-0 w-full" data-svelte-h="svelte-2wd01h"><video alt="queue.mp4" autoplay="" loop="" autobuffer="" muted="" playsinline=""><source src="https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/blog/remote_vae/queue.mp4" type="video/mp4"></video></figure> <h2 class="relative group"><a id="integrations" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#integrations"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Integrations</span></h2> <ul data-svelte-h="svelte-1lxpzv1"><li><strong><a href="https://github.com/vladmandic/sdnext" rel="nofollow">SD.Next</a>:</strong> All-in-one UI with direct supports Hybrid Inference.</li> <li><strong><a href="https://github.com/kijai/ComfyUI-HFRemoteVae" rel="nofollow">ComfyUI-HFRemoteVae</a>:</strong> ComfyUI node for Hybrid Inference.</li></ul> <a class="!text-gray-400 !no-underline text-sm flex items-center not-prose mt-4" href="https://github.com/huggingface/diffusers/blob/main/docs/source/en/hybrid_inference/vae_decode.md" target="_blank"><svg class="mr-1" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M31,16l-7,7l-1.41-1.41L28.17,16l-5.58-5.59L24,9l7,7z"></path><path d="M1,16l7-7l1.41,1.41L3.83,16l5.58,5.59L8,23l-7-7z"></path><path d="M12.419,25.484L17.639,6.552l1.932,0.518L14.351,26.002z"></path></svg> <span data-svelte-h="svelte-zjs2n5"><span class="underline">Update</span> on GitHub</span></a> <p></p> | |
| <script> | |
| { | |
| __sveltekit_kxvna0 = { | |
| assets: "/docs/diffusers/pr_11739/en", | |
| base: "/docs/diffusers/pr_11739/en", | |
| env: {} | |
| }; | |
| const element = document.currentScript.parentElement; | |
| const data = [null,null]; | |
| Promise.all([ | |
| import("/docs/diffusers/pr_11739/en/_app/immutable/entry/start.56da5b2b.js"), | |
| import("/docs/diffusers/pr_11739/en/_app/immutable/entry/app.794fd245.js") | |
| ]).then(([kit, app]) => { | |
| kit.start(app, element, { | |
| node_ids: [0, 260], | |
| data, | |
| form: null, | |
| error: null | |
| }); | |
| }); | |
| } | |
| </script> | |
Xet Storage Details
- Size:
- 45.7 kB
- Xet hash:
- f7b35434c47170339107a72da1648f1f19a1e3544bc6516c7643183a40ac38e0
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.