Buckets:

rtrm's picture
download
raw
18.6 kB
import{s as Ce,n as Le,o as Oe}from"../chunks/scheduler.8c3d61f6.js";import{S as Pe,i as ke,g as i,s as n,r as h,A as qe,h as a,f as r,c as o,j as F,u as f,x as p,k as E,y as t,a as d,v as g,d as _,t as x,w as v}from"../chunks/index.da70eac4.js";import{D as G}from"../chunks/Docstring.567bc132.js";import{H as Te,E as Ne}from"../chunks/index.5d4ab994.js";function Ie(ve){let m,K,R,B,y,J,M,be='<code>FlowMatchEulerDiscreteScheduler</code> is based on the flow-matching sampling introduced in <a href="https://arxiv.org/abs/2403.03206" rel="nofollow">Stable Diffusion 3</a>.',Q,S,Y,s,T,ie,N,we="Euler scheduler.",ae,I,$e=`This model inherits from <a href="/docs/diffusers/pr_11234/en/api/schedulers/overview#diffusers.SchedulerMixin">SchedulerMixin</a> and <a href="/docs/diffusers/pr_11234/en/api/configuration#diffusers.ConfigMixin">ConfigMixin</a>. Check the superclass documentation for the generic
methods the library implements for all schedulers such as loading and saving.`,ce,b,C,le,H,De="Forward process in flow-matching",de,w,L,ue,V,Fe="Sets the begin index for the scheduler. This function should be run from pipeline before the inference.",me,$,O,he,z,Ee="Sets the discrete timesteps used for the diffusion chain (to be run before inference).",fe,D,P,pe,A,ye=`Predict the sample from the previous timestep by reversing the SDE. This function propagates the diffusion
process from the learned model outputs (most often the predicted noise).`,ge,u,k,_e,W,Me=`Stretches and shifts the timestep schedule to ensure it terminates at the configured <code>shift_terminal</code> config
value.`,xe,j,Se=`Reference:
<a href="https://github.com/Lightricks/LTX-Video/blob/a01a171f8fe3d99dce2728d60a73fecf4d4238ae/ltx_video/schedulers/rf.py#L51" rel="nofollow">https://github.com/Lightricks/LTX-Video/blob/a01a171f8fe3d99dce2728d60a73fecf4d4238ae/ltx_video/schedulers/rf.py#L51</a>`,Z,q,ee,X,te;return y=new Te({props:{title:"FlowMatchEulerDiscreteScheduler",local:"flowmatcheulerdiscretescheduler",headingTag:"h1"}}),S=new Te({props:{title:"FlowMatchEulerDiscreteScheduler",local:"diffusers.FlowMatchEulerDiscreteScheduler",headingTag:"h2"}}),T=new G({props:{name:"class diffusers.FlowMatchEulerDiscreteScheduler",anchor:"diffusers.FlowMatchEulerDiscreteScheduler",parameters:[{name:"num_train_timesteps",val:": int = 1000"},{name:"shift",val:": float = 1.0"},{name:"use_dynamic_shifting",val:": bool = False"},{name:"base_shift",val:": typing.Optional[float] = 0.5"},{name:"max_shift",val:": typing.Optional[float] = 1.15"},{name:"base_image_seq_len",val:": typing.Optional[int] = 256"},{name:"max_image_seq_len",val:": typing.Optional[int] = 4096"},{name:"invert_sigmas",val:": bool = False"},{name:"shift_terminal",val:": typing.Optional[float] = None"},{name:"use_karras_sigmas",val:": typing.Optional[bool] = False"},{name:"use_exponential_sigmas",val:": typing.Optional[bool] = False"},{name:"use_beta_sigmas",val:": typing.Optional[bool] = False"},{name:"time_shift_type",val:": str = 'exponential'"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.num_train_timesteps",description:`<strong>num_train_timesteps</strong> (<code>int</code>, defaults to 1000) &#x2014;
The number of diffusion steps to train the model.`,name:"num_train_timesteps"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.shift",description:`<strong>shift</strong> (<code>float</code>, defaults to 1.0) &#x2014;
The shift value for the timestep schedule.`,name:"shift"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.use_dynamic_shifting",description:`<strong>use_dynamic_shifting</strong> (<code>bool</code>, defaults to False) &#x2014;
Whether to apply timestep shifting on-the-fly based on the image resolution.`,name:"use_dynamic_shifting"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.base_shift",description:`<strong>base_shift</strong> (<code>float</code>, defaults to 0.5) &#x2014;
Value to stabilize image generation. Increasing <code>base_shift</code> reduces variation and image is more consistent
with desired output.`,name:"base_shift"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.max_shift",description:`<strong>max_shift</strong> (<code>float</code>, defaults to 1.15) &#x2014;
Value change allowed to latent vectors. Increasing <code>max_shift</code> encourages more variation and image may be
more exaggerated or stylized.`,name:"max_shift"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.base_image_seq_len",description:`<strong>base_image_seq_len</strong> (<code>int</code>, defaults to 256) &#x2014;
The base image sequence length.`,name:"base_image_seq_len"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.max_image_seq_len",description:`<strong>max_image_seq_len</strong> (<code>int</code>, defaults to 4096) &#x2014;
The maximum image sequence length.`,name:"max_image_seq_len"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.invert_sigmas",description:`<strong>invert_sigmas</strong> (<code>bool</code>, defaults to False) &#x2014;
Whether to invert the sigmas.`,name:"invert_sigmas"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.shift_terminal",description:`<strong>shift_terminal</strong> (<code>float</code>, defaults to None) &#x2014;
The end value of the shifted timestep schedule.`,name:"shift_terminal"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.use_karras_sigmas",description:`<strong>use_karras_sigmas</strong> (<code>bool</code>, defaults to False) &#x2014;
Whether to use Karras sigmas for step sizes in the noise schedule during sampling.`,name:"use_karras_sigmas"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.use_exponential_sigmas",description:`<strong>use_exponential_sigmas</strong> (<code>bool</code>, defaults to False) &#x2014;
Whether to use exponential sigmas for step sizes in the noise schedule during sampling.`,name:"use_exponential_sigmas"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.use_beta_sigmas",description:`<strong>use_beta_sigmas</strong> (<code>bool</code>, defaults to False) &#x2014;
Whether to use beta sigmas for step sizes in the noise schedule during sampling.`,name:"use_beta_sigmas"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.time_shift_type",description:`<strong>time_shift_type</strong> (<code>str</code>, defaults to &#x201C;exponential&#x201D;) &#x2014;
The type of dynamic resolution-dependent timestep shifting to apply. Either &#x201C;exponential&#x201D; or &#x201C;linear&#x201D;.`,name:"time_shift_type"}],source:"https://github.com/huggingface/diffusers/blob/vr_11234/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L47"}}),C=new G({props:{name:"scale_noise",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.scale_noise",parameters:[{name:"sample",val:": FloatTensor"},{name:"timestep",val:": typing.Union[float, torch.FloatTensor]"},{name:"noise",val:": typing.Optional[torch.FloatTensor] = None"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.scale_noise.sample",description:`<strong>sample</strong> (<code>torch.FloatTensor</code>) &#x2014;
The input sample.`,name:"sample"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.scale_noise.timestep",description:`<strong>timestep</strong> (<code>int</code>, <em>optional</em>) &#x2014;
The current timestep in the diffusion chain.`,name:"timestep"}],source:"https://github.com/huggingface/diffusers/blob/vr_11234/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L168",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>A scaled input sample.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>torch.FloatTensor</code></p>
`}}),L=new G({props:{name:"set_begin_index",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_begin_index",parameters:[{name:"begin_index",val:": int = 0"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_begin_index.begin_index",description:`<strong>begin_index</strong> (<code>int</code>) &#x2014;
The begin index for the scheduler.`,name:"begin_index"}],source:"https://github.com/huggingface/diffusers/blob/vr_11234/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L155"}}),O=new G({props:{name:"set_timesteps",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps",parameters:[{name:"num_inference_steps",val:": typing.Optional[int] = None"},{name:"device",val:": typing.Union[str, torch.device] = None"},{name:"sigmas",val:": typing.Optional[typing.List[float]] = None"},{name:"mu",val:": typing.Optional[float] = None"},{name:"timesteps",val:": typing.Optional[typing.List[float]] = None"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.num_inference_steps",description:`<strong>num_inference_steps</strong> (<code>int</code>, <em>optional</em>) &#x2014;
The number of diffusion steps used when generating samples with a pre-trained model.`,name:"num_inference_steps"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.device",description:`<strong>device</strong> (<code>str</code> or <code>torch.device</code>, <em>optional</em>) &#x2014;
The device to which the timesteps should be moved to. If <code>None</code>, the timesteps are not moved.`,name:"device"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.sigmas",description:`<strong>sigmas</strong> (<code>List[float]</code>, <em>optional</em>) &#x2014;
Custom values for sigmas to be used for each diffusion step. If <code>None</code>, the sigmas are computed
automatically.`,name:"sigmas"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.mu",description:`<strong>mu</strong> (<code>float</code>, <em>optional</em>) &#x2014;
Determines the amount of shifting applied to sigmas when performing resolution-dependent timestep
shifting.`,name:"mu"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.timesteps",description:`<strong>timesteps</strong> (<code>List[float]</code>, <em>optional</em>) &#x2014;
Custom values for timesteps to be used for each diffusion step. If <code>None</code>, the timesteps are computed
automatically.`,name:"timesteps"}],source:"https://github.com/huggingface/diffusers/blob/vr_11234/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L246"}}),P=new G({props:{name:"step",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step",parameters:[{name:"model_output",val:": FloatTensor"},{name:"timestep",val:": typing.Union[float, torch.FloatTensor]"},{name:"sample",val:": FloatTensor"},{name:"s_churn",val:": float = 0.0"},{name:"s_tmin",val:": float = 0.0"},{name:"s_tmax",val:": float = inf"},{name:"s_noise",val:": float = 1.0"},{name:"generator",val:": typing.Optional[torch._C.Generator] = None"},{name:"per_token_timesteps",val:": typing.Optional[torch.Tensor] = None"},{name:"return_dict",val:": bool = True"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.model_output",description:`<strong>model_output</strong> (<code>torch.FloatTensor</code>) &#x2014;
The direct output from learned diffusion model.`,name:"model_output"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.timestep",description:`<strong>timestep</strong> (<code>float</code>) &#x2014;
The current discrete timestep in the diffusion chain.`,name:"timestep"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.sample",description:`<strong>sample</strong> (<code>torch.FloatTensor</code>) &#x2014;
A current instance of a sample created by the diffusion process.`,name:"sample"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_churn",description:"<strong>s_churn</strong> (<code>float</code>) &#x2014;",name:"s_churn"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_tmin",description:"<strong>s_tmin</strong> (<code>float</code>) &#x2014;",name:"s_tmin"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_tmax",description:"<strong>s_tmax</strong> (<code>float</code>) &#x2014;",name:"s_tmax"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_noise",description:`<strong>s_noise</strong> (<code>float</code>, defaults to 1.0) &#x2014;
Scaling factor for noise added to the sample.`,name:"s_noise"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.generator",description:`<strong>generator</strong> (<code>torch.Generator</code>, <em>optional</em>) &#x2014;
A random number generator.`,name:"generator"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.per_token_timesteps",description:`<strong>per_token_timesteps</strong> (<code>torch.Tensor</code>, <em>optional</em>) &#x2014;
The timesteps for each token in the sample.`,name:"per_token_timesteps"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.return_dict",description:`<strong>return_dict</strong> (<code>bool</code>) &#x2014;
Whether or not to return a
<code>FlowMatchEulerDiscreteSchedulerOutput</code> or tuple.`,name:"return_dict"}],source:"https://github.com/huggingface/diffusers/blob/vr_11234/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L370",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>If return_dict is <code>True</code>,
<code>FlowMatchEulerDiscreteSchedulerOutput</code> is returned,
otherwise a tuple is returned where the first element is the sample tensor.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>FlowMatchEulerDiscreteSchedulerOutput</code> or <code>tuple</code></p>
`}}),k=new G({props:{name:"stretch_shift_to_terminal",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.stretch_shift_to_terminal",parameters:[{name:"t",val:": Tensor"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.stretch_shift_to_terminal.t",description:`<strong>t</strong> (<code>torch.Tensor</code>) &#x2014;
A tensor of timesteps to be stretched and shifted.`,name:"t"}],source:"https://github.com/huggingface/diffusers/blob/vr_11234/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L225",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>A tensor of adjusted timesteps such that the final value equals <code>self.config.shift_terminal</code>.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>torch.Tensor</code></p>
`}}),q=new Ne({props:{source:"https://github.com/huggingface/diffusers/blob/main/docs/source/en/api/schedulers/flow_match_euler_discrete.md"}}),{c(){m=i("meta"),K=n(),R=i("p"),B=n(),h(y.$$.fragment),J=n(),M=i("p"),M.innerHTML=be,Q=n(),h(S.$$.fragment),Y=n(),s=i("div"),h(T.$$.fragment),ie=n(),N=i("p"),N.textContent=we,ae=n(),I=i("p"),I.innerHTML=$e,ce=n(),b=i("div"),h(C.$$.fragment),le=n(),H=i("p"),H.textContent=De,de=n(),w=i("div"),h(L.$$.fragment),ue=n(),V=i("p"),V.textContent=Fe,me=n(),$=i("div"),h(O.$$.fragment),he=n(),z=i("p"),z.textContent=Ee,fe=n(),D=i("div"),h(P.$$.fragment),pe=n(),A=i("p"),A.textContent=ye,ge=n(),u=i("div"),h(k.$$.fragment),_e=n(),W=i("p"),W.innerHTML=Me,xe=n(),j=i("p"),j.innerHTML=Se,Z=n(),h(q.$$.fragment),ee=n(),X=i("p"),this.h()},l(e){const c=qe("svelte-u9bgzb",document.head);m=a(c,"META",{name:!0,content:!0}),c.forEach(r),K=o(e),R=a(e,"P",{}),F(R).forEach(r),B=o(e),f(y.$$.fragment,e),J=o(e),M=a(e,"P",{"data-svelte-h":!0}),p(M)!=="svelte-j7byiu"&&(M.innerHTML=be),Q=o(e),f(S.$$.fragment,e),Y=o(e),s=a(e,"DIV",{class:!0});var l=F(s);f(T.$$.fragment,l),ie=o(l),N=a(l,"P",{"data-svelte-h":!0}),p(N)!=="svelte-rqsn3u"&&(N.textContent=we),ae=o(l),I=a(l,"P",{"data-svelte-h":!0}),p(I)!=="svelte-16my9bb"&&(I.innerHTML=$e),ce=o(l),b=a(l,"DIV",{class:!0});var se=F(b);f(C.$$.fragment,se),le=o(se),H=a(se,"P",{"data-svelte-h":!0}),p(H)!=="svelte-1nqwaax"&&(H.textContent=De),se.forEach(r),de=o(l),w=a(l,"DIV",{class:!0});var re=F(w);f(L.$$.fragment,re),ue=o(re),V=a(re,"P",{"data-svelte-h":!0}),p(V)!=="svelte-1k141rk"&&(V.textContent=Fe),re.forEach(r),me=o(l),$=a(l,"DIV",{class:!0});var ne=F($);f(O.$$.fragment,ne),he=o(ne),z=a(ne,"P",{"data-svelte-h":!0}),p(z)!=="svelte-1vzm9q"&&(z.textContent=Ee),ne.forEach(r),fe=o(l),D=a(l,"DIV",{class:!0});var oe=F(D);f(P.$$.fragment,oe),pe=o(oe),A=a(oe,"P",{"data-svelte-h":!0}),p(A)!=="svelte-hi84tp"&&(A.textContent=ye),oe.forEach(r),ge=o(l),u=a(l,"DIV",{class:!0});var U=F(u);f(k.$$.fragment,U),_e=o(U),W=a(U,"P",{"data-svelte-h":!0}),p(W)!=="svelte-1mirmbz"&&(W.innerHTML=Me),xe=o(U),j=a(U,"P",{"data-svelte-h":!0}),p(j)!=="svelte-1sj7udg"&&(j.innerHTML=Se),U.forEach(r),l.forEach(r),Z=o(e),f(q.$$.fragment,e),ee=o(e),X=a(e,"P",{}),F(X).forEach(r),this.h()},h(){E(m,"name","hf:doc:metadata"),E(m,"content",He),E(b,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),E(w,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),E($,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),E(D,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),E(u,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),E(s,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8")},m(e,c){t(document.head,m),d(e,K,c),d(e,R,c),d(e,B,c),g(y,e,c),d(e,J,c),d(e,M,c),d(e,Q,c),g(S,e,c),d(e,Y,c),d(e,s,c),g(T,s,null),t(s,ie),t(s,N),t(s,ae),t(s,I),t(s,ce),t(s,b),g(C,b,null),t(b,le),t(b,H),t(s,de),t(s,w),g(L,w,null),t(w,ue),t(w,V),t(s,me),t(s,$),g(O,$,null),t($,he),t($,z),t(s,fe),t(s,D),g(P,D,null),t(D,pe),t(D,A),t(s,ge),t(s,u),g(k,u,null),t(u,_e),t(u,W),t(u,xe),t(u,j),d(e,Z,c),g(q,e,c),d(e,ee,c),d(e,X,c),te=!0},p:Le,i(e){te||(_(y.$$.fragment,e),_(S.$$.fragment,e),_(T.$$.fragment,e),_(C.$$.fragment,e),_(L.$$.fragment,e),_(O.$$.fragment,e),_(P.$$.fragment,e),_(k.$$.fragment,e),_(q.$$.fragment,e),te=!0)},o(e){x(y.$$.fragment,e),x(S.$$.fragment,e),x(T.$$.fragment,e),x(C.$$.fragment,e),x(L.$$.fragment,e),x(O.$$.fragment,e),x(P.$$.fragment,e),x(k.$$.fragment,e),x(q.$$.fragment,e),te=!1},d(e){e&&(r(K),r(R),r(B),r(J),r(M),r(Q),r(Y),r(s),r(Z),r(ee),r(X)),r(m),v(y,e),v(S,e),v(T),v(C),v(L),v(O),v(P),v(k),v(q,e)}}}const He='{"title":"FlowMatchEulerDiscreteScheduler","local":"flowmatcheulerdiscretescheduler","sections":[{"title":"FlowMatchEulerDiscreteScheduler","local":"diffusers.FlowMatchEulerDiscreteScheduler","sections":[],"depth":2}],"depth":1}';function Ve(ve){return Oe(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class Ue extends Pe{constructor(m){super(),ke(this,m,Ve,Ie,Ce,{})}}export{Ue as component};

Xet Storage Details

Size:
18.6 kB
·
Xet hash:
f25d0c153e4869efe9ba0ac68a829664c0c810154080f5278ff5959fc299cbf0

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.