Buckets:

rtrm's picture
download
raw
13.1 kB
import{s as xe,n as $e,o as we}from"../chunks/scheduler.8c3d61f6.js";import{S as De,i as Ee,g as a,s as o,r as g,A as Se,h as l,f as r,c as i,j as O,u as _,x as P,k as L,y as t,a as d,v,d as b,t as x,w as $}from"../chunks/index.da70eac4.js";import{D as G}from"../chunks/Docstring.6b390b9a.js";import{H as be,E as Fe}from"../chunks/EditOnGithub.1e64e623.js";function Me(ue){let u,j,V,z,w,R,D,pe='<code>FlowMatchEulerDiscreteScheduler</code> is based on the flow-matching sampling introduced in <a href="https://arxiv.org/abs/2403.03206" rel="nofollow">Stable Diffusion 3</a>.',W,E,B,s,S,te,q,fe="Euler scheduler.",re,N,me=`This model inherits from <a href="/docs/diffusers/pr_10167/en/api/schedulers/overview#diffusers.SchedulerMixin">SchedulerMixin</a> and <a href="/docs/diffusers/pr_10167/en/api/configuration#diffusers.ConfigMixin">ConfigMixin</a>. Check the superclass documentation for the generic
methods the library implements for all schedulers such as loading and saving.`,se,p,F,ne,I,he="Forward process in flow-matching",oe,f,M,ie,k,ge="Sets the begin index for the scheduler. This function should be run from pipeline before the inference.",ae,m,y,le,H,_e="Sets the discrete timesteps used for the diffusion chain (to be run before inference).",ce,h,T,de,A,ve=`Predict the sample from the previous timestep by reversing the SDE. This function propagates the diffusion
process from the learned model outputs (most often the predicted noise).`,J,C,K,U,Q;return w=new be({props:{title:"FlowMatchEulerDiscreteScheduler",local:"flowmatcheulerdiscretescheduler",headingTag:"h1"}}),E=new be({props:{title:"FlowMatchEulerDiscreteScheduler",local:"diffusers.FlowMatchEulerDiscreteScheduler",headingTag:"h2"}}),S=new G({props:{name:"class diffusers.FlowMatchEulerDiscreteScheduler",anchor:"diffusers.FlowMatchEulerDiscreteScheduler",parameters:[{name:"num_train_timesteps",val:": int = 1000"},{name:"shift",val:": float = 1.0"},{name:"use_dynamic_shifting",val:" = False"},{name:"base_shift",val:": typing.Optional[float] = 0.5"},{name:"max_shift",val:": typing.Optional[float] = 1.15"},{name:"base_image_seq_len",val:": typing.Optional[int] = 256"},{name:"max_image_seq_len",val:": typing.Optional[int] = 4096"},{name:"invert_sigmas",val:": bool = False"},{name:"use_karras_sigmas",val:": typing.Optional[bool] = False"},{name:"use_exponential_sigmas",val:": typing.Optional[bool] = False"},{name:"use_beta_sigmas",val:": typing.Optional[bool] = False"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.num_train_timesteps",description:`<strong>num_train_timesteps</strong> (<code>int</code>, defaults to 1000) &#x2014;
The number of diffusion steps to train the model.`,name:"num_train_timesteps"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.timestep_spacing",description:`<strong>timestep_spacing</strong> (<code>str</code>, defaults to <code>&quot;linspace&quot;</code>) &#x2014;
The way the timesteps should be scaled. Refer to Table 2 of the <a href="https://huggingface.co/papers/2305.08891" rel="nofollow">Common Diffusion Noise Schedules and
Sample Steps are Flawed</a> for more information.`,name:"timestep_spacing"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.shift",description:`<strong>shift</strong> (<code>float</code>, defaults to 1.0) &#x2014;
The shift value for the timestep schedule.`,name:"shift"}],source:"https://github.com/huggingface/diffusers/blob/vr_10167/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L47"}}),F=new G({props:{name:"scale_noise",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.scale_noise",parameters:[{name:"sample",val:": FloatTensor"},{name:"timestep",val:": typing.Union[float, torch.FloatTensor]"},{name:"noise",val:": typing.Optional[torch.FloatTensor] = None"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.scale_noise.sample",description:`<strong>sample</strong> (<code>torch.FloatTensor</code>) &#x2014;
The input sample.`,name:"sample"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.scale_noise.timestep",description:`<strong>timestep</strong> (<code>int</code>, <em>optional</em>) &#x2014;
The current timestep in the diffusion chain.`,name:"timestep"}],source:"https://github.com/huggingface/diffusers/blob/vr_10167/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L130",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>A scaled input sample.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>torch.FloatTensor</code></p>
`}}),M=new G({props:{name:"set_begin_index",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_begin_index",parameters:[{name:"begin_index",val:": int = 0"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_begin_index.begin_index",description:`<strong>begin_index</strong> (<code>int</code>) &#x2014;
The begin index for the scheduler.`,name:"begin_index"}],source:"https://github.com/huggingface/diffusers/blob/vr_10167/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L120"}}),y=new G({props:{name:"set_timesteps",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps",parameters:[{name:"num_inference_steps",val:": int = None"},{name:"device",val:": typing.Union[str, torch.device] = None"},{name:"sigmas",val:": typing.Optional[typing.List[float]] = None"},{name:"mu",val:": typing.Optional[float] = None"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.num_inference_steps",description:`<strong>num_inference_steps</strong> (<code>int</code>) &#x2014;
The number of diffusion steps used when generating samples with a pre-trained model.`,name:"num_inference_steps"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.set_timesteps.device",description:`<strong>device</strong> (<code>str</code> or <code>torch.device</code>, <em>optional</em>) &#x2014;
The device to which the timesteps should be moved to. If <code>None</code>, the timesteps are not moved.`,name:"device"}],source:"https://github.com/huggingface/diffusers/blob/vr_10167/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L184"}}),T=new G({props:{name:"step",anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step",parameters:[{name:"model_output",val:": FloatTensor"},{name:"timestep",val:": typing.Union[float, torch.FloatTensor]"},{name:"sample",val:": FloatTensor"},{name:"s_churn",val:": float = 0.0"},{name:"s_tmin",val:": float = 0.0"},{name:"s_tmax",val:": float = inf"},{name:"s_noise",val:": float = 1.0"},{name:"generator",val:": typing.Optional[torch._C.Generator] = None"},{name:"return_dict",val:": bool = True"}],parametersDescription:[{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.model_output",description:`<strong>model_output</strong> (<code>torch.FloatTensor</code>) &#x2014;
The direct output from learned diffusion model.`,name:"model_output"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.timestep",description:`<strong>timestep</strong> (<code>float</code>) &#x2014;
The current discrete timestep in the diffusion chain.`,name:"timestep"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.sample",description:`<strong>sample</strong> (<code>torch.FloatTensor</code>) &#x2014;
A current instance of a sample created by the diffusion process.`,name:"sample"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_churn",description:"<strong>s_churn</strong> (<code>float</code>) &#x2014;",name:"s_churn"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_tmin",description:"<strong>s_tmin</strong> (<code>float</code>) &#x2014;",name:"s_tmin"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_tmax",description:"<strong>s_tmax</strong> (<code>float</code>) &#x2014;",name:"s_tmax"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.s_noise",description:`<strong>s_noise</strong> (<code>float</code>, defaults to 1.0) &#x2014;
Scaling factor for noise added to the sample.`,name:"s_noise"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.generator",description:`<strong>generator</strong> (<code>torch.Generator</code>, <em>optional</em>) &#x2014;
A random number generator.`,name:"generator"},{anchor:"diffusers.FlowMatchEulerDiscreteScheduler.step.return_dict",description:`<strong>return_dict</strong> (<code>bool</code>) &#x2014;
Whether or not to return a <a href="/docs/diffusers/pr_10167/en/api/schedulers/euler#diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteSchedulerOutput">EulerDiscreteSchedulerOutput</a> or
tuple.`,name:"return_dict"}],source:"https://github.com/huggingface/diffusers/blob/vr_10167/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py#L265",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>If return_dict is <code>True</code>, <a
href="/docs/diffusers/pr_10167/en/api/schedulers/euler#diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteSchedulerOutput"
>EulerDiscreteSchedulerOutput</a> is
returned, otherwise a tuple is returned where the first element is the sample tensor.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><a
href="/docs/diffusers/pr_10167/en/api/schedulers/euler#diffusers.schedulers.scheduling_euler_discrete.EulerDiscreteSchedulerOutput"
>EulerDiscreteSchedulerOutput</a> or <code>tuple</code></p>
`}}),C=new Fe({props:{source:"https://github.com/huggingface/diffusers/blob/main/docs/source/en/api/schedulers/flow_match_euler_discrete.md"}}),{c(){u=a("meta"),j=o(),V=a("p"),z=o(),g(w.$$.fragment),R=o(),D=a("p"),D.innerHTML=pe,W=o(),g(E.$$.fragment),B=o(),s=a("div"),g(S.$$.fragment),te=o(),q=a("p"),q.textContent=fe,re=o(),N=a("p"),N.innerHTML=me,se=o(),p=a("div"),g(F.$$.fragment),ne=o(),I=a("p"),I.textContent=he,oe=o(),f=a("div"),g(M.$$.fragment),ie=o(),k=a("p"),k.textContent=ge,ae=o(),m=a("div"),g(y.$$.fragment),le=o(),H=a("p"),H.textContent=_e,ce=o(),h=a("div"),g(T.$$.fragment),de=o(),A=a("p"),A.textContent=ve,J=o(),g(C.$$.fragment),K=o(),U=a("p"),this.h()},l(e){const n=Se("svelte-u9bgzb",document.head);u=l(n,"META",{name:!0,content:!0}),n.forEach(r),j=i(e),V=l(e,"P",{}),O(V).forEach(r),z=i(e),_(w.$$.fragment,e),R=i(e),D=l(e,"P",{"data-svelte-h":!0}),P(D)!=="svelte-j7byiu"&&(D.innerHTML=pe),W=i(e),_(E.$$.fragment,e),B=i(e),s=l(e,"DIV",{class:!0});var c=O(s);_(S.$$.fragment,c),te=i(c),q=l(c,"P",{"data-svelte-h":!0}),P(q)!=="svelte-rqsn3u"&&(q.textContent=fe),re=i(c),N=l(c,"P",{"data-svelte-h":!0}),P(N)!=="svelte-ukne4n"&&(N.innerHTML=me),se=i(c),p=l(c,"DIV",{class:!0});var X=O(p);_(F.$$.fragment,X),ne=i(X),I=l(X,"P",{"data-svelte-h":!0}),P(I)!=="svelte-1nqwaax"&&(I.textContent=he),X.forEach(r),oe=i(c),f=l(c,"DIV",{class:!0});var Y=O(f);_(M.$$.fragment,Y),ie=i(Y),k=l(Y,"P",{"data-svelte-h":!0}),P(k)!=="svelte-1k141rk"&&(k.textContent=ge),Y.forEach(r),ae=i(c),m=l(c,"DIV",{class:!0});var Z=O(m);_(y.$$.fragment,Z),le=i(Z),H=l(Z,"P",{"data-svelte-h":!0}),P(H)!=="svelte-1vzm9q"&&(H.textContent=_e),Z.forEach(r),ce=i(c),h=l(c,"DIV",{class:!0});var ee=O(h);_(T.$$.fragment,ee),de=i(ee),A=l(ee,"P",{"data-svelte-h":!0}),P(A)!=="svelte-hi84tp"&&(A.textContent=ve),ee.forEach(r),c.forEach(r),J=i(e),_(C.$$.fragment,e),K=i(e),U=l(e,"P",{}),O(U).forEach(r),this.h()},h(){L(u,"name","hf:doc:metadata"),L(u,"content",ye),L(p,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(f,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(m,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(h,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(s,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8")},m(e,n){t(document.head,u),d(e,j,n),d(e,V,n),d(e,z,n),v(w,e,n),d(e,R,n),d(e,D,n),d(e,W,n),v(E,e,n),d(e,B,n),d(e,s,n),v(S,s,null),t(s,te),t(s,q),t(s,re),t(s,N),t(s,se),t(s,p),v(F,p,null),t(p,ne),t(p,I),t(s,oe),t(s,f),v(M,f,null),t(f,ie),t(f,k),t(s,ae),t(s,m),v(y,m,null),t(m,le),t(m,H),t(s,ce),t(s,h),v(T,h,null),t(h,de),t(h,A),d(e,J,n),v(C,e,n),d(e,K,n),d(e,U,n),Q=!0},p:$e,i(e){Q||(b(w.$$.fragment,e),b(E.$$.fragment,e),b(S.$$.fragment,e),b(F.$$.fragment,e),b(M.$$.fragment,e),b(y.$$.fragment,e),b(T.$$.fragment,e),b(C.$$.fragment,e),Q=!0)},o(e){x(w.$$.fragment,e),x(E.$$.fragment,e),x(S.$$.fragment,e),x(F.$$.fragment,e),x(M.$$.fragment,e),x(y.$$.fragment,e),x(T.$$.fragment,e),x(C.$$.fragment,e),Q=!1},d(e){e&&(r(j),r(V),r(z),r(R),r(D),r(W),r(B),r(s),r(J),r(K),r(U)),r(u),$(w,e),$(E,e),$(S),$(F),$(M),$(y),$(T),$(C,e)}}}const ye='{"title":"FlowMatchEulerDiscreteScheduler","local":"flowmatcheulerdiscretescheduler","sections":[{"title":"FlowMatchEulerDiscreteScheduler","local":"diffusers.FlowMatchEulerDiscreteScheduler","sections":[],"depth":2}],"depth":1}';function Te(ue){return we(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class qe extends De{constructor(u){super(),Ee(this,u,Te,Me,xe,{})}}export{qe as component};

Xet Storage Details

Size:
13.1 kB
·
Xet hash:
cf016364367a389f6bbd60678dd2c47035e8bb1c1b1ece3880289a5b007a8a1a

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.