From 690adb5bd9e7bcafb93337e7229c6ba4c5cb12b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?apolin=C3=A1rio?= Date: Sat, 19 Apr 2025 19:48:16 +0200 Subject: [PATCH 1/7] Add stochastic sampling to FlowMatchEulerDiscreteScheduler This PR adds stochastic sampling to FlowMatchEulerDiscreteScheduler based on https://github.com/Lightricks/LTX-Video/commit/b1aeddd7ccac85e6d1b0d97762610ddb53c1b408 ltx_video/schedulers/rf.py --- .../scheduling_flow_match_euler_discrete.py | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index cbb27e5fad63..0c1d6f5f8ca2 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -80,6 +80,8 @@ class FlowMatchEulerDiscreteScheduler(SchedulerMixin, ConfigMixin): Whether to use beta sigmas for step sizes in the noise schedule during sampling. time_shift_type (`str`, defaults to "exponential"): The type of dynamic resolution-dependent timestep shifting to apply. Either "exponential" or "linear". + stochastic_sampling (`bool`, defaults to False): + Whether to use stochastic sampling. """ _compatibles = [] @@ -101,6 +103,7 @@ def __init__( use_exponential_sigmas: Optional[bool] = False, use_beta_sigmas: Optional[bool] = False, time_shift_type: str = "exponential", + stochastic_sampling: bool = False, ): if self.config.use_beta_sigmas and not is_scipy_available(): raise ImportError("Make sure to install scipy if you want to use beta sigmas.") @@ -378,6 +381,7 @@ def step( s_noise: float = 1.0, generator: Optional[torch.Generator] = None, per_token_timesteps: Optional[torch.Tensor] = None, + stochastic_sampling: Optional[bool] = None, return_dict: bool = True, ) -> Union[FlowMatchEulerDiscreteSchedulerOutput, Tuple]: """ @@ -400,6 +404,8 @@ def step( A random number generator. per_token_timesteps (`torch.Tensor`, *optional*): The timesteps for each token in the sample. + stochastic_sampling (`bool`, *optional*): + Whether to use stochastic sampling. If None, defaults to the value set in the scheduler's config. return_dict (`bool`): Whether or not to return a [`~schedulers.scheduling_flow_match_euler_discrete.FlowMatchEulerDiscreteSchedulerOutput`] or tuple. @@ -437,13 +443,28 @@ def step( lower_mask = sigmas < per_token_sigmas[None] - 1e-6 lower_sigmas = lower_mask * sigmas lower_sigmas, _ = lower_sigmas.max(dim=0) - dt = (per_token_sigmas - lower_sigmas)[..., None] + + current_sigma = per_token_sigmas[..., None] + next_sigma = lower_sigmas[..., None] + dt = next_sigma - current_sigma # Equivalent to sigma_next - sigma else: - sigma = self.sigmas[self.step_index] - sigma_next = self.sigmas[self.step_index + 1] + sigma_idx = self.step_index + sigma = self.sigmas[sigma_idx] + sigma_next = self.sigmas[sigma_idx + 1] + + current_sigma = sigma + next_sigma = sigma_next dt = sigma_next - sigma - prev_sample = sample + dt * model_output + # Determine whether to use stochastic sampling for this step + use_stochastic = stochastic_sampling if stochastic_sampling is not None else self.config.stochastic_sampling + + if use_stochastic: + x0 = sample - current_sigma * model_output + noise = torch.randn_like(sample) + prev_sample = (1.0 - next_sigma) * x0 + next_sigma * noise + else: + prev_sample = sample + dt * model_output # upon completion increase step index by one self._step_index += 1 From f87956e9cf362b9a62bc9bb4911bcd23494f99a1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sat, 19 Apr 2025 19:33:27 +0000 Subject: [PATCH 2/7] Apply style fixes --- .../schedulers/scheduling_flow_match_euler_discrete.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index 0c1d6f5f8ca2..deedfbca40d8 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -443,10 +443,10 @@ def step( lower_mask = sigmas < per_token_sigmas[None] - 1e-6 lower_sigmas = lower_mask * sigmas lower_sigmas, _ = lower_sigmas.max(dim=0) - + current_sigma = per_token_sigmas[..., None] next_sigma = lower_sigmas[..., None] - dt = next_sigma - current_sigma # Equivalent to sigma_next - sigma + dt = next_sigma - current_sigma # Equivalent to sigma_next - sigma else: sigma_idx = self.step_index sigma = self.sigmas[sigma_idx] From 9edc5beddcb2572493ec6ca9a041065e599b80dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?apolin=C3=A1rio?= Date: Mon, 21 Apr 2025 17:40:13 +0200 Subject: [PATCH 3/7] Use config value directly --- .../schedulers/scheduling_flow_match_euler_discrete.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index deedfbca40d8..d953f11282bb 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -456,10 +456,8 @@ def step( next_sigma = sigma_next dt = sigma_next - sigma - # Determine whether to use stochastic sampling for this step - use_stochastic = stochastic_sampling if stochastic_sampling is not None else self.config.stochastic_sampling - - if use_stochastic: + + if self.config.stochastic_sampling: x0 = sample - current_sigma * model_output noise = torch.randn_like(sample) prev_sample = (1.0 - next_sigma) * x0 + next_sigma * noise From 32d9aef997c6118c022ec4bf9c278c287c20ff36 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 21 Apr 2025 15:42:31 +0000 Subject: [PATCH 4/7] Apply style fixes --- src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index d953f11282bb..700e924f5c61 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -456,7 +456,6 @@ def step( next_sigma = sigma_next dt = sigma_next - sigma - if self.config.stochastic_sampling: x0 = sample - current_sigma * model_output noise = torch.randn_like(sample) From 9c35a8992193574c09d0817093e13ff7c5c8145c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?apolin=C3=A1rio?= Date: Mon, 21 Apr 2025 23:31:04 +0200 Subject: [PATCH 5/7] Swap order --- .../schedulers/scheduling_flow_match_euler_discrete.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index 700e924f5c61..f622f4217cf7 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -446,7 +446,7 @@ def step( current_sigma = per_token_sigmas[..., None] next_sigma = lower_sigmas[..., None] - dt = next_sigma - current_sigma # Equivalent to sigma_next - sigma + dt = current_sigma - next_sigma else: sigma_idx = self.step_index sigma = self.sigmas[sigma_idx] From 25bc77d8f88ec970916e66c81854e2b2371ceb2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?apolin=C3=A1rio?= Date: Mon, 21 Apr 2025 23:34:24 +0200 Subject: [PATCH 6/7] Update src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py Co-authored-by: YiYi Xu --- .../schedulers/scheduling_flow_match_euler_discrete.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index f622f4217cf7..b1eddac5b094 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -404,8 +404,6 @@ def step( A random number generator. per_token_timesteps (`torch.Tensor`, *optional*): The timesteps for each token in the sample. - stochastic_sampling (`bool`, *optional*): - Whether to use stochastic sampling. If None, defaults to the value set in the scheduler's config. return_dict (`bool`): Whether or not to return a [`~schedulers.scheduling_flow_match_euler_discrete.FlowMatchEulerDiscreteSchedulerOutput`] or tuple. From ff1012f8cfcd1adee7ca880bb3121c505376610a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?apolin=C3=A1rio?= Date: Mon, 21 Apr 2025 23:34:30 +0200 Subject: [PATCH 7/7] Update src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py Co-authored-by: YiYi Xu --- src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py index b1eddac5b094..575423ee80e7 100644 --- a/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_flow_match_euler_discrete.py @@ -381,7 +381,6 @@ def step( s_noise: float = 1.0, generator: Optional[torch.Generator] = None, per_token_timesteps: Optional[torch.Tensor] = None, - stochastic_sampling: Optional[bool] = None, return_dict: bool = True, ) -> Union[FlowMatchEulerDiscreteSchedulerOutput, Tuple]: """