Skip to content

Commit

Permalink
updates
Browse files Browse the repository at this point in the history
  • Loading branch information
sayakpaul committed Jan 16, 2025
1 parent bd87661 commit fcdb36a
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
4 changes: 4 additions & 0 deletions finetrainers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -696,6 +696,10 @@ def train(self) -> None:
if "pooled_prompt_embeds" in text_conditions:
text_conditions["pooled_prompt_embeds"].fill_(0)

# TODO: Mochi only does: sigmas = torch.rand(latents.shape[0])
# It doesn't rely on `sigmas` configured in the scheduler. To handle that, should
# Mochi implement its own `prepare_sigmas()` similar to how
# `calculate_noisy_latents()` is implemented?
sigmas = prepare_sigmas(
scheduler=self.scheduler,
sigmas=scheduler_sigmas,
Expand Down
3 changes: 0 additions & 3 deletions finetrainers/utils/diffusion_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,6 @@ def prepare_sigmas(
device: torch.device = torch.device("cpu"),
generator: Optional[torch.Generator] = None,
) -> torch.Tensor:
# TODO: Mochi only does: sigmas = torch.rand(latents.shape[0])
# It doesn't rely on `sigmas` configured in the scheduler. To handle that, should
# Mochi implement its own `prepare_sigmas()` similar to how `calculate_noisy_latents()` is implemented?
if isinstance(scheduler, FlowMatchEulerDiscreteScheduler):
weights = compute_density_for_timestep_sampling(
weighting_scheme=flow_weighting_scheme,
Expand Down

0 comments on commit fcdb36a

Please sign in to comment.