Compare commits

...

1 Commits

Author SHA1 Message Date
tcaimm
e747fe4a94 Fix wrapped transformer config access in Flux2 Klein training (#13219) 2026-03-06 19:47:51 +05:30
2 changed files with 2 additions and 2 deletions

View File

@@ -1715,7 +1715,7 @@ def main(args):
packed_noisy_model_input = Flux2KleinPipeline._pack_latents(noisy_model_input)
# handle guidance
if transformer.config.guidance_embeds:
if unwrap_model(transformer).config.guidance_embeds:
guidance = torch.full([1], args.guidance_scale, device=accelerator.device)
guidance = guidance.expand(model_input.shape[0])
else:

View File

@@ -1682,7 +1682,7 @@ def main(args):
model_input_ids = torch.cat([model_input_ids, cond_model_input_ids], dim=1)
# handle guidance
if transformer.config.guidance_embeds:
if unwrap_model(transformer).config.guidance_embeds:
guidance = torch.full([1], args.guidance_scale, device=accelerator.device)
guidance = guidance.expand(model_input.shape[0])
else: