From 5c89a15b9aa1e19d311108511970dca1718f4d69 Mon Sep 17 00:00:00 2001 From: Qifan Zhang Date: Tue, 21 Apr 2026 13:45:09 +0800 Subject: [PATCH] Reorder optimizer and logger calls in training loop (#1404) --- diffsynth/diffusion/runner.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/diffsynth/diffusion/runner.py b/diffsynth/diffusion/runner.py index 7aca6ac..43a8d37 100644 --- a/diffsynth/diffusion/runner.py +++ b/diffsynth/diffusion/runner.py @@ -33,15 +33,15 @@ def launch_training_task( for epoch_id in range(num_epochs): for data in tqdm(dataloader): with accelerator.accumulate(model): - optimizer.zero_grad() if dataset.load_from_cache: loss = model({}, inputs=data) else: loss = model(data) accelerator.backward(loss) optimizer.step() - model_logger.on_step_end(accelerator, model, save_steps, loss=loss) scheduler.step() + optimizer.zero_grad() + model_logger.on_step_end(accelerator, model, save_steps, loss=loss) if save_steps is None: model_logger.on_epoch_end(accelerator, model, epoch_id) model_logger.on_training_end(accelerator, model, save_steps)