diff --git a/diffsynth/trainers/utils.py b/diffsynth/trainers/utils.py index e0c20b9..fff84d5 100644 --- a/diffsynth/trainers/utils.py +++ b/diffsynth/trainers/utils.py @@ -404,6 +404,7 @@ def launch_training_task( ) model, optimizer, dataloader, scheduler = accelerator.prepare(model, optimizer, dataloader, scheduler) + global_steps = 0 for epoch_id in range(num_epochs): for step_id, data in enumerate(tqdm(dataloader)): with accelerator.accumulate(model): diff --git a/examples/qwen_image/model_training/lora/Qwen-Image.sh b/examples/qwen_image/model_training/lora/Qwen-Image.sh index 7359f8c..15084c2 100644 --- a/examples/qwen_image/model_training/lora/Qwen-Image.sh +++ b/examples/qwen_image/model_training/lora/Qwen-Image.sh @@ -13,5 +13,5 @@ accelerate launch examples/qwen_image/model_training/train.py \ --lora_rank 32 \ --align_to_opensource_format \ --use_gradient_checkpointing \ - --num_workers 8 \ + --dataset_num_workers 8 \ --find_unused_parameters