Merge pull request #1183 from modelscope/z-image-omni-base-dev

fix unused parameters in z-image-omni-base
This commit is contained in:
Zhongjie Duan
2026-01-08 17:03:20 +08:00
committed by GitHub
2 changed files with 4 additions and 0 deletions

View File

@@ -12,6 +12,7 @@ accelerate launch --config_file examples/z_image/model_training/full/accelerate_
--output_path "./models/train/Z-Image-Omni-Base_full" \ --output_path "./models/train/Z-Image-Omni-Base_full" \
--trainable_models "dit" \ --trainable_models "dit" \
--use_gradient_checkpointing \ --use_gradient_checkpointing \
--find_unused_parameters \
--dataset_num_workers 8 --dataset_num_workers 8
# Image(s) to image training # Image(s) to image training
@@ -29,4 +30,5 @@ accelerate launch --config_file examples/z_image/model_training/full/accelerate_
# --output_path "./models/train/Z-Image-Omni-Base_full_edit" \ # --output_path "./models/train/Z-Image-Omni-Base_full_edit" \
# --trainable_models "dit" \ # --trainable_models "dit" \
# --use_gradient_checkpointing \ # --use_gradient_checkpointing \
# --find_unused_parameters \
# --dataset_num_workers 8 # --dataset_num_workers 8

View File

@@ -13,6 +13,7 @@ accelerate launch examples/z_image/model_training/train.py \
--lora_target_modules "to_q,to_k,to_v,to_out.0,w1,w2,w3" \ --lora_target_modules "to_q,to_k,to_v,to_out.0,w1,w2,w3" \
--lora_rank 32 \ --lora_rank 32 \
--use_gradient_checkpointing \ --use_gradient_checkpointing \
--find_unused_parameters \
--dataset_num_workers 8 --dataset_num_workers 8
# Image(s) to image training # Image(s) to image training
@@ -32,4 +33,5 @@ accelerate launch examples/z_image/model_training/train.py \
# --lora_target_modules "to_q,to_k,to_v,to_out.0,w1,w2,w3" \ # --lora_target_modules "to_q,to_k,to_v,to_out.0,w1,w2,w3" \
# --lora_rank 32 \ # --lora_rank 32 \
# --use_gradient_checkpointing \ # --use_gradient_checkpointing \
# --find_unused_parameters \
# --dataset_num_workers 8 # --dataset_num_workers 8