fix controlnet annotator

This commit is contained in:
mi804
2025-08-20 23:28:40 +08:00
parent 36838a05ee
commit d0467a7e8d
2 changed files with 4 additions and 4 deletions

View File

@@ -3,9 +3,9 @@ import torch
from modelscope import dataset_snapshot_download, snapshot_download from modelscope import dataset_snapshot_download, snapshot_download
from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig
from diffsynth.controlnets.processors import Annotator from diffsynth.controlnets.processors import Annotator
from diffsynth import download_models
download_models(["Annotators:Depth"]) allow_file_pattern = ["sk_model.pth", "sk_model2.pth", "dpt_hybrid-midas-501f0c75.pt", "ControlNetHED.pth", "body_pose_model.pth", "hand_pose_model.pth", "facenet.pth", "scannet.pt"]
snapshot_download("lllyasviel/Annotators", local_dir="models/Annotators", allow_file_pattern=allow_file_pattern)
pipe = QwenImagePipeline.from_pretrained( pipe = QwenImagePipeline.from_pretrained(
torch_dtype=torch.bfloat16, torch_dtype=torch.bfloat16,

View File

@@ -3,9 +3,9 @@ import torch
from modelscope import dataset_snapshot_download, snapshot_download from modelscope import dataset_snapshot_download, snapshot_download
from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig from diffsynth.pipelines.qwen_image import QwenImagePipeline, ModelConfig
from diffsynth.controlnets.processors import Annotator from diffsynth.controlnets.processors import Annotator
from diffsynth import download_models
download_models(["Annotators:Depth"]) allow_file_pattern = ["sk_model.pth", "sk_model2.pth", "dpt_hybrid-midas-501f0c75.pt", "ControlNetHED.pth", "body_pose_model.pth", "hand_pose_model.pth", "facenet.pth", "scannet.pt"]
snapshot_download("lllyasviel/Annotators", local_dir="models/Annotators", allow_file_pattern=allow_file_pattern)
pipe = QwenImagePipeline.from_pretrained( pipe = QwenImagePipeline.from_pretrained(
torch_dtype=torch.bfloat16, torch_dtype=torch.bfloat16,