support qwen prompt refiner

This commit is contained in:
Artiprocher
2024-09-04 17:12:01 +08:00
parent 3a8694b642
commit e5e55345dc
4 changed files with 33 additions and 35 deletions

View File

@@ -2,18 +2,13 @@ from diffsynth import ModelManager, SDXLImagePipeline, download_models, QwenProm
import torch
# Download models (automatically)
# `models/stable_diffusion_xl/sd_xl_base_1.0.safetensors`: [link](https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors)
# `models/BeautifulPrompt/pai-bloom-1b1-text2prompt-sd/`: [link](https://huggingface.co/alibaba-pai/pai-bloom-1b1-text2prompt-sd)
# `models/translator/opus-mt-zh-en/`: [link](https://huggingface.co/Helsinki-NLP/opus-mt-en-zh)
download_models(["StableDiffusionXL_v1", "QwenPrompt", "opus-mt-zh-en"])
download_models(["StableDiffusionXL_v1", "QwenPrompt"])
# Load models
model_manager = ModelManager(torch_dtype=torch.float16, device="cuda")
model_manager.load_models([
"models/stable_diffusion_xl/sd_xl_base_1.0.safetensors",
"models/QwenPrompt/qwen2-1.5b-instruct",
"models/translator/opus-mt-zh-en"
])
pipe = SDXLImagePipeline.from_model_manager(model_manager, prompt_refiner_classes=[QwenPrompt])