File size: 822 Bytes
c3b62d3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
from diffusers import AutoPipelineForText2Image
import torch

from diffusers import StableDiffusionControlNetPipeline, ControlNetModel, UniPCMultistepScheduler
from diffusers import StableDiffusionXLControlNetPipeline, ControlNetModel, AutoencoderKL
import torch

from PIL import Image


def get_pipe(lora_dir):
  controlnets = [ControlNetModel.from_pretrained("mattyamonaca/white2line", torch_dtype=torch.float16)]

  vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
  pipe = StableDiffusionXLControlNetPipeline.from_pretrained(
      "cagliostrolab/animagine-xl-3.1", controlnet=controlnets, vae=vae, torch_dtype=torch.float16
  )

  pipe.enable_model_cpu_offload()
  pipe.load_lora_weights(lora_dir, weight_name="sdxl_BWLine.safetensors")
  return pipe