temporary removing variant="fp16"
Browse files
app.py
CHANGED
@@ -22,7 +22,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
22 |
|
23 |
torch.cuda.max_memory_allocated(device=device)
|
24 |
|
25 |
-
pipe1 =
|
26 |
pipe2 = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
|
27 |
|
28 |
pipe1 = pipe1.to(device)
|
|
|
22 |
|
23 |
torch.cuda.max_memory_allocated(device=device)
|
24 |
|
25 |
+
pipe1 = StableDiffusionXLPipeline.from_pretrained("FFusion/FFbasetest", torch_dtype=torch.float16, use_safetensors=True)
|
26 |
pipe2 = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-refiner-1.0", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
|
27 |
|
28 |
pipe1 = pipe1.to(device)
|