Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -35,11 +35,11 @@ pipe = EllaXLPipeline(pipe,f'{pipeline_path}/pytorch_model.bin')
|
|
35 |
|
36 |
|
37 |
|
38 |
-
def tocuda():
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
|
44 |
|
45 |
# print("Optimizing BRIA-2.3-T5 - this could take a while")
|
@@ -63,7 +63,7 @@ def tocuda():
|
|
63 |
|
64 |
# print(f"Optimizing finished successfully after {time.time()-t} secs")
|
65 |
|
66 |
-
@spaces.GPU(enable_queue=True)
|
67 |
def infer(prompt,negative_prompt,seed,resolution, steps):
|
68 |
|
69 |
# if 'cuda' not in pipe.pipe.device.type:
|
|
|
35 |
|
36 |
|
37 |
|
38 |
+
# def tocuda():
|
39 |
+
# pipe.pipe.vae.to('cuda')
|
40 |
+
# pipe.t5_encoder.to('cuda')
|
41 |
+
# pipe.pipe.unet.unet.to('cuda')
|
42 |
+
# pipe.pipe.unet.ella.to('cuda')
|
43 |
|
44 |
|
45 |
# print("Optimizing BRIA-2.3-T5 - this could take a while")
|
|
|
63 |
|
64 |
# print(f"Optimizing finished successfully after {time.time()-t} secs")
|
65 |
|
66 |
+
@spaces.GPU(enable_queue=True, duration = 120)
|
67 |
def infer(prompt,negative_prompt,seed,resolution, steps):
|
68 |
|
69 |
# if 'cuda' not in pipe.pipe.device.type:
|