mokady commited on
Commit
f2b506b
·
verified ·
1 Parent(s): 028fa1b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -28,8 +28,13 @@ pipe.load_lora_weights(f'{pipeline_path}/pytorch_lora_weights.safetensors')
28
  pipe.fuse_lora()
29
  pipe.unload_lora_weights()
30
  pipe.force_zeros_for_empty_prompt = False
 
 
 
31
  pipe = EllaXLPipeline(pipe,f'{pipeline_path}/pytorch_model.bin')
32
 
 
 
33
  def tocuda():
34
  pipe.pipe.vae.to('cuda')
35
  pipe.t5_encoder.to('cuda')
@@ -61,8 +66,8 @@ def tocuda():
61
  @spaces.GPU(enable_queue=True)
62
  def infer(prompt,negative_prompt,seed,resolution, steps):
63
 
64
- if 'cuda' not in pipe.pipe.device.type:
65
- tocuda()
66
 
67
  print(f"""
68
  —/n
 
28
  pipe.fuse_lora()
29
  pipe.unload_lora_weights()
30
  pipe.force_zeros_for_empty_prompt = False
31
+
32
+ pipe.to("cuda")
33
+
34
  pipe = EllaXLPipeline(pipe,f'{pipeline_path}/pytorch_model.bin')
35
 
36
+
37
+
38
  def tocuda():
39
  pipe.pipe.vae.to('cuda')
40
  pipe.t5_encoder.to('cuda')
 
66
  @spaces.GPU(enable_queue=True)
67
  def infer(prompt,negative_prompt,seed,resolution, steps):
68
 
69
+ # if 'cuda' not in pipe.pipe.device.type:
70
+ # tocuda()
71
 
72
  print(f"""
73
  —/n