Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -56,14 +56,15 @@ class calculateDuration:
|
|
56 |
|
57 |
@spaces.GPU(duration=120)
|
58 |
@torch.inference_mode()
|
59 |
-
def generate_image(prompt, steps, seed, cfg_scale, width, height, progress):
|
60 |
|
61 |
gr.Info("Start to generate images ...")
|
62 |
|
63 |
with calculateDuration(f"Make a new generator:{seed}"):
|
64 |
pipe.to(device)
|
65 |
generator = torch.Generator(device=device).manual_seed(seed)
|
66 |
-
|
|
|
67 |
with calculateDuration("Generating image"):
|
68 |
# Generate image
|
69 |
generated_image = pipe(
|
@@ -77,6 +78,8 @@ def generate_image(prompt, steps, seed, cfg_scale, width, height, progress):
|
|
77 |
).images[0]
|
78 |
|
79 |
progress(99, "Generate image success!")
|
|
|
|
|
80 |
return generated_image
|
81 |
|
82 |
|
@@ -146,15 +149,12 @@ def run_lora(prompt, lora_strings_json, cfg_scale, steps, randomize_seed, seed,
|
|
146 |
# set lora weights
|
147 |
if len(adapter_names) > 0:
|
148 |
pipe.set_adapters(adapter_names, adapter_weights=adapter_weights)
|
149 |
-
|
150 |
-
pipe.fuse_lora(adapter_names=adapter_names)
|
151 |
-
print("active adapters:", pipe.get_active_adapters())
|
152 |
-
|
153 |
# Generate image
|
154 |
error_message = ""
|
155 |
try:
|
156 |
print("Start applying for zeroGPU resources")
|
157 |
-
final_image = generate_image(prompt, steps, seed, cfg_scale, width, height, progress)
|
158 |
except Exception as e:
|
159 |
error_message = str(e)
|
160 |
gr.Error(error_message)
|
|
|
56 |
|
57 |
@spaces.GPU(duration=120)
|
58 |
@torch.inference_mode()
|
59 |
+
def generate_image(prompt, adapter_names, steps, seed, cfg_scale, width, height, progress):
|
60 |
|
61 |
gr.Info("Start to generate images ...")
|
62 |
|
63 |
with calculateDuration(f"Make a new generator:{seed}"):
|
64 |
pipe.to(device)
|
65 |
generator = torch.Generator(device=device).manual_seed(seed)
|
66 |
+
if len(adapter_names) > 0:
|
67 |
+
pipe.fuse_lora(adapter_names=adapter_names)
|
68 |
with calculateDuration("Generating image"):
|
69 |
# Generate image
|
70 |
generated_image = pipe(
|
|
|
78 |
).images[0]
|
79 |
|
80 |
progress(99, "Generate image success!")
|
81 |
+
if len(adapter_names) > 0:
|
82 |
+
pipe.unfuse_lora()
|
83 |
return generated_image
|
84 |
|
85 |
|
|
|
149 |
# set lora weights
|
150 |
if len(adapter_names) > 0:
|
151 |
pipe.set_adapters(adapter_names, adapter_weights=adapter_weights)
|
152 |
+
|
|
|
|
|
|
|
153 |
# Generate image
|
154 |
error_message = ""
|
155 |
try:
|
156 |
print("Start applying for zeroGPU resources")
|
157 |
+
final_image = generate_image(prompt, adapter_names, steps, seed, cfg_scale, width, height, progress)
|
158 |
except Exception as e:
|
159 |
error_message = str(e)
|
160 |
gr.Error(error_message)
|