Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -32,14 +32,14 @@ def generate_image(prompt, option, progress=gr.Progress()):
|
|
32 |
global step_loaded
|
33 |
print(prompt, option)
|
34 |
ckpt, step = opts[option]
|
35 |
-
progress(0,
|
36 |
if step != step_loaded:
|
37 |
print(f"Switching checkpoint from {step_loaded} to {step}")
|
38 |
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing", prediction_type="sample" if step == 1 else "epsilon")
|
39 |
pipe.unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device=device))
|
40 |
step_loaded = step
|
41 |
def inference_callback(p, i, t, kwargs):
|
42 |
-
progress(i+1,
|
43 |
return kwargs
|
44 |
return pipe(prompt, num_inference_steps=step, guidance_scale=0, callback_on_step_end=inference_callback).images[0]
|
45 |
|
|
|
32 |
global step_loaded
|
33 |
print(prompt, option)
|
34 |
ckpt, step = opts[option]
|
35 |
+
progress((0, step))
|
36 |
if step != step_loaded:
|
37 |
print(f"Switching checkpoint from {step_loaded} to {step}")
|
38 |
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing", prediction_type="sample" if step == 1 else "epsilon")
|
39 |
pipe.unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device=device))
|
40 |
step_loaded = step
|
41 |
def inference_callback(p, i, t, kwargs):
|
42 |
+
progress((i+1, step))
|
43 |
return kwargs
|
44 |
return pipe(prompt, num_inference_steps=step, guidance_scale=0, callback_on_step_end=inference_callback).images[0]
|
45 |
|