Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -423,6 +423,11 @@ def generate_30(
|
|
423 |
samples=1,
|
424 |
progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
|
425 |
):
|
|
|
|
|
|
|
|
|
|
|
426 |
image_paths=[]
|
427 |
seed = random.randint(0, MAX_SEED)
|
428 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
@@ -562,6 +567,11 @@ def generate_60(
|
|
562 |
samples=1,
|
563 |
progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
|
564 |
):
|
|
|
|
|
|
|
|
|
|
|
565 |
image_paths=[]
|
566 |
seed = random.randint(0, MAX_SEED)
|
567 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
@@ -701,6 +711,11 @@ def generate_90(
|
|
701 |
samples=1,
|
702 |
progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
|
703 |
):
|
|
|
|
|
|
|
|
|
|
|
704 |
image_paths=[]
|
705 |
seed = random.randint(0, MAX_SEED)
|
706 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
|
|
423 |
samples=1,
|
424 |
progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
|
425 |
):
|
426 |
+
prompt1=None
|
427 |
+
prompt2=None
|
428 |
+
prompt3=None
|
429 |
+
prompt4=None
|
430 |
+
prompt5=None
|
431 |
image_paths=[]
|
432 |
seed = random.randint(0, MAX_SEED)
|
433 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
|
|
567 |
samples=1,
|
568 |
progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
|
569 |
):
|
570 |
+
prompt1=None
|
571 |
+
prompt2=None
|
572 |
+
prompt3=None
|
573 |
+
prompt4=None
|
574 |
+
prompt5=None
|
575 |
image_paths=[]
|
576 |
seed = random.randint(0, MAX_SEED)
|
577 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
|
|
711 |
samples=1,
|
712 |
progress=gr.Progress(track_tqdm=True) # Add progress as a keyword argument
|
713 |
):
|
714 |
+
prompt1=None
|
715 |
+
prompt2=None
|
716 |
+
prompt3=None
|
717 |
+
prompt4=None
|
718 |
+
prompt5=None
|
719 |
image_paths=[]
|
720 |
seed = random.randint(0, MAX_SEED)
|
721 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|