Spaces:
Running
on
Zero
Running
on
Zero
1inkusFace
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -186,7 +186,7 @@ def generate_30(
|
|
186 |
seed = random.randint(0, MAX_SEED)
|
187 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
188 |
pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
|
189 |
-
|
190 |
options = {
|
191 |
"prompt": [prompt],
|
192 |
"negative_prompt": [negative_prompt],
|
@@ -228,7 +228,7 @@ def generate_60(
|
|
228 |
seed = random.randint(0, MAX_SEED)
|
229 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
230 |
pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
|
231 |
-
|
232 |
options = {
|
233 |
"prompt": [prompt],
|
234 |
"negative_prompt": [negative_prompt],
|
@@ -270,7 +270,7 @@ def generate_90(
|
|
270 |
seed = random.randint(0, MAX_SEED)
|
271 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
272 |
pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
|
273 |
-
|
274 |
options = {
|
275 |
"prompt": [prompt],
|
276 |
"negative_prompt": [negative_prompt],
|
|
|
186 |
seed = random.randint(0, MAX_SEED)
|
187 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
188 |
pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
|
189 |
+
pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
|
190 |
options = {
|
191 |
"prompt": [prompt],
|
192 |
"negative_prompt": [negative_prompt],
|
|
|
228 |
seed = random.randint(0, MAX_SEED)
|
229 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
230 |
pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
|
231 |
+
pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
|
232 |
options = {
|
233 |
"prompt": [prompt],
|
234 |
"negative_prompt": [negative_prompt],
|
|
|
270 |
seed = random.randint(0, MAX_SEED)
|
271 |
generator = torch.Generator(device='cuda').manual_seed(seed)
|
272 |
pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
|
273 |
+
pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
|
274 |
options = {
|
275 |
"prompt": [prompt],
|
276 |
"negative_prompt": [negative_prompt],
|