multimodalart HF staff commited on
Commit
3537b64
1 Parent(s): b78df58

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -15,10 +15,10 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
15
  #torch._inductor.config.epilogue_fusion = False
16
  #torch._inductor.config.coordinate_descent_check_all_directions = True
17
 
18
- pipe_v1 = AuraFlowPipeline.from_pretrained(
19
- "fal/AuraFlow",
20
- torch_dtype=torch.float16
21
- ).to("cuda")
22
 
23
  pipe_v2 = AuraFlowPipeline.from_pretrained(
24
  "fal/AuraFlow-v0.2",
@@ -167,7 +167,7 @@ with gr.Blocks(css=css) as demo:
167
  with gr.Accordion("Advanced Settings", open=False):
168
 
169
  model_version = gr.Dropdown(
170
- ["0.1", "0.2", "0.3"], label="Model version", value="0.3"
171
  )
172
 
173
  negative_prompt = gr.Text(
 
15
  #torch._inductor.config.epilogue_fusion = False
16
  #torch._inductor.config.coordinate_descent_check_all_directions = True
17
 
18
+ #pipe_v1 = AuraFlowPipeline.from_pretrained(
19
+ # "fal/AuraFlow",
20
+ # torch_dtype=torch.float16
21
+ #).to("cuda")
22
 
23
  pipe_v2 = AuraFlowPipeline.from_pretrained(
24
  "fal/AuraFlow-v0.2",
 
167
  with gr.Accordion("Advanced Settings", open=False):
168
 
169
  model_version = gr.Dropdown(
170
+ ["0.2", "0.3"], label="Model version", value="0.3"
171
  )
172
 
173
  negative_prompt = gr.Text(