prithivMLmods commited on
Commit
8f93c1b
·
verified ·
1 Parent(s): db43384

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -8
app.py CHANGED
@@ -136,10 +136,10 @@ if torch.cuda.is_available():
136
  pipe3.enable_model_cpu_offload()
137
  else:
138
  pipe3.to(device)
139
- print("Loaded Animagine XL 4.0 on Device!")
140
  if USE_TORCH_COMPILE:
141
  pipe3.unet = torch.compile(pipe3.unet, mode="reduce-overhead", fullgraph=True)
142
- print("Model Animagine XL 4.0 Compiled!")
143
  else:
144
  pipe = StableDiffusionXLPipeline.from_pretrained(
145
  "SG161222/RealVisXL_V5.0_Lightning",
@@ -235,8 +235,8 @@ def generate(
235
 
236
  selected_pipe = models.get(model_choice, pipe)
237
  images = selected_pipe(**options).images
 
238
  image_path = save_image(images[0])
239
- yield "Generating image ..."
240
  yield gr.Image(image_path)
241
  return
242
 
@@ -272,7 +272,7 @@ def generate(
272
  thread.start()
273
 
274
  buffer = ""
275
- yield "Thinking..."
276
  for new_text in streamer:
277
  buffer += new_text
278
  buffer = buffer.replace("<|im_end|>", "")
@@ -340,9 +340,9 @@ demo = gr.ChatInterface(
340
  ],
341
  examples=[
342
  ["@tts1 Who is Nikola Tesla, and why did he die?"],
343
- ['@lightningv5 "Chocolate dripping from a donut against a yellow background, in the style of brocore, hyper-realistic"'],
344
- ['@lightningv4 "A serene landscape with mountains"'],
345
- ['@turbov3 "Abstract art, colorful and vibrant"'],
346
  ["Write a Python function to check if a number is prime."],
347
  ["@tts2 What causes rainbows to form?"],
348
  ],
@@ -351,9 +351,10 @@ demo = gr.ChatInterface(
351
  description=DESCRIPTION,
352
  css=css,
353
  fill_height=True,
354
- textbox=gr.MultimodalTextbox(label="Query Input", file_types=["image"], file_count="multiple"),
355
  stop_btn="Stop Generation",
356
  multimodal=True,
 
357
  )
358
 
359
  if __name__ == "__main__":
 
136
  pipe3.enable_model_cpu_offload()
137
  else:
138
  pipe3.to(device)
139
+ print("Loaded RealVisXL_V3.0_Turbo on Device!")
140
  if USE_TORCH_COMPILE:
141
  pipe3.unet = torch.compile(pipe3.unet, mode="reduce-overhead", fullgraph=True)
142
+ print("Model RealVisXL_V3.0_Turbo Compiled!")
143
  else:
144
  pipe = StableDiffusionXLPipeline.from_pretrained(
145
  "SG161222/RealVisXL_V5.0_Lightning",
 
235
 
236
  selected_pipe = models.get(model_choice, pipe)
237
  images = selected_pipe(**options).images
238
+ yield "⚡ Generating image ███████▒▒▒ 69%"
239
  image_path = save_image(images[0])
 
240
  yield gr.Image(image_path)
241
  return
242
 
 
272
  thread.start()
273
 
274
  buffer = ""
275
+ yield "💭 Thinking..."
276
  for new_text in streamer:
277
  buffer += new_text
278
  buffer = buffer.replace("<|im_end|>", "")
 
340
  ],
341
  examples=[
342
  ["@tts1 Who is Nikola Tesla, and why did he die?"],
343
+ ['@lightningv5 Chocolate dripping from a donut against a yellow background, in the style of brocore, hyper-realistic'],
344
+ ['@lightningv4 A serene landscape with mountains'],
345
+ ['@turbov3 Abstract art, colorful and vibrant'],
346
  ["Write a Python function to check if a number is prime."],
347
  ["@tts2 What causes rainbows to form?"],
348
  ],
 
351
  description=DESCRIPTION,
352
  css=css,
353
  fill_height=True,
354
+ textbox=gr.MultimodalTextbox(label="Query Input", file_types=["image"], file_count="multiple", placeholder="use the tags @lightningv5 @lightningv4 @turbov3 for image gen !"),
355
  stop_btn="Stop Generation",
356
  multimodal=True,
357
+
358
  )
359
 
360
  if __name__ == "__main__":