guardiancc commited on
Commit
a1d63d2
·
verified ·
1 Parent(s): 3cc869d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -5
app.py CHANGED
@@ -25,6 +25,10 @@ from huggingface_hub import (
25
  ModelCard,
26
  snapshot_download)
27
 
 
 
 
 
28
  import spaces
29
 
30
  #---if env = local or colab---
@@ -1430,6 +1434,14 @@ pipe_i2i = AutoPipelineForImage2Image.from_pretrained(base_model,
1430
  torch_dtype=dtype
1431
  )
1432
 
 
 
 
 
 
 
 
 
1433
  MAX_SEED = 2**32-1
1434
 
1435
  pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
@@ -1473,6 +1485,25 @@ def update_selection(evt: gr.SelectData, width, height):
1473
  height,
1474
  )
1475
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1476
  @spaces.GPU(duration=100)
1477
  def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
1478
  pipe.to("cuda")
@@ -1511,7 +1542,7 @@ def generate_image_to_image(prompt_mash, image_input_path, image_strength, steps
1511
  return final_image
1512
 
1513
  @spaces.GPU(duration=100)
1514
- def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, progress=gr.Progress(track_tqdm=True)):
1515
  if selected_index is None:
1516
  raise gr.Error("You must select a LoRA before proceeding.🧨")
1517
  selected_lora = loras[selected_index]
@@ -1548,9 +1579,15 @@ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_ind
1548
  seed = random.randint(0, MAX_SEED)
1549
 
1550
  if(image_input is not None):
1551
-
1552
- final_image = generate_image_to_image(prompt_mash, image_input, image_strength, steps, cfg_scale, width, height, lora_scale, seed)
1553
- yield final_image, seed, gr.update(visible=False)
 
 
 
 
 
 
1554
  else:
1555
  image_generator = generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress)
1556
 
@@ -1705,6 +1742,8 @@ with gr.Blocks(theme="prithivMLmods/Minecraft-Theme", css=css, delete_cache=(60,
1705
  with gr.Row():
1706
  cfg_scale = gr.Slider(label="CFG Scale", minimum=1, maximum=20, step=0.5, value=3.5)
1707
  steps = gr.Slider(label="Steps", minimum=1, maximum=50, step=1, value=28)
 
 
1708
 
1709
  with gr.Row():
1710
  width = gr.Slider(label="Width", minimum=256, maximum=1536, step=64, value=1024)
@@ -1732,7 +1771,7 @@ with gr.Blocks(theme="prithivMLmods/Minecraft-Theme", css=css, delete_cache=(60,
1732
  gr.on(
1733
  triggers=[generate_button.click, prompt.submit],
1734
  fn=run_lora,
1735
- inputs=[prompt, input_image, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale],
1736
  outputs=[result, seed, progress_bar]
1737
  )
1738
 
 
25
  ModelCard,
26
  snapshot_download)
27
 
28
+ import torch
29
+ from diffusers import FluxControlNetPipeline
30
+ from diffusers import FluxControlNetModel
31
+
32
  import spaces
33
 
34
  #---if env = local or colab---
 
1434
  torch_dtype=dtype
1435
  )
1436
 
1437
+
1438
+ # CONTROLNET
1439
+ controlnet_model = "InstantX/FLUX.1-dev-controlnet-canny"
1440
+ controlnet = FluxControlNetModel.from_pretrained(controlnet_model, torch_dtype=torch.bfloat16)
1441
+ pipe_canny = FluxControlNetPipeline.from_pretrained(
1442
+ base_model, controlnet=controlnet, torch_dtype=torch.bfloat16
1443
+ )
1444
+
1445
  MAX_SEED = 2**32-1
1446
 
1447
  pipe.flux_pipe_call_that_returns_an_iterable_of_images = flux_pipe_call_that_returns_an_iterable_of_images.__get__(pipe)
 
1485
  height,
1486
  )
1487
 
1488
+ import cv2
1489
+ def generate_canny(image, type="canny"):
1490
+ img = cv2.imread(image)
1491
+ img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
1492
+ img_blur = cv2.GaussianBlur(img_gray, (3,3), 0)
1493
+
1494
+ if(type == "canny"):
1495
+ edges = cv2.Canny(image=img_blur, threshold1=100, threshold2=200
1496
+ cv2.imwrite('output.jpg', edges)
1497
+ return 'output.jpg'
1498
+
1499
+ if(type == "sobel"):
1500
+ sobelxy = cv2.Sobel(src=img_blur, ddepth=cv2.CV_64F, dx=1, dy=1, ksize=5)
1501
+ cv2.imwrite('output.jpg', sobelxy)
1502
+ return 'output.jpg'
1503
+
1504
+ return image
1505
+
1506
+
1507
  @spaces.GPU(duration=100)
1508
  def generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress):
1509
  pipe.to("cuda")
 
1542
  return final_image
1543
 
1544
  @spaces.GPU(duration=100)
1545
+ def run_lora(prompt, image_input, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, useCanny, useSobel, progress=gr.Progress(track_tqdm=True)):
1546
  if selected_index is None:
1547
  raise gr.Error("You must select a LoRA before proceeding.🧨")
1548
  selected_lora = loras[selected_index]
 
1579
  seed = random.randint(0, MAX_SEED)
1580
 
1581
  if(image_input is not None):
1582
+ if(useCanny):
1583
+ final_image = generate_canny(image_input, "canny")
1584
+ yield final_image, seed, gr.update(visible=False)
1585
+ elif(useSobel):
1586
+ final_image = generate_canny(image_input, "canny")
1587
+ yield final_image, seed, gr.update(visible=False)
1588
+ else:
1589
+ final_image = generate_image_to_image(prompt_mash, image_input, image_strength, steps, cfg_scale, width, height, lora_scale, seed)
1590
+ yield final_image, seed, gr.update(visible=False)
1591
  else:
1592
  image_generator = generate_image(prompt_mash, steps, seed, cfg_scale, width, height, lora_scale, progress)
1593
 
 
1742
  with gr.Row():
1743
  cfg_scale = gr.Slider(label="CFG Scale", minimum=1, maximum=20, step=0.5, value=3.5)
1744
  steps = gr.Slider(label="Steps", minimum=1, maximum=50, step=1, value=28)
1745
+ useCanny = gr.Checkbox(label="use canny")
1746
+ useSobel = gr.Checkbox(label="use Sobel")
1747
 
1748
  with gr.Row():
1749
  width = gr.Slider(label="Width", minimum=256, maximum=1536, step=64, value=1024)
 
1771
  gr.on(
1772
  triggers=[generate_button.click, prompt.submit],
1773
  fn=run_lora,
1774
+ inputs=[prompt, input_image, image_strength, cfg_scale, steps, selected_index, randomize_seed, seed, width, height, lora_scale, useCanny, useSobel],
1775
  outputs=[result, seed, progress_bar]
1776
  )
1777