Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
import numpy as np
|
|
|
4 |
import random
|
5 |
from huggingface_hub import AsyncInferenceClient
|
6 |
from translatepy import Translator
|
@@ -63,7 +64,7 @@ async def generate_image(prompt, model, lora_word, width, height, scales, steps,
|
|
63 |
print(f"[-] Generating image with prompt: {text}, model: {model}")
|
64 |
client = AsyncInferenceClient()
|
65 |
image = await client.text_to_image(prompt=text, height=height, width=width, guidance_scale=scales, num_inference_steps=steps, model=model)
|
66 |
-
return image, seed
|
67 |
except Exception as e:
|
68 |
print(f"[-] Error generating image: {e}")
|
69 |
return None, None
|
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
import numpy as np
|
4 |
+
from rembg import remove
|
5 |
import random
|
6 |
from huggingface_hub import AsyncInferenceClient
|
7 |
from translatepy import Translator
|
|
|
64 |
print(f"[-] Generating image with prompt: {text}, model: {model}")
|
65 |
client = AsyncInferenceClient()
|
66 |
image = await client.text_to_image(prompt=text, height=height, width=width, guidance_scale=scales, num_inference_steps=steps, model=model)
|
67 |
+
return remove(image), seed
|
68 |
except Exception as e:
|
69 |
print(f"[-] Error generating image: {e}")
|
70 |
return None, None
|