Update app.py
Browse files
app.py
CHANGED
@@ -11,6 +11,7 @@ MAX_SEED = np.iinfo(np.int32).max
|
|
11 |
|
12 |
HF_TOKEN = os.getenv('HF_TOKEN')
|
13 |
HF_TOKEN_UPSCALER = os.getenv('HF_TOKEN')
|
|
|
14 |
css2="""
|
15 |
/* Apply dark theme (black background) */
|
16 |
body {
|
@@ -157,7 +158,6 @@ body {
|
|
157 |
|
158 |
"""
|
159 |
|
160 |
-
|
161 |
# Define base models
|
162 |
base_models = [
|
163 |
"black-forest-labs/FLUX.1-schnell",
|
@@ -165,14 +165,13 @@ base_models = [
|
|
165 |
"Shakker-Labs/FLUX.1-dev-ControlNet-Union-Pro"
|
166 |
]
|
167 |
|
168 |
-
|
169 |
def load_local_loras(lora_directory="lora_models"):
|
170 |
"""Load loras from local safetensor files"""
|
171 |
loras_list_custom = []
|
172 |
|
173 |
if not os.path.exists(lora_directory):
|
174 |
-
|
175 |
-
|
176 |
|
177 |
lora_files = list(Path(lora_directory).glob("*.safetensors"))
|
178 |
|
@@ -200,6 +199,30 @@ def enable_lora(lora_path, basemodel):
|
|
200 |
"lora_scale": 0.75 # Adjust this value as needed
|
201 |
}
|
202 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
203 |
# Function to generate image
|
204 |
async def generate_image(prompt, model_config, lora_word, width, height, scales, steps, seed):
|
205 |
try:
|
@@ -242,11 +265,40 @@ async def generate_image(prompt, model_config, lora_word, width, height, scales,
|
|
242 |
print(f"[-] Error generating image: {e}")
|
243 |
return None, None
|
244 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
245 |
# Load local loras
|
246 |
local_loras = load_local_loras()
|
247 |
|
248 |
# Creating Gradio interface
|
249 |
-
with gr.Blocks(css=css2) as WallpaperFluxMaker:
|
250 |
gr.HTML('<div id="banner">✨ Flux MultiMode Generator + Upscaler ✨</div>')
|
251 |
|
252 |
with gr.Column(elem_id="col-container"):
|
|
|
11 |
|
12 |
HF_TOKEN = os.getenv('HF_TOKEN')
|
13 |
HF_TOKEN_UPSCALER = os.getenv('HF_TOKEN')
|
14 |
+
|
15 |
css2="""
|
16 |
/* Apply dark theme (black background) */
|
17 |
body {
|
|
|
158 |
|
159 |
"""
|
160 |
|
|
|
161 |
# Define base models
|
162 |
base_models = [
|
163 |
"black-forest-labs/FLUX.1-schnell",
|
|
|
165 |
"Shakker-Labs/FLUX.1-dev-ControlNet-Union-Pro"
|
166 |
]
|
167 |
|
|
|
168 |
def load_local_loras(lora_directory="lora_models"):
|
169 |
"""Load loras from local safetensor files"""
|
170 |
loras_list_custom = []
|
171 |
|
172 |
if not os.path.exists(lora_directory):
|
173 |
+
os.makedirs(lora_directory)
|
174 |
+
print(f"[-] Created lora directory: {lora_directory}")
|
175 |
|
176 |
lora_files = list(Path(lora_directory).glob("*.safetensors"))
|
177 |
|
|
|
199 |
"lora_scale": 0.75 # Adjust this value as needed
|
200 |
}
|
201 |
|
202 |
+
# Function to upscale image
|
203 |
+
def get_upscale_finegrain(prompt, img_path, upscale_factor):
|
204 |
+
try:
|
205 |
+
print(f"[-] Starting upscaling process with factor {upscale_factor} for image {img_path}")
|
206 |
+
client = Client("finegrain/finegrain-image-enhancer", hf_token=HF_TOKEN_UPSCALER)
|
207 |
+
result = client.predict(
|
208 |
+
input_image=handle_file(img_path),
|
209 |
+
prompt=prompt,
|
210 |
+
negative_prompt="worst quality, low quality, normal quality",
|
211 |
+
upscale_factor=upscale_factor,
|
212 |
+
controlnet_scale=0.6,
|
213 |
+
controlnet_decay=1,
|
214 |
+
condition_scale=6,
|
215 |
+
denoise_strength=0.35,
|
216 |
+
num_inference_steps=18,
|
217 |
+
solver="DDIM",
|
218 |
+
api_name="/process"
|
219 |
+
)
|
220 |
+
print(f"[-] Upscaling successful.")
|
221 |
+
return result[1] # Return upscale image path
|
222 |
+
except Exception as e:
|
223 |
+
print(f"[-] Error scaling image: {e}")
|
224 |
+
return None
|
225 |
+
|
226 |
# Function to generate image
|
227 |
async def generate_image(prompt, model_config, lora_word, width, height, scales, steps, seed):
|
228 |
try:
|
|
|
265 |
print(f"[-] Error generating image: {e}")
|
266 |
return None, None
|
267 |
|
268 |
+
# Main function to generate images and optionally upscale
|
269 |
+
async def gen(prompt, basemodel, width, height, scales, steps, seed, upscale_factor, process_upscale, lora_model, process_lora):
|
270 |
+
print(f"[-] Starting image generation with prompt: {prompt}")
|
271 |
+
|
272 |
+
model = enable_lora(lora_model, basemodel) if process_lora else basemodel
|
273 |
+
print(f"[-] Using model: {model}")
|
274 |
+
|
275 |
+
image, seed = await generate_image(prompt, model, "", width, height, scales, steps, seed)
|
276 |
+
|
277 |
+
if image is None:
|
278 |
+
print("[-] Image generation failed.")
|
279 |
+
return []
|
280 |
+
|
281 |
+
image_path = "temp_image.jpg"
|
282 |
+
print(f"[-] Saving temporary image to: {image_path}")
|
283 |
+
image.save(image_path, format="JPEG")
|
284 |
+
|
285 |
+
upscale_image_path = None
|
286 |
+
if process_upscale:
|
287 |
+
print(f"[-] Processing upscaling with factor: {upscale_factor}")
|
288 |
+
upscale_image_path = get_upscale_finegrain(prompt, image_path, upscale_factor)
|
289 |
+
if upscale_image_path is not None and os.path.exists(upscale_image_path):
|
290 |
+
print(f"[-] Upscaling complete. Image saved at: {upscale_image_path}")
|
291 |
+
return [image_path, upscale_image_path] # Return both images
|
292 |
+
else:
|
293 |
+
print("[-] Upscaling failed, upscaled image path not found.")
|
294 |
+
|
295 |
+
return [image_path]
|
296 |
+
|
297 |
# Load local loras
|
298 |
local_loras = load_local_loras()
|
299 |
|
300 |
# Creating Gradio interface
|
301 |
+
with gr.Blocks(css=css2, theme=IndonesiaTheme()) as WallpaperFluxMaker:
|
302 |
gr.HTML('<div id="banner">✨ Flux MultiMode Generator + Upscaler ✨</div>')
|
303 |
|
304 |
with gr.Column(elem_id="col-container"):
|