Arkm20's picture
Add application file
03d5c24
import gradio as gr
import numpy as np
import os
import random
import requests
from PIL import Image
from io import BytesIO
MAX_SEED = np.iinfo(np.int32).max
MAX_IMAGE_SIZE = 2048
class APIClient:
def __init__(self, api_key=os.getenv("API_KEY"), base_url="inference.prodia.com"):
self.headers = {
"Content-Type": "application/json",
"Accept": "image/jpeg",
"Authorization": f"Bearer {api_key}"
}
self.base_url = f"https://{base_url}"
def _post(self, url, json=None):
r = requests.post(url, headers=self.headers, json=json)
r.raise_for_status()
return Image.open(BytesIO(r.content)).convert("RGB")
def job(self, config):
body = {"type": "inference.flux.dev.txt2img.v1", "config": config}
return self._post(f"{self.base_url}/v2/job", json=body)
def infer(prompt, seed=42, randomize_seed=False, resolution="1024x1024", guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
if randomize_seed:
seed = random.randint(0, MAX_SEED)
width, height = resolution.split("x")
image = generative_api.job({
"prompt": prompt,
"width": int(width),
"height": int(height),
"seed": seed,
"steps": num_inference_steps,
"guidance_scale": guidance_scale
})
return image, seed
generative_api = APIClient()
with open("header.md", "r") as file:
header = file.read()
examples = [
"a tiny astronaut hatching from an egg on the moon",
"a cat holding a sign that says hello world",
"an anime illustration of a wiener schnitzel",
]
css="""
#col-container {
margin: 0 auto;
max-width: 520px;
}
.image-container img {
max-width: 512px;
max-height: 512px;
margin: 0 auto;
border-radius: 0px;
}
"""
with gr.Blocks(css=css) as demo:
with gr.Column(elem_id="col-container"):
gr.Markdown(header)
with gr.Row():
prompt = gr.Text(
label="Prompt",
show_label=False,
max_lines=1,
placeholder="Enter your prompt"
)
run_button = gr.Button("Run", scale=0)
result = gr.Image(label="Result", show_label=False, format="jpeg")
with gr.Accordion("Advanced Settings", open=False):
seed = gr.Slider(
label="Seed",
minimum=0,
maximum=MAX_SEED,
step=1,
value=0,
)
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
with gr.Row():
resolution = gr.Dropdown(
label="Resolution",
value="1024x1024",
choices=[
"1024x1024",
"1024x576",
"576x1024"
]
)
with gr.Row():
guidance_scale = gr.Slider(
label="Guidance Scale",
minimum=1,
maximum=15,
step=0.1,
value=3.5,
)
num_inference_steps = gr.Slider(
label="Number of inference steps",
minimum=1,
maximum=50,
step=1,
value=28,
)
gr.Examples(
examples = examples,
fn = infer,
inputs = [prompt],
outputs = [result, seed],
cache_examples="lazy"
)
gr.on(
triggers=[run_button.click, prompt.submit],
fn = infer,
inputs = [prompt, seed, randomize_seed, resolution, guidance_scale, num_inference_steps],
outputs = [result, seed]
)
demo.queue(default_concurrency_limit=12, max_size=14, api_open=True).launch(max_threads=256, show_api=True)