MrDrmm commited on
Commit
e5edc75
·
verified ·
1 Parent(s): ae4bb43

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -117
app.py CHANGED
@@ -1,58 +1,45 @@
1
  import gradio as gr
 
2
  from model import models
3
- from multit2i import (load_models, infer_fn, infer_rand_fn, save_gallery,
 
4
  change_model, warm_model, get_model_info_md, loaded_models,
5
  get_positive_prefix, get_positive_suffix, get_negative_prefix, get_negative_suffix,
6
- get_recom_prompt_type, set_recom_prompt_preset, get_tag_type, randomize_seed, translate_to_en)
7
-
8
 
9
- import os
 
 
 
10
 
11
- # Поле для HF_TOKEN
12
- hf_token_input = gr.Textbox(label="Введите HF Token", type="password", interactive=True)
 
 
 
 
13
 
 
14
  def set_hf_token(hf_token):
15
- """Функция для установки HF_TOKEN и проверки его активации"""
16
  if hf_token:
17
- os.environ["HF_TOKEN"] = hf_token # Устанавливаем токен в переменные окружения
18
  return f"✅ HF_TOKEN установлен!", gr.update(value=hf_token, interactive=True)
19
  else:
20
  return "❌ Токен не введен!", gr.update(value="", interactive=True)
21
 
22
- # Кнопка подтверждения токена
23
- confirm_token = gr.Button("Активировать HF_TOKEN")
24
-
25
- # Поле для отображения статуса
26
- token_status = gr.Markdown("🔴 HF_TOKEN не установлен")
27
-
28
- # Подключаем обработчик кнопки
29
- confirm_token.click(set_hf_token, inputs=[hf_token_input], outputs=[token_status, hf_token_input])
30
-
31
- # Вставляем эти элементы в твой UI
32
- with gr.Blocks(theme="NoCrypt/miku@>=1.2.2") as demo:
33
- with gr.Tab("Settings"):
34
  gr.Markdown("### Настройки токена Hugging Face")
35
- hf_token_input.render() # Поле ввода
36
- confirm_token.render() # Кнопка активации
37
- token_status.render() # Статус токена
38
 
39
- # Здесь твой код UI (его менять не нужно)
40
- # ...
 
41
 
 
42
 
43
-
44
-
45
- max_images = 8
46
- MAX_SEED = 2**32-1
47
- load_models(models)
48
-
49
- css = """
50
- .model_info { text-align: center; }
51
- .output { width=112px; height=112px; max_width=112px; max_height=112px; !important; }
52
- .gallery { min_width=512px; min_height=512px; max_height=1024px; !important; }
53
- """
54
-
55
- with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
56
  with gr.Tab("Image Generator"):
57
  with gr.Row():
58
  with gr.Column(scale=10):
@@ -61,13 +48,13 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
61
  with gr.Accordion("Advanced options", open=False):
62
  neg_prompt = gr.Text(label="Negative Prompt", lines=1, max_lines=8, placeholder="")
63
  with gr.Row():
64
- width = gr.Slider(label="Width", info="If 0, the default value is used.", maximum=2048, step=32, value=0)
65
- height = gr.Slider(label="Height", info="If 0, the default value is used.", maximum=2048, step=32, value=0)
66
- steps = gr.Slider(label="Number of inference steps", info="If 0, the default value is used.", maximum=100, step=1, value=0)
67
  with gr.Row():
68
- cfg = gr.Slider(label="Guidance scale", info="If 0, the default value is used.", maximum=30.0, step=0.1, value=0)
69
- seed = gr.Slider(label="Seed", info="Randomize Seed if -1.", minimum=-1, maximum=MAX_SEED, step=1, value=-1)
70
- seed_rand = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary")
71
  recom_prompt_preset = gr.Radio(label="Set Presets", choices=get_recom_prompt_type(), value="Common")
72
  with gr.Row():
73
  positive_prefix = gr.CheckboxGroup(label="Use Positive Prefix", choices=get_positive_prefix(), value=[])
@@ -82,10 +69,10 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
82
  run_button = gr.Button("Generate Image", variant="primary", scale=8)
83
  random_button = gr.Button("Random Model 🎲", variant="secondary", scale=3)
84
  stop_button = gr.Button('Stop', interactive=False, variant="stop", scale=1)
85
-
86
  with gr.Group():
87
  model_name = gr.Dropdown(label="Select Model", choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
88
  model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_classes="model_info")
 
89
  with gr.Column(scale=10):
90
  with gr.Group():
91
  with gr.Row():
@@ -97,83 +84,31 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
97
  container=True, format="png", object_fit="cover", columns=2, rows=2)
98
  image_files = gr.Files(label="Download", interactive=False)
99
  clear_results = gr.Button("Clear Gallery / Download 🗑️", variant="secondary")
100
- with gr.Column():
101
- examples = gr.Examples(
102
- examples = [
103
- ["souryuu asuka langley, 1girl, neon genesis evangelion, plugsuit, pilot suit, red bodysuit, sitting, crossing legs, black eye patch, cat hat, throne, symmetrical, looking down, from bottom, looking at viewer, outdoors"],
104
- ["sailor moon, magical girl transformation, sparkles and ribbons, soft pastel colors, crescent moon motif, starry night sky background, shoujo manga style"],
105
- ["kafuu chino, 1girl, solo"],
106
- ["1girl"],
107
- ["beautiful sunset"],
108
- ],
109
- inputs=[prompt],
110
- cache_examples=False,
111
- )
112
- with gr.Tab("PNG Info"):
113
- def extract_exif_data(image):
114
- if image is None: return ""
115
- try:
116
- metadata_keys = ['parameters', 'metadata', 'prompt', 'Comment']
117
- for key in metadata_keys:
118
- if key in image.info:
119
- return image.info[key]
120
- return str(image.info)
121
- except Exception as e:
122
- return f"Error extracting metadata: {str(e)}"
123
- with gr.Row():
124
- with gr.Column():
125
- image_metadata = gr.Image(label="Image with metadata", type="pil", sources=["upload"])
126
- with gr.Column():
127
- result_metadata = gr.Textbox(label="Metadata", show_label=True, show_copy_button=True, interactive=False, container=True, max_lines=99)
128
 
129
- image_metadata.change(
130
- fn=extract_exif_data,
131
- inputs=[image_metadata],
132
- outputs=[result_metadata],
133
- )
134
- gr.Markdown(
135
- f"""This demo was created in reference to the following demos.<br>
136
- [Nymbo/Flood](https://huggingface.co/spaces/Nymbo/Flood),
137
- [Yntec/ToyWorldXL](https://huggingface.co/spaces/Yntec/ToyWorldXL),
138
- [Yntec/Diffusion80XX](https://huggingface.co/spaces/Yntec/Diffusion80XX).
139
- """
140
- )
141
- gr.DuplicateButton(value="Duplicate Space")
142
- gr.Markdown(f"Just a few edits to *model.py* are all it takes to complete your own collection.")
143
 
144
- gr.on(triggers=[run_button.click, prompt.submit, random_button.click], fn=lambda: gr.update(interactive=True), inputs=None, outputs=stop_button, show_api=False)
145
- model_name.change(change_model, [model_name], [model_info], queue=True, show_api=True)\
146
- .success(warm_model, [model_name], None, queue=True, show_api=True)
147
  for i, o in enumerate(output):
148
  img_i = gr.Number(i, visible=False)
149
- image_num.change(lambda i, n: gr.update(visible = (i < n)), [img_i, image_num], o, show_api=True)
150
- gen_event = gr.on(triggers=[run_button.click, prompt.submit],
151
- fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
152
- inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
153
- positive_prefix, positive_suffix, negative_prefix, negative_suffix],
154
- outputs=[o], queue=True, show_api=False) # Be sure to delete ", queue=False" when activating the stop button
155
- gen_event2 = gr.on(triggers=[random_button.click],
156
- fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_rand_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
157
- inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
158
- positive_prefix, positive_suffix, negative_prefix, negative_suffix],
159
- outputs=[o], queue=True, show_api=False) # Be sure to delete ", queue=False" when activating the stop button
160
- o.change(save_gallery, [o, results], [results, image_files], show_api=False)
161
- stop_button.click(lambda: gr.update(interactive=False), None, stop_button, cancels=[gen_event, gen_event2], show_api=False)
162
-
163
- clear_prompt.click(lambda: (None, None), None, [prompt, neg_prompt], queue=True, show_api=True)
164
- clear_results.click(lambda: (None, None), None, [results, image_files], queue=True, show_api=True)
165
  recom_prompt_preset.change(set_recom_prompt_preset, [recom_prompt_preset],
166
- [positive_prefix, positive_suffix, negative_prefix, negative_suffix], queue=True, show_api=True)
167
- seed_rand.click(randomize_seed, None, [seed], queue=True, show_api=True)
168
- trans_prompt.click(translate_to_en, [prompt], [prompt], queue=True, show_api=True)\
169
- .then(translate_to_en, [neg_prompt], [neg_prompt], queue=True, show_api=True)
170
-
171
-
172
-
173
 
 
174
  demo.queue(default_concurrency_limit=240, max_size=240)
175
  demo.launch(max_threads=400, ssr_mode=True)
176
- # https://github.com/gradio-app/gradio/issues/6339
177
-
178
- demo.queue(concurrency_count=50)
179
- demo.launch()
 
1
  import gradio as gr
2
+ import os
3
  from model import models
4
+ from multit2i import (
5
+ load_models, infer_fn, infer_rand_fn, save_gallery,
6
  change_model, warm_model, get_model_info_md, loaded_models,
7
  get_positive_prefix, get_positive_suffix, get_negative_prefix, get_negative_suffix,
8
+ get_recom_prompt_type, set_recom_prompt_preset, get_tag_type, randomize_seed, translate_to_en
9
+ )
10
 
11
+ # Устанавливаем максимальные значения
12
+ max_images = 8
13
+ MAX_SEED = 2**32 - 1
14
+ load_models(models)
15
 
16
+ # CSS для оформления
17
+ css = """
18
+ .model_info { text-align: center; }
19
+ .output { width=112px; height=112px; max_width=112px; max_height=112px; !important; }
20
+ .gallery { min_width=512px; min_height=512px; max_height=1024px; !important; }
21
+ """
22
 
23
+ # Функция установки HF_TOKEN
24
  def set_hf_token(hf_token):
 
25
  if hf_token:
26
+ os.environ["HF_TOKEN"] = hf_token # Сохраняем токен в окружении
27
  return f"✅ HF_TOKEN установлен!", gr.update(value=hf_token, interactive=True)
28
  else:
29
  return "❌ Токен не введен!", gr.update(value="", interactive=True)
30
 
31
+ # Основной интерфейс
32
+ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css=css) as demo:
33
+
34
+ with gr.Tab("Settings"): # Вкладка для настройки HF_TOKEN
 
 
 
 
 
 
 
 
35
  gr.Markdown("### Настройки токена Hugging Face")
 
 
 
36
 
37
+ hf_token_input = gr.Textbox(label="Введите HF Token", type="password", interactive=True)
38
+ confirm_token = gr.Button("Активировать HF_TOKEN")
39
+ token_status = gr.Markdown("🔴 HF_TOKEN не установлен")
40
 
41
+ confirm_token.click(set_hf_token, inputs=[hf_token_input], outputs=[token_status, hf_token_input])
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  with gr.Tab("Image Generator"):
44
  with gr.Row():
45
  with gr.Column(scale=10):
 
48
  with gr.Accordion("Advanced options", open=False):
49
  neg_prompt = gr.Text(label="Negative Prompt", lines=1, max_lines=8, placeholder="")
50
  with gr.Row():
51
+ width = gr.Slider(label="Width", maximum=2048, step=32, value=0)
52
+ height = gr.Slider(label="Height", maximum=2048, step=32, value=0)
53
+ steps = gr.Slider(label="Inference steps", maximum=100, step=1, value=0)
54
  with gr.Row():
55
+ cfg = gr.Slider(label="Guidance scale", maximum=30.0, step=0.1, value=0)
56
+ seed = gr.Slider(label="Seed", minimum=-1, maximum=MAX_SEED, step=1, value=-1)
57
+ seed_rand = gr.Button("🎲 Randomize Seed", size="sm", variant="secondary")
58
  recom_prompt_preset = gr.Radio(label="Set Presets", choices=get_recom_prompt_type(), value="Common")
59
  with gr.Row():
60
  positive_prefix = gr.CheckboxGroup(label="Use Positive Prefix", choices=get_positive_prefix(), value=[])
 
69
  run_button = gr.Button("Generate Image", variant="primary", scale=8)
70
  random_button = gr.Button("Random Model 🎲", variant="secondary", scale=3)
71
  stop_button = gr.Button('Stop', interactive=False, variant="stop", scale=1)
 
72
  with gr.Group():
73
  model_name = gr.Dropdown(label="Select Model", choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
74
  model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_classes="model_info")
75
+
76
  with gr.Column(scale=10):
77
  with gr.Group():
78
  with gr.Row():
 
84
  container=True, format="png", object_fit="cover", columns=2, rows=2)
85
  image_files = gr.Files(label="Download", interactive=False)
86
  clear_results = gr.Button("Clear Gallery / Download 🗑️", variant="secondary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
+ model_name.change(change_model, [model_name], [model_info], queue=True)\
89
+ .success(warm_model, [model_name], None, queue=True)
 
 
 
 
 
 
 
 
 
 
 
 
90
 
 
 
 
91
  for i, o in enumerate(output):
92
  img_i = gr.Number(i, visible=False)
93
+ image_num.change(lambda i, n: gr.update(visible=(i < n)), [img_i, image_num], o)
94
+ gen_event = gr.on(
95
+ triggers=[run_button.click, prompt.submit],
96
+ fn=lambda i, n, m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4: infer_fn(m, t1, t2, n1, n2, n3, n4, n5, l1, l2, l3, l4) if (i < n) else None,
97
+ inputs=[img_i, image_num, model_name, prompt, neg_prompt, height, width, steps, cfg, seed,
98
+ positive_prefix, positive_suffix, negative_prefix, negative_suffix],
99
+ outputs=[o], queue=True
100
+ )
101
+ o.change(save_gallery, [o, results], [results, image_files])
102
+ stop_button.click(lambda: gr.update(interactive=False), None, stop_button, cancels=[gen_event])
103
+
104
+ clear_prompt.click(lambda: (None, None), None, [prompt, neg_prompt], queue=True)
105
+ clear_results.click(lambda: (None, None), None, [results, image_files], queue=True)
 
 
 
106
  recom_prompt_preset.change(set_recom_prompt_preset, [recom_prompt_preset],
107
+ [positive_prefix, positive_suffix, negative_prefix, negative_suffix], queue=True)
108
+ seed_rand.click(randomize_seed, None, [seed], queue=True)
109
+ trans_prompt.click(translate_to_en, [prompt], [prompt], queue=True)\
110
+ .then(translate_to_en, [neg_prompt], [neg_prompt], queue=True)
 
 
 
111
 
112
+ # Запуск интерфейса
113
  demo.queue(default_concurrency_limit=240, max_size=240)
114
  demo.launch(max_threads=400, ssr_mode=True)