Spaces:
Sleeping
Sleeping
update
Browse files- .gitignore +2 -1
- app.py +8 -8
- inference.py +4 -2
.gitignore
CHANGED
@@ -1,2 +1,3 @@
|
|
1 |
experiments/*
|
2 |
-
trash/*
|
|
|
|
1 |
experiments/*
|
2 |
+
trash/*
|
3 |
+
scripts/*
|
app.py
CHANGED
@@ -101,7 +101,7 @@ def reload_custom_diffusion_weight_list() -> dict:
|
|
101 |
return gr.update(choices=find_weight_files())
|
102 |
|
103 |
|
104 |
-
def create_inference_demo(func: inference_fn, device
|
105 |
with gr.Blocks() as demo:
|
106 |
with gr.Row():
|
107 |
with gr.Column():
|
@@ -115,10 +115,10 @@ def create_inference_demo(func: inference_fn, device=args.device) -> gr.Blocks:
|
|
115 |
label='Prompt',
|
116 |
max_lines=1,
|
117 |
placeholder='Example: "cat <R> stone"')
|
118 |
-
placeholder_string = gr.Textbox(
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
|
123 |
with gr.Accordion('Other Parameters', open=False):
|
124 |
guidance_scale = gr.Slider(label='Classifier-Free Guidance Scale',
|
@@ -149,7 +149,7 @@ def create_inference_demo(func: inference_fn, device=args.device) -> gr.Blocks:
|
|
149 |
inputs=[
|
150 |
model_id,
|
151 |
prompt,
|
152 |
-
|
153 |
num_samples,
|
154 |
guidance_scale,
|
155 |
device
|
@@ -160,7 +160,7 @@ def create_inference_demo(func: inference_fn, device=args.device) -> gr.Blocks:
|
|
160 |
inputs=[
|
161 |
model_id,
|
162 |
prompt,
|
163 |
-
placeholder_string,
|
164 |
num_samples,
|
165 |
guidance_scale,
|
166 |
device
|
@@ -186,7 +186,7 @@ with gr.Blocks(css='style.css') as demo:
|
|
186 |
|
187 |
with gr.Tabs():
|
188 |
with gr.TabItem('Test'):
|
189 |
-
create_inference_demo(inference_fn, args.device)
|
190 |
|
191 |
demo.launch(
|
192 |
enable_queue=args.enable_queue,
|
|
|
101 |
return gr.update(choices=find_weight_files())
|
102 |
|
103 |
|
104 |
+
def create_inference_demo(func: inference_fn, device) -> gr.Blocks:
|
105 |
with gr.Blocks() as demo:
|
106 |
with gr.Row():
|
107 |
with gr.Column():
|
|
|
115 |
label='Prompt',
|
116 |
max_lines=1,
|
117 |
placeholder='Example: "cat <R> stone"')
|
118 |
+
# placeholder_string = gr.Textbox(
|
119 |
+
# label='Placeholder String',
|
120 |
+
# max_lines=1,
|
121 |
+
# placeholder='Example: "<R>"')
|
122 |
|
123 |
with gr.Accordion('Other Parameters', open=False):
|
124 |
guidance_scale = gr.Slider(label='Classifier-Free Guidance Scale',
|
|
|
149 |
inputs=[
|
150 |
model_id,
|
151 |
prompt,
|
152 |
+
# placeholder_string,
|
153 |
num_samples,
|
154 |
guidance_scale,
|
155 |
device
|
|
|
160 |
inputs=[
|
161 |
model_id,
|
162 |
prompt,
|
163 |
+
# placeholder_string,
|
164 |
num_samples,
|
165 |
guidance_scale,
|
166 |
device
|
|
|
186 |
|
187 |
with gr.Tabs():
|
188 |
with gr.TabItem('Test'):
|
189 |
+
create_inference_demo(inference_fn, device=args.device)
|
190 |
|
191 |
demo.launch(
|
192 |
enable_queue=args.enable_queue,
|
inference.py
CHANGED
@@ -44,7 +44,7 @@ def make_image_grid(imgs, rows, cols):
|
|
44 |
def inference_fn(
|
45 |
model_id,
|
46 |
prompt,
|
47 |
-
placeholder_string,
|
48 |
num_samples,
|
49 |
guidance_scale,
|
50 |
device
|
@@ -72,7 +72,9 @@ def inference_fn(
|
|
72 |
|
73 |
for prompt in prompt_list:
|
74 |
# insert relation prompt <R>
|
75 |
-
prompt = prompt.lower().replace("<r>", "<R>").format(placeholder_string)
|
|
|
|
|
76 |
|
77 |
# make sub-folder
|
78 |
image_folder = os.path.join(image_root_folder, prompt, 'samples')
|
|
|
44 |
def inference_fn(
|
45 |
model_id,
|
46 |
prompt,
|
47 |
+
# placeholder_string,
|
48 |
num_samples,
|
49 |
guidance_scale,
|
50 |
device
|
|
|
72 |
|
73 |
for prompt in prompt_list:
|
74 |
# insert relation prompt <R>
|
75 |
+
# prompt = prompt.lower().replace("<r>", "<R>").format(placeholder_string)
|
76 |
+
prompt = prompt.lower().replace("<r>", "<R>").format("<R>")
|
77 |
+
|
78 |
|
79 |
# make sub-folder
|
80 |
image_folder = os.path.join(image_root_folder, prompt, 'samples')
|