Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
π¨ format
Browse filesSigned-off-by: peter szemraj <[email protected]>
app.py
CHANGED
@@ -197,7 +197,9 @@ def load_uploaded_file(file_obj, max_pages=20):
|
|
197 |
|
198 |
if __name__ == "__main__":
|
199 |
logging.info("Starting app instance")
|
200 |
-
os.environ[
|
|
|
|
|
201 |
logging.info("Loading summ models")
|
202 |
with contextlib.redirect_stdout(None):
|
203 |
model, tokenizer = load_model_and_tokenizer(
|
@@ -231,18 +233,20 @@ if __name__ == "__main__":
|
|
231 |
gr.Markdown(
|
232 |
"Enter text below in the text area. The text will be summarized [using the selected parameters](https://huggingface.co/blog/how-to-generate). Optionally load an example below or upload a file. (`.txt` or `.pdf` - _[link to guide](https://i.imgur.com/c6Cs9ly.png)_)"
|
233 |
)
|
234 |
-
with gr.Row(variant=
|
235 |
-
with gr.Column(scale=0.5, variant=
|
236 |
|
237 |
model_size = gr.Radio(
|
238 |
-
choices=["LongT5-base", "Pegasus-X-large"],
|
|
|
|
|
239 |
)
|
240 |
num_beams = gr.Radio(
|
241 |
choices=[2, 3, 4],
|
242 |
label="Beam Search: # of Beams",
|
243 |
value=2,
|
244 |
)
|
245 |
-
with gr.Column(variant=
|
246 |
example_name = gr.Dropdown(
|
247 |
_examples,
|
248 |
label="Examples",
|
@@ -290,7 +294,7 @@ if __name__ == "__main__":
|
|
290 |
gr.Markdown("---")
|
291 |
with gr.Column():
|
292 |
gr.Markdown("### Advanced Settings")
|
293 |
-
with gr.Row(variant=
|
294 |
length_penalty = gr.inputs.Slider(
|
295 |
minimum=0.5,
|
296 |
maximum=1.0,
|
@@ -304,7 +308,7 @@ if __name__ == "__main__":
|
|
304 |
value=1024,
|
305 |
)
|
306 |
|
307 |
-
with gr.Row(variant=
|
308 |
repetition_penalty = gr.inputs.Slider(
|
309 |
minimum=1.0,
|
310 |
maximum=5.0,
|
|
|
197 |
|
198 |
if __name__ == "__main__":
|
199 |
logging.info("Starting app instance")
|
200 |
+
os.environ[
|
201 |
+
"TOKENIZERS_PARALLELISM"
|
202 |
+
] = "false" # parallelism on tokenizers is buggy with gradio
|
203 |
logging.info("Loading summ models")
|
204 |
with contextlib.redirect_stdout(None):
|
205 |
model, tokenizer = load_model_and_tokenizer(
|
|
|
233 |
gr.Markdown(
|
234 |
"Enter text below in the text area. The text will be summarized [using the selected parameters](https://huggingface.co/blog/how-to-generate). Optionally load an example below or upload a file. (`.txt` or `.pdf` - _[link to guide](https://i.imgur.com/c6Cs9ly.png)_)"
|
235 |
)
|
236 |
+
with gr.Row(variant="compact"):
|
237 |
+
with gr.Column(scale=0.5, variant="compact"):
|
238 |
|
239 |
model_size = gr.Radio(
|
240 |
+
choices=["LongT5-base", "Pegasus-X-large"],
|
241 |
+
label="Model Variant",
|
242 |
+
value="LongT5-base",
|
243 |
)
|
244 |
num_beams = gr.Radio(
|
245 |
choices=[2, 3, 4],
|
246 |
label="Beam Search: # of Beams",
|
247 |
value=2,
|
248 |
)
|
249 |
+
with gr.Column(variant="compact"):
|
250 |
example_name = gr.Dropdown(
|
251 |
_examples,
|
252 |
label="Examples",
|
|
|
294 |
gr.Markdown("---")
|
295 |
with gr.Column():
|
296 |
gr.Markdown("### Advanced Settings")
|
297 |
+
with gr.Row(variant="compact"):
|
298 |
length_penalty = gr.inputs.Slider(
|
299 |
minimum=0.5,
|
300 |
maximum=1.0,
|
|
|
308 |
value=1024,
|
309 |
)
|
310 |
|
311 |
+
with gr.Row(variant="compact"):
|
312 |
repetition_penalty = gr.inputs.Slider(
|
313 |
minimum=1.0,
|
314 |
maximum=5.0,
|