Spaces:
Running
on
Zero
Running
on
Zero
fix sd3 bsz
Browse files- app.py +4 -4
- requirements.txt +2 -2
app.py
CHANGED
@@ -11,7 +11,7 @@ if USE_HUGGINGFACE_ZEROGPU: # huggingface ZeroGPU, dynamic GPU allocation
|
|
11 |
USE_HUGGINGFACE_ZEROGPU = False
|
12 |
|
13 |
if USE_HUGGINGFACE_ZEROGPU:
|
14 |
-
BATCH_SIZE =
|
15 |
else: # run on local machine
|
16 |
BATCH_SIZE = 1
|
17 |
|
@@ -621,7 +621,7 @@ def make_parameters_section():
|
|
621 |
from ncut_pytorch.backbone import SD_KEY_DICT
|
622 |
default_layer = 'up_2_resnets_1_block' if 'diffusion-3' not in model_name else 'block_23'
|
623 |
return (gr.Slider(1, 49, step=1, label="Diffusion: Timestep (Noise)", value=5, elem_id="layer", visible=True, info="Noise level, 50 is max noise"),
|
624 |
-
gr.Dropdown(SD_KEY_DICT[model_name], label="Diffusion: Layer and Node", value=default_layer, elem_id="node_type", info="
|
625 |
|
626 |
layer_dict = LAYER_DICT
|
627 |
if model_name in layer_dict:
|
@@ -643,7 +643,7 @@ def make_parameters_section():
|
|
643 |
embedding_method_dropdown = gr.Dropdown(["tsne_3d", "umap_3d", "umap_shpere", "tsne_2d", "umap_2d"], label="Coloring method", value="tsne_3d", elem_id="embedding_method")
|
644 |
num_sample_tsne_slider = gr.Slider(100, 10000, step=100, label="t-SNE/UMAP: num_sample", value=300, elem_id="num_sample_tsne", info="Nyström approximation")
|
645 |
knn_tsne_slider = gr.Slider(1, 100, step=1, label="t-SNE/UMAP: KNN", value=10, elem_id="knn_tsne", info="Nyström approximation")
|
646 |
-
perplexity_slider = gr.Slider(10, 1000, step=10, label="t-SNE:
|
647 |
n_neighbors_slider = gr.Slider(10, 1000, step=10, label="UMAP: n_neighbors", value=150, elem_id="n_neighbors")
|
648 |
min_dist_slider = gr.Slider(0.1, 1, step=0.1, label="UMAP: min_dist", value=0.1, elem_id="min_dist")
|
649 |
return [model_dropdown, layer_slider, node_type_dropdown, num_eig_slider,
|
@@ -935,7 +935,7 @@ with demo:
|
|
935 |
|
936 |
with gr.Row():
|
937 |
with gr.Column():
|
938 |
-
gr.Markdown("#####
|
939 |
with gr.Column():
|
940 |
gr.Markdown("###### Running out of GPU? Try [Demo](https://ncut-pytorch.readthedocs.io/en/latest/demo/) hosted at UPenn")
|
941 |
|
|
|
11 |
USE_HUGGINGFACE_ZEROGPU = False
|
12 |
|
13 |
if USE_HUGGINGFACE_ZEROGPU:
|
14 |
+
BATCH_SIZE = 1
|
15 |
else: # run on local machine
|
16 |
BATCH_SIZE = 1
|
17 |
|
|
|
621 |
from ncut_pytorch.backbone import SD_KEY_DICT
|
622 |
default_layer = 'up_2_resnets_1_block' if 'diffusion-3' not in model_name else 'block_23'
|
623 |
return (gr.Slider(1, 49, step=1, label="Diffusion: Timestep (Noise)", value=5, elem_id="layer", visible=True, info="Noise level, 50 is max noise"),
|
624 |
+
gr.Dropdown(SD_KEY_DICT[model_name], label="Diffusion: Layer and Node", value=default_layer, elem_id="node_type", info="U-Net (v1, v2) or DiT (v3)"))
|
625 |
|
626 |
layer_dict = LAYER_DICT
|
627 |
if model_name in layer_dict:
|
|
|
643 |
embedding_method_dropdown = gr.Dropdown(["tsne_3d", "umap_3d", "umap_shpere", "tsne_2d", "umap_2d"], label="Coloring method", value="tsne_3d", elem_id="embedding_method")
|
644 |
num_sample_tsne_slider = gr.Slider(100, 10000, step=100, label="t-SNE/UMAP: num_sample", value=300, elem_id="num_sample_tsne", info="Nyström approximation")
|
645 |
knn_tsne_slider = gr.Slider(1, 100, step=1, label="t-SNE/UMAP: KNN", value=10, elem_id="knn_tsne", info="Nyström approximation")
|
646 |
+
perplexity_slider = gr.Slider(10, 1000, step=10, label="t-SNE: perplexity", value=150, elem_id="perplexity")
|
647 |
n_neighbors_slider = gr.Slider(10, 1000, step=10, label="UMAP: n_neighbors", value=150, elem_id="n_neighbors")
|
648 |
min_dist_slider = gr.Slider(0.1, 1, step=0.1, label="UMAP: min_dist", value=0.1, elem_id="min_dist")
|
649 |
return [model_dropdown, layer_slider, node_type_dropdown, num_eig_slider,
|
|
|
935 |
|
936 |
with gr.Row():
|
937 |
with gr.Column():
|
938 |
+
gr.Markdown("##### POWERED BY [ncut-pytorch](https://ncut-pytorch.readthedocs.io/) ")
|
939 |
with gr.Column():
|
940 |
gr.Markdown("###### Running out of GPU? Try [Demo](https://ncut-pytorch.readthedocs.io/en/latest/demo/) hosted at UPenn")
|
941 |
|
requirements.txt
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
torch
|
2 |
torchvision
|
3 |
-
ncut-pytorch>=1.3.4
|
4 |
opencv-python
|
5 |
decord
|
6 |
transformers
|
@@ -14,4 +13,5 @@ SAM-2 @ git+https://github.com/huzeyann/segment-anything-2.git
|
|
14 |
segment-anything @ git+https://github.com/facebookresearch/segment-anything.git@6fdee8f
|
15 |
mobile-sam @ git+https://github.com/ChaoningZhang/MobileSAM.git@c12dd83
|
16 |
timm
|
17 |
-
open-clip-torch==2.20.0
|
|
|
|
1 |
torch
|
2 |
torchvision
|
|
|
3 |
opencv-python
|
4 |
decord
|
5 |
transformers
|
|
|
13 |
segment-anything @ git+https://github.com/facebookresearch/segment-anything.git@6fdee8f
|
14 |
mobile-sam @ git+https://github.com/ChaoningZhang/MobileSAM.git@c12dd83
|
15 |
timm
|
16 |
+
open-clip-torch==2.20.0
|
17 |
+
ncut-pytorch>=1.3.8
|