File size: 128 Bytes
8ced4d2 |
1 2 3 4 5 6 7 |
models_dict = {
"fastsam": {"instance": None},
"lisa": {"inference": None}
}
embedding_dict = {}
inference_fn_dict = {}
|
8ced4d2 |
1 2 3 4 5 6 7 |
models_dict = {
"fastsam": {"instance": None},
"lisa": {"inference": None}
}
embedding_dict = {}
inference_fn_dict = {}
|