Spaces:
Running
on
Zero
Running
on
Zero
phi
commited on
Commit
•
99241b8
1
Parent(s):
3709b60
update
Browse files- app.py +10 -6
- requirements.txt +0 -4
app.py
CHANGED
@@ -40,6 +40,7 @@ DTYPE = os.environ.get("DTYPE", "bfloat16")
|
|
40 |
DOWNLOAD_SNAPSHOT = bool(int(os.environ.get("DOWNLOAD_SNAPSHOT", "0")))
|
41 |
# ! uploaded model path, will be downloaded to MODEL_PATH
|
42 |
HF_MODEL_NAME = os.environ.get("HF_MODEL_NAME", "DAMO-NLP-SG/seal-13b-chat-a")
|
|
|
43 |
MODEL_PATH = os.environ.get("MODEL_PATH", "./seal-13b-chat-a")
|
44 |
|
45 |
|
@@ -898,16 +899,19 @@ def launch():
|
|
898 |
print(f'Creating in DEBUG MODE')
|
899 |
else:
|
900 |
# ! load the model
|
901 |
-
import vllm
|
902 |
-
from vllm import LLM, SamplingParams
|
903 |
-
|
904 |
-
print(F'VLLM: {vllm.__version__}')
|
905 |
|
906 |
if DOWNLOAD_SNAPSHOT:
|
907 |
print(f'Downloading from HF_MODEL_NAME={hf_model_name} -> {model_path}')
|
908 |
-
|
|
|
|
|
|
|
|
|
909 |
|
910 |
-
|
|
|
|
|
|
|
911 |
ckpt_info = check_model_path(model_path)
|
912 |
|
913 |
print(f'Load path: {model_path} | {ckpt_info}')
|
|
|
40 |
DOWNLOAD_SNAPSHOT = bool(int(os.environ.get("DOWNLOAD_SNAPSHOT", "0")))
|
41 |
# ! uploaded model path, will be downloaded to MODEL_PATH
|
42 |
HF_MODEL_NAME = os.environ.get("HF_MODEL_NAME", "DAMO-NLP-SG/seal-13b-chat-a")
|
43 |
+
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
44 |
MODEL_PATH = os.environ.get("MODEL_PATH", "./seal-13b-chat-a")
|
45 |
|
46 |
|
|
|
899 |
print(f'Creating in DEBUG MODE')
|
900 |
else:
|
901 |
# ! load the model
|
|
|
|
|
|
|
|
|
902 |
|
903 |
if DOWNLOAD_SNAPSHOT:
|
904 |
print(f'Downloading from HF_MODEL_NAME={hf_model_name} -> {model_path}')
|
905 |
+
if HF_TOKEN is not None:
|
906 |
+
print(f'Load with HF_TOKEN: {HF_TOKEN}')
|
907 |
+
snapshot_download(hf_model_name, local_dir=model_path, use_auth_token=True, token=HF_TOKEN)
|
908 |
+
else:
|
909 |
+
snapshot_download(hf_model_name, local_dir=model_path)
|
910 |
|
911 |
+
import vllm
|
912 |
+
from vllm import LLM, SamplingParams
|
913 |
+
|
914 |
+
print(F'VLLM: {vllm.__version__}')
|
915 |
ckpt_info = check_model_path(model_path)
|
916 |
|
917 |
print(f'Load path: {model_path} | {ckpt_info}')
|
requirements.txt
CHANGED
@@ -5,8 +5,6 @@ evaluate
|
|
5 |
datasets
|
6 |
sacrebleu
|
7 |
websockets
|
8 |
-
fire
|
9 |
-
indic-nlp-library
|
10 |
omegaconf
|
11 |
scikit-learn
|
12 |
jiwer
|
@@ -18,10 +16,8 @@ ray
|
|
18 |
psutil
|
19 |
xformers >= 0.0.19
|
20 |
fastapi
|
21 |
-
tensorboard
|
22 |
geomloss
|
23 |
einops
|
24 |
-
gdown
|
25 |
langdetect
|
26 |
vllm==0.1.4
|
27 |
transformers
|
|
|
5 |
datasets
|
6 |
sacrebleu
|
7 |
websockets
|
|
|
|
|
8 |
omegaconf
|
9 |
scikit-learn
|
10 |
jiwer
|
|
|
16 |
psutil
|
17 |
xformers >= 0.0.19
|
18 |
fastapi
|
|
|
19 |
geomloss
|
20 |
einops
|
|
|
21 |
langdetect
|
22 |
vllm==0.1.4
|
23 |
transformers
|