Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -27,10 +27,10 @@ cache = Cache()
|
|
27 |
hf_token = os.environ.get("HF_TOKEN")
|
28 |
|
29 |
llm_models = {
|
30 |
-
"4": VLLM(model="lilmeaty/4", trust_remote_code=True, max_new_tokens=50, temperature=0.1, use_auth_token=hf_token, device="cpu"),
|
31 |
-
"yi-coder": VLLM(model="01-ai/Yi-Coder-1.5B", trust_remote_code=True, max_new_tokens=50, temperature=0.6, use_auth_token=hf_token, device="cpu"),
|
32 |
-
"llama": VLLM(model="meta-llama/Llama-3.2-3B-Instruct", trust_remote_code=True, max_new_tokens=50, temperature=0.1, use_auth_token=hf_token, device="cpu"),
|
33 |
-
"qwen": VLLM(model="Qwen/Qwen2.5-1.5B-Instruct", trust_remote_code=True, max_new_tokens=50, temperature=0.6, use_auth_token=hf_token, device="cpu"),
|
34 |
}
|
35 |
|
36 |
for llm_name, llm in llm_models.items():
|
|
|
27 |
hf_token = os.environ.get("HF_TOKEN")
|
28 |
|
29 |
llm_models = {
|
30 |
+
"4": VLLM(model="lilmeaty/4", trust_remote_code=True, use_cuda=False, max_new_tokens=50, temperature=0.1, use_auth_token=hf_token, device="cpu"),
|
31 |
+
"yi-coder": VLLM(model="01-ai/Yi-Coder-1.5B", trust_remote_code=True, use_cuda=False, max_new_tokens=50, temperature=0.6, use_auth_token=hf_token, device="cpu"),
|
32 |
+
"llama": VLLM(model="meta-llama/Llama-3.2-3B-Instruct", trust_remote_code=True, use_cuda=False, max_new_tokens=50, temperature=0.1, use_auth_token=hf_token, device="cpu"),
|
33 |
+
"qwen": VLLM(model="Qwen/Qwen2.5-1.5B-Instruct", trust_remote_code=True, use_cuda=False, max_new_tokens=50, temperature=0.6, use_auth_token=hf_token, device="cpu"),
|
34 |
}
|
35 |
|
36 |
for llm_name, llm in llm_models.items():
|