Spaces:
Runtime error
Runtime error
try distant m7b01
Browse files- app.py +9 -0
- backend/query_llm.py +2 -2
app.py
CHANGED
@@ -21,6 +21,13 @@ def run_llama(_, msg, *__):
|
|
21 |
message=msg,
|
22 |
api_name="/chat"
|
23 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
inf_models = list(hf_models.keys()) + list(openai_models)
|
25 |
|
26 |
emb_models = ["bge", "minilm"]
|
@@ -92,6 +99,8 @@ def bot(history, model_name, oepnai_api_key,
|
|
92 |
|
93 |
if model_name == "llama 3":
|
94 |
generate_fn = run_llama
|
|
|
|
|
95 |
elif model_name in hf_models:
|
96 |
generate_fn = generate_hf
|
97 |
elif model_name in openai_models:
|
|
|
21 |
message=msg,
|
22 |
api_name="/chat"
|
23 |
)
|
24 |
+
|
25 |
+
m7b01 = Client("https://zhuraavl-mistralai-mistral-7b-v0-1.hf.space/")
|
26 |
+
def run_m7b01(_, msg, *__):
|
27 |
+
yield m7b01.predict(
|
28 |
+
msg,
|
29 |
+
api_name="/predict"
|
30 |
+
)
|
31 |
inf_models = list(hf_models.keys()) + list(openai_models)
|
32 |
|
33 |
emb_models = ["bge", "minilm"]
|
|
|
99 |
|
100 |
if model_name == "llama 3":
|
101 |
generate_fn = run_llama
|
102 |
+
if model_name == "mistral-7B 0.1":
|
103 |
+
generate_fn = run_m7b01
|
104 |
elif model_name in hf_models:
|
105 |
generate_fn = generate_hf
|
106 |
elif model_name in openai_models:
|
backend/query_llm.py
CHANGED
@@ -13,8 +13,8 @@ HF_TOKEN = os.getenv("HF_TOKEN")
|
|
13 |
hf_models = {
|
14 |
"mistral-7B": "mistralai/Mistral-7B-Instruct-v0.2",
|
15 |
"mistral-7B 0.1": "mistralai/Mistral-7B-v0.1",
|
16 |
-
"vicuna-13b":"lmsys/vicuna-13b-v1.5",
|
17 |
-
"WizardLM-30B": "cognitivecomputations/WizardLM-30B-Uncensored",
|
18 |
"llama 3": "meta-llama/Meta-Llama-3-70B-Instruct",
|
19 |
}
|
20 |
openai_models = {"gpt-4o","gpt-3.5-turbo-0125"}
|
|
|
13 |
hf_models = {
|
14 |
"mistral-7B": "mistralai/Mistral-7B-Instruct-v0.2",
|
15 |
"mistral-7B 0.1": "mistralai/Mistral-7B-v0.1",
|
16 |
+
# "vicuna-13b":"lmsys/vicuna-13b-v1.5",
|
17 |
+
# "WizardLM-30B": "cognitivecomputations/WizardLM-30B-Uncensored",
|
18 |
"llama 3": "meta-llama/Meta-Llama-3-70B-Instruct",
|
19 |
}
|
20 |
openai_models = {"gpt-4o","gpt-3.5-turbo-0125"}
|