complynx commited on
Commit
47c1d0e
Β·
1 Parent(s): 74d9b0d

remove client

Browse files
Files changed (1) hide show
  1. app.py +1 -13
app.py CHANGED
@@ -15,11 +15,6 @@ import itertools
15
  from gradio_client import Client
16
 
17
 
18
- clients_eps = {
19
- "wizardlm 13B": "https://itsmynti-ehartford-wizardlm-13b-uncensored.hf.space/",
20
- }
21
- clients = {k: Client(u) for k,u in clients_eps.items()}
22
-
23
  client = Client("Be-Bo/llama-3-chatbot_70b")
24
 
25
  def run_llama(_, msg, *__):
@@ -28,12 +23,7 @@ def run_llama(_, msg, *__):
28
  api_name="/chat"
29
  )
30
 
31
- def run_client(cli_name, msg, *__):
32
- yield clients[cli_name].predict(
33
- msg,
34
- api_name="/predict"
35
- )
36
- inf_models = list(hf_models.keys()) + list(openai_models) + list(clients_eps.keys())
37
 
38
  emb_models = ["bge", "minilm"]
39
  splitters = ['ct', 'rct', 'nltk']
@@ -104,8 +94,6 @@ def bot(history, model_name, oepnai_api_key,
104
 
105
  if model_name == "llama 3":
106
  generate_fn = run_llama
107
- if model_name in clients_eps:
108
- generate_fn = run_client
109
  elif model_name in hf_models:
110
  generate_fn = generate_hf
111
  elif model_name in openai_models:
 
15
  from gradio_client import Client
16
 
17
 
 
 
 
 
 
18
  client = Client("Be-Bo/llama-3-chatbot_70b")
19
 
20
  def run_llama(_, msg, *__):
 
23
  api_name="/chat"
24
  )
25
 
26
+ inf_models = list(hf_models.keys()) + list(openai_models)
 
 
 
 
 
27
 
28
  emb_models = ["bge", "minilm"]
29
  splitters = ['ct', 'rct', 'nltk']
 
94
 
95
  if model_name == "llama 3":
96
  generate_fn = run_llama
 
 
97
  elif model_name in hf_models:
98
  generate_fn = generate_hf
99
  elif model_name in openai_models: