Omnibus commited on
Commit
2c9b2c2
1 Parent(s): 07095f2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -45,8 +45,8 @@ opts=[]
45
  def generate(prompt, history,max_new_tokens,health,seed,temperature=temperature,top_p=top_p,repetition_penalty=repetition_penalty):
46
  opts.clear()
47
  #client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
48
- #client = InferenceClient("abacusai/Slerp-CM-mist-dpo")
49
- client = AsyncInferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
50
  temperature = float(temperature)
51
  if temperature < 1e-2:
52
  temperature = 1e-2
@@ -79,7 +79,7 @@ def generate(prompt, history,max_new_tokens,health,seed,temperature=temperature,
79
  if cnt > MAX_HISTORY:
80
  history1 = compress_history(str(history), temperature, top_p, repetition_penalty)
81
  formatted_prompt = format_prompt(f"{GAME_MASTER.format(history=history1,stats=stats,dice=random.randint(1,10))}, {prompt}", history)
82
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
83
  output = ""
84
 
85
  for response in stream:
 
45
  def generate(prompt, history,max_new_tokens,health,seed,temperature=temperature,top_p=top_p,repetition_penalty=repetition_penalty):
46
  opts.clear()
47
  #client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
48
+ client = InferenceClient()
49
+ #client = AsyncInferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
50
  temperature = float(temperature)
51
  if temperature < 1e-2:
52
  temperature = 1e-2
 
79
  if cnt > MAX_HISTORY:
80
  history1 = compress_history(str(history), temperature, top_p, repetition_penalty)
81
  formatted_prompt = format_prompt(f"{GAME_MASTER.format(history=history1,stats=stats,dice=random.randint(1,10))}, {prompt}", history)
82
+ stream = client.text_generation(prompt=formatted_prompt,model="abacusai/Slerp-CM-mist-dpo", **generate_kwargs, stream=True, details=True, return_full_text=False)
83
  output = ""
84
 
85
  for response in stream: