Sergidev commited on
Commit
0b503f9
1 Parent(s): 83ccd10
Files changed (1) hide show
  1. modules/pmbl.py +2 -2
modules/pmbl.py CHANGED
@@ -102,7 +102,7 @@ class PMBL:
102
  yield chunk
103
 
104
  def generate_response_task(self, system_prompt, prompt, n_ctx):
105
- llm = Llama(model_path=self.model_path, n_ctx=n_ctx, n_threads=8)
106
 
107
  response = llm(
108
  system_prompt,
@@ -147,7 +147,7 @@ class PMBL:
147
  conn.close()
148
 
149
  def generate_topic(self, prompt, response):
150
- llm = Llama(model_path=self.model_path, n_ctx=2690, n_threads=8)
151
 
152
  system_prompt = f"Based on the following interaction between a user and an AI assistant, generate a concise topic for the conversation in 2-4 words:\n\nUser: {prompt}\nAssistant: {response}\n\nTopic:"
153
 
 
102
  yield chunk
103
 
104
  def generate_response_task(self, system_prompt, prompt, n_ctx):
105
+ llm = Llama(model_path=self.model_path, n_ctx=n_ctx, n_threads=8, n_gpu_layers=-1, mlock=True)
106
 
107
  response = llm(
108
  system_prompt,
 
147
  conn.close()
148
 
149
  def generate_topic(self, prompt, response):
150
+ llm = Llama(model_path=self.model_path, n_ctx=1690, n_threads=2, n_gpu_layers=-1, mlock=True)
151
 
152
  system_prompt = f"Based on the following interaction between a user and an AI assistant, generate a concise topic for the conversation in 2-4 words:\n\nUser: {prompt}\nAssistant: {response}\n\nTopic:"
153