Update modules/pmbl.py
Browse files- modules/pmbl.py +4 -3
modules/pmbl.py
CHANGED
@@ -122,16 +122,17 @@ class PMBL:
|
|
122 |
self.save_chat_history(prompt, response_text)
|
123 |
|
124 |
def calculate_context(self, system_prompt, formatted_history):
|
125 |
-
system_prompt_tokens = len(system_prompt) //
|
126 |
-
history_tokens = len(formatted_history) //
|
127 |
max_response_tokens = 1500
|
128 |
-
context_ceiling =
|
129 |
|
130 |
available_tokens = context_ceiling - system_prompt_tokens - max_response_tokens
|
131 |
if history_tokens <= available_tokens:
|
132 |
return system_prompt_tokens + history_tokens + max_response_tokens
|
133 |
else:
|
134 |
return context_ceiling # Return the maximum context size
|
|
|
135 |
def sleep_mode(self):
|
136 |
conn = sqlite3.connect('chat_history.db')
|
137 |
c = conn.cursor()
|
|
|
122 |
self.save_chat_history(prompt, response_text)
|
123 |
|
124 |
def calculate_context(self, system_prompt, formatted_history):
|
125 |
+
system_prompt_tokens = len(system_prompt) // 2
|
126 |
+
history_tokens = len(formatted_history) // 2
|
127 |
max_response_tokens = 1500
|
128 |
+
context_ceiling = 31690
|
129 |
|
130 |
available_tokens = context_ceiling - system_prompt_tokens - max_response_tokens
|
131 |
if history_tokens <= available_tokens:
|
132 |
return system_prompt_tokens + history_tokens + max_response_tokens
|
133 |
else:
|
134 |
return context_ceiling # Return the maximum context size
|
135 |
+
|
136 |
def sleep_mode(self):
|
137 |
conn = sqlite3.connect('chat_history.db')
|
138 |
c = conn.cursor()
|