Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
de12b06
1
Parent(s):
77f2c42
修复system prompt不可用的问题
Browse files- ChuanhuChatbot.py +1 -0
- modules/base_model.py +3 -1
- modules/models.py +0 -3
- modules/utils.py +1 -1
ChuanhuChatbot.py
CHANGED
@@ -292,6 +292,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
|
|
292 |
keyTxt.submit(**get_usage_args)
|
293 |
|
294 |
# Template
|
|
|
295 |
templateRefreshBtn.click(get_template_names, None, [templateFileSelectDropdown])
|
296 |
templateFileSelectDropdown.change(
|
297 |
load_template,
|
|
|
292 |
keyTxt.submit(**get_usage_args)
|
293 |
|
294 |
# Template
|
295 |
+
systemPromptTxt.change(current_model.value.set_system_prompt, [systemPromptTxt], None)
|
296 |
templateRefreshBtn.click(get_template_names, None, [templateFileSelectDropdown])
|
297 |
templateFileSelectDropdown.change(
|
298 |
load_template,
|
modules/base_model.py
CHANGED
@@ -46,7 +46,6 @@ class BaseLLMModel:
|
|
46 |
self.all_token_counts = []
|
47 |
self.model_name = model_name
|
48 |
self.model_type = ModelType.get_type(model_name)
|
49 |
-
self.api_key = None
|
50 |
self.token_upper_limit = MODEL_TOKEN_LIMIT[model_name]
|
51 |
self.max_generation_token = max_generation_token if max_generation_token is not None else self.token_upper_limit
|
52 |
self.interrupted = False
|
@@ -347,6 +346,9 @@ class BaseLLMModel:
|
|
347 |
def set_top_p(self, new_top_p):
|
348 |
self.top_p = new_top_p
|
349 |
|
|
|
|
|
|
|
350 |
def reset(self):
|
351 |
self.history = []
|
352 |
self.all_token_counts = []
|
|
|
46 |
self.all_token_counts = []
|
47 |
self.model_name = model_name
|
48 |
self.model_type = ModelType.get_type(model_name)
|
|
|
49 |
self.token_upper_limit = MODEL_TOKEN_LIMIT[model_name]
|
50 |
self.max_generation_token = max_generation_token if max_generation_token is not None else self.token_upper_limit
|
51 |
self.interrupted = False
|
|
|
346 |
def set_top_p(self, new_top_p):
|
347 |
self.top_p = new_top_p
|
348 |
|
349 |
+
def set_system_prompt(self, new_system_prompt):
|
350 |
+
self.system_prompt = new_system_prompt
|
351 |
+
|
352 |
def reset(self):
|
353 |
self.history = []
|
354 |
self.all_token_counts = []
|
modules/models.py
CHANGED
@@ -63,9 +63,6 @@ class OpenAIClient(BaseLLMModel):
|
|
63 |
return input_token_count + system_prompt_token_count
|
64 |
return input_token_count
|
65 |
|
66 |
-
def set_system_prompt(self, new_system_prompt):
|
67 |
-
self.system_prompt = new_system_prompt
|
68 |
-
|
69 |
def billing_info(self):
|
70 |
try:
|
71 |
curr_time = datetime.datetime.now()
|
|
|
63 |
return input_token_count + system_prompt_token_count
|
64 |
return input_token_count
|
65 |
|
|
|
|
|
|
|
66 |
def billing_info(self):
|
67 |
try:
|
68 |
curr_time = datetime.datetime.now()
|
modules/utils.py
CHANGED
@@ -229,7 +229,7 @@ def get_template_names(plain=False):
|
|
229 |
|
230 |
|
231 |
def get_template_content(templates, selection, original_system_prompt):
|
232 |
-
logging.
|
233 |
try:
|
234 |
return templates[selection]
|
235 |
except:
|
|
|
229 |
|
230 |
|
231 |
def get_template_content(templates, selection, original_system_prompt):
|
232 |
+
logging.debug(f"应用模板中,选择为{selection},原始系统提示为{original_system_prompt}")
|
233 |
try:
|
234 |
return templates[selection]
|
235 |
except:
|