Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
18f19f4
1
Parent(s):
4064938
不再检测API-Key长度是否为51,改为判断非空
Browse files- modules/chat_func.py +2 -2
modules/chat_func.py
CHANGED
@@ -273,7 +273,7 @@ def predict(
|
|
273 |
from llama_index.indices.query.schema import QueryBundle
|
274 |
from langchain.llms import OpenAIChat
|
275 |
|
276 |
-
|
277 |
logging.info("输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
|
278 |
if should_check_token_count:
|
279 |
yield chatbot+[(inputs, "")], history, "开始生成回答……", all_token_counts
|
@@ -331,7 +331,7 @@ def predict(
|
|
331 |
else:
|
332 |
display_reference = ""
|
333 |
|
334 |
-
if len(openai_api_key)
|
335 |
status_text = standard_error_msg + no_apikey_msg
|
336 |
logging.info(status_text)
|
337 |
chatbot.append((inputs, ""))
|
|
|
273 |
from llama_index.indices.query.schema import QueryBundle
|
274 |
from langchain.llms import OpenAIChat
|
275 |
|
276 |
+
|
277 |
logging.info("输入为:" + colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
|
278 |
if should_check_token_count:
|
279 |
yield chatbot+[(inputs, "")], history, "开始生成回答……", all_token_counts
|
|
|
331 |
else:
|
332 |
display_reference = ""
|
333 |
|
334 |
+
if len(openai_api_key) == 0 and not shared.state.multi_api_key:
|
335 |
status_text = standard_error_msg + no_apikey_msg
|
336 |
logging.info(status_text)
|
337 |
chatbot.append((inputs, ""))
|