Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
e7fcf86
1
Parent(s):
6c5fd1c
修正变量名
Browse files- modules/base_model.py +10 -10
modules/base_model.py
CHANGED
@@ -166,7 +166,7 @@ class BaseLLMModel:
|
|
166 |
|
167 |
def prepare_inputs(self, inputs, use_websearch, files, reply_language):
|
168 |
old_inputs = None
|
169 |
-
|
170 |
limited_context = False
|
171 |
if files:
|
172 |
from llama_index.indices.vector_store.base_query import GPTVectorStoreIndexQuery
|
@@ -216,8 +216,8 @@ class BaseLLMModel:
|
|
216 |
nodes = query_object.retrieve(query_bundle)
|
217 |
reference_results = [n.node.text for n in nodes]
|
218 |
reference_results = add_source_numbers(reference_results, use_source=False)
|
219 |
-
|
220 |
-
|
221 |
inputs = (
|
222 |
replace_today(PROMPT_TEMPLATE)
|
223 |
.replace("{query_str}", inputs)
|
@@ -233,11 +233,11 @@ class BaseLLMModel:
|
|
233 |
logging.debug(f"搜索结果{idx + 1}:{result}")
|
234 |
domain_name = urllib3.util.parse_url(result["href"]).host
|
235 |
reference_results.append([result["body"], result["href"]])
|
236 |
-
|
237 |
f"{idx+1}. [{domain_name}]({result['href']})\n"
|
238 |
)
|
239 |
reference_results = add_source_numbers(reference_results)
|
240 |
-
|
241 |
inputs = (
|
242 |
replace_today(WEBSEARCH_PTOMPT_TEMPLATE)
|
243 |
.replace("{query}", inputs)
|
@@ -245,8 +245,8 @@ class BaseLLMModel:
|
|
245 |
.replace("{reply_language}", reply_language)
|
246 |
)
|
247 |
else:
|
248 |
-
|
249 |
-
return limited_context, old_inputs,
|
250 |
|
251 |
def predict(
|
252 |
self,
|
@@ -268,7 +268,7 @@ class BaseLLMModel:
|
|
268 |
if reply_language == "跟随问题语言(不稳定)":
|
269 |
reply_language = "the same language as the question, such as English, 中文, 日本語, Español, Français, or Deutsch."
|
270 |
|
271 |
-
limited_context, old_inputs,
|
272 |
|
273 |
if (
|
274 |
self.need_api_key and
|
@@ -304,7 +304,7 @@ class BaseLLMModel:
|
|
304 |
inputs,
|
305 |
chatbot,
|
306 |
fake_input=old_inputs,
|
307 |
-
display_append=
|
308 |
)
|
309 |
for chatbot, status_text in iter:
|
310 |
yield chatbot, status_text
|
@@ -314,7 +314,7 @@ class BaseLLMModel:
|
|
314 |
inputs,
|
315 |
chatbot,
|
316 |
fake_input=old_inputs,
|
317 |
-
display_append=
|
318 |
)
|
319 |
yield chatbot, status_text
|
320 |
except Exception as e:
|
|
|
166 |
|
167 |
def prepare_inputs(self, inputs, use_websearch, files, reply_language):
|
168 |
old_inputs = None
|
169 |
+
display_append = []
|
170 |
limited_context = False
|
171 |
if files:
|
172 |
from llama_index.indices.vector_store.base_query import GPTVectorStoreIndexQuery
|
|
|
216 |
nodes = query_object.retrieve(query_bundle)
|
217 |
reference_results = [n.node.text for n in nodes]
|
218 |
reference_results = add_source_numbers(reference_results, use_source=False)
|
219 |
+
display_append = add_details(reference_results)
|
220 |
+
display_append = "\n\n" + "".join(display_append)
|
221 |
inputs = (
|
222 |
replace_today(PROMPT_TEMPLATE)
|
223 |
.replace("{query_str}", inputs)
|
|
|
233 |
logging.debug(f"搜索结果{idx + 1}:{result}")
|
234 |
domain_name = urllib3.util.parse_url(result["href"]).host
|
235 |
reference_results.append([result["body"], result["href"]])
|
236 |
+
display_append.append(
|
237 |
f"{idx+1}. [{domain_name}]({result['href']})\n"
|
238 |
)
|
239 |
reference_results = add_source_numbers(reference_results)
|
240 |
+
display_append = "\n\n" + "".join(display_append)
|
241 |
inputs = (
|
242 |
replace_today(WEBSEARCH_PTOMPT_TEMPLATE)
|
243 |
.replace("{query}", inputs)
|
|
|
245 |
.replace("{reply_language}", reply_language)
|
246 |
)
|
247 |
else:
|
248 |
+
display_append = ""
|
249 |
+
return limited_context, old_inputs, display_append, inputs
|
250 |
|
251 |
def predict(
|
252 |
self,
|
|
|
268 |
if reply_language == "跟随问题语言(不稳定)":
|
269 |
reply_language = "the same language as the question, such as English, 中文, 日本語, Español, Français, or Deutsch."
|
270 |
|
271 |
+
limited_context, old_inputs, display_append, inputs = self.prepare_inputs(inputs=inputs, use_websearch=use_websearch, files=files, reply_language=reply_language)
|
272 |
|
273 |
if (
|
274 |
self.need_api_key and
|
|
|
304 |
inputs,
|
305 |
chatbot,
|
306 |
fake_input=old_inputs,
|
307 |
+
display_append=display_append,
|
308 |
)
|
309 |
for chatbot, status_text in iter:
|
310 |
yield chatbot, status_text
|
|
|
314 |
inputs,
|
315 |
chatbot,
|
316 |
fake_input=old_inputs,
|
317 |
+
display_append=display_append,
|
318 |
)
|
319 |
yield chatbot, status_text
|
320 |
except Exception as e:
|