Spaces:
Sleeping
Sleeping
fix (#518)
Browse files- modules/chat_func.py +1 -1
modules/chat_func.py
CHANGED
@@ -269,7 +269,7 @@ def predict(
|
|
269 |
reply_language="中文",
|
270 |
should_check_token_count=True,
|
271 |
): # repetition_penalty, top_k
|
272 |
-
from llama_index.indices.
|
273 |
from llama_index.indices.query.schema import QueryBundle
|
274 |
from langchain.llms import OpenAIChat
|
275 |
|
|
|
269 |
reply_language="中文",
|
270 |
should_check_token_count=True,
|
271 |
): # repetition_penalty, top_k
|
272 |
+
from llama_index.indices.vector_store.base_query import GPTVectorStoreIndexQuery
|
273 |
from llama_index.indices.query.schema import QueryBundle
|
274 |
from langchain.llms import OpenAIChat
|
275 |
|