xingfanxia commited on
Commit
f8aac7a
·
unverified ·
1 Parent(s): e9c4dc3

fix (#518)

Browse files
Files changed (1) hide show
  1. modules/chat_func.py +1 -1
modules/chat_func.py CHANGED
@@ -269,7 +269,7 @@ def predict(
269
  reply_language="中文",
270
  should_check_token_count=True,
271
  ): # repetition_penalty, top_k
272
- from llama_index.indices.query.vector_store import GPTVectorStoreIndexQuery
273
  from llama_index.indices.query.schema import QueryBundle
274
  from langchain.llms import OpenAIChat
275
 
 
269
  reply_language="中文",
270
  should_check_token_count=True,
271
  ): # repetition_penalty, top_k
272
+ from llama_index.indices.vector_store.base_query import GPTVectorStoreIndexQuery
273
  from llama_index.indices.query.schema import QueryBundle
274
  from langchain.llms import OpenAIChat
275