Spaces:
Sleeping
Sleeping
UPDATE: Flashrank
Browse files- functions.py +5 -6
- requirements.txt +1 -0
functions.py
CHANGED
@@ -11,7 +11,8 @@ from langchain_core.chat_history import BaseChatMessageHistory
|
|
11 |
from langchain.storage import InMemoryStore
|
12 |
from langchain.docstore.document import Document
|
13 |
from langchain_huggingface import HuggingFaceEmbeddings
|
14 |
-
from
|
|
|
15 |
from supabase.client import create_client
|
16 |
from qdrant_client import QdrantClient
|
17 |
from langchain_groq import ChatGroq
|
@@ -118,13 +119,11 @@ def addDocuments(text: str, vectorstore: str):
|
|
118 |
global embeddings
|
119 |
global store
|
120 |
parentSplitter = RecursiveCharacterTextSplitter(
|
121 |
-
chunk_size =
|
122 |
-
chunk_overlap = 240,
|
123 |
add_start_index = True
|
124 |
)
|
125 |
childSplitter = RecursiveCharacterTextSplitter(
|
126 |
chunk_size = 300,
|
127 |
-
chunk_overlap = 90,
|
128 |
add_start_index = True
|
129 |
)
|
130 |
texts = [Document(page_content = text)]
|
@@ -191,9 +190,9 @@ def answerQuery(query: str, vectorstore: str, llmModel: str = "llama3-70b-8192")
|
|
191 |
vectorstore=vectorstore,
|
192 |
docstore=store,
|
193 |
child_splitter=RecursiveCharacterTextSplitter(),
|
194 |
-
|
195 |
)
|
196 |
-
compressor =
|
197 |
retriever = ContextualCompressionRetriever(
|
198 |
base_compressor=compressor, base_retriever=retriever
|
199 |
)
|
|
|
11 |
from langchain.storage import InMemoryStore
|
12 |
from langchain.docstore.document import Document
|
13 |
from langchain_huggingface import HuggingFaceEmbeddings
|
14 |
+
from langchain.retrievers import ContextualCompressionRetriever
|
15 |
+
from langchain.retrievers.document_compressors import FlashrankRerank
|
16 |
from supabase.client import create_client
|
17 |
from qdrant_client import QdrantClient
|
18 |
from langchain_groq import ChatGroq
|
|
|
119 |
global embeddings
|
120 |
global store
|
121 |
parentSplitter = RecursiveCharacterTextSplitter(
|
122 |
+
chunk_size = 2100,
|
|
|
123 |
add_start_index = True
|
124 |
)
|
125 |
childSplitter = RecursiveCharacterTextSplitter(
|
126 |
chunk_size = 300,
|
|
|
127 |
add_start_index = True
|
128 |
)
|
129 |
texts = [Document(page_content = text)]
|
|
|
190 |
vectorstore=vectorstore,
|
191 |
docstore=store,
|
192 |
child_splitter=RecursiveCharacterTextSplitter(),
|
193 |
+
search_kwargs={"k": 20}
|
194 |
)
|
195 |
+
compressor = FlashrankRerank()
|
196 |
retriever = ContextualCompressionRetriever(
|
197 |
base_compressor=compressor, base_retriever=retriever
|
198 |
)
|
requirements.txt
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
huggingface-hub
|
2 |
fastapi
|
|
|
3 |
gradio
|
4 |
langchain
|
5 |
langchain-community
|
|
|
1 |
huggingface-hub
|
2 |
fastapi
|
3 |
+
flashrank
|
4 |
gradio
|
5 |
langchain
|
6 |
langchain-community
|