Shabdobhedi's picture
Upload 4 files
eb1a03e verified
from langchain_community.embeddings import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
from langchain_community.document_loaders import PyPDFLoader, DirectoryLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter
import os
from src.helper import load_pdf, text_split, download_hugging_face_embeddings
DATA_PATH = r'G:\Chatbot\data'
DB_FAISS_PATH = r'G:\Chatbot\data\vector'
'''extracted_data = load_pdf(r"G:\Chatbot\data")
text_chunks = text_split(extracted_data)
embeddings = download_hugging_face_embeddings()
# Initializing the Faiss
db = FAISS.from_documents(text_chunks, embeddings)
db.save_local(DB_FAISS_PATH)
# I change the above DB_FAISS_PATH
# db.save_local(r"G:\Chatbot\DB_FAISS_PATH")'''
# Load the data from the PDF file
def create_vector_db():
extracted_data = load_pdf(DATA_PATH)
text_chunks = text_split(extracted_data)
embeddings = download_hugging_face_embeddings()
db = FAISS.from_documents(text_chunks, embeddings)
db.save_local(DB_FAISS_PATH)
print("### db is created")
'''# Create vector database
def create_vector_db():
loader = DirectoryLoader(DATA_PATH,
glob='*.pdf',
loader_cls=PyPDFLoader)
documents = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500,
chunk_overlap=50)
texts = text_splitter.split_documents(documents)
embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L6-v2',
model_kwargs={'device': 'cuda'})
db = FAISS.from_documents(texts, embeddings)
db.save_local(DB_FAISS_PATH)
create_vector_db() # Call the function directly in the cell'''