Your-AI-Doctor / ai_assistant.py
ImranzamanML's picture
Rename ai_doctor.py to ai_assistant.py
bd79569 verified
raw
history blame
1.58 kB
from langchain.document_loaders.csv_loader import CSVLoader
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.embeddings import CacheBackedEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.storage import LocalFileStore
from langchain.chains import RetrievalQA
from langchain_openai import ChatOpenAI
import os
def create_index():
# load the data
dir = os.path.dirname(__file__)
df_path = dir + '/data/train.csv'
loader = CSVLoader(file_path = df_path)
data = loader.load()
# create the embeddings model
embeddings_model = OpenAIEmbeddings()
# create the cache backed embeddings in vector store
store = LocalFileStore("./cache")
cached_embeder = CacheBackedEmbeddings.from_bytes_store(
embeddings_model, store, namespace=embeddings_model.model
)
vector_store = FAISS.from_documents(data, embeddings_model)
return vector_store.as_retriever()
def setup(openai_key):
# Set the API key for OpenAI
os.environ["OPENAI_API_KEY"] = openai_key
retriver = create_index()
llm = ChatOpenAI(model="gpt-4")
return retriver, llm
def ai_doctor(openai_key,query):
# Setup
retriever,llm = setup(openai_key)
# Create the QA chain
handler = StdOutCallbackHandler()
qa_with_sources_chain = RetrievalQA.from_chain_type(
llm=llm,
retriever=retriever,
callbacks=[handler],
return_source_documents=True
)
# Ask a question
res = qa_with_sources_chain({"query":query})
return (res['result'])