luanpoppe
feat
1a93363
raw
history blame
797 Bytes
import os
from langchain_backend.utils import create_prompt_llm_chain, create_retriever, getPDF
from langchain.chains import create_retrieval_chain
os.environ.get("OPENAI_API_KEY")
def get_llm_answer(system_prompt, user_prompt, pdf_url, model):
print('model: ', model)
pages = None
if pdf_url:
pages = getPDF(pdf_url)
else:
pages = getPDF()
retriever = create_retriever(pages)
# rag_chain = None
rag_chain = create_retrieval_chain(retriever, create_prompt_llm_chain(system_prompt, model))
# if model:
# rag_chain = create_retrieval_chain(retriever, create_prompt_llm_chain(system_prompt, model))
# else:
# rag_chain = create_retrieval_chain(retriever, create_prompt_llm_chain(system_prompt))
results = rag_chain.invoke({"input": user_prompt})
return results