pgurazada1's picture
Update app.py
9fcd64a verified
raw
history blame
No virus
3.27 kB
import os
import chromadb
import gradio as gr
from dotenv import load_dotenv
from openai import OpenAI
from langchain_community.embeddings import AnyscaleEmbeddings
from langchain_community.vectorstores import Chroma
qna_system_message = """
You are an assistant to an insurance firm who answers user queries on policy documents.
User input will have the context required by you to answer user questions.
This context will begin with the word: ###Context.
The context contains references to specific portions of a document relevant to the user query.
User questions will begin with the word: ###Question.
Please answer user questions only using the context provided in the input.
Do not mention anything about the context in your final answer. Your response should only contain the answer to the question.
If the answer is not found in the context, respond "Sorry, I cannot answer your question. Please contact our representative on the hotline 1-800-AWESOMEINSURER".
"""
qna_user_message_template = """
###Context
Here are some documents that are relevant to the question mentioned below.
{context}
###Question
{question}
"""
load_dotenv()
anyscale_api_key = os.environ['ANYSCALE_API_KEY']
client = OpenAI(
base_url="https://api.endpoints.anyscale.com/v1",
api_key=anyscale_api_key
)
qna_model = 'mlabonne/NeuralHermes-2.5-Mistral-7B'
embedding_model = AnyscaleEmbeddings(
client=client,
model='thenlper/gte-large'
)
chromadb_client = chromadb.PersistentClient(path='./policy_db')
vectorstore_persisted = Chroma(
client=chromadb_client,
collection_name="policy-text",
embedding_function=embedding_model
)
retriever = vectorstore_persisted.as_retriever(
search_type='similarity',
search_kwargs={'k': 5}
)
def predict(question):
relevant_document_chunks = retriever.invoke(question)
context_list = [d.page_content for d in relevant_document_chunks]
context_for_query = "\n".join(context_list)
prompt = [
{'role':'system', 'content': qna_system_message},
{'role': 'user', 'content': qna_user_message_template.format(
context=context_for_query,
question=question
)
}
]
try:
response = client.chat.completions.create(
model=qna_model,
messages=prompt,
temperature=0
)
prediction = response.choices[0].message.content.strip()
except Exception as e:
prediction = f'Sorry, I encountered the following error: \n {e}'
return prediction
textbox = gr.Textbox(placeholder="Enter your query here", lines=6)
demo = gr.Interface(
inputs=textbox, fn=predict, outputs="text",
title="AMA on your insurance policy document",
description="This web API presents an interface to ask questions on contents of your health insurance policy.",
article="Note that questions that are not relevant to the policy will not be answered.",
examples=[["My trip was delayed and I paid 45, how much am I covered for?", ""],
["I just had a baby, is baby food covered?", ""],
["How is the gauze used in my operation covered?", ""]
],
concurrency_limit=16
)
demo.queue()
demo.launch(auth=("demouser", os.getenv('PASSWD')))