pgurazada1 commited on
Commit
fde2252
1 Parent(s): 4fc330f

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +107 -0
app.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import chromadb
3
+
4
+ import gradio as gr
5
+
6
+ from dotenv import load_dotenv
7
+ from openai import OpenAI
8
+
9
+ from langchain_community.embeddings import AnyscaleEmbeddings
10
+ from langchain_community.vectorstores import Chroma
11
+
12
+ qna_system_message = """
13
+ You are an assistant to an insurance firm who answers user queries on policy documents.
14
+ User input will have the context required by you to answer user questions.
15
+ This context will begin with the word: ###Context.
16
+ The context contains references to specific portions of a document relevant to the user query.
17
+
18
+ User questions will begin with the word: ###Question.
19
+
20
+ Please answer user questions only using the context provided in the input.
21
+ Do not mention anything about the context in your final answer. Your response should only contain the answer to the question.
22
+
23
+ If the answer is not found in the context, respond "Sorry, I cannot answer your question. Please contact our representative on the hotline 1-800-AWESOMEINSURER".
24
+ """
25
+
26
+ qna_user_message_template = """
27
+ ###Context
28
+ Here are some documents that are relevant to the question mentioned below.
29
+ {context}
30
+
31
+ ###Question
32
+ {question}
33
+ """
34
+
35
+ load_dotenv()
36
+
37
+ anyscale_api_key = os.environ['ANYSCALE_API_KEY']
38
+
39
+ client = OpenAI(
40
+ base_url="https://api.endpoints.anyscale.com/v1",
41
+ api_key=anyscale_api_key
42
+ )
43
+
44
+ qna_model = 'mlabonne/NeuralHermes-2.5-Mistral-7B'
45
+
46
+ embedding_model = AnyscaleEmbeddings(
47
+ client=client,
48
+ model='thenlper/gte-large'
49
+ )
50
+
51
+ chromadb_client = chromadb.PersistentClient(path='./policy_db')
52
+
53
+ vectorstore_persisted = Chroma(
54
+ client=chromadb_client,
55
+ collection_name="policy-text",
56
+ embedding_function=embedding_model
57
+ )
58
+
59
+ retriever = vectorstore_persisted.as_retriever(
60
+ search_type='similarity',
61
+ search_kwargs={'k': 5}
62
+ )
63
+
64
+ def predict(question):
65
+
66
+ relevant_document_chunks = retriever.invoke(question)
67
+ context_list = [d.page_content for d in relevant_document_chunks]
68
+ context_for_query = "\n".join(context_list)
69
+
70
+ prompt = [
71
+ {'role':'system', 'content': qna_system_message},
72
+ {'role': 'user', 'content': qna_user_message_template.format(
73
+ context=context_for_query,
74
+ question=question
75
+ )
76
+ }
77
+ ]
78
+
79
+ try:
80
+ response = client.chat.completions.create(
81
+ model=qna_model,
82
+ messages=prompt,
83
+ temperature=0
84
+ )
85
+
86
+ prediction = response.choices[0].message.content.strip()
87
+ except Exception as e:
88
+ prediction = f'Sorry, I encountered the following error: \n {e}'
89
+
90
+ return prediction
91
+
92
+ textbox = gr.Textbox(placeholder="Enter your query here", lines=6)
93
+
94
+ demo = gr.Interface(
95
+ inputs=textbox, fn=predict, outputs="text",
96
+ title="AMA your insurance policy document",
97
+ description="This web API presents an interface to ask questions on contents of your health insurance policy",
98
+ article="Note that questions that are not relevant to the policy will not be answered.",
99
+ examples=[["My trip was delayed and I paid 45, how much am I covered for?", ""],
100
+ ["I just had a baby, is baby food covered?", ""],
101
+ ["How is the gauze used in my operation covered?", ""]
102
+ ],
103
+ concurrency_limit=16
104
+ )
105
+
106
+ demo.queue()
107
+ demo.launch(auth=("demouser", os.getenv('PASSWD')))