Spaces:
Runtime error
Runtime error
Update query_data.py
Browse files- query_data.py +13 -13
query_data.py
CHANGED
@@ -2,32 +2,32 @@ from langchain.prompts.prompt import PromptTemplate
|
|
2 |
from langchain.llms import OpenAI
|
3 |
from langchain.chains import ChatVectorDBChain
|
4 |
|
5 |
-
_template = """
|
6 |
-
You can assume the question about the most recent state of the union address.
|
7 |
Chat History:
|
8 |
{chat_history}
|
9 |
-
|
10 |
-
|
11 |
-
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
|
12 |
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
|
|
|
|
18 |
=========
|
19 |
{context}
|
20 |
=========
|
21 |
Answer in Markdown:"""
|
22 |
-
QA_PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
|
23 |
|
|
|
24 |
|
25 |
def get_chain(vectorstore):
|
26 |
-
llm = OpenAI(temperature=0)
|
27 |
qa_chain = ChatVectorDBChain.from_llm(
|
28 |
llm,
|
29 |
vectorstore,
|
30 |
qa_prompt=QA_PROMPT,
|
31 |
-
|
32 |
)
|
33 |
return qa_chain
|
|
|
2 |
from langchain.llms import OpenAI
|
3 |
from langchain.chains import ChatVectorDBChain
|
4 |
|
5 |
+
_template = """Paraphrase the message and encourage to share more
|
|
|
6 |
Chat History:
|
7 |
{chat_history}
|
8 |
+
Message: {message}
|
9 |
+
Paraphrased message:"""
|
|
|
10 |
|
11 |
+
PARAPHRASE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
|
12 |
+
|
13 |
+
template = """You are an AI psychotherapist. You are empathic and encourage humans to share. If asked for information,
|
14 |
+
provide it and then gently inquire if they want to talk about it. If you don't know the answer, just say
|
15 |
+
"Hmm, I'm not sure." Don't try to make up an answer. If the question is not about mental health or resources,
|
16 |
+
politely inform them that you are tuned to only answer questions about mental health and well being.
|
17 |
+
Message: {message}
|
18 |
=========
|
19 |
{context}
|
20 |
=========
|
21 |
Answer in Markdown:"""
|
|
|
22 |
|
23 |
+
QA_PROMPT = PromptTemplate(template=template, input_variables=["message", "context"])
|
24 |
|
25 |
def get_chain(vectorstore):
|
26 |
+
llm = OpenAI(temperature=0.5)
|
27 |
qa_chain = ChatVectorDBChain.from_llm(
|
28 |
llm,
|
29 |
vectorstore,
|
30 |
qa_prompt=QA_PROMPT,
|
31 |
+
paraphrase_question_prompt=CONDENSE_QUESTION_PROMPT,
|
32 |
)
|
33 |
return qa_chain
|