File size: 5,030 Bytes
fc2cb23 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
from langchain_core.prompts import ChatPromptTemplate
from modules.chat.langchain.utils import *
class CustomConversationalRetrievalChain:
def __init__(self, llm, memory, retriever, qa_prompt: str, rephrase_prompt: str):
"""
Initialize the CustomConversationalRetrievalChain class.
Args:
llm (LanguageModelLike): The language model instance.
memory (BaseChatMessageHistory): The chat message history instance.
retriever (BaseRetriever): The retriever instance.
qa_prompt (str): The QA prompt string.
rephrase_prompt (str): The rephrase prompt string.
"""
self.llm = llm
self.memory = memory
self.retriever = retriever
self.qa_prompt = qa_prompt
self.rephrase_prompt = rephrase_prompt
self.store = {}
# Contextualize question prompt
contextualize_q_system_prompt = rephrase_prompt or (
"Given a chat history and the latest user question "
"which might reference context in the chat history, "
"formulate a standalone question which can be understood "
"without the chat history. Do NOT answer the question, just "
"reformulate it if needed and otherwise return it as is."
)
self.contextualize_q_prompt = ChatPromptTemplate.from_messages(
[
("system", contextualize_q_system_prompt),
MessagesPlaceholder("chat_history"),
("human", "{input}"),
]
)
# History-aware retriever
self.history_aware_retriever = create_history_aware_retriever(
self.llm, self.retriever, self.contextualize_q_prompt
)
# Answer question prompt
qa_system_prompt = qa_prompt or (
"You are an assistant for question-answering tasks. Use "
"the following pieces of retrieved context to answer the "
"question. If you don't know the answer, just say that you "
"don't know. Use three sentences maximum and keep the answer "
"concise."
"\n\n"
"{context}"
)
self.qa_prompt_template = ChatPromptTemplate.from_messages(
[
("system", qa_system_prompt),
MessagesPlaceholder("chat_history"),
("human", "{input}"),
]
)
# Question-answer chain
self.question_answer_chain = create_stuff_documents_chain(
self.llm, self.qa_prompt_template
)
# Final retrieval chain
self.rag_chain = create_retrieval_chain(
self.history_aware_retriever, self.question_answer_chain
)
self.rag_chain = CustomRunnableWithHistory(
self.rag_chain,
get_session_history=self.get_session_history,
input_messages_key="input",
history_messages_key="chat_history",
output_messages_key="answer",
history_factory_config=[
ConfigurableFieldSpec(
id="user_id",
annotation=str,
name="User ID",
description="Unique identifier for the user.",
default="",
is_shared=True,
),
ConfigurableFieldSpec(
id="conversation_id",
annotation=str,
name="Conversation ID",
description="Unique identifier for the conversation.",
default="",
is_shared=True,
),
ConfigurableFieldSpec(
id="memory_window",
annotation=int,
name="Number of Conversations",
description="Number of conversations to consider for context.",
default=1,
is_shared=True,
),
],
)
def get_session_history(
self, user_id: str, conversation_id: str, memory_window: int
) -> BaseChatMessageHistory:
"""
Get the session history for a user and conversation.
Args:
user_id (str): The user identifier.
conversation_id (str): The conversation identifier.
memory_window (int): The number of conversations to consider for context.
Returns:
BaseChatMessageHistory: The chat message history.
"""
if (user_id, conversation_id) not in self.store:
self.store[(user_id, conversation_id)] = InMemoryHistory()
return self.store[(user_id, conversation_id)]
def invoke(self, user_query, config):
"""
Invoke the chain.
Args:
kwargs: The input variables.
Returns:
dict: The output variables.
"""
print(user_query, config)
return self.rag_chain.invoke(user_query, config)
|