hari-huynh commited on
Commit
fe81246
1 Parent(s): e735715

Update memory for new chat session

Browse files
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +15 -4
  3. react_agent_v2.py +12 -8
.gitignore CHANGED
@@ -1,3 +1,4 @@
 
1
  .venv
2
  .idea
3
  __pycache__
 
1
+ .ipynb_checkpoints
2
  .venv
3
  .idea
4
  __pycache__
app.py CHANGED
@@ -1,9 +1,10 @@
 
1
  from langchain.prompts import ChatPromptTemplate
2
  from langchain.schema import StrOutputParser
3
  from langchain.schema.runnable import Runnable
4
  from langchain.schema.runnable.config import RunnableConfig
5
- from react_agent_v2 import agent_executor
6
- import chainlit as cl
7
  from langchain_community.chat_message_histories import ChatMessageHistory
8
  from langchain_core.chat_history import BaseChatMessageHistory
9
  from langchain_core.runnables.history import RunnableWithMessageHistory
@@ -12,8 +13,18 @@ from langchain_core.runnables.history import RunnableWithMessageHistory
12
 
13
  @cl.on_chat_start
14
  async def on_chat_start():
 
 
 
 
 
 
 
 
 
15
  cl.user_session.set("runnable", agent_executor)
16
 
 
17
  @cl.on_message
18
  async def on_message(message: cl.Message):
19
  # runnable = cl.user_session.get("runnable") # type: Runnable
@@ -39,8 +50,8 @@ async def on_message(message: cl.Message):
39
 
40
  llm_chain = cl.user_session.get("runnable")
41
 
42
- response = llm_chain.invoke(
43
- {"input": message.content}, callbacks = [cl.LangchainCallbackHandler()]
44
  )
45
 
46
  await cl.Message(response["output"].replace("`", "")).send()
 
1
+ import chainlit as cl
2
  from langchain.prompts import ChatPromptTemplate
3
  from langchain.schema import StrOutputParser
4
  from langchain.schema.runnable import Runnable
5
  from langchain.schema.runnable.config import RunnableConfig
6
+ from react_agent_v2 import get_react_agent
7
+ from langchain.memory import ConversationBufferMemory
8
  from langchain_community.chat_message_histories import ChatMessageHistory
9
  from langchain_core.chat_history import BaseChatMessageHistory
10
  from langchain_core.runnables.history import RunnableWithMessageHistory
 
13
 
14
  @cl.on_chat_start
15
  async def on_chat_start():
16
+ message_history = ChatMessageHistory()
17
+ memory = ConversationBufferMemory(
18
+ memory_key = "chat_history",
19
+ output_key = "output",
20
+ chat_memory = message_history,
21
+ return_message = True
22
+ )
23
+
24
+ agent_executor = get_react_agent(memory)
25
  cl.user_session.set("runnable", agent_executor)
26
 
27
+
28
  @cl.on_message
29
  async def on_message(message: cl.Message):
30
  # runnable = cl.user_session.get("runnable") # type: Runnable
 
50
 
51
  llm_chain = cl.user_session.get("runnable")
52
 
53
+ response = await llm_chain.ainvoke(
54
+ {"input": message.content}, callbacks = [cl.AsyncLangchainCallbackHandler()]
55
  )
56
 
57
  await cl.Message(response["output"].replace("`", "")).send()
react_agent_v2.py CHANGED
@@ -1,9 +1,8 @@
1
  from langchain.agents import Tool, AgentType, initialize_agent
2
  from langchain.memory import ConversationBufferMemory
3
- # from langchain.utilities import DuckDuckGoSearchAPIWrapper
4
  from langchain_google_genai import ChatGoogleGenerativeAI
5
  from langchain.agents import AgentExecutor
6
- from langchain import hub
7
  from langchain.agents.format_scratchpad import format_log_to_str
8
  from langchain.agents.output_parsers import ReActSingleInputOutputParser
9
  from langchain.tools.render import render_text_description
@@ -20,12 +19,6 @@ llm = ChatGoogleGenerativeAI(
20
  temperature = 0
21
  )
22
 
23
- # search = DuckDuckGoSearchAPIWrapper()
24
- #
25
- # search_tool = Tool(name="Current Search",
26
- # func=search.run,
27
- # description="Useful when you need to answer questions about detail jobs information or search a job."
28
- # )
29
 
30
  kg_query = Tool(
31
  name = 'Query Knowledge Graph',
@@ -88,6 +81,17 @@ agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, memory=me
88
  # result = agent_executor.invoke(question)
89
  # print(result)
90
 
 
 
 
 
 
 
 
 
 
 
 
91
  if __name__ == "__main__":
92
  while True:
93
  try:
 
1
  from langchain.agents import Tool, AgentType, initialize_agent
2
  from langchain.memory import ConversationBufferMemory
 
3
  from langchain_google_genai import ChatGoogleGenerativeAI
4
  from langchain.agents import AgentExecutor
5
+
6
  from langchain.agents.format_scratchpad import format_log_to_str
7
  from langchain.agents.output_parsers import ReActSingleInputOutputParser
8
  from langchain.tools.render import render_text_description
 
19
  temperature = 0
20
  )
21
 
 
 
 
 
 
 
22
 
23
  kg_query = Tool(
24
  name = 'Query Knowledge Graph',
 
81
  # result = agent_executor.invoke(question)
82
  # print(result)
83
 
84
+ def get_react_agent(memory):
85
+ agent_executor = AgentExecutor(
86
+ agent = agent,
87
+ tools = tools,
88
+ verbose = True,
89
+ memory = memory
90
+ )
91
+
92
+ return agent_executor
93
+
94
+
95
  if __name__ == "__main__":
96
  while True:
97
  try: