Spaces:
Running
Running
Update sessionID handling and add memory checkpointer for short term memory
Browse files- app.py +71 -21
- pyproject.toml +3 -2
- uv.lock +12 -4
app.py
CHANGED
@@ -1,9 +1,10 @@
|
|
|
|
1 |
from typing import Annotated, TypedDict, Literal
|
2 |
from langchain_openai import ChatOpenAI
|
3 |
from langgraph.graph import StateGraph, START, END
|
4 |
-
from langgraph.graph.message import MessagesState
|
5 |
from langgraph.prebuilt import ToolNode
|
6 |
-
from langgraph.
|
7 |
|
8 |
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
|
9 |
from langchain.schema.runnable.config import RunnableConfig
|
@@ -62,12 +63,10 @@ tool_node = ToolNode(tool_belt)
|
|
62 |
|
63 |
# Simple flow control - always go to final
|
64 |
def should_continue(state):
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
return
|
69 |
-
|
70 |
-
return END
|
71 |
|
72 |
# Create the graph
|
73 |
builder = StateGraph(AgentState)
|
@@ -84,25 +83,72 @@ builder.add_conditional_edges(
|
|
84 |
)
|
85 |
builder.add_edge("action", "agent")
|
86 |
|
87 |
-
#
|
88 |
-
|
89 |
|
90 |
-
#
|
91 |
-
graph =
|
92 |
|
93 |
@cl.on_chat_start
|
94 |
async def on_chat_start():
|
95 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
96 |
|
97 |
@cl.on_message
|
98 |
async def on_message(message: cl.Message):
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
}
|
105 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
# Setup callback handler and final answer message
|
108 |
cb = cl.LangchainCallbackHandler()
|
@@ -113,8 +159,12 @@ async def on_message(message: cl.Message):
|
|
113 |
|
114 |
# Stream the response
|
115 |
async for chunk in graph.astream(
|
116 |
-
{"messages":
|
117 |
-
config=RunnableConfig(
|
|
|
|
|
|
|
|
|
118 |
):
|
119 |
for node, values in chunk.items():
|
120 |
if node == "retrieve":
|
|
|
1 |
+
import uuid
|
2 |
from typing import Annotated, TypedDict, Literal
|
3 |
from langchain_openai import ChatOpenAI
|
4 |
from langgraph.graph import StateGraph, START, END
|
5 |
+
from langgraph.graph.message import MessagesState, add_messages
|
6 |
from langgraph.prebuilt import ToolNode
|
7 |
+
from langgraph.checkpoint.memory import MemorySaver
|
8 |
|
9 |
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
|
10 |
from langchain.schema.runnable.config import RunnableConfig
|
|
|
63 |
|
64 |
# Simple flow control - always go to final
|
65 |
def should_continue(state):
|
66 |
+
last_message = state["messages"][-1]
|
67 |
+
if last_message.tool_calls:
|
68 |
+
return "action"
|
69 |
+
return END
|
|
|
|
|
70 |
|
71 |
# Create the graph
|
72 |
builder = StateGraph(AgentState)
|
|
|
83 |
)
|
84 |
builder.add_edge("action", "agent")
|
85 |
|
86 |
+
# Initialize memory saver for conversation persistence
|
87 |
+
memory = MemorySaver()
|
88 |
|
89 |
+
# Compile the graph with memory
|
90 |
+
graph = builder.compile(checkpointer=memory)
|
91 |
|
92 |
@cl.on_chat_start
|
93 |
async def on_chat_start():
|
94 |
+
# Generate and store a session ID
|
95 |
+
session_id = str(uuid.uuid4())
|
96 |
+
cl.user_session.set("session_id", session_id)
|
97 |
+
|
98 |
+
# Initialize the conversation state with proper auth
|
99 |
+
cl.user_session.set("messages", [])
|
100 |
+
|
101 |
+
# Initialize config using stored session ID
|
102 |
+
config = RunnableConfig(
|
103 |
+
configurable={
|
104 |
+
"thread_id": session_id,
|
105 |
+
"sessionId": session_id
|
106 |
+
}
|
107 |
+
)
|
108 |
+
|
109 |
+
# Initialize empty state with auth
|
110 |
+
try:
|
111 |
+
await graph.ainvoke(
|
112 |
+
{"messages": [], "context": []},
|
113 |
+
config=config
|
114 |
+
)
|
115 |
+
except Exception as e:
|
116 |
+
print(f"Error initializing state: {str(e)}")
|
117 |
+
|
118 |
+
await cl.Message(
|
119 |
+
content="Hello! I'm your chief joy officer, here to help you with finding fun things to do in London!",
|
120 |
+
author="Assistant"
|
121 |
+
).send()
|
122 |
|
123 |
@cl.on_message
|
124 |
async def on_message(message: cl.Message):
|
125 |
+
session_id = cl.user_session.get("session_id")
|
126 |
+
print(f"Session ID: {session_id}")
|
127 |
+
if not session_id:
|
128 |
+
session_id = str(uuid.uuid4())
|
129 |
+
cl.user_session.set("session_id", session_id)
|
130 |
+
|
131 |
+
config = RunnableConfig(
|
132 |
+
configurable={
|
133 |
+
"thread_id": session_id,
|
134 |
+
"checkpoint_ns": "default_namespace",
|
135 |
+
"sessionId": session_id
|
136 |
}
|
137 |
+
)
|
138 |
+
|
139 |
+
# Try to retrieve previous conversation state
|
140 |
+
try:
|
141 |
+
previous_state = await graph.aget_state(config)
|
142 |
+
if previous_state and previous_state.values:
|
143 |
+
previous_messages = previous_state.values.get('messages', [])
|
144 |
+
print("Found previous state with messages:", len(previous_messages))
|
145 |
+
else:
|
146 |
+
print("Previous state empty or invalid")
|
147 |
+
previous_messages = []
|
148 |
+
current_messages = previous_messages + [HumanMessage(content=message.content)]
|
149 |
+
except Exception as e:
|
150 |
+
print(f"Error retrieving previous state: {str(e)}")
|
151 |
+
current_messages = [HumanMessage(content=message.content)]
|
152 |
|
153 |
# Setup callback handler and final answer message
|
154 |
cb = cl.LangchainCallbackHandler()
|
|
|
159 |
|
160 |
# Stream the response
|
161 |
async for chunk in graph.astream(
|
162 |
+
{"messages": current_messages, "context": []},
|
163 |
+
config=RunnableConfig(
|
164 |
+
configurable={
|
165 |
+
"thread_id": session_id,
|
166 |
+
}
|
167 |
+
)
|
168 |
):
|
169 |
for node, values in chunk.items():
|
170 |
if node == "retrieve":
|
pyproject.toml
CHANGED
@@ -5,7 +5,7 @@ description = "Simple Pythonic RAG App"
|
|
5 |
readme = "README.md"
|
6 |
requires-python = ">=3.13"
|
7 |
dependencies = [
|
8 |
-
"chainlit>=2.
|
9 |
"numpy>=2.2.2",
|
10 |
"openai>=1.59.9",
|
11 |
"pydantic==2.10.1",
|
@@ -20,5 +20,6 @@ dependencies = [
|
|
20 |
"tavily-python>=0.3.1",
|
21 |
"typing-extensions>=4.9.0",
|
22 |
"beautifulsoup4==4.13.3",
|
23 |
-
"sentence-transformers==3.4.1"
|
|
|
24 |
]
|
|
|
5 |
readme = "README.md"
|
6 |
requires-python = ">=3.13"
|
7 |
dependencies = [
|
8 |
+
"chainlit>=2.2.1",
|
9 |
"numpy>=2.2.2",
|
10 |
"openai>=1.59.9",
|
11 |
"pydantic==2.10.1",
|
|
|
20 |
"tavily-python>=0.3.1",
|
21 |
"typing-extensions>=4.9.0",
|
22 |
"beautifulsoup4==4.13.3",
|
23 |
+
"sentence-transformers==3.4.1",
|
24 |
+
"uuid==1.30"
|
25 |
]
|
uv.lock
CHANGED
@@ -21,13 +21,14 @@ dependencies = [
|
|
21 |
{ name = "sentence-transformers" },
|
22 |
{ name = "tavily-python" },
|
23 |
{ name = "typing-extensions" },
|
|
|
24 |
{ name = "websockets" },
|
25 |
]
|
26 |
|
27 |
[package.metadata]
|
28 |
requires-dist = [
|
29 |
{ name = "beautifulsoup4", specifier = "==4.13.3" },
|
30 |
-
{ name = "chainlit", specifier = ">=2.
|
31 |
{ name = "langchain", specifier = ">=0.1.8" },
|
32 |
{ name = "langchain-community", specifier = ">=0.0.19" },
|
33 |
{ name = "langchain-core", specifier = ">=0.1.23" },
|
@@ -41,6 +42,7 @@ requires-dist = [
|
|
41 |
{ name = "sentence-transformers", specifier = "==3.4.1" },
|
42 |
{ name = "tavily-python", specifier = ">=0.3.1" },
|
43 |
{ name = "typing-extensions", specifier = ">=4.9.0" },
|
|
|
44 |
{ name = "websockets", specifier = ">=14.2" },
|
45 |
]
|
46 |
|
@@ -205,7 +207,7 @@ wheels = [
|
|
205 |
|
206 |
[[package]]
|
207 |
name = "chainlit"
|
208 |
-
version = "2.
|
209 |
source = { registry = "https://pypi.org/simple" }
|
210 |
dependencies = [
|
211 |
{ name = "aiofiles" },
|
@@ -231,9 +233,9 @@ dependencies = [
|
|
231 |
{ name = "uvicorn" },
|
232 |
{ name = "watchfiles" },
|
233 |
]
|
234 |
-
sdist = { url = "https://files.pythonhosted.org/packages/
|
235 |
wheels = [
|
236 |
-
{ url = "https://files.pythonhosted.org/packages/
|
237 |
]
|
238 |
|
239 |
[[package]]
|
@@ -1978,6 +1980,12 @@ wheels = [
|
|
1978 |
{ url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 },
|
1979 |
]
|
1980 |
|
|
|
|
|
|
|
|
|
|
|
|
|
1981 |
[[package]]
|
1982 |
name = "uvicorn"
|
1983 |
version = "0.34.0"
|
|
|
21 |
{ name = "sentence-transformers" },
|
22 |
{ name = "tavily-python" },
|
23 |
{ name = "typing-extensions" },
|
24 |
+
{ name = "uuid" },
|
25 |
{ name = "websockets" },
|
26 |
]
|
27 |
|
28 |
[package.metadata]
|
29 |
requires-dist = [
|
30 |
{ name = "beautifulsoup4", specifier = "==4.13.3" },
|
31 |
+
{ name = "chainlit", specifier = ">=2.2.1" },
|
32 |
{ name = "langchain", specifier = ">=0.1.8" },
|
33 |
{ name = "langchain-community", specifier = ">=0.0.19" },
|
34 |
{ name = "langchain-core", specifier = ">=0.1.23" },
|
|
|
42 |
{ name = "sentence-transformers", specifier = "==3.4.1" },
|
43 |
{ name = "tavily-python", specifier = ">=0.3.1" },
|
44 |
{ name = "typing-extensions", specifier = ">=4.9.0" },
|
45 |
+
{ name = "uuid", specifier = "==1.30" },
|
46 |
{ name = "websockets", specifier = ">=14.2" },
|
47 |
]
|
48 |
|
|
|
207 |
|
208 |
[[package]]
|
209 |
name = "chainlit"
|
210 |
+
version = "2.2.1"
|
211 |
source = { registry = "https://pypi.org/simple" }
|
212 |
dependencies = [
|
213 |
{ name = "aiofiles" },
|
|
|
233 |
{ name = "uvicorn" },
|
234 |
{ name = "watchfiles" },
|
235 |
]
|
236 |
+
sdist = { url = "https://files.pythonhosted.org/packages/5a/0d/19f66ced254c35cd0595ba064402a15c2ca3d3fba2a75f7ea75b52619871/chainlit-2.2.1.tar.gz", hash = "sha256:45bfc5721f7b0766a3b9ef748478ec1043cfdcb9edc78e23a89db489335f10e9", size = 9473105 }
|
237 |
wheels = [
|
238 |
+
{ url = "https://files.pythonhosted.org/packages/b8/ef/4408863d8110462bea8ee971a9f747ceeaafe3aa0404f0d7e85d6b06bb1c/chainlit-2.2.1-py3-none-any.whl", hash = "sha256:c2cc7d84386073899164b960371a9c046f175a9659763189cb9f1130b544b694", size = 9623188 },
|
239 |
]
|
240 |
|
241 |
[[package]]
|
|
|
1980 |
{ url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 },
|
1981 |
]
|
1982 |
|
1983 |
+
[[package]]
|
1984 |
+
name = "uuid"
|
1985 |
+
version = "1.30"
|
1986 |
+
source = { registry = "https://pypi.org/simple" }
|
1987 |
+
sdist = { url = "https://files.pythonhosted.org/packages/ce/63/f42f5aa951ebf2c8dac81f77a8edcc1c218640a2a35a03b9ff2d4aa64c3d/uuid-1.30.tar.gz", hash = "sha256:1f87cc004ac5120466f36c5beae48b4c48cc411968eed0eaecd3da82aa96193f", size = 5811 }
|
1988 |
+
|
1989 |
[[package]]
|
1990 |
name = "uvicorn"
|
1991 |
version = "0.34.0"
|