XThomasBU commited on
Commit
9c5bb39
·
1 Parent(s): 9b7a7cf

updates for testing

Browse files
code/modules/chat/langchain/utils.py CHANGED
@@ -36,6 +36,27 @@ from langchain_core.messages import BaseMessage
36
 
37
 
38
  class CustomRunnableWithHistory(RunnableWithMessageHistory):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  def _enter_history(self, input: Any, config: RunnableConfig) -> List[BaseMessage]:
40
  """
41
  Get the last k conversations from the message history.
@@ -59,6 +80,9 @@ class CustomRunnableWithHistory(RunnableWithMessageHistory):
59
  messages = []
60
  else:
61
  messages = messages[-2 * config["configurable"]["memory_window"] :]
 
 
 
62
  return messages
63
 
64
 
 
36
 
37
 
38
  class CustomRunnableWithHistory(RunnableWithMessageHistory):
39
+
40
+ def _get_chat_history(self, chat_history: List[CHAT_TURN_TYPE]) -> str:
41
+ _ROLE_MAP = {"human": "Student: ", "ai": "AI Tutor: "}
42
+ buffer = ""
43
+ for dialogue_turn in chat_history:
44
+ if isinstance(dialogue_turn, BaseMessage):
45
+ role_prefix = _ROLE_MAP.get(
46
+ dialogue_turn.type, f"{dialogue_turn.type}: "
47
+ )
48
+ buffer += f"\n{role_prefix}{dialogue_turn.content}"
49
+ elif isinstance(dialogue_turn, tuple):
50
+ human = "Student: " + dialogue_turn[0]
51
+ ai = "AI Tutor: " + dialogue_turn[1]
52
+ buffer += "\n" + "\n".join([human, ai])
53
+ else:
54
+ raise ValueError(
55
+ f"Unsupported chat history format: {type(dialogue_turn)}."
56
+ f" Full chat history: {chat_history} "
57
+ )
58
+ return buffer
59
+
60
  def _enter_history(self, input: Any, config: RunnableConfig) -> List[BaseMessage]:
61
  """
62
  Get the last k conversations from the message history.
 
80
  messages = []
81
  else:
82
  messages = messages[-2 * config["configurable"]["memory_window"] :]
83
+
84
+ messages = self._get_chat_history(messages)
85
+
86
  return messages
87
 
88
 
code/modules/chat_processor/literal_ai.py CHANGED
@@ -1,7 +1,44 @@
1
- from chainlit.data import ChainlitDataLayer
2
 
3
 
4
  # update custom methods here (Ref: https://github.com/Chainlit/chainlit/blob/4b533cd53173bcc24abe4341a7108f0070d60099/backend/chainlit/data/__init__.py)
5
  class CustomLiteralDataLayer(ChainlitDataLayer):
6
  def __init__(self, **kwargs):
7
  super().__init__(**kwargs)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from chainlit.data import ChainlitDataLayer, queue_until_user_message
2
 
3
 
4
  # update custom methods here (Ref: https://github.com/Chainlit/chainlit/blob/4b533cd53173bcc24abe4341a7108f0070d60099/backend/chainlit/data/__init__.py)
5
  class CustomLiteralDataLayer(ChainlitDataLayer):
6
  def __init__(self, **kwargs):
7
  super().__init__(**kwargs)
8
+
9
+ @queue_until_user_message()
10
+ async def create_step(self, step_dict: "StepDict"):
11
+ metadata = dict(
12
+ step_dict.get("metadata", {}),
13
+ **{
14
+ "waitForAnswer": step_dict.get("waitForAnswer"),
15
+ "language": step_dict.get("language"),
16
+ "showInput": step_dict.get("showInput"),
17
+ },
18
+ )
19
+
20
+ step: LiteralStepDict = {
21
+ "createdAt": step_dict.get("createdAt"),
22
+ "startTime": step_dict.get("start"),
23
+ "endTime": step_dict.get("end"),
24
+ "generation": step_dict.get("generation"),
25
+ "id": step_dict.get("id"),
26
+ "parentId": step_dict.get("parentId"),
27
+ "name": step_dict.get("name"),
28
+ "threadId": step_dict.get("threadId"),
29
+ "type": step_dict.get("type"),
30
+ "tags": step_dict.get("tags"),
31
+ "metadata": metadata,
32
+ }
33
+ if step_dict.get("input"):
34
+ step["input"] = {"content": step_dict.get("input")}
35
+ if step_dict.get("output"):
36
+ step["output"] = {"content": step_dict.get("output")}
37
+ if step_dict.get("isError"):
38
+ step["error"] = step_dict.get("output")
39
+
40
+ print("\n\n\n")
41
+ print("Step: ", step)
42
+ print("\n\n\n")
43
+
44
+ await self.client.api.send_steps([step])