EC2 Default User commited on
Commit
b97982d
1 Parent(s): 634b74b

add converstation limit

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -10,6 +10,7 @@ import os
10
  from langchain import OpenAI
11
  from langchain.chains import ConversationChain
12
  from langchain.memory import ConversationSummaryBufferMemory
 
13
  from langchain.chat_models import ChatOpenAI
14
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
15
  from langchain.schema import HumanMessage
@@ -54,7 +55,8 @@ s3 = boto3.client('s3')
54
  transcribe = boto3.client('transcribe')
55
 
56
 
57
- memory = ConversationSummaryBufferMemory(llm=ChatOpenAI(), max_token_limit=2048, k=3)
 
58
  conversation = ConversationChain(
59
  llm=OpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()], max_tokens=2048, temperature=0.5),
60
  memory=memory,
 
10
  from langchain import OpenAI
11
  from langchain.chains import ConversationChain
12
  from langchain.memory import ConversationSummaryBufferMemory
13
+ from langchain.memory import ConversationBufferWindowsMemory
14
  from langchain.chat_models import ChatOpenAI
15
  from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
16
  from langchain.schema import HumanMessage
 
55
  transcribe = boto3.client('transcribe')
56
 
57
 
58
+ #memory = ConversationSummaryBufferMemory(llm=ChatOpenAI(), max_token_limit=2048)
59
+ memory = ConversationBufferWindowsMemory(k=5)
60
  conversation = ConversationChain(
61
  llm=OpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()], max_tokens=2048, temperature=0.5),
62
  memory=memory,