kikuepi's picture
Update app.py
1e37cb5 verified
import os
import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage
from langchain.agents import AgentType, initialize_agent, load_tools
from langchain.callbacks import StreamlitCallbackHandler
from langchain.memory import ConversationBufferMemory
from langchain.prompts import MessagesPlaceholder
st.title("Chatbot")
api_model = os.getenv("OPENAI_API_MODEL")
temperature = os.getenv("OPENAI_API_TEMPERATURE")
origin_text = st.sidebar.text_area("γ‚·γ‚Ήγƒ†γƒ γƒ—γƒ­γƒ³γƒ—γƒˆε…₯εŠ›")
system_prompt = origin_text if origin_text else os.getenv("system_prompt")
print(system_prompt)
def create_agent_chain():
chat = ChatOpenAI(
model_name = api_model,
temperature = temperature,
streaming = True,
)
agent_kwargs = {
"extra_prompt_messages": [MessagesPlaceholder(variable_name = "memory")],
}
memory = ConversationBufferMemory(memory_key = "memory", return_messages = True)
tools = load_tools(["ddg-search"])
return initialize_agent(
tools,
chat,
agent = AgentType.OPENAI_FUNCTIONS,
agent_kwargs = agent_kwargs,
memory = memory,
)
if "messages" not in st.session_state:
st.session_state.messages = []
if "agent_chain" not in st.session_state:
st.session_state.agent_chain = create_agent_chain()
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
prompt = st.chat_input("What is up?")
if prompt:
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
callback = StreamlitCallbackHandler(st.container())
response = st.session_state.agent_chain.run(system_prompt + prompt, callbacks = [callback])
st.markdown(response)
st.session_state.messages.append({"role": "assistant", "content": response})