# Import dependencies import gradio as gr # from llama_index import GPTVectorStoreIndex # from query_data import get_chain from langchain.chat_models import ChatOpenAI from langchain import LLMChain from langchain.chat_models import ChatOpenAI from langchain.prompts.chat import ( ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate, ) from langchain.agents import initialize_agent, AgentType # initialize OpenAI chatbot agent llm = ChatOpenAI(temperature=0) agent = initialize_agent(llm, AgentType.CHAT_CONVERSATIONAL_REACT_DESCRIPTION) # define function to get chatbot response def get_response(text): response = agent.run(text) return response # create Gradio interface iface = gr.Interface(fn=get_response, inputs="text", outputs="text") # launch interface iface.launch() """chat = ChatOpenAI(temperature=0) template = "You are a brilliant and empathic counselor. You encourage to share and provide resources when asked." system_message_prompt = SystemMessagePromptTemplate.from_template(template) human_template = "{text}" human_message_prompt = HumanMessagePromptTemplate.from_template(human_template) chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt]) chain = LLMChain(llm=chat, prompt=chat_prompt) chain.run(text="I feel lonely.")""" # Execute the chat functionality. """ with gr.Blocks(theme=gr.themes.Soft()) as demo: gr.HTML("