Spaces:
Runtime error
Runtime error
# Import dependencies | |
import gradio as gr | |
# from llama_index import GPTVectorStoreIndex | |
# from query_data import get_chain | |
from langchain.chat_models import ChatOpenAI | |
# create the OpenAI chatbot | |
chatbot = ChatOpenAI() | |
# define the function to generate the chatbot response | |
def generate_response(text): | |
response = chatbot.generate_response(text) | |
return response | |
# create the Gradio interface | |
interface = gr.Interface( | |
fn=generate_response, | |
inputs=gr.inputs.Textbox(label="Input Text"), | |
outputs=gr.outputs.Textbox(label="Output Text") | |
) | |
# launch the interface | |
interface.launch() | |
#from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate | |
#from langchain.memory import ConversationBufferWindowMemory | |
#template = | |
"""You are a brilliant and empathic counselor. You encourage human to share feelings. | |
You provide resources when appropriate or if asked. | |
{history} | |
Human: {human_input} | |
Assistant:""" | |
"""prompt = PromptTemplate(input_variables=["history", "human_input"], template=template) | |
chatgpt_chain = LLMChain( | |
llm=OpenAI(temperature=0.8), | |
prompt=prompt, | |
verbose=False, | |
memory=ConversationBufferWindowMemory(k=2), | |
) | |
output = chatgpt_chain.predict( | |
human_input= | |
iface = gr.Interface(fn=get_response, inputs="text", outputs="text")""" | |
"""chat = ChatOpenAI(temperature=0) | |
template = "You are a brilliant and empathic counselor. You encourage to share and provide resources when asked." | |
system_message_prompt = SystemMessagePromptTemplate.from_template(template) | |
human_template = "{text}" | |
human_message_prompt = HumanMessagePromptTemplate.from_template(human_template) | |
chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt]) | |
chain = LLMChain(llm=chat, prompt=chat_prompt) | |
chain.run(text="I feel lonely.")""" | |
# Execute the chat functionality. | |
""" | |
with gr.Blocks(theme=gr.themes.Soft()) as demo: | |
gr.HTML("<center><h2>Omdena AI Chatbot For Mental Health and Wellbeing</h2></center>") | |
gr.HTML("WELCOME<br>" | |
"I am an AI ChatBot and I am here to assist you with whatever is bothering you. " | |
"Our conversation is strictly confidential and I will not remember it when you come back another time." | |
) | |
chatbot = gr.Chatbot() | |
chat_message = gr.Textbox(label="What would you like to chat about?") | |
response = gr.Textbox """ | |
# define function to get chatbot response | |
""" def get_response(text): | |
response = agent.run(text) | |
return response """ | |
""" def respond(chat_message, chat_history): | |
response = get_chain(chat_message, chat_history) | |
chat_history.append((chat_message, response)) | |
return "", chat_history """ | |
""" with gr.Row(): | |
send = gr.Button(value="Send").style(full_width=False) | |
clear = gr.Button(value="Clear Chat").style(full_width=False) | |
gr.Examples( | |
examples=[ | |
"I feel lonely", | |
"I'm having problems at home", | |
"I am looking for some resources", | |
], | |
inputs=chat_message | |
) | |
send.click(get_response(chat_message)) | |
clear.click(lambda: None, None, chatbot, queue=False) | |
if __name__ == "__main__": | |
demo.launch(debug=True) | |
""" |