ThinkSpace / app.py
AndrewLam489's picture
Update app.py
4f581a5 verified
import streamlit as st
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
# Ensure you're logged in
tokenizer = AutoTokenizer.from_pretrained("google/gemma-7b-it", use_auth_token=True)
model = AutoModelForCausalLM.from_pretrained("google/gemma-7b-it", use_auth_token=True)
# Set up the Streamlit page configuration
st.set_page_config(page_title="AI Companion Chatbot", layout="centered")
# Title of the application
st.title("AI Companion Chatbot")
# Add a brief description
st.markdown("""
Welcome to the AI Companion Chatbot! This chatbot is designed to offer therapeutic conversations,
providing a safe and empathetic space for you to express your feelings.
""")
# Create a text input box for user input
user_input = st.text_area("How are you feeling today?", "")
# Define the function to generate the response
def generate_response(user_input):
prompt = f"""
You are a therapist with a strong focus on providing practical, actionable advice.
Rules:
1. Respond in a supportive, empathetic, and non-judgmental manner to the following statement.
2. Offer at least 3 **specific** strategies or coping techniques that the user can try immediately to manage or alleviate their anxiety.
These could include emotional regulation techniques (like grounding exercises, breathing techniques),
self-care practices (like self-compassion or taking breaks), or mindset shifts (like reframing negative thoughts or focusing on what can be controlled).
3. Be very descriptive. Use bullet points to clearly state actionable steps.
4. Do not use "I" or reference the first person perspective.
Base your response on how the user is feeling: {user_input}
"""
inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
# Generate the output
outputs = model.generate(**inputs, max_length=350, num_return_sequences=1)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
# Display chatbot's response
if st.button("Send"):
if user_input: # Check if the user has provided input
# Get the response from the model
response = generate_response(user_input)
# Show the response
st.text_area("AI Companion Response:", response, height=200)
else:
st.warning("Please enter something to continue the conversation.")