garyd1 commited on
Commit
ff2871e
·
verified ·
1 Parent(s): 649f909

Updated Chatbot

Browse files
Files changed (1) hide show
  1. app.py +56 -40
app.py CHANGED
@@ -1,49 +1,65 @@
1
-
2
- from dotenv import load_dotenv
3
-
4
- load_dotenv() # take environment variables from .env.
5
-
6
  import streamlit as st
7
  import os
8
- import pathlib
9
- import textwrap
10
-
11
  import google.generativeai as genai
12
 
13
- from IPython.display import display
14
- from IPython.display import Markdown
15
-
16
-
17
- os.getenv("GOOGLE_API_KEY")
18
- genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
19
 
20
- ## Function to load OpenAI model and get respones
 
21
  model = genai.GenerativeModel('gemini-pro')
22
- chat = model.start_chat(history=[])
23
- def get_gemini_response(question):
24
-
25
- response =chat.send_message(question,stream=True)
26
- return response
27
-
28
- ##initialize our streamlit app
29
-
30
- st.set_page_config(page_title="Q&A Demo")
31
-
32
- st.header("Gemini Application")
33
 
34
- input=st.text_input("Input: ",key="input")
 
35
 
 
 
 
36
 
37
- submit=st.button("Ask the question")
38
-
39
- ## If ask button is clicked
40
-
41
- if submit:
42
-
43
- response=get_gemini_response(input)
44
- st.subheader("The Response is")
45
- for chunk in response:
46
- print(st.write(chunk.text))
47
- print("_"*80)
48
-
49
- st.write(chat.history)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import os
3
+ from dotenv import load_dotenv
 
 
4
  import google.generativeai as genai
5
 
6
+ # Load environment variables
7
+ load_dotenv()
8
+ API_KEY = os.getenv("GOOGLE_API_KEY")
 
 
 
9
 
10
+ # Configure Gemini API
11
+ genai.configure(api_key=API_KEY)
12
  model = genai.GenerativeModel('gemini-pro')
 
 
 
 
 
 
 
 
 
 
 
13
 
14
+ # Streamlit Page Config
15
+ st.set_page_config(page_title="Gemini Q&A App", layout="wide")
16
 
17
+ # Initialize Chat History in Session State
18
+ if "chat_history" not in st.session_state:
19
+ st.session_state.chat_history = []
20
 
21
+ # Function to get response from Gemini API
22
+ def get_gemini_response(question):
23
+ chat = model.start_chat(history=st.session_state.chat_history)
24
+ try:
25
+ response = chat.send_message(question, stream=True)
26
+ full_response = ""
27
+ for chunk in response:
28
+ full_response += chunk.text + " "
29
+ st.session_state.chat_history.append({"user": question, "bot": full_response})
30
+ return full_response
31
+ except Exception as e:
32
+ return f"Error: {str(e)}"
33
+
34
+ # UI Layout
35
+ st.title("🤖 Gemini AI - Interactive Q&A")
36
+ st.write("Ask me anything and I'll try to answer!")
37
+
38
+ # Sidebar for Settings
39
+ with st.sidebar:
40
+ st.header("Settings")
41
+ if st.button("Clear Chat History"):
42
+ st.session_state.chat_history = []
43
+ st.success("Chat history cleared!")
44
+
45
+ # User Input
46
+ user_input = st.text_input("Your Question:", placeholder="Type your question here...")
47
+ submit = st.button("Ask Gemini")
48
+
49
+ # Response Handling
50
+ if submit and user_input:
51
+ if not API_KEY:
52
+ st.error("API Key is missing. Please check your .env file.")
53
+ else:
54
+ with st.spinner("Thinking..."):
55
+ response = get_gemini_response(user_input)
56
+ st.subheader("Response:")
57
+ st.markdown(response) # Render response in Markdown format
58
+
59
+ # Display Chat History
60
+ if st.session_state.chat_history:
61
+ st.subheader("Chat History")
62
+ for chat in st.session_state.chat_history:
63
+ st.markdown(f"**You:** {chat['user']}")
64
+ st.markdown(f"**Gemini:** {chat['bot']}")
65
+ st.write("---")