|
import streamlit as st |
|
import os |
|
import google.generativeai as genai |
|
|
|
genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) |
|
|
|
model=genai.GenerativeModel("gemini-pro") |
|
chat = model.start_chat(history=[]) |
|
def get_gemini_response(question): |
|
|
|
response=chat.send_message(question,stream=True) |
|
return response |
|
|
|
st.set_page_config(page_title="Q&A with Gemini") |
|
|
|
st.header("Gemini LLM Application") |
|
|
|
input=st.text_input("Input: ",key="input") |
|
submit=st.button("Ask the question") |
|
|
|
if submit and input: |
|
response=get_gemini_response(input) |
|
st.subheader("Response: ") |
|
for chunk in response: |
|
st.write(chunk.text) |