Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,9 +6,11 @@ from huggingface_hub import login
|
|
6 |
import re
|
7 |
import os
|
8 |
|
|
|
9 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
10 |
login(token=HF_TOKEN)
|
11 |
|
|
|
12 |
MODELS = {
|
13 |
"athena-1": {
|
14 |
"name": "π¦ Atlas-Flash",
|
@@ -20,6 +22,10 @@ MODELS = {
|
|
20 |
},
|
21 |
}
|
22 |
|
|
|
|
|
|
|
|
|
23 |
class AtlasInferenceApp:
|
24 |
def __init__(self):
|
25 |
if "current_model" not in st.session_state:
|
@@ -148,16 +154,21 @@ class AtlasInferenceApp:
|
|
148 |
|
149 |
st.markdown("*β οΈ CAUTION: Atlas is an experimental model and this is just a preview. Responses may not be expected. Please double-check sensitive information!*")
|
150 |
|
|
|
151 |
for message in st.session_state.chat_history:
|
152 |
-
with st.chat_message(
|
|
|
|
|
|
|
153 |
st.markdown(message["content"])
|
154 |
|
|
|
155 |
if prompt := st.chat_input("Message Atlas..."):
|
156 |
st.session_state.chat_history.append({"role": "user", "content": prompt})
|
157 |
-
with st.chat_message("user"):
|
158 |
st.markdown(prompt)
|
159 |
|
160 |
-
with st.chat_message("assistant"):
|
161 |
with st.spinner("Generating response..."):
|
162 |
response = self.respond(prompt, max_tokens, temperature, top_p, top_k)
|
163 |
st.markdown(response)
|
|
|
6 |
import re
|
7 |
import os
|
8 |
|
9 |
+
# Load Hugging Face token
|
10 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
11 |
login(token=HF_TOKEN)
|
12 |
|
13 |
+
# Define models
|
14 |
MODELS = {
|
15 |
"athena-1": {
|
16 |
"name": "π¦ Atlas-Flash",
|
|
|
22 |
},
|
23 |
}
|
24 |
|
25 |
+
# Profile pictures
|
26 |
+
USER_PFP = "https://huggingface.co/front/assets/avatars.png" # Hugging Face user avatar
|
27 |
+
AI_PFP = "ai_pfp.png" # Replace with the path to your AI's image or a URL
|
28 |
+
|
29 |
class AtlasInferenceApp:
|
30 |
def __init__(self):
|
31 |
if "current_model" not in st.session_state:
|
|
|
154 |
|
155 |
st.markdown("*β οΈ CAUTION: Atlas is an experimental model and this is just a preview. Responses may not be expected. Please double-check sensitive information!*")
|
156 |
|
157 |
+
# Display chat history
|
158 |
for message in st.session_state.chat_history:
|
159 |
+
with st.chat_message(
|
160 |
+
message["role"],
|
161 |
+
avatar=USER_PFP if message["role"] == "user" else AI_PFP
|
162 |
+
):
|
163 |
st.markdown(message["content"])
|
164 |
|
165 |
+
# Input box for user messages
|
166 |
if prompt := st.chat_input("Message Atlas..."):
|
167 |
st.session_state.chat_history.append({"role": "user", "content": prompt})
|
168 |
+
with st.chat_message("user", avatar=USER_PFP):
|
169 |
st.markdown(prompt)
|
170 |
|
171 |
+
with st.chat_message("assistant", avatar=AI_PFP):
|
172 |
with st.spinner("Generating response..."):
|
173 |
response = self.respond(prompt, max_tokens, temperature, top_p, top_k)
|
174 |
st.markdown(response)
|