Spaces:
Runtime error
Runtime error
import streamlit as st | |
from streamlit_chat import message | |
from streamlit_extras.colored_header import colored_header | |
from streamlit_extras.add_vertical_space import add_vertical_space | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
st.set_page_config(page_title="Einfach.HugChat") | |
# List of models | |
models = ["vicuna-13b", "koala-13b", "oasst-pythia-12b", "RWKV-4-Raven-14B", | |
"alpaca-13b", "chatglm-6b", "llama-13b", "dolly-v2-12b", "stablelm-tuned-alpha-7b", | |
"fastchat-t5-3b", "mpt-7b-chat"] | |
# Sidebar contents | |
with st.sidebar: | |
st.title('EinfachChat') | |
st.markdown(''' | |
## About | |
This app is a LLM-powered chatbot built using: | |
- [Streamlit](https://streamlit.io/) | |
- [OpenAssistant/oasst-sft-6-llama-30b-xor](https://huggingface.co/OpenAssistant/oasst-sft-6-llama-30b-xor) LLM model | |
💡 Note: No API key required! | |
''') | |
model_name = st.selectbox('Choose a model', models) | |
add_vertical_space(5) | |
st.write('Made with ❤️ by EinfachAlex') | |
2023-05-18 21:30:13.025 | |
Warning: to view this Streamlit app on a browser, run it with the following | |
command: | |
streamlit run app.py [ARGUMENTS] | |
2023-05-18 21:30:13.026 Session state does not function when running a script without `streamlit run` | |
Traceback (most recent call last): | |
File "/home/user/.local/lib/python3.8/site-packages/streamlit/runtime/state/session_state.py", line 376, in __getitem__ | |
return self._getitem(widget_id, key) | |
File "/home/user/.local/lib/python3.8/site-packages/streamlit/runtime/state/session_state.py", line 421, in _getitem | |
raise KeyError | |
KeyError | |
During handling of the above exception, another exception occurred: | |
Traceback (most recent call last): | |
File "app.py", line 68, in <module> | |
if st.session_state['generated']: | |
File "/home/user/.local/lib/python3.8/site-packages/streamlit/runtime/state/session_state_proxy.py", line 90, in __getitem__ | |
return get_session_state()[key] | |
File "/home/user/.local/lib/python3.8/site-packages/streamlit/runtime/state/safe_session_state.py", line 113, in __getitem__ | |
return self._state[key] | |
File "/home/user/.local/lib/python3.8/site-packages/streamlit/runtime/state/session_state.py", line 378, in __getitem__ | |
raise KeyError(_missing_key_error_message(key)) | |
KeyError: 'st.session_state has no key "generated". Did you forget to initialize it? More info: https://docs.streamlit.io/library/advanced-features/session-state#initialization' | |
# Layout of input/response containers | |
input_container = st.container() | |
colored_header(label='', description='', color_name='blue-30') | |
response_container = st.container() | |
# User input | |
## Function for taking user provided prompt as input | |
def get_text(): | |
input_text = st.text_input("You: ", "", key="input") | |
return input_text | |
## Applying the user input box | |
with input_container: | |
user_input = get_text() | |
# Response output | |
## Function for taking user prompt as input followed by producing AI generated responses | |
def generate_response(prompt, model_name): | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
inputs = tokenizer(prompt, return_tensors='pt') | |
outputs = model.generate(**inputs) | |
response = tokenizer.decode(outputs[0]) | |
return response | |
## Conditional display of AI generated responses as a function of user provided prompts | |
with response_container: | |
if user_input: | |
response = generate_response(user_input, model_name) | |
st.session_state.past.append(user_input) | |
st.session_state.generated.append(response) | |
if st.session_state['generated']: | |
for i in range(len(st.session_state['generated'])): | |
message(st.session_state['past'][i], is_user=True, key=str(i) + '_user') | |
message(st.session_state["generated"][i], key=str(i)) | |