|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
from huggingface_hub import login |
|
|
|
|
|
import streamlit as st |
|
|
|
st.set_page_config( |
|
page_title="Code Generation", |
|
page_icon="π€", |
|
layout="wide", |
|
initial_sidebar_state="expanded", |
|
) |
|
login(token='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', add_to_git_credential=True) |
|
|
|
st.title("Code Generation") |
|
st.write('MODEL: TinyPixel/red1xe/Llama-2-7B-codeGPT') |
|
model_name='lmsys/vicuna-7b-v1.1' |
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
model= AutoModelForCausalLM.from_pretrained(model_name) |
|
|
|
|