Spaces:
Sleeping
Sleeping
import streamlit as st | |
import numpy as np | |
from transformers import BertTokenizer, TFBertForSequenceClassification | |
import torch | |
def get_model(): | |
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') | |
model = TFBertForSequenceClassification.from_pretrained("lfernandopg/Proyecto-Transformers") | |
return tokenizer,model | |
tokenizer,model = get_model() | |
user_input = st.text_area('Enter Text to Analyze') | |
button = st.button("Analyze") | |
d = { | |
0 : 'Accountant', | |
1 : 'Actuary', | |
2 : 'Biologist', | |
3 : 'Chemist', | |
4 : 'Civil engineer', | |
5 : 'Computer programmer', | |
6 : 'Data scientist', | |
7 : 'Database administrator', | |
8 : 'Dentist', | |
9 : 'Economist', | |
10 : 'Environmental engineer', | |
11 : 'Financial analyst', | |
12 : 'IT manager', | |
13 : 'Mathematician', | |
14 : 'Mechanical engineer', | |
15 : 'Physician assistant', | |
16 : 'Psychologist', | |
17 : 'Statistician', | |
18 : 'Systems analyst', | |
19 : 'Technical writer ', | |
20 : 'Web developer ' | |
} | |
if user_input and button : | |
test_sample = tokenizer([user_input], padding=True, truncation=True, max_length=512,return_tensors='pt') | |
# test_sample | |
output = model(**test_sample) | |
st.write("Logits: ",output.logits) | |
y_pred = np.argmax(output.logits.detach().numpy(),axis=1) | |
st.write("Prediction: ",d[y_pred[0]]) |