Spaces:
Runtime error
Runtime error
from transformers import AutoModelForSequenceClassification,AutoTokenizer | |
from torch.nn.functional import softmax | |
import torch | |
import gradio as gr | |
import json | |
model_name="nebiyu29/hate_classifier" | |
tokenizer=AutoTokenizer.from_pretrained(model_name) | |
model=AutoModelForSequenceClassification.from_pretrained(model_name) | |
#this where the model is active and we need to make the gradiends in active | |
def model_classifier(text): | |
model.eval() | |
with torch.no_grad(): | |
if len(text)==0: | |
return f"the input text is {text}" | |
else: | |
encoded_input=tokenizer(text) #this is where the encoding happens | |
logits=model(**encoded) #this is the logits of the labels | |
probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form | |
id2label=model.config.id2label | |
return_probs={id2label[i]:probs.item() for i,probs in enumerate(probs_label[0])} | |
return json.dumps(list(return_probs.items())) | |
#lets define how the output looks like | |
output_format=gr.Table(label="label probabilities", | |
columns=["label","probabilities"], | |
type="table", | |
show_index=False, | |
input_type="json", | |
max_rows=4, | |
max_columms=2 | |
) | |
#lets write something that accepts input as text and returns the most likely out come out of 3 | |
demo=gr.Interface( | |
fn=model_classifier, | |
inputs=gr.Textbox(lines=5,label="Enter you text"), | |
outputs=output_format, | |
title="Hate Classifier Demo App" | |
) | |
demo.launch(share=True) |