Spaces:
Runtime error
Runtime error
File size: 1,485 Bytes
8ec0711 3711811 eeac5cc 3711811 ce5c0eb a8c9879 8ec0711 959ecc7 a8c9879 8ec0711 2dab15b 8ec0711 3711811 ce5c0eb a8c9879 ce5c0eb f57b87c ce5c0eb a8c9879 b989a77 ce5c0eb a8c9879 082d447 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
from transformers import AutoModelForSequenceClassification,AutoTokenizer
from torch.nn.functional import softmax
import torch
import gradio as gr
import json
model_name="nebiyu29/hate_classifier"
tokenizer=AutoTokenizer.from_pretrained(model_name)
model=AutoModelForSequenceClassification.from_pretrained(model_name)
#this where the model is active and we need to make the gradiends in active
def model_classifier(text):
model.eval()
with torch.no_grad():
if len(text)==0:
return f"the input text is {text}"
else:
encoded_input=tokenizer(text) #this is where the encoding happens
logits=model(**encoded) #this is the logits of the labels
probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form
id2label=model.config.id2label
return_probs={id2label[i]:probs.item() for i,probs in enumerate(probs_label[0])}
return json.dumps(list(return_probs.items()))
#lets define how the output looks like
output_format=gr.Dataframe(row_count=(3,"dynamic"),col_count=(2,"fixed"),label="label probabilities",headers=["label","probabilities"])
#lets write something that accepts input as text and returns the most likely out come out of 3
demo=gr.Interface(
fn=model_classifier,
inputs=gr.inputs.Textbox(lines=5,label="Enter you text"),
outputs=output_format,
title="Hate Classifier Demo App"
)
demo.launch(share=True) |