Spaces:
Runtime error
Runtime error
the model classifier now returns probability distribution over all the labels
Browse files
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
from transformers import AutoModelForSequenceClassification,AutoTokenizer
|
2 |
-
|
3 |
import torch
|
|
|
4 |
|
5 |
model_name="nebiyu29/hate_classifier"
|
6 |
tokenizer=AutoTokenizer.from_pretrained(model_name)
|
@@ -15,8 +16,11 @@ def model_classifier(text):
|
|
15 |
return f"the input text is {text}"
|
16 |
else:
|
17 |
encoded_input=tokenizer(text) #this is where the encoding happens
|
18 |
-
|
19 |
-
|
|
|
|
|
|
|
20 |
|
21 |
|
22 |
|
|
|
1 |
from transformers import AutoModelForSequenceClassification,AutoTokenizer
|
2 |
+
from torch.nn.functional import softmax
|
3 |
import torch
|
4 |
+
import gradio as gr
|
5 |
|
6 |
model_name="nebiyu29/hate_classifier"
|
7 |
tokenizer=AutoTokenizer.from_pretrained(model_name)
|
|
|
16 |
return f"the input text is {text}"
|
17 |
else:
|
18 |
encoded_input=tokenizer(text) #this is where the encoding happens
|
19 |
+
logits=model(**encoded) #this is the logits of the labels
|
20 |
+
probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form
|
21 |
+
id2label=model.config.id2label
|
22 |
+
return_probs={id2label[i]:probs.item() for i,probs in enumerate(probs_label[0])}
|
23 |
+
return return_probs
|
24 |
|
25 |
|
26 |
|