Mauricio Caicedo Palacio commited on
Commit
f78daac
·
1 Parent(s): cb8620a

First commit with the first version of my demo

Browse files
Files changed (2) hide show
  1. app.py +37 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
+
5
+ model_name = "macapa/emotion-classifier"
6
+ tokenizer = AutoTokenizer.from_pretrained("distilbert-base-uncased")
7
+ model = AutoModelForSequenceClassification.from_pretrained(model_name)
8
+
9
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
10
+ model.to(device)
11
+
12
+ labels = {0: 'sadness',
13
+ 1: 'joy',
14
+ 2: 'love',
15
+ 3: 'anger',
16
+ 4: 'fear',
17
+ 5: 'surprise'}
18
+
19
+ def predict(text):
20
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
21
+ inputs = inputs.to(device)
22
+ outputs = model(**inputs)
23
+ predictions = torch.argmax(outputs.logits, dim=1)
24
+ label = labels[predictions.item()]
25
+ return label
26
+
27
+ # Create the Gradio interface
28
+ iface = gr.Interface(
29
+ fn=predict,
30
+ inputs=gr.Textbox(lines=2, placeholder="Enter text here..."),
31
+ outputs="textbox",
32
+ title="Emotion Classification",
33
+ description="Enter some text and the model will predict the emotion.",
34
+ )
35
+
36
+ # Launch the interface
37
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ transformers==4.47.0
2
+ gradio
3
+ torch