nebiyu29 commited on
Commit
011a88c
1 Parent(s): c96e178

changed the inputs into tensors and added the maximum amount of padding elements

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -16,9 +16,13 @@ def model_classifier(text):
16
  if len(text)==0:
17
  return f"the input text is {text}"
18
  else:
19
- encoded_input=tokenizer(text,return_tensors="pt",truncation=True,padding=True) #this is where the encoding happens
20
  input_ids=encoded_input["input_ids"]
21
  attention_mask=encoded_input["attention_mask"]
 
 
 
 
22
 
23
  logits=model(input_ids,attention_mask).logits #this is the logits of the labels
24
  probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form
 
16
  if len(text)==0:
17
  return f"the input text is {text}"
18
  else:
19
+ encoded_input=tokenizer(text,return_tensors="pt",truncation=True,padding=True,max_length=512) #this is where the encoding happens
20
  input_ids=encoded_input["input_ids"]
21
  attention_mask=encoded_input["attention_mask"]
22
+
23
+ #turning the inputs into tensors
24
+ inputs_ids=torch.tensor(input_ids)
25
+ attention_mask=torch.tensor(attention_mask)
26
 
27
  logits=model(input_ids,attention_mask).logits #this is the logits of the labels
28
  probs_label=softmax(logits,dim=-1) #turning the probability distribution into normalize form