Spaces:
Sleeping
Sleeping
limited the number of decimal number outputed
Browse files
app.py
CHANGED
@@ -2,6 +2,8 @@ import streamlit as st
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
3 |
import torch
|
4 |
import re
|
|
|
|
|
5 |
# Load the model and tokenizer
|
6 |
tokenizer = AutoTokenizer.from_pretrained("nebiyu29/fintunned-v2-roberta_GA")
|
7 |
model = AutoModelForSequenceClassification.from_pretrained("nebiyu29/fintunned-v2-roberta_GA")
|
@@ -104,10 +106,15 @@ st.write("Enter some text, and the model will classify it.")
|
|
104 |
text_input = st.text_input("Text Input")
|
105 |
#if st.button("Classify"):
|
106 |
predictions = classify(text_input)
|
|
|
107 |
labels_str=",".join(predictions["top_labels"])
|
108 |
probs_ints=",".join(map(str,predictions["top_probabilities"]))
|
|
|
|
|
|
|
|
|
109 |
#for prediction in predictions:
|
110 |
# st.write(f"Segment Text: {prediction['segment_text']}")
|
111 |
|
112 |
st.write(f"Label: {labels_str}")
|
113 |
-
st.write(f"Probability: {
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
3 |
import torch
|
4 |
import re
|
5 |
+
import pandas as pd
|
6 |
+
|
7 |
# Load the model and tokenizer
|
8 |
tokenizer = AutoTokenizer.from_pretrained("nebiyu29/fintunned-v2-roberta_GA")
|
9 |
model = AutoModelForSequenceClassification.from_pretrained("nebiyu29/fintunned-v2-roberta_GA")
|
|
|
106 |
text_input = st.text_input("Text Input")
|
107 |
#if st.button("Classify"):
|
108 |
predictions = classify(text_input)
|
109 |
+
|
110 |
labels_str=",".join(predictions["top_labels"])
|
111 |
probs_ints=",".join(map(str,predictions["top_probabilities"]))
|
112 |
+
|
113 |
+
df=pd.DataFrame({'probabilities: ',probs_ints})
|
114 |
+
formated_df=df.styled.format("{:.2f}").to_dict('list')
|
115 |
+
|
116 |
#for prediction in predictions:
|
117 |
# st.write(f"Segment Text: {prediction['segment_text']}")
|
118 |
|
119 |
st.write(f"Label: {labels_str}")
|
120 |
+
st.write(f"Probability: {formated_df["probabilities"][0]}")
|