Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -18,8 +18,6 @@ def load_model():
|
|
18 |
@st.cache(suppress_st_warning=True, hash_funcs={tokenizers.Tokenizer: lambda _: None})
|
19 |
def predict(title, summary, tokenizer, model):
|
20 |
text = title + "\n" + summary
|
21 |
-
if len(text) < 20:
|
22 |
-
return 'error'
|
23 |
tokens = tokenizer.encode(text)
|
24 |
with torch.no_grad():
|
25 |
logits = model(torch.as_tensor([tokens]))[0]
|
@@ -60,7 +58,7 @@ button = st.button('Run')
|
|
60 |
if button:
|
61 |
prediction, prediction_probs = predict(title, summary, tokenizer, model)
|
62 |
ans = get_results(prediction, prediction_probs)
|
63 |
-
if
|
64 |
st.error("Your input is too short. It is probably not a real article, please try again.")
|
65 |
else:
|
66 |
st.subheader('Results:')
|
|
|
18 |
@st.cache(suppress_st_warning=True, hash_funcs={tokenizers.Tokenizer: lambda _: None})
|
19 |
def predict(title, summary, tokenizer, model):
|
20 |
text = title + "\n" + summary
|
|
|
|
|
21 |
tokens = tokenizer.encode(text)
|
22 |
with torch.no_grad():
|
23 |
logits = model(torch.as_tensor([tokens]))[0]
|
|
|
58 |
if button:
|
59 |
prediction, prediction_probs = predict(title, summary, tokenizer, model)
|
60 |
ans = get_results(prediction, prediction_probs)
|
61 |
+
if len(title + "\n" + summary) < 20:
|
62 |
st.error("Your input is too short. It is probably not a real article, please try again.")
|
63 |
else:
|
64 |
st.subheader('Results:')
|