Spaces:
Sleeping
Sleeping
File size: 507 Bytes
1bb6412 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
import streamlit as st
from transformers import AutoTokenizer
st.title('Text Summarization')
text = st.text_input
tokenizer = AutoTokenizer.from_pretrained("madanagrawal/summarization_model")
inputs = tokenizer(text, return_tensors="pt").input_ids
from transformers import AutoModelForSeq2SeqLM
model = AutoModelForSeq2SeqLM.from_pretrained("madanagrawal/summarizsation_model")
outputs = model.generate(inputs, max_new_tokens=100, do_sample=False)
tokenizer.decode(outputs[0], skip_special_tokens=True) |