Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import AutoTokenizer | |
st.title('Text Summarization') | |
text = st.text_input | |
tokenizer = AutoTokenizer.from_pretrained("madanagrawal/summarization_model") | |
inputs = tokenizer(text, return_tensors="pt").input_ids | |
from transformers import AutoModelForSeq2SeqLM | |
model = AutoModelForSeq2SeqLM.from_pretrained("madanagrawal/summarizsation_model") | |
outputs = model.generate(inputs, max_new_tokens=100, do_sample=False) | |
tokenizer.decode(outputs[0], skip_special_tokens=True) |