ailm's picture
Update app.py
d31a58f verified
raw
history blame contribute delete
641 Bytes
import gradio as gr
import spaces
from transformers import PegasusForConditionalGeneration, PegasusTokenizer
import torch
device = 'cuda'
model_name = "ailm/pegsus-text-summarization"
model = PegasusForConditionalGeneration.from_pretrained(model_name).to(device)
tokenizer = PegasusTokenizer.from_pretrained(model_name)
@spaces.GPU
def summarize(text):
tokens = tokenizer(text, truncation=True, padding="longest", return_tensors="pt").to(device)
summary = model.generate(**tokens)
return tokenizer.decode(summary[0], skip_special_tokens=True)
iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
iface.launch()