File size: 641 Bytes
40010d5
d31a58f
 
 
 
 
40010d5
d086dfc
d31a58f
d086dfc
40010d5
d31a58f
40010d5
d31a58f
40010d5
 
 
 
d31a58f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import gradio as gr
import spaces
from transformers import PegasusForConditionalGeneration, PegasusTokenizer
import torch

device = 'cuda' 

model_name = "ailm/pegsus-text-summarization"
model = PegasusForConditionalGeneration.from_pretrained(model_name).to(device)
tokenizer = PegasusTokenizer.from_pretrained(model_name)

@spaces.GPU
def summarize(text):
    tokens = tokenizer(text, truncation=True, padding="longest", return_tensors="pt").to(device)
    summary = model.generate(**tokens)
    return tokenizer.decode(summary[0], skip_special_tokens=True)

iface = gr.Interface(fn=summarize, inputs="text", outputs="text")
iface.launch()