Spaces:
Sleeping
Sleeping
File size: 833 Bytes
a529649 2dcfda6 a529649 851d58a a529649 f2b94ee 9ec27b5 a529649 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
from transformers import AutoTokenizer
from transformers import pipeline
import gradio as gr
# Load tokenizer and generator
tokenizer = AutoTokenizer.from_pretrained('google/mt5-base')
generator3 = pipeline(model='Suchinthana/MT5-Sinhala-Wikigen-Experimental', tokenizer=tokenizer)
# Define the function for Gradio to use
def generate_sinhala_text(prompt):
generated_text = generator3("writeWiki:"+prompt, do_sample=True, max_length=75)[0]['generated_text']
return generated_text
# Define the function to display loader
def loading():
return "Loading... Please wait."
iface = gr.Interface(
fn=generate_sinhala_text,
inputs=gr.Textbox(label="Topic Here"),
outputs=gr.Textbox(lines=3, placeholder="Result Comes Here...", label="Output"),
live=loading # Add the loading function here
)
iface.launch()
|