from transformers import AutoTokenizer, AutoModelForCausalLM from transformers import pipeline tokenizer = AutoTokenizer.from_pretrained("Imran1/gpt2-urdu-news") model = AutoModelForCausalLM.from_pretrained("Imran1/gpt2-urdu-news") pipe = pipeline('text-generation',model=model, tokenizer=tokenizer) def text_generate(text): generate_text = pipe(f"{text}", max_length = 200,top_k=200, temperature=0.7, top_p=0.95) return generate_text[0]['generated_text'] import gradio as gr exmp=["درمدی قیمتوں میں","ایک فیصلے میں کہا کہ"] gr.Interface(fn=text_generate, inputs="text", outputs="text", examples=exmp,title= "FINE TUNE GPT2 FOR URDU NEWS TEXT GENERATION").launch()