# app.py import gradio as gr from transformers import pipeline def load_question_generator(num_questions): pipe = pipeline("text2text-generation", model="SinaRp/Question_generator_persian", do_sample=True, top_k=5, num_return_sequences=num_questions) return pipe def generate_questions(context, num_questions): try: generator = load_question_generator(num_questions) questions = generator(context, max_length=64, num_return_sequences=int(num_questions)) return "\n\n".join([q['generated_text'] for q in questions]) except Exception as e: return f"Error generating questions: {str(e)}" iface = gr.Interface( fn=generate_questions, inputs=[ gr.Textbox(lines=5, label="Enter your text context"), gr.Slider(minimum=1, maximum=5, value=3, step=1, label="Number of questions") ], outputs=gr.Textbox(label="Generated Questions"), title="Persian Question Generator", description="Generate questions from your text using Fine-tuned mT5 model", ) if __name__ == "__main__": iface.launch()