import gradio as gr from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline model_name = "IProject-10/roberta-base-finetuned-squad2" nlp = pipeline("question-answering", model=model_name, tokenizer=model_name) def predict(context, question): res = nlp({"question": question, "context": context}) return res["answer"] md = """ """ context = "The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America..." question = "Which continent is the Amazon rainforest in?" gr.Interface( predict, inputs=[ gr.Textbox(lines=7, value=context, label="Context Paragraph"), gr.Textbox(lines=2, value=question, label="Question"), ], outputs=gr.Textbox(label="Answer"), title="Question Answering System", description=md, ).launch()