Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline | |
model_name = "IProject-10/roberta-base-finetuned-squad2" | |
nlp = pipeline("question-answering", model=model_name, tokenizer=model_name) | |
def predict(context, question): | |
res = nlp({"question": question, "context": context}) | |
return res["answer"] | |
md = """In this project work we build a Text Retrieval Question-Answering system using BERT model. QA system is an important NLP task in which the user asks a question in natural language to the model as input and the model provides the answer in natural language as output. | |
The language representation model BERT stands for Bidirectional Encoder Representations from Transformers. The model is based on the Devlin et al. paper: [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805). | |
Dataset used is SQuAD 2.0 [Stanford Question Answering Dataset 2.0](https://rajpurkar.github.io/SQuAD-explorer/). It is a reading comprehension dataset which consists of question-answer pairs derived from Wikipedia articles written by crowdworkers. The answer to all the questions is in the form of a span of text. | |
""" | |
context = "The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America..." | |
question = "Which continent is the Amazon rainforest in?" | |
apple_context = "An apple is an edible fruit produced by an apple tree (Malus domestica)..." | |
apple_question = "How many years have apples been grown for?" | |
gr.Interface( | |
predict, | |
inputs=[ | |
gr.Textbox(lines=7, value=context, label="Context Paragraph"), | |
gr.Textbox(lines=2, value=question, label="Question"), | |
], | |
outputs=gr.Textbox(label="Answer"), | |
examples=[[apple_context, apple_question]], | |
title="Question & Answering with BERT using the SQuAD 2 dataset", | |
description=md, | |
).launch() | |