Spaces:
Runtime error
Runtime error
# Benchmarks: NT, Why is blood important? | |
#model_name = "deepset/roberta-base-squad2" # 180 | |
#model_name = "deepset/deberta-v3-large-squad2" # est. 4X | |
model_name = "deepset/tinyroberta-squad2" # 86 | |
#model_name = "deepset/minilm-uncased-squad2" # 96 | |
#model_name = "deepset/electra-base-squad2" # 185 (nice wordy results) | |
# Install Dependences | |
# Use my Conda qna environment, then you're all set | |
# !pip install transformers | |
# !pip install ipywidgets | |
# !pip install gradio # see setup for installing gradio | |
import gradio as gr | |
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline | |
nlp = pipeline('question-answering', model=model_name, tokenizer=model_name) | |
def question_answer(context_filename, question): | |
"""Produce a NLP response based on the input text filename and question.""" | |
with open(context_filename) as f: | |
context = f.read() | |
nlp_input = {'question': question, 'context': context} | |
result = nlp(nlp_input) | |
return result['answer'] | |
demo = gr.Interface( | |
fn=question_answer, | |
#inputs=gr.inputs.Textbox(lines=2, placeholder='Enter your question'), | |
inputs=[ | |
gr.Dropdown([ | |
'spiderman.txt', | |
'world-john.txt', | |
'world-romans.txt', | |
'world-nt.txt', | |
'world-ot.txt']), # 'lotr01.txt' | |
"text" | |
], | |
outputs="textbox") | |
demo.launch(share=False) | |