ahabb's picture
Update app.py
7662bd7 verified
import gradio as gr
from transformers import T5ForConditionalGeneration, T5Tokenizer
from textwrap import fill
# Load the finetuned model and tokenizer
last_checkpoint = "model/out/kaggle/working/results/checkpoint-1000"
finetuned_model = T5ForConditionalGeneration.from_pretrained(last_checkpoint)
tokenizer = T5Tokenizer.from_pretrained(last_checkpoint)
def answer_question(question):
inputs = "Answer this question truthfully: " + question
tokenized_inputs = tokenizer(inputs, return_tensors="pt", padding=True, truncation=True)
outputs = finetuned_model.generate(**tokenized_inputs)
answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
return fill(answer, width=80)
# Create Gradio interface
iface = gr.Interface(
fn=answer_question,
inputs="text",
outputs="text",
title="Medical Question Answering",
description="Enter a medical question to get a truthful answer from the finetuned T5 model.",
examples=[["What is the relationship between very low Mg2+ levels, PTH levels, and Ca2+ levels?"]]
)
# Launch the app
iface.launch()