File size: 1,960 Bytes
8e63ced 1ac767c 489ad9f 5149d00 43a91cc e4f30a4 5149d00 8e167cf 33c504d 1ac767c 0fb3df9 5149d00 0fb3df9 5149d00 0fb3df9 5149d00 d410c01 0fb3df9 d410c01 0fb3df9 5149d00 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import gradio as gr
from huggingface_hub import InferenceClient
from transformers import AutoTokenizer, AutoModelForCausalLM
from peft import PeftModel, PeftConfig
impot os
client = InferenceClient(api_key=(os.getenv('HF_TOKEN')))
tokenizer = AutoTokenizer.from_pretrained("BeastGokul/Bio-Mistral-7B-finetuned")
base_model = AutoModelForCausalLM.from_pretrained("BeastGokul/Bio-Mistral-7B-finetuned")
base_model.resize_token_embeddings(len(tokenizer))
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
import gradio as gr
from huggingface_hub import InferenceClient
# Set up Hugging Face client
client = InferenceClient(api_key="YOUR_HF_API_KEY")
def get_response(user_query):
# Define the request message with a biomedical role
messages = [
{
"role": "user",
"content": user_query
}
]
# Make a call to the model
response = client.chat_completions.create(
model="mistralai/Mistral-7B-Instruct-v0.3",
messages=messages,
max_tokens=500
)
# Collect and return the output
reply = "".join(chunk.choices[0].delta.content for chunk in response)
return reply
# Define the UI with examples
example_queries = [
"What are the symptoms of anemia?",
"Explain the genetic basis of cystic fibrosis.",
"What are the latest treatments for Alzheimer's disease?",
"How does insulin affect blood sugar levels?",
"Can you summarize recent advances in cancer immunotherapy?"
]
# Gradio Interface
with gr.Blocks() as demo:
gr.Markdown("# Biomedical Assistant")
user_input = gr.Textbox(placeholder="Enter your biomedical query...", label="Your Query")
output_text = gr.Textbox(label="Response")
example = gr.Examples(examples=example_queries, inputs=user_input)
submit_btn = gr.Button("Get Response")
submit_btn.click(fn=get_response, inputs=user_input, outputs=output_text)
demo.launch()
|