Update app.py
Browse files
app.py
CHANGED
@@ -10,41 +10,49 @@ base_model = AutoModelForCausalLM.from_pretrained("BeastGokul/Bio-Mistral-7B-fin
|
|
10 |
base_model.resize_token_embeddings(len(tokenizer))
|
11 |
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
|
12 |
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
messages = [
|
15 |
{
|
16 |
"role": "user",
|
17 |
"content": user_query
|
18 |
}
|
19 |
-
{
|
20 |
-
"role": "biomedical assistant",
|
21 |
-
"content": ""
|
22 |
-
}
|
23 |
]
|
24 |
|
25 |
-
|
26 |
response = client.chat_completions.create(
|
27 |
-
model=
|
28 |
messages=messages,
|
29 |
max_tokens=500
|
30 |
)
|
31 |
-
|
32 |
-
# Extract and return the response content
|
33 |
-
return response.choices[0].message['content']
|
34 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
with gr.Blocks() as demo:
|
36 |
-
gr.Markdown("# Biomedical
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
output = gr.Textbox(label="Response from Biomedical Assistant")
|
46 |
-
|
47 |
-
submit_button.click(get_model_response, inputs=user_input, outputs=output)
|
48 |
-
|
49 |
-
# Launch the app
|
50 |
demo.launch()
|
|
|
10 |
base_model.resize_token_embeddings(len(tokenizer))
|
11 |
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-Instruct-v0.3")
|
12 |
|
13 |
+
import gradio as gr
|
14 |
+
from huggingface_hub import InferenceClient
|
15 |
+
|
16 |
+
# Set up Hugging Face client
|
17 |
+
client = InferenceClient(api_key="YOUR_HF_API_KEY")
|
18 |
+
|
19 |
+
def get_response(user_query):
|
20 |
+
# Define the request message with a biomedical role
|
21 |
messages = [
|
22 |
{
|
23 |
"role": "user",
|
24 |
"content": user_query
|
25 |
}
|
|
|
|
|
|
|
|
|
26 |
]
|
27 |
|
28 |
+
# Make a call to the model
|
29 |
response = client.chat_completions.create(
|
30 |
+
model="mistralai/Mistral-7B-Instruct-v0.3",
|
31 |
messages=messages,
|
32 |
max_tokens=500
|
33 |
)
|
|
|
|
|
|
|
34 |
|
35 |
+
# Collect and return the output
|
36 |
+
reply = "".join(chunk.choices[0].delta.content for chunk in response)
|
37 |
+
return reply
|
38 |
+
|
39 |
+
# Define the UI with examples
|
40 |
+
example_queries = [
|
41 |
+
"What are the symptoms of anemia?",
|
42 |
+
"Explain the genetic basis of cystic fibrosis.",
|
43 |
+
"What are the latest treatments for Alzheimer's disease?",
|
44 |
+
"How does insulin affect blood sugar levels?",
|
45 |
+
"Can you summarize recent advances in cancer immunotherapy?"
|
46 |
+
]
|
47 |
+
|
48 |
+
# Gradio Interface
|
49 |
with gr.Blocks() as demo:
|
50 |
+
gr.Markdown("# Biomedical Assistant")
|
51 |
+
user_input = gr.Textbox(placeholder="Enter your biomedical query...", label="Your Query")
|
52 |
+
output_text = gr.Textbox(label="Response")
|
53 |
+
example = gr.Examples(examples=example_queries, inputs=user_input)
|
54 |
+
|
55 |
+
submit_btn = gr.Button("Get Response")
|
56 |
+
submit_btn.click(fn=get_response, inputs=user_input, outputs=output_text)
|
57 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
demo.launch()
|