ASaboor commited on
Commit
36b4a9d
·
verified ·
1 Parent(s): e3aacf9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -0
app.py CHANGED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from transformers import pipeline, BartTokenizer, BartForConditionalGeneration
3
+
4
+ # Replace with your Hugging Face model repository path for QnA
5
+ model_repo_path_qna = 'ASaboor/Bart_Therapy'
6
+
7
+ # Load the model and tokenizer for QnA
8
+ model_qna = BartForConditionalGeneration.from_pretrained(model_repo_path_qna)
9
+ tokenizer_qna = BartTokenizer.from_pretrained(model_repo_path_qna)
10
+
11
+ # Initialize the QnA pipeline
12
+ qna_pipeline = pipeline('question-answering', model=model_qna, tokenizer=tokenizer_qna)
13
+
14
+ # Streamlit app layout
15
+ st.set_page_config(page_title="QnA App", page_icon=":memo:", layout="wide")
16
+
17
+ st.title("Question and Answer App")
18
+ st.write("""
19
+ This app uses a fine-tuned BART model to answer questions based on the provided context.
20
+ Enter the context and your question below, then click "Get Answer" to see the result.
21
+ """)
22
+
23
+ # User input for QnA
24
+ context_input = st.text_area("Enter context for QnA", height=300, placeholder="Paste your context here...")
25
+ question_input = st.text_input("Enter question", placeholder="Type your question here...")
26
+
27
+ # Generate the answer
28
+ if st.button("Get Answer"):
29
+ if context_input and question_input:
30
+ with st.spinner("Generating answer..."):
31
+ try:
32
+ # Generate answer
33
+ answer = qna_pipeline({'context': context_input, 'question': question_input})
34
+
35
+ # Display answer
36
+ st.subheader("Answer")
37
+ st.write(answer['answer'])
38
+ except Exception as e:
39
+ st.error(f"An error occurred during QnA: {e}")
40
+ else:
41
+ st.warning("Please enter both context and question for QnA.")
42
+
43
+