AbdulHadi806 commited on
Commit
d9e831a
·
verified ·
1 Parent(s): 1117424

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -16
app.py CHANGED
@@ -1,16 +1,20 @@
1
- """app.py"""
2
-
3
  import streamlit as st
4
- from transformers import pipeline, GPT2LMHeadModel, GPT2Tokenizer
 
 
 
 
 
 
 
 
 
5
 
6
- # Load pre-trained GPT-2 model and tokenizer
7
- model_name = "gpt2"
8
- model = GPT2LMHeadModel.from_pretrained(model_name)
9
- tokenizer = GPT2Tokenizer.from_pretrained(model_name)
10
 
11
- # Define function to generate blog post
12
- def generate_blogpost(topic):
13
- input_text = f"Blog post about {topic}:"
14
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
15
 
16
  # Generate text
@@ -22,16 +26,17 @@ def generate_blogpost(topic):
22
 
23
  # Streamlit app
24
  def main():
25
- st.title("Blog Post Generator")
26
 
27
  # Sidebar input for topic
28
- topic = st.sidebar.text_input("Enter topic for the blog post", "a crazy person driving a car")
29
 
30
  # Generate button
31
- if st.sidebar.button("Generate Blog Post"):
32
- blogpost = generate_blogpost(topic)
33
- st.subheader(f"Generated Blog Post on {topic}:")
34
- st.write(blogpost)
 
35
 
36
  if __name__ == "__main__":
37
  main()
 
 
 
1
  import streamlit as st
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ model_name = "Llama-2-7b-finetuned-with-QLoRa"
5
+
6
+ # Load model and tokenizer
7
+ @st.cache_resource
8
+ def load_model_and_tokenizer(model_name):
9
+ model = AutoModelForCausalLM.from_pretrained(model_name)
10
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
11
+ return model, tokenizer
12
 
13
+ model, tokenizer = load_model_and_tokenizer(model_name)
 
 
 
14
 
15
+ # Function to generate response
16
+ def generate_response(topic):
17
+ input_text = f"Response about {topic}:"
18
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
19
 
20
  # Generate text
 
26
 
27
  # Streamlit app
28
  def main():
29
+ st.title("Llama 2 Fine-Tuned Demo with QLoRa")
30
 
31
  # Sidebar input for topic
32
+ topic = st.sidebar.text_input("Enter your topic", "a crazy person driving a car")
33
 
34
  # Generate button
35
+ if st.sidebar.button("Generate Response"):
36
+ with st.spinner("Generating response..."):
37
+ response = generate_response(topic)
38
+ st.subheader(f"Generated response on '{topic}':")
39
+ st.write(response)
40
 
41
  if __name__ == "__main__":
42
  main()