Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -99,29 +99,27 @@ tokenizer.save_pretrained("/fine_tuned_model")
|
|
99 |
|
100 |
|
101 |
import gradio as gr
|
|
|
102 |
|
|
|
103 |
model_dir= "/fine_tuned_model"
|
104 |
-
tokenizer =
|
105 |
-
model =
|
106 |
-
|
107 |
-
#
|
108 |
-
|
109 |
-
|
110 |
-
# Define function
|
111 |
-
def
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
gr.Interface(fn=answer_question, inputs="text", outputs="text").launch()
|
125 |
-
|
126 |
"""## Deploy the Gradio Interface in a Huggingface Space"""
|
127 |
|
|
|
99 |
|
100 |
|
101 |
import gradio as gr
|
102 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
103 |
|
104 |
+
# Initialize fine-tuned model and tokenizer
|
105 |
model_dir= "/fine_tuned_model"
|
106 |
+
tokenizer = AutoTokenizer.from_pretrained(model_dir)
|
107 |
+
model = AutoModelForCausalLM.from_pretrained(model_dir)
|
108 |
+
|
109 |
+
# Create a text generation pipeline
|
110 |
+
text_gen = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
111 |
+
|
112 |
+
# Define question_answering function
|
113 |
+
def question_answer(question):
|
114 |
+
generated = text_gen(question, max_length=200, num_return_sequences=1)
|
115 |
+
generated_tokens = generated[0]['generated_text'].replace(question, "")
|
116 |
+
generated_token_ids = [int(token) for token in generated_tokens.strip().split()]
|
117 |
+
answer = tokenizer.decode(generated_token_ids)
|
118 |
+
return answer
|
119 |
+
|
120 |
+
# Set up gradio interface
|
121 |
+
iface = gr.Interface(fn = question_answer, inputs='text', outputs='text', title='Fine-tuned Enron Question Answering',
|
122 |
+
description='Ask a question regarding the Enron case')
|
123 |
+
iface.launch()
|
|
|
|
|
|
|
|
|
124 |
"""## Deploy the Gradio Interface in a Huggingface Space"""
|
125 |
|