Spaces:
Runtime error
Runtime error
Update app.py
Browse filesGen AI text generation with GPT2!
app.py
CHANGED
@@ -131,19 +131,51 @@
|
|
131 |
# However, in contrast with zero-shot classification, few-shot classification makes use of very few labeled samples during the training process.
|
132 |
# The implementation of the few-shot classification methods can be found in OpenAI, where the GPT3 classifier is a well-known example of a few-shot classifier.
|
133 |
|
134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
import gradio as grad
|
136 |
|
137 |
-
|
|
|
138 |
|
139 |
-
def
|
140 |
-
|
141 |
-
|
142 |
-
response =
|
143 |
return response
|
144 |
|
145 |
-
txt=grad.Textbox(lines=1, label="English", placeholder="
|
146 |
-
|
147 |
-
out=grad.Textbox(lines=1, label="Classification")
|
148 |
|
149 |
-
grad.Interface(
|
|
|
131 |
# However, in contrast with zero-shot classification, few-shot classification makes use of very few labeled samples during the training process.
|
132 |
# The implementation of the few-shot classification methods can be found in OpenAI, where the GPT3 classifier is a well-known example of a few-shot classifier.
|
133 |
|
134 |
+
# Deploying the following code works but comes with a warning: "No model was supplied, defaulted to facebook/bart-large-mnli and revision c626438 (https://huggingface.co/facebook/bart-large-mnli).
|
135 |
+
# Using a pipeline without specifying a model name and revision in production is not recommended."
|
136 |
+
|
137 |
+
# from transformers import pipeline
|
138 |
+
# import gradio as grad
|
139 |
+
|
140 |
+
# zero_shot_classifier = pipeline("zero-shot-classification")
|
141 |
+
|
142 |
+
# def classify(text,labels):
|
143 |
+
# classifer_labels = labels.split(",")
|
144 |
+
# #["software", "politics", "love", "movies", "emergency", "advertisment","sports"]
|
145 |
+
# response = zero_shot_classifier(text,classifer_labels)
|
146 |
+
# return response
|
147 |
+
|
148 |
+
# txt=grad.Textbox(lines=1, label="English", placeholder="text to be classified")
|
149 |
+
# labels=grad.Textbox(lines=1, label="Labels", placeholder="comma separated labels")
|
150 |
+
# out=grad.Textbox(lines=1, label="Classification")
|
151 |
+
|
152 |
+
# grad.Interface(classify, inputs=[txt,labels], outputs=out).launch()
|
153 |
+
|
154 |
+
#-----------------------------------------------------------------------------------
|
155 |
+
# 8. Text Generation Task/Models
|
156 |
+
# The earliest text generation models were based on Markov chains . Markov chains are like a state machine wherein
|
157 |
+
# using only the previous state, the next state is predicted. This is similar also to what we studied in bigrams.
|
158 |
+
|
159 |
+
# Post the Markov chains, recurrent neural networks (RNNs) , which were capable of retaining a greater context of the text, were introduced.
|
160 |
+
# They are based on neural network architectures that are recurrent in nature. RNNs are able to retain a greater context of the text that was introduced.
|
161 |
+
# Nevertheless, the amount of information that these kinds of networks are able to remember is constrained, and it is also difficult to train them,
|
162 |
+
# which means that they are not effective at generating lengthy texts. To counter this issue with RNNs, LSTM architectures were evolved,
|
163 |
+
# which could capture long-term dependencies in text. Finally, we came to transformers, whose decoder architecture became popular for generative models
|
164 |
+
# used for generating text as an example.
|
165 |
+
|
166 |
+
from transformers import GPT2LMHeadModel,GPT2Tokenizer
|
167 |
import gradio as grad
|
168 |
|
169 |
+
mdl = GPT2LMHeadModel.from_pretrained('gpt2')
|
170 |
+
gpt2_tkn=GPT2Tokenizer.from_pretrained('gpt2')
|
171 |
|
172 |
+
def generate(starting_text):
|
173 |
+
tkn_ids = gpt2_tkn.encode(starting_text, return_tensors = 'pt')
|
174 |
+
gpt2_tensors = mdl.generate(tkn_ids)
|
175 |
+
response = gpt2_tensors
|
176 |
return response
|
177 |
|
178 |
+
txt=grad.Textbox(lines=1, label="English", placeholder="English Text here")
|
179 |
+
out=grad.Textbox(lines=1, label="Generated Tensors")
|
|
|
180 |
|
181 |
+
grad.Interface(generate, inputs=txt, outputs=out).launch()
|