run480 commited on
Commit
7cf499d
1 Parent(s): 03af3c3

Update app.py

Browse files

T5 task 3 English-to-German translation

Files changed (1) hide show
  1. app.py +42 -22
app.py CHANGED
@@ -222,7 +222,7 @@
222
  # grad.Interface(generate, inputs=txt, outputs=out).launch()
223
 
224
  #-----------------------------------------------------------------------------------
225
- # 10. Text-to-Text Generation using the T5 model - first use case generates a question given some context.
226
 
227
  # A transformer-based architecture that takes a text-to-text approach is referred to as T5, which stands for Text-to-Text Transfer Transformer.
228
 
@@ -247,35 +247,55 @@
247
 
248
  # context=grad.Textbox(lines=10, label="English", placeholder="Context")
249
  # ans=grad.Textbox(lines=1, label="Answer")
250
- # out=grad.Textbox(lines=1, label="Genereated Question")
251
 
252
  # grad.Interface(text2text, inputs=[context,ans], outputs=out).launch()
253
 
254
  #-----------------------------------------------------------------------------------
255
- # 11. Text-to-Text Generation using the T5 model - second use case summarizes a paragraph of text.
256
 
257
- from transformers import AutoTokenizer, AutoModelWithLMHead
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
258
  import gradio as grad
259
 
260
- text2text_tkn = AutoTokenizer.from_pretrained("deep-learning-analytics/wikihow-t5-small")
261
- mdl = AutoModelWithLMHead.from_pretrained("deep-learning-analytics/wikihow-t5-small")
262
-
263
- def text2text_summary(para):
264
- initial_txt = para.strip().replace("\n","")
265
- tkn_text = text2text_tkn.encode(initial_txt, return_tensors="pt")
266
- tkn_ids = mdl.generate(
267
- tkn_text,
268
- max_length=250,
269
- num_beams=5,
270
- repetition_penalty=2.5,
271
- early_stopping=True
272
- )
273
- response = text2text_tkn.decode(tkn_ids[0], skip_special_tokens=True)
274
- return response
275
 
276
- para=grad.Textbox(lines=10, label="Paragraph", placeholder="Copy paragraph")
277
- out=grad.Textbox(lines=1, label="Summary")
 
 
 
 
278
 
279
- grad.Interface(text2text_summary, inputs=para, outputs=out).launch()
 
280
 
 
281
 
 
222
  # grad.Interface(generate, inputs=txt, outputs=out).launch()
223
 
224
  #-----------------------------------------------------------------------------------
225
+ # 10. Text-to-Text Generation using the T5 model - Task 1 generates a question given some context.
226
 
227
  # A transformer-based architecture that takes a text-to-text approach is referred to as T5, which stands for Text-to-Text Transfer Transformer.
228
 
 
247
 
248
  # context=grad.Textbox(lines=10, label="English", placeholder="Context")
249
  # ans=grad.Textbox(lines=1, label="Answer")
250
+ # out=grad.Textbox(lines=1, label="Generated Question")
251
 
252
  # grad.Interface(text2text, inputs=[context,ans], outputs=out).launch()
253
 
254
  #-----------------------------------------------------------------------------------
255
+ # 11. Text-to-Text Generation using the T5 model - Task 2 summarizes a paragraph of text.
256
 
257
+ # from transformers import AutoTokenizer, AutoModelWithLMHead
258
+ # import gradio as grad
259
+
260
+ # text2text_tkn = AutoTokenizer.from_pretrained("deep-learning-analytics/wikihow-t5-small")
261
+ # mdl = AutoModelWithLMHead.from_pretrained("deep-learning-analytics/wikihow-t5-small")
262
+
263
+ # def text2text_summary(para):
264
+ # initial_txt = para.strip().replace("\n","")
265
+ # tkn_text = text2text_tkn.encode(initial_txt, return_tensors="pt")
266
+ # tkn_ids = mdl.generate(
267
+ # tkn_text,
268
+ # max_length=250,
269
+ # num_beams=5,
270
+ # repetition_penalty=2.5,
271
+ # early_stopping=True
272
+ # )
273
+ # response = text2text_tkn.decode(tkn_ids[0], skip_special_tokens=True)
274
+ # return response
275
+
276
+ # para=grad.Textbox(lines=10, label="Paragraph", placeholder="Copy paragraph")
277
+ # out=grad.Textbox(lines=1, label="Summary")
278
+
279
+ # grad.Interface(text2text_summary, inputs=para, outputs=out).launch()
280
+
281
+ #-----------------------------------------------------------------------------------
282
+ # 12. Text-to-Text Generation using the T5 model - Task 3 English-to-German translation.
283
+
284
+ from transformers import T5ForConditionalGeneration, T5Tokenizer
285
  import gradio as grad
286
 
287
+ text2text_tkn= T5Tokenizer.from_pretrained("t5-small")
288
+ mdl = T5ForConditionalGeneration.from_pretrained("t5-small")
 
 
 
 
 
 
 
 
 
 
 
 
 
289
 
290
+ def text2text_translation(text):
291
+ inp = "translate English to German:: "+text
292
+ enc = text2text_tkn(inp, return_tensors="pt")
293
+ tokens = mdl.generate(**enc)
294
+ response=text2text_tkn.batch_decode(tokens)
295
+ return response
296
 
297
+ para=grad.Textbox(lines=1, label="English Text", placeholder="Text in English")
298
+ out=grad.Textbox(lines=1, label="German Translation")
299
 
300
+ grad.Interface(text2text_translation, inputs=para, outputs=out).launch()
301