ThomasBlumet commited on
Commit
aa5f698
·
1 Parent(s): 7d5f4fe

update app

Browse files
Files changed (1) hide show
  1. app.py +9 -1
app.py CHANGED
@@ -1,4 +1,6 @@
1
  from transformers import BartForConditionalGeneration, BartTokenizer
 
 
2
  import gradio as gr
3
 
4
  # Charger le modèle BART et le tokenizer
@@ -12,7 +14,13 @@ def generate_text(prompt):
12
  summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
13
  return tokenizer.decode(summary_ids[0], skip_special_tokens=True)
14
 
 
 
 
 
15
  # Créer une interface de saisie avec Gradio
16
  interface = gr.Interface(fn=generate_text, inputs="text", outputs="text",title="TeLLMyStory",description="Enter your story idea and the model will generate the story based on it.")
 
 
17
  #Lancer l'interface
18
- interface.launch()
 
1
  from transformers import BartForConditionalGeneration, BartTokenizer
2
+
3
+
4
  import gradio as gr
5
 
6
  # Charger le modèle BART et le tokenizer
 
14
  summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
15
  return tokenizer.decode(summary_ids[0], skip_special_tokens=True)
16
 
17
+ #for training the model after the data is collected
18
+ #model.save_pretrained("model")
19
+ #tokenizer.save_pretrained("model")
20
+
21
  # Créer une interface de saisie avec Gradio
22
  interface = gr.Interface(fn=generate_text, inputs="text", outputs="text",title="TeLLMyStory",description="Enter your story idea and the model will generate the story based on it.")
23
+
24
+
25
  #Lancer l'interface
26
+ interface.launch(share=True)