File size: 3,007 Bytes
5362bf1 12476bc 437cdee 12476bc 5362bf1 12476bc 5362bf1 12476bc 5362bf1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
from transformers import BartForConditionalGeneration, BartTokenizer, pipeline, AutoTokenizer, AutoModelForSeq2SeqLM
from transformers.utils import logging
import gradio as gr
#define the logger instance
logger = logging.get_logger("transformers")
#other text-to-text model
chatbot = pipeline("text2text-generation", model="google/flan-t5-small") #model= "gpt2"
def respond(prompt):
result = chatbot(prompt, max_length=50, num_return_sequences=1)
return result[0]['generated_text']
interface = gr.Interface(fn=respond, inputs="text", outputs="text")
interface.launch()
#load the model
model_name = "google/flan-t5-small"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
# # Charger le modèle BART et le tokenizer
# model_name = "facebook/bart-large-cnn"
# tokenizer = BartTokenizer.from_pretrained(model_name)
# model = BartForConditionalGeneration.from_pretrained(model_name)
# # Fonction pour générer du texte
# def generate_text(prompt):
# inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512)
# summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
# return tokenizer.decode(summary_ids[0], skip_special_tokens=True)
# #for training the model after the data is collected
# #model.save_pretrained("model")
# #tokenizer.save_pretrained("model")
# #for the app functions
# def clear_save_textbox(message):
# return " ", message
# def show_input_text(message,history:list[tuple[str,str]]):
# history.append((message,""))
# story = generate_text(message)
# history[-1] = (message,story)
# return history
# def delete_previous_text(history:list[tuple[str,str]]):
# try:
# message, _ = history.pop()
# except IndexError:
# message = " "
# return history, message
# # Créer une interface de saisie avec Gradio
# interface = gr.Interface(fn=generate_text, inputs="text", outputs="text",title="TeLLMyStory",description="Enter your story idea and the model will generate the story based on it.")
# with gr.Blocks() as demo:
# gr.Markdown("TeLLMyStory chatbot")
# #input_text = blocks.text(name="input_text", label="Enter your story idea here", default="Once upon a time, there was")
# with gr.Row():
# input_text = gr.Textbox(label="Enter your story idea here")
# #clear_button = gr.Button("Clear",variant="secondary")
# #clear_button.click(fn=clear_save_textbox, inputs=[input_text])
# #retry_button = gr.Button("Retry", fn=delete_previous_text, inputs=[input_text],variants=["secondary"])
# with gr.Row():
# gr.Markdown("History of your story ideas")
# gen_story = gr.Textbox(label="History")
# #send_button = gr.Button(name="send_button", label="Send", fn=show_input_text, inputs=[input_text],outputs=[gen_story],variants=["primary"])
# # Lancer l'interface
# interface.launch()
|