Aityz's picture
Create app.py
19d7d4f
raw
history blame
858 Bytes
import gradio as gr
from transformers import AutoTokenizer
from transformers import AutoModelForCausalLM
import torch
tokenizer = AutoTokenizer.from_pretrained("Aityz/Aityz_model_eli5")
model = AutoModelForCausalLM.from_pretrained("Aityz/Aityz_model_eli5")
# maxtokens = int(input('What would you like the max tokens to be (default: 100) '))
def aityz(input, maxtokens):
prompt = input
inputs = tokenizer(prompt, return_tensors="pt").input_ids
outputs = model.generate(inputs, max_new_tokens=maxtokens, do_sample=True, top_k=50, top_p=0.95)
output = tokenizer.batch_decode(outputs, skip_special_tokens=True)
outputstr = ''.join(output)
return(outputstr)
demo = gr.Interface(fn=aityz, inputs=["textbox", gr.Slider(1, 1000, value=100)], outputs="textbox")
demo.launch() # enable share=True for Non Hugging Face Spaces Usage.........