|
import gradio as gr |
|
from transformers import AutoTokenizer |
|
from transformers import AutoModelForCausalLM |
|
|
|
tokenizer = AutoTokenizer.from_pretrained("Aityz/reviews_model") |
|
|
|
model = AutoModelForCausalLM.from_pretrained("Aityz/reviews_model") |
|
|
|
def aityz(Input, Tokens, TopK, TopP): |
|
prompt = Input |
|
inputs = tokenizer(prompt, return_tensors="pt").input_ids |
|
outputs = model.generate(inputs, max_new_tokens=Tokens, do_sample=True, top_k=int(TopK), top_p=TopP) |
|
output = tokenizer.batch_decode(outputs, skip_special_tokens=True) |
|
outputstr = ''.join(output) |
|
return(outputstr) |
|
demo = gr.Interface(fn=aityz, inputs=["textbox", gr.Slider(1, 1000, value=100), gr.Number(value=50), gr.Number(value=0.95) ], outputs="textbox") |
|
demo.launch() |