import gradio as gr from transformers import utils, pipeline, set_seed import torch from torch.nn import functional as F device = 'cpu' class Generator: def __init__(self) -> None: self.generator = pipeline('text-generation', model="EleutherAI/gpt-neo-1.3B", do_sample=True) self.history = "Human: Can you tell me the weather forecast for tomorrow?\nAssistant: Try checking a weather app like a normal person.\nHuman: Can you help me find a good restaurant in the area\nAssistant: Try asking someone with a functioning sense of taste.\n" def generate(self, text, max_len=400, temp=0.8): self.history += f'Human: {text}\nAssistant: ' gen_text = self.generator(self.history, max_length=max_len, temperature=temp)[0]['generated_text'] gen_text = gen_text.split('Human:')[0] self.history += gen_text return gen_text gen = Generator() def generate(text): return gen.generate(text) iface = gr.Interface(fn=generate, inputs="text", outputs="textbox") iface.launch()