ganga-1b / app.py
Lingo-IITGN's picture
Update app.py
2e23e90 verified
raw
history blame contribute delete
647 Bytes
import spaces, gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
tokenizer = AutoTokenizer.from_pretrained("LingoIITGN/ganga-1b")
model = AutoModelForCausalLM.from_pretrained("LingoIITGN/ganga-1b")
@spaces.GPU(duration=120)
def greet(input_text):
input_token = tokenizer.encode(input_text, return_tensors="pt")
output = model.generate(input_token, max_new_tokens=100, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95, temperature=0.7)
output_text = tokenizer.batch_decode(output)[0]
return output_text
demo = gr.Interface(fn=greet, inputs=["text"], outputs=["text"],)
demo.launch()