Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
# Define the model name and cache path | |
model_name = 'MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct' | |
# Load the tokenizer and model | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name).to("cuda" if torch.cuda.is_available() else "cpu") | |
# Add a pad token if it does not exist | |
if tokenizer.pad_token is None: | |
tokenizer.pad_token = tokenizer.eos_token | |
def generate_response(input_text): | |
inputs = tokenizer(input_text, return_tensors='pt', padding=True, truncation=True, max_length=512) | |
inputs = {key: value.to("cuda" if torch.cuda.is_available() else "cpu") for key, value in inputs.items()} | |
with torch.no_grad(): | |
outputs = model.generate(inputs['input_ids'], attention_mask=inputs['attention_mask'], max_length=50) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return response | |
interface = gr.Interface(fn=generate_response, inputs="text", outputs="text") | |
interface.launch(share=True) | |