File size: 1,280 Bytes
7ec6641
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# Load the fine-tuned MarianMT model and tokenizer
# Replace with the path to your model directory
model_dir = '/content/drive/MyDrive/fine_tuned_marian'  # Replace with the correct path
model = MarianMTModel.from_pretrained(model_dir)
tokenizer = MarianTokenizer.from_pretrained(model_dir)

# Function to translate text
def translate_arabic_to_english(arabic_text):
    # Tokenize the input text
    inputs = tokenizer(arabic_text, return_tensors="pt", padding=True, truncation=True, max_length=128)

    # Move inputs to the same device as the model
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
    model.to(device)
    inputs = {k: v.to(device) for k, v in inputs.items()}

    # Generate translation
    with torch.no_grad():
        translated_ids = model.generate(**inputs)

    # Decode the translated text
    translated_text = tokenizer.decode(translated_ids[0], skip_special_tokens=True)
    return translated_text

# Create the Gradio interface
iface = gr.Interface(
    fn=translate_arabic_to_english,
    inputs=gr.Textbox(lines=5, placeholder="Enter Arabic text here..."),
    outputs="text",
    title="Arabic to English Machine Translation",
    description="Translate Arabic text to English ",
)

# Launch the interface
iface.launch()