# -*- coding: utf-8 -*- """TurjumanDemo Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1VVJ7uPEYD8Q1pR-IINWWAQVpqyP1XnzD """ # Installing dependencies !pip install gradio !pip install turjuman transformers !git clone https://huggingface.co/spaces/ahmedoumar/TurjumanDemo # Import our modules import gradio as gr from turjuman import turjuman import logging import os from transformers import AutoTokenizer logging.basicConfig( format="%(asctime)s | %(levelname)s | %(name)s | %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=os.environ.get("LOGLEVEL", "INFO").upper(), ) logger = logging.getLogger("turjuman.translate") cache_dir="/content/mycache" # Get the turjuman object and its tokenizer turj = turjuman.turjuman(logger, cache_dir) tokenizer = AutoTokenizer.from_pretrained('UBC-NLP/AraT5-base-title-generation') # The translate function def translate(sent): beam_options = {"search_method":"beam", "seq_length": 300, "num_beams":5, "no_repeat_ngram_size":2, "max_outputs":1} targets = turj.translate(sent,**beam_options) #print(targets) ans = "" for target in targets: target = tokenizer.decode(target, skip_special_tokens=True, clean_up_tokenization_spaces=True) ans += target return ans print(translate('Здравствуй, друг')) gr.Interface(fn=translate, inputs=['text'], outputs=['text']).launch(width=1000, height=1000)