Files changed (1) hide show
  1. README.md +1 -0
README.md CHANGED
@@ -489,6 +489,7 @@ tokenizer = SentencePieceProcessor()
489
  tokenizer.load(f"{model_path}/sentencepiece.model")
490
  translator = ctranslate2.Translator(model_path)
491
 
 
492
  input_text = "I love pizza!"
493
  input_tokens = tokenizer.encode(f"<2{target_language}> {input_text}", out_type=str)
494
  results = translator.translate_batch(
 
489
  tokenizer.load(f"{model_path}/sentencepiece.model")
490
  translator = ctranslate2.Translator(model_path)
491
 
492
+ target_language="pt"
493
  input_text = "I love pizza!"
494
  input_tokens = tokenizer.encode(f"<2{target_language}> {input_text}", out_type=str)
495
  results = translator.translate_batch(