File size: 261 Bytes
a4c3e1b
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
  "clean_up_tokenization_spaces": true,
  "model_max_length": 1000000000000000019884624838656,
  "special_tokens": [
    "<s>",
    "</s>",
    "<unk>",
    "<sep>",
    "<pad>",
    "<cls>",
    "<mask>"
  ],
  "tokenizer_class": "PreTrainedTokenizerFast"
}