File size: 241 Bytes
5cc4efb |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"added_tokens_decoder": {},
"auto_map": {
"AutoTokenizer": [
"tokenizer.AlphabetTokenizer",
null
]
},
"clean_up_tokenization_spaces": true,
"model_max_length": 2048,
"tokenizer_class": "AlphabetTokenizer"
}
|