roberta_base_en / tokenizer.json
mattdangerw's picture
Upload folder using huggingface_hub
da04892 verified
raw
history blame contribute delete
686 Bytes
{
"module": "keras_hub.src.models.roberta.roberta_tokenizer",
"class_name": "RobertaTokenizer",
"config": {
"name": "roberta_tokenizer",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "int32"
},
"registered_name": null
},
"config_file": "tokenizer.json",
"sequence_length": null,
"add_prefix_space": false,
"unsplittable_tokens": [
"<s>",
"<pad>",
"</s>",
"<mask>"
]
},
"registered_name": "keras_hub>RobertaTokenizer"
}