gpt2-fa / tokenizer_config.json
m3hrdadfi's picture
Hello, persian gpt2
952943f
raw
history blame
698 Bytes
{
"unk_token": {
"content": "<|endoftext|>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": true,
"__type": "AddedToken"
},
"bos_token": {
"content": "<|endoftext|>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": true,
"__type": "AddedToken"
},
"eos_token": {
"content": "<|endoftext|>",
"single_word": false,
"lstrip": false,
"rstrip": false,
"normalized": true,
"__type": "AddedToken"
},
"add_prefix_space": false,
"special_tokens_map_file": null,
"errors": "replace"
}