glm-350M-chinese / tokenizer_config.json
sunzeyeah's picture
init
5f529af
raw
history blame contribute delete
504 Bytes
{
"name_or_path": "BAAI/glm-large-chinese",
"eos_token": "<|endoftext|>",
"pad_token": "<|endoftext|>",
"cls_token": "[CLS]",
"mask_token": "[MASK]",
"unk_token": "[UNK]",
"sep_token": "[SEP]",
"additional_special_tokens": ["<|startofpiece|>", "<|endofpiece|>", "[gMASK]", "[sMASK]"],
"add_prefix_space": false,
"tokenizer_class": "GLMChineseTokenizer",
"use_fast": false,
"auto_map": {
"AutoTokenizer": [
"tokenization_glm.GLMChineseTokenizer",
null
]
}
}