zh-tw-llm-ta01-pythia-1b-ta8000-v1-b_1_embeddings_and_attention-a100-t02-713b8e
/
special_tokens_map.json
{ | |
"bos_token": "<|padding|>", | |
"eos_token": "<|endoftext|>", | |
"pad_token": "<|padding|>", | |
"unk_token": "<|padding|>" | |
} | |
{ | |
"bos_token": "<|padding|>", | |
"eos_token": "<|endoftext|>", | |
"pad_token": "<|padding|>", | |
"unk_token": "<|padding|>" | |
} | |