andykcheng commited on
Commit
1d61f5e
1 Parent(s): 0bd6c76

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +3 -3
  2. tokenizer.json +0 -27
  3. tokenizer_config.json +4 -28
special_tokens_map.json CHANGED
@@ -14,10 +14,10 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": true,
19
  "normalized": false,
20
- "rstrip": true,
21
  "single_word": false
22
  },
23
  "unk_token": {
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
  "normalized": false,
20
+ "rstrip": false,
21
  "single_word": false
22
  },
23
  "unk_token": {
tokenizer.json CHANGED
@@ -29,33 +29,6 @@
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
32
- },
33
- {
34
- "id": 32000,
35
- "content": "[PAD]",
36
- "single_word": false,
37
- "lstrip": true,
38
- "rstrip": true,
39
- "normalized": false,
40
- "special": true
41
- },
42
- {
43
- "id": 32001,
44
- "content": "<|im_start|>",
45
- "single_word": false,
46
- "lstrip": false,
47
- "rstrip": false,
48
- "normalized": false,
49
- "special": true
50
- },
51
- {
52
- "id": 32002,
53
- "content": "<|im_end|>",
54
- "single_word": false,
55
- "lstrip": false,
56
- "rstrip": false,
57
- "normalized": false,
58
- "special": true
59
  }
60
  ],
61
  "normalizer": {
 
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  }
33
  ],
34
  "normalizer": {
tokenizer_config.json CHANGED
@@ -23,42 +23,18 @@
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
- },
27
- "32000": {
28
- "content": "[PAD]",
29
- "lstrip": true,
30
- "normalized": false,
31
- "rstrip": true,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "32001": {
36
- "content": "<|im_start|>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "32002": {
44
- "content": "<|im_end|>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
  }
51
  },
52
- "additional_special_tokens": [],
53
  "bos_token": "<s>",
 
54
  "clean_up_tokenization_spaces": false,
55
  "eos_token": "</s>",
56
  "legacy": false,
57
- "model_max_length": 1000000000000000019884624838656,
58
- "pad_token": "[PAD]",
59
  "padding_side": "right",
60
  "sp_model_kwargs": {},
61
  "tokenizer_class": "LlamaTokenizer",
62
  "unk_token": "<unk>",
63
- "use_default_system_prompt": true
64
  }
 
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  }
27
  },
 
28
  "bos_token": "<s>",
29
+ "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
30
  "clean_up_tokenization_spaces": false,
31
  "eos_token": "</s>",
32
  "legacy": false,
33
+ "model_max_length": 2048,
34
+ "pad_token": "</s>",
35
  "padding_side": "right",
36
  "sp_model_kwargs": {},
37
  "tokenizer_class": "LlamaTokenizer",
38
  "unk_token": "<unk>",
39
+ "use_default_system_prompt": false
40
  }