beamaia commited on
Commit
8bd3b6f
·
verified ·
1 Parent(s): 01b2a9b

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -13,13 +13,7 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": {
17
- "content": "<unk>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": "</s>",
 
 
 
 
 
 
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
tokenizer.json CHANGED
@@ -71,6 +71,12 @@
71
  "id": "A",
72
  "type_id": 0
73
  }
 
 
 
 
 
 
74
  }
75
  ],
76
  "pair": [
@@ -86,6 +92,12 @@
86
  "type_id": 0
87
  }
88
  },
 
 
 
 
 
 
89
  {
90
  "SpecialToken": {
91
  "id": "<s>",
@@ -97,9 +109,24 @@
97
  "id": "B",
98
  "type_id": 1
99
  }
 
 
 
 
 
 
100
  }
101
  ],
102
  "special_tokens": {
 
 
 
 
 
 
 
 
 
103
  "<s>": {
104
  "id": "<s>",
105
  "ids": [
 
71
  "id": "A",
72
  "type_id": 0
73
  }
74
+ },
75
+ {
76
+ "SpecialToken": {
77
+ "id": "</s>",
78
+ "type_id": 0
79
+ }
80
  }
81
  ],
82
  "pair": [
 
92
  "type_id": 0
93
  }
94
  },
95
+ {
96
+ "SpecialToken": {
97
+ "id": "</s>",
98
+ "type_id": 0
99
+ }
100
+ },
101
  {
102
  "SpecialToken": {
103
  "id": "<s>",
 
109
  "id": "B",
110
  "type_id": 1
111
  }
112
+ },
113
+ {
114
+ "SpecialToken": {
115
+ "id": "</s>",
116
+ "type_id": 1
117
+ }
118
  }
119
  ],
120
  "special_tokens": {
121
+ "</s>": {
122
+ "id": "</s>",
123
+ "ids": [
124
+ 2
125
+ ],
126
+ "tokens": [
127
+ "</s>"
128
+ ]
129
+ },
130
  "<s>": {
131
  "id": "<s>",
132
  "ids": [
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "add_bos_token": true,
3
- "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
@@ -39,8 +39,10 @@
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "</s>",
41
  "legacy": false,
 
42
  "model_max_length": 1000000000000000019884624838656,
43
- "pad_token": "<unk>",
 
44
  "sp_model_kwargs": {},
45
  "tokenizer_class": "LlamaTokenizer",
46
  "unk_token": "<unk>",
 
1
  {
2
  "add_bos_token": true,
3
+ "add_eos_token": true,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
39
  "clean_up_tokenization_spaces": false,
40
  "eos_token": "</s>",
41
  "legacy": false,
42
+ "max_lenght": 2048,
43
  "model_max_length": 1000000000000000019884624838656,
44
+ "pad_token": "</s>",
45
+ "padding": true,
46
  "sp_model_kwargs": {},
47
  "tokenizer_class": "LlamaTokenizer",
48
  "unk_token": "<unk>",