Playingyoyo commited on
Commit
bb59315
·
verified ·
1 Parent(s): 1957f05

Upload tokenizer

Browse files
Files changed (3) hide show
  1. special_tokens_map.json +3 -1
  2. tokenizer.json +11 -1
  3. tokenizer_config.json +12 -1
special_tokens_map.json CHANGED
@@ -1 +1,3 @@
1
- {}
 
 
 
1
+ {
2
+ "pad_token": "<|endoftext|>"
3
+ }
tokenizer.json CHANGED
@@ -2,7 +2,17 @@
2
  "version": "1.0",
3
  "truncation": null,
4
  "padding": null,
5
- "added_tokens": [],
 
 
 
 
 
 
 
 
 
 
6
  "normalizer": {
7
  "type": "Sequence",
8
  "normalizers": [
 
2
  "version": "1.0",
3
  "truncation": null,
4
  "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 0,
8
+ "content": "<|endoftext|>",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ }
15
+ ],
16
  "normalizer": {
17
  "type": "Sequence",
18
  "normalizers": [
tokenizer_config.json CHANGED
@@ -1,6 +1,17 @@
1
  {
2
- "added_tokens_decoder": {},
 
 
 
 
 
 
 
 
 
3
  "clean_up_tokenization_spaces": false,
4
  "model_max_length": 1000000000000000019884624838656,
 
 
5
  "tokenizer_class": "PreTrainedTokenizerFast"
6
  }
 
1
  {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ }
11
+ },
12
  "clean_up_tokenization_spaces": false,
13
  "model_max_length": 1000000000000000019884624838656,
14
+ "pad_token": "<|endoftext|>",
15
+ "padding_side": "right",
16
  "tokenizer_class": "PreTrainedTokenizerFast"
17
  }