Around6827 commited on
Commit
88d1834
1 Parent(s): 16d0297

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +0 -0
  2. tokenizer_config.json +10 -16
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,28 +1,26 @@
1
  {
2
- "add_bos_token": true,
3
- "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
7
- "lstrip": true,
8
- "normalized": false,
9
- "rstrip": true,
10
  "single_word": false,
11
  "special": true
12
  },
13
  "1": {
14
  "content": "<s>",
15
- "lstrip": true,
16
- "normalized": false,
17
- "rstrip": true,
18
  "single_word": false,
19
  "special": true
20
  },
21
  "2": {
22
  "content": "</s>",
23
- "lstrip": true,
24
- "normalized": false,
25
- "rstrip": true,
26
  "single_word": false,
27
  "special": true
28
  }
@@ -35,11 +33,7 @@
35
  "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "<unk>",
37
  "sp_model_kwargs": {},
38
- "spaces_between_special_tokens": false,
39
  "tokenizer_class": "LlamaTokenizer",
40
- "tokenizer_file": "/N/u/oleykin/BigRed200/.cache/huggingface/hub/models--NousResearch--Llama-2-7b-hf/snapshots/dacdfcde31297e34b19ee0e7532f29586d2c17bc/tokenizer.json",
41
- "trust_remote_code": false,
42
  "unk_token": "<unk>",
43
- "use_default_system_prompt": true,
44
- "use_fast": true
45
  }
 
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
  "1": {
12
  "content": "<s>",
13
+ "lstrip": false,
14
+ "normalized": true,
15
+ "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
  "2": {
20
  "content": "</s>",
21
+ "lstrip": false,
22
+ "normalized": true,
23
+ "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  }
 
33
  "model_max_length": 1000000000000000019884624838656,
34
  "pad_token": "<unk>",
35
  "sp_model_kwargs": {},
 
36
  "tokenizer_class": "LlamaTokenizer",
 
 
37
  "unk_token": "<unk>",
38
+ "use_default_system_prompt": true
 
39
  }