ThiloteE commited on
Commit
cb003e4
1 Parent(s): b907a71

Add config files

Browse files
Customized config/config.json ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Phi-3-mini-128k-instruct",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_pdrop": 0.0,
13
+ "eos_token_id": 32007,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "model_type": "phi3",
20
+ "num_attention_heads": 32,
21
+ "num_hidden_layers": 32,
22
+ "num_key_value_heads": 32,
23
+ "original_max_position_embeddings": 4096,
24
+ "pad_token_id": 32007,
25
+ "resid_pdrop": 0.0,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "long_factor": [
29
+ 1.0700000524520874,
30
+ 1.1200000047683716,
31
+ 1.149999976158142,
32
+ 1.4199999570846558,
33
+ 1.5699999332427979,
34
+ 1.7999999523162842,
35
+ 2.129999876022339,
36
+ 2.129999876022339,
37
+ 3.009999990463257,
38
+ 5.910000324249268,
39
+ 6.950000286102295,
40
+ 9.070000648498535,
41
+ 9.930000305175781,
42
+ 10.710000038146973,
43
+ 11.130000114440918,
44
+ 14.609999656677246,
45
+ 15.409998893737793,
46
+ 19.809999465942383,
47
+ 37.279998779296875,
48
+ 38.279998779296875,
49
+ 38.599998474121094,
50
+ 40.12000274658203,
51
+ 46.20000457763672,
52
+ 50.940006256103516,
53
+ 53.66000747680664,
54
+ 54.9373893737793,
55
+ 56.89738845825195,
56
+ 57.28738784790039,
57
+ 59.98738479614258,
58
+ 60.86738586425781,
59
+ 60.887386322021484,
60
+ 61.71739196777344,
61
+ 62.91739273071289,
62
+ 62.957393646240234,
63
+ 63.41739273071289,
64
+ 63.8173942565918,
65
+ 63.83739471435547,
66
+ 63.897396087646484,
67
+ 63.93739700317383,
68
+ 64.06739807128906,
69
+ 64.11434936523438,
70
+ 64.12435150146484,
71
+ 64.15435028076172,
72
+ 64.19435119628906,
73
+ 64.24435424804688,
74
+ 64.57435607910156,
75
+ 64.69000244140625,
76
+ 64.76000213623047
77
+ ],
78
+ "short_factor": [
79
+ 1.1,
80
+ 1.1,
81
+ 1.1,
82
+ 1.3000000000000003,
83
+ 1.3500000000000003,
84
+ 1.3500000000000003,
85
+ 1.4000000000000004,
86
+ 1.5500000000000005,
87
+ 2.000000000000001,
88
+ 2.000000000000001,
89
+ 2.000000000000001,
90
+ 2.000000000000001,
91
+ 2.000000000000001,
92
+ 2.000000000000001,
93
+ 2.000000000000001,
94
+ 2.000000000000001,
95
+ 2.000000000000001,
96
+ 2.000000000000001,
97
+ 2.000000000000001,
98
+ 2.000000000000001,
99
+ 2.000000000000001,
100
+ 2.000000000000001,
101
+ 2.000000000000001,
102
+ 2.000000000000001,
103
+ 2.000000000000001,
104
+ 2.0500000000000007,
105
+ 2.0500000000000007,
106
+ 2.0500000000000007,
107
+ 2.0500000000000007,
108
+ 2.0500000000000007,
109
+ 2.0500000000000007,
110
+ 2.1000000000000005,
111
+ 2.1000000000000005,
112
+ 2.1500000000000004,
113
+ 2.25,
114
+ 2.25,
115
+ 2.25,
116
+ 2.25,
117
+ 2.25,
118
+ 2.3999999999999995,
119
+ 2.4499999999999993,
120
+ 2.499999999999999,
121
+ 2.6999999999999984,
122
+ 2.6999999999999984,
123
+ 2.7499999999999982,
124
+ 2.799999999999998,
125
+ 2.8999999999999977,
126
+ 3.049999999999997
127
+ ],
128
+ "type": "longrope"
129
+ },
130
+ "rope_theta": 10000.0,
131
+ "sliding_window": 262144,
132
+ "tie_word_embeddings": false,
133
+ "torch_dtype": "bfloat16",
134
+ "transformers_version": "4.40.2",
135
+ "use_cache": true,
136
+ "attention_bias": false,
137
+ "vocab_size": 32064
138
+ }
Customized config/generation_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": [
5
+ 32000,
6
+ 32001,
7
+ 32007
8
+ ],
9
+ "pad_token_id": 32007,
10
+ "transformers_version": "4.41.2"
11
+ }
Customized config/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
Customized config/tokenizer_config.json ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": true,
26
+ "single_word": false,
27
+ "special": false
28
+ },
29
+ "32000": {
30
+ "content": "<|endoftext|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "32001": {
38
+ "content": "<|assistant|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": true,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "32002": {
46
+ "content": "<|placeholder1|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": true,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "32003": {
54
+ "content": "<|placeholder2|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": true,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "32004": {
62
+ "content": "<|placeholder3|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": true,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "32005": {
70
+ "content": "<|placeholder4|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": true,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "32006": {
78
+ "content": "<|system|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": true,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "32007": {
86
+ "content": "<|end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": true,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "32008": {
94
+ "content": "<|placeholder5|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": true,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "32009": {
102
+ "content": "<|placeholder6|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": true,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "32010": {
110
+ "content": "<|user|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": true,
114
+ "single_word": false,
115
+ "special": true
116
+ }
117
+ },
118
+ "bos_token": "<s>",
119
+ "chat_template": "{% for message in messages %}{% if message['role'] == 'system' %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
120
+ "clean_up_tokenization_spaces": false,
121
+ "eos_token": "<|end|>",
122
+ "legacy": false,
123
+ "model_max_length": 131072,
124
+ "pad_token": "<|endoftext|>",
125
+ "padding_side": "left",
126
+ "sp_model_kwargs": {},
127
+ "tokenizer_class": "LlamaTokenizer",
128
+ "unk_token": "<unk>",
129
+ "use_default_system_prompt": false
130
+ }
Original config/config.json ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Phi-3-mini-128k-instruct",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_pdrop": 0.0,
13
+ "eos_token_id": 32000,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 3072,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 8192,
18
+ "max_position_embeddings": 131072,
19
+ "model_type": "phi3",
20
+ "num_attention_heads": 32,
21
+ "num_hidden_layers": 32,
22
+ "num_key_value_heads": 32,
23
+ "original_max_position_embeddings": 4096,
24
+ "pad_token_id": 32000,
25
+ "resid_pdrop": 0.0,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "long_factor": [
29
+ 1.0700000524520874,
30
+ 1.1200000047683716,
31
+ 1.149999976158142,
32
+ 1.4199999570846558,
33
+ 1.5699999332427979,
34
+ 1.7999999523162842,
35
+ 2.129999876022339,
36
+ 2.129999876022339,
37
+ 3.009999990463257,
38
+ 5.910000324249268,
39
+ 6.950000286102295,
40
+ 9.070000648498535,
41
+ 9.930000305175781,
42
+ 10.710000038146973,
43
+ 11.130000114440918,
44
+ 14.609999656677246,
45
+ 15.409998893737793,
46
+ 19.809999465942383,
47
+ 37.279998779296875,
48
+ 38.279998779296875,
49
+ 38.599998474121094,
50
+ 40.12000274658203,
51
+ 46.20000457763672,
52
+ 50.940006256103516,
53
+ 53.66000747680664,
54
+ 54.9373893737793,
55
+ 56.89738845825195,
56
+ 57.28738784790039,
57
+ 59.98738479614258,
58
+ 60.86738586425781,
59
+ 60.887386322021484,
60
+ 61.71739196777344,
61
+ 62.91739273071289,
62
+ 62.957393646240234,
63
+ 63.41739273071289,
64
+ 63.8173942565918,
65
+ 63.83739471435547,
66
+ 63.897396087646484,
67
+ 63.93739700317383,
68
+ 64.06739807128906,
69
+ 64.11434936523438,
70
+ 64.12435150146484,
71
+ 64.15435028076172,
72
+ 64.19435119628906,
73
+ 64.24435424804688,
74
+ 64.57435607910156,
75
+ 64.69000244140625,
76
+ 64.76000213623047
77
+ ],
78
+ "short_factor": [
79
+ 1.1,
80
+ 1.1,
81
+ 1.1,
82
+ 1.3000000000000003,
83
+ 1.3500000000000003,
84
+ 1.3500000000000003,
85
+ 1.4000000000000004,
86
+ 1.5500000000000005,
87
+ 2.000000000000001,
88
+ 2.000000000000001,
89
+ 2.000000000000001,
90
+ 2.000000000000001,
91
+ 2.000000000000001,
92
+ 2.000000000000001,
93
+ 2.000000000000001,
94
+ 2.000000000000001,
95
+ 2.000000000000001,
96
+ 2.000000000000001,
97
+ 2.000000000000001,
98
+ 2.000000000000001,
99
+ 2.000000000000001,
100
+ 2.000000000000001,
101
+ 2.000000000000001,
102
+ 2.000000000000001,
103
+ 2.000000000000001,
104
+ 2.0500000000000007,
105
+ 2.0500000000000007,
106
+ 2.0500000000000007,
107
+ 2.0500000000000007,
108
+ 2.0500000000000007,
109
+ 2.0500000000000007,
110
+ 2.1000000000000005,
111
+ 2.1000000000000005,
112
+ 2.1500000000000004,
113
+ 2.25,
114
+ 2.25,
115
+ 2.25,
116
+ 2.25,
117
+ 2.25,
118
+ 2.3999999999999995,
119
+ 2.4499999999999993,
120
+ 2.499999999999999,
121
+ 2.6999999999999984,
122
+ 2.6999999999999984,
123
+ 2.7499999999999982,
124
+ 2.799999999999998,
125
+ 2.8999999999999977,
126
+ 3.049999999999997
127
+ ],
128
+ "type": "longrope"
129
+ },
130
+ "rope_theta": 10000.0,
131
+ "sliding_window": 262144,
132
+ "tie_word_embeddings": false,
133
+ "torch_dtype": "bfloat16",
134
+ "transformers_version": "4.40.2",
135
+ "use_cache": true,
136
+ "attention_bias": false,
137
+ "vocab_size": 32064
138
+ }
Original config/generation_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": [
5
+ 32000,
6
+ 32001,
7
+ 32007
8
+ ],
9
+ "pad_token_id": 32000,
10
+ "transformers_version": "4.41.2"
11
+ }
Original config/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
Original config/tokenizer_config.json ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": true,
26
+ "single_word": false,
27
+ "special": false
28
+ },
29
+ "32000": {
30
+ "content": "<|endoftext|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "32001": {
38
+ "content": "<|assistant|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": true,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "32002": {
46
+ "content": "<|placeholder1|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": true,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "32003": {
54
+ "content": "<|placeholder2|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": true,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "32004": {
62
+ "content": "<|placeholder3|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": true,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "32005": {
70
+ "content": "<|placeholder4|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": true,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "32006": {
78
+ "content": "<|system|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": true,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "32007": {
86
+ "content": "<|end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": true,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "32008": {
94
+ "content": "<|placeholder5|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": true,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "32009": {
102
+ "content": "<|placeholder6|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": true,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "32010": {
110
+ "content": "<|user|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": true,
114
+ "single_word": false,
115
+ "special": true
116
+ }
117
+ },
118
+ "bos_token": "<s>",
119
+ "chat_template": "{% for message in messages %}{% if message['role'] == 'system' %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
120
+ "clean_up_tokenization_spaces": false,
121
+ "eos_token": "<|endoftext|>",
122
+ "legacy": false,
123
+ "model_max_length": 131072,
124
+ "pad_token": "<|endoftext|>",
125
+ "padding_side": "left",
126
+ "sp_model_kwargs": {},
127
+ "tokenizer_class": "LlamaTokenizer",
128
+ "unk_token": "<unk>",
129
+ "use_default_system_prompt": false
130
+ }