justinthelaw commited on
Commit
200681f
1 Parent(s): 91f0494
Files changed (3) hide show
  1. config.json +155 -0
  2. model.safetensors +3 -0
  3. quantize_config.json +17 -0
config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/Phi-3-mini-128k-instruct",
3
+ "architectures": [
4
+ "Phi3ForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config",
10
+ "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM"
11
+ },
12
+ "bos_token_id": 1,
13
+ "embd_pdrop": 0.0,
14
+ "eos_token_id": 32000,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 3072,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 8192,
19
+ "max_position_embeddings": 131072,
20
+ "model_type": "phi3",
21
+ "num_attention_heads": 32,
22
+ "num_hidden_layers": 32,
23
+ "num_key_value_heads": 32,
24
+ "original_max_position_embeddings": 4096,
25
+ "pad_token_id": 32000,
26
+ "quantization_config": {
27
+ "bits": 4,
28
+ "checkpoint_format": "gptq",
29
+ "damp_percent": 0.01,
30
+ "desc_act": true,
31
+ "group_size": 128,
32
+ "lm_head": false,
33
+ "meta": {
34
+ "quantizer": "gptqmodel:0.9.9"
35
+ },
36
+ "model_file_base_name": null,
37
+ "model_name_or_path": null,
38
+ "quant_method": "gptq",
39
+ "static_groups": false,
40
+ "sym": true,
41
+ "true_sequential": true
42
+ },
43
+ "resid_pdrop": 0.0,
44
+ "rms_norm_eps": 1e-05,
45
+ "rope_scaling": {
46
+ "long_factor": [
47
+ 1.0700000524520874,
48
+ 1.1200000047683716,
49
+ 1.149999976158142,
50
+ 1.4199999570846558,
51
+ 1.5699999332427979,
52
+ 1.7999999523162842,
53
+ 2.129999876022339,
54
+ 2.129999876022339,
55
+ 3.009999990463257,
56
+ 5.910000324249268,
57
+ 6.950000286102295,
58
+ 9.070000648498535,
59
+ 9.930000305175781,
60
+ 10.710000038146973,
61
+ 11.130000114440918,
62
+ 14.609999656677246,
63
+ 15.409998893737793,
64
+ 19.809999465942383,
65
+ 37.279998779296875,
66
+ 38.279998779296875,
67
+ 38.599998474121094,
68
+ 40.12000274658203,
69
+ 46.20000457763672,
70
+ 50.940006256103516,
71
+ 53.66000747680664,
72
+ 54.9373893737793,
73
+ 56.89738845825195,
74
+ 57.28738784790039,
75
+ 59.98738479614258,
76
+ 60.86738586425781,
77
+ 60.887386322021484,
78
+ 61.71739196777344,
79
+ 62.91739273071289,
80
+ 62.957393646240234,
81
+ 63.41739273071289,
82
+ 63.8173942565918,
83
+ 63.83739471435547,
84
+ 63.897396087646484,
85
+ 63.93739700317383,
86
+ 64.06739807128906,
87
+ 64.11434936523438,
88
+ 64.12435150146484,
89
+ 64.15435028076172,
90
+ 64.19435119628906,
91
+ 64.24435424804688,
92
+ 64.57435607910156,
93
+ 64.69000244140625,
94
+ 64.76000213623047
95
+ ],
96
+ "short_factor": [
97
+ 1.1,
98
+ 1.1,
99
+ 1.1,
100
+ 1.3000000000000003,
101
+ 1.3500000000000003,
102
+ 1.3500000000000003,
103
+ 1.4000000000000004,
104
+ 1.5500000000000005,
105
+ 2.000000000000001,
106
+ 2.000000000000001,
107
+ 2.000000000000001,
108
+ 2.000000000000001,
109
+ 2.000000000000001,
110
+ 2.000000000000001,
111
+ 2.000000000000001,
112
+ 2.000000000000001,
113
+ 2.000000000000001,
114
+ 2.000000000000001,
115
+ 2.000000000000001,
116
+ 2.000000000000001,
117
+ 2.000000000000001,
118
+ 2.000000000000001,
119
+ 2.000000000000001,
120
+ 2.000000000000001,
121
+ 2.000000000000001,
122
+ 2.0500000000000007,
123
+ 2.0500000000000007,
124
+ 2.0500000000000007,
125
+ 2.0500000000000007,
126
+ 2.0500000000000007,
127
+ 2.0500000000000007,
128
+ 2.1000000000000005,
129
+ 2.1000000000000005,
130
+ 2.1500000000000004,
131
+ 2.25,
132
+ 2.25,
133
+ 2.25,
134
+ 2.25,
135
+ 2.25,
136
+ 2.3999999999999995,
137
+ 2.4499999999999993,
138
+ 2.499999999999999,
139
+ 2.6999999999999984,
140
+ 2.6999999999999984,
141
+ 2.7499999999999982,
142
+ 2.799999999999998,
143
+ 2.8999999999999977,
144
+ 3.049999999999997
145
+ ],
146
+ "type": "longrope"
147
+ },
148
+ "rope_theta": 10000.0,
149
+ "sliding_window": 262144,
150
+ "tie_word_embeddings": false,
151
+ "torch_dtype": "bfloat16",
152
+ "transformers_version": "4.43.3",
153
+ "use_cache": true,
154
+ "vocab_size": 32064
155
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:273077aa6a96667f64220dfec1c2f47330a97b87bc23cfb0cb7f1ecf24a6a259
3
+ size 2279413888
quantize_config.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "group_size": 128,
4
+ "desc_act": true,
5
+ "static_groups": false,
6
+ "sym": true,
7
+ "lm_head": false,
8
+ "damp_percent": 0.01,
9
+ "true_sequential": true,
10
+ "model_name_or_path": "Phi-3-mini-128k-instruct-4bit-128g",
11
+ "model_file_base_name": "model",
12
+ "quant_method": "gptq",
13
+ "checkpoint_format": "gptq",
14
+ "meta": {
15
+ "quantizer": "gptqmodel:0.9.9"
16
+ }
17
+ }