yechen commited on
Commit
20310f5
•
1 Parent(s): 2d51623
Files changed (2) hide show
  1. config.json +27 -18
  2. tf_model.h5 +2 -2
config.json CHANGED
@@ -1,21 +1,30 @@
1
  {
 
2
  "architectures": [
3
  "BertForMaskedLM"
4
- ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "directionality": "bidi",
7
- "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.1,
9
- "hidden_size": 1024,
10
- "initializer_range": 0.02,
11
- "intermediate_size": 4096,
12
- "max_position_embeddings": 512,
13
- "num_attention_heads": 16,
14
- "num_hidden_layers": 24,
15
- "pooler_fc_size": 768,
16
- "pooler_num_attention_heads": 12,
17
- "pooler_num_fc_layers": 3,
18
- "pooler_size_per_head": 128,
19
- "pooler_type": "first_token_transform",
20
- "type_vocab_size": 2,
21
- "vocab_size": 21128}
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "/data/yechen/bert/bert-large-chinese",
3
  "architectures": [
4
  "BertForMaskedLM"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "directionality": "bidi",
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 4096,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 16,
18
+ "num_hidden_layers": 24,
19
+ "pad_token_id": 0,
20
+ "pooler_fc_size": 768,
21
+ "pooler_num_attention_heads": 12,
22
+ "pooler_num_fc_layers": 3,
23
+ "pooler_size_per_head": 128,
24
+ "pooler_type": "first_token_transform",
25
+ "position_embedding_type": "absolute",
26
+ "transformers_version": "4.3.3",
27
+ "type_vocab_size": 2,
28
+ "use_cache": true,
29
+ "vocab_size": 21128
30
+ }
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2594fba1861e40f8a19d0505aceee88c766da31ef64704f61efb0af8cc8d0c42
3
- size 3919167104
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:313050cd80734a801bda2c1caf1259022873c1aae07d6e285f953bdab8ba285f
3
+ size 1393494464