ccore commited on
Commit
95b995d
·
verified ·
1 Parent(s): 7f36591

Training in progress, step 400

Browse files
Files changed (3) hide show
  1. config.json +9 -6
  2. model.safetensors +2 -2
  3. training_args.bin +1 -1
config.json CHANGED
@@ -1,5 +1,7 @@
1
  {
 
2
  "_remove_final_layer_norm": false,
 
3
  "activation_function": "relu",
4
  "architectures": [
5
  "OPTForCausalLM"
@@ -10,19 +12,20 @@
10
  "dropout": 0.1,
11
  "enable_bias": true,
12
  "eos_token_id": 2,
13
- "ffn_dim": 512,
14
- "hidden_size": 512,
15
  "init_std": 0.02,
16
  "layer_norm_elementwise_affine": true,
17
  "layerdrop": 0.0,
18
  "max_position_embeddings": 2048,
19
  "model_type": "opt",
20
- "num_attention_heads": 2,
21
- "num_hidden_layers": 1,
22
  "pad_token_id": 1,
 
23
  "torch_dtype": "float32",
24
  "transformers_version": "4.42.4",
25
  "use_cache": true,
26
- "vocab_size": 50265,
27
- "word_embed_proj_dim": 512
28
  }
 
1
  {
2
+ "_name_or_path": "facebook/opt-125m",
3
  "_remove_final_layer_norm": false,
4
+ "activation_dropout": 0.0,
5
  "activation_function": "relu",
6
  "architectures": [
7
  "OPTForCausalLM"
 
12
  "dropout": 0.1,
13
  "enable_bias": true,
14
  "eos_token_id": 2,
15
+ "ffn_dim": 3072,
16
+ "hidden_size": 768,
17
  "init_std": 0.02,
18
  "layer_norm_elementwise_affine": true,
19
  "layerdrop": 0.0,
20
  "max_position_embeddings": 2048,
21
  "model_type": "opt",
22
+ "num_attention_heads": 12,
23
+ "num_hidden_layers": 12,
24
  "pad_token_id": 1,
25
+ "prefix": "</s>",
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.42.4",
28
  "use_cache": true,
29
+ "vocab_size": 50272,
30
+ "word_embed_proj_dim": 768
31
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:40a53ebed8dd89fc2c4f6cb6c0a373650454884f91ece046278453e113559da0
3
- size 113459440
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ae8d759d4ada99aef5934a5b42b46745dc10a32a364ce05b447f79544c1c938
3
+ size 217446424
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8a1f3b79f703c8a3fb21de91388dd6842387489de9fd8c556eb9a2448f97d062
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80648db8d67445acad0e576fa124f071c50ea896247a73464339d0feb9d88ca3
3
  size 5112