duyphu commited on
Commit
77c7e80
·
verified ·
1 Parent(s): ec98c85

Training in progress, step 13

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "up_proj",
24
- "down_proj",
25
  "k_proj",
 
26
  "gate_proj",
27
- "v_proj",
28
- "q_proj",
29
- "o_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "v_proj",
 
24
  "k_proj",
25
+ "up_proj",
26
  "gate_proj",
27
+ "down_proj",
28
+ "o_proj",
29
+ "q_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:73ef7e9a7fc69deea4b3ad235f971ca8a922a7d241b65b0fe2c2c769c8822fc3
3
  size 15182728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfcae81e7c68f112bfcdf173a75285d2adc544401909b30f12c6a3c1000eeb66
3
  size 15182728
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9abb2b1ec3ce81e6f2148d07bc1bc65e00c38955c3b82eee24048f74df234cab
3
  size 6776
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2093762adc0d1dee31d5feefc26fe7e28cb29c7bd4d2cdccfeb9b0462524c836
3
  size 6776