HimashaJ96 commited on
Commit
f93eb4b
·
1 Parent(s): 22f5186

Training in progress, step 4

Browse files
adapter_config.json CHANGED
@@ -15,14 +15,14 @@
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 64,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "o_proj",
23
  "q_proj",
24
- "v_proj",
25
- "k_proj"
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 32,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "k_proj",
23
  "o_proj",
24
  "q_proj",
25
+ "v_proj"
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fd3c0e7df5875a69148ef7e94d11d2ab8f171292ca399f844777ceace73ad430
3
- size 218138576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f691ef602151d39fe0d1db63a959e47cef2c26963fad1912c05c980d96d3b5e
3
+ size 109086416
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Left",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {