silviasapora commited on
Commit
c28c5bd
·
verified ·
1 Parent(s): 36c3c2a

Training in progress, step 315

Browse files
adapter_config.json CHANGED
@@ -3,6 +3,8 @@
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "google/gemma-7b",
5
  "bias": "none",
 
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
  "init_lora_weights": true,
@@ -11,6 +13,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 256,
 
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
@@ -20,13 +23,13 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "q_proj",
24
- "k_proj",
25
  "o_proj",
26
- "down_proj",
27
- "v_proj",
28
  "gate_proj",
29
- "up_proj"
 
 
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "google/gemma-7b",
5
  "bias": "none",
6
+ "eva_config": null,
7
+ "exclude_modules": null,
8
  "fan_in_fan_out": false,
9
  "inference_mode": true,
10
  "init_lora_weights": true,
 
13
  "layers_to_transform": null,
14
  "loftq_config": {},
15
  "lora_alpha": 256,
16
+ "lora_bias": false,
17
  "lora_dropout": 0.05,
18
  "megatron_config": null,
19
  "megatron_core": "megatron.core",
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
+ "up_proj",
27
  "q_proj",
 
28
  "o_proj",
 
 
29
  "gate_proj",
30
+ "k_proj",
31
+ "down_proj",
32
+ "v_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ec905e4a3250231596f1b061df61a468a923da16721d08834b470627d94e98c5
3
- size 800117240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79d358b33505feb06a01ce5107282b29fff74e987350c1c9b905fcfc32067962
3
+ size 1600180696
runs/Jan29_01-59-17_b4d476355ce0/events.out.tfevents.1738115969.b4d476355ce0.183528.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa6cf34f0465add081579aadf9aefbd6d8ee3d70899f7f6e0e90cee65724fa4c
3
+ size 23014
runs/Jan29_03-02-43_b4d476355ce0/events.out.tfevents.1738119775.b4d476355ce0.191289.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08498f735c1f0b59946ae019a2a3dcd0044f37bd9073eea668f02aa9d2bcb9c7
3
+ size 9892
runs/Jan29_03-23-59_b4d476355ce0/events.out.tfevents.1738121051.b4d476355ce0.191981.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fba0ecdd90ce3b223cdfea47d1d109aa3c0fa301c8483435ccdf09de2762d49
3
+ size 17911
runs/Jan29_03-35-31_b4d476355ce0/events.out.tfevents.1738121743.b4d476355ce0.192529.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7addcd0768a1e3e2bcda52fe9a122f2b65ca8e66bf63b23f673023a2aaae11e
3
+ size 5518
runs/Jan29_04-09-34_b4d476355ce0/events.out.tfevents.1738123787.b4d476355ce0.194293.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df6fbcfa88a4e7395bcd6bf1e4081bb1579615b5e785b041995652221c889f19
3
+ size 7078
runs/Jan29_04-17-55_b4d476355ce0/events.out.tfevents.1738124287.b4d476355ce0.194990.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28cd60f325aa219a55af6df26eefc52f6644681bbeeb6a2bc0f41d96e0b484fa
3
+ size 12538
runs/Jan29_04-44-07_b4d476355ce0/events.out.tfevents.1738125859.b4d476355ce0.196096.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f19426483de0389223f99f6930d6551efa9e16e4c689a457e261edfbf0a0fca
3
+ size 12538
runs/Jan29_05-10-16_b4d476355ce0/events.out.tfevents.1738127429.b4d476355ce0.197192.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b852ddb288e340c89612b23bb68a2fb12a17de6a12b1bba165cc7151d59cf452
3
+ size 5518
runs/Jan29_05-18-58_b4d476355ce0/events.out.tfevents.1738127949.b4d476355ce0.198265.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a770f692b2557965e78504428917ca7580b79eb2ee9ff8af9eaef3859570154
3
+ size 5518
runs/Jan29_05-20-37_b4d476355ce0/events.out.tfevents.1738128049.b4d476355ce0.199318.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6e5d8a7587efcefdb378aca35d31112509101326e646b674650220c2501b93c
3
+ size 55544
tokenizer_config.json CHANGED
@@ -1747,6 +1747,7 @@
1747
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + ' ' + message['content'] | trim + '<end_of_turn> ' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model '}}{% endif %}",
1748
  "clean_up_tokenization_spaces": false,
1749
  "eos_token": "<eos>",
 
1750
  "model_max_length": 2048,
1751
  "pad_token": "<pad>",
1752
  "sp_model_kwargs": {},
 
1747
  "chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<start_of_turn>' + role + ' ' + message['content'] | trim + '<end_of_turn> ' }}{% endfor %}{% if add_generation_prompt %}{{'<start_of_turn>model '}}{% endif %}",
1748
  "clean_up_tokenization_spaces": false,
1749
  "eos_token": "<eos>",
1750
+ "extra_special_tokens": {},
1751
  "model_max_length": 2048,
1752
  "pad_token": "<pad>",
1753
  "sp_model_kwargs": {},
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea7cabbd6032336503a5e258108a592943f23279762dd5aca5a10cb9b6183408
3
- size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ff4504ad66aee5983ddf1ca471563c519b16f9e464afd1698227604d43588c8
3
+ size 5688