ebsmothers
commited on
Upload folder using huggingface_hub
Browse files- adapter_0.pt +3 -0
- adapter_config.json +1 -0
- adapter_model.bin +3 -0
- config.json +1 -0
- hf_model_0001_0.pt +3 -0
- hf_model_0002_0.pt +3 -0
adapter_0.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:193fb462bc21599d2b05176b031b8de28d624f9a632ff97194df0b6b537ba536
|
3 |
+
size 16819322
|
adapter_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"r": 8, "lora_alpha": 16, "target_modules": ["q_proj", "v_proj"], "peft_type": "LORA"}
|
adapter_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9bfd92581e86d57a7f86204f5cd67f7e5b6cd6eb788be47d3f8ba2bae30d3b75
|
3 |
+
size 16823434
|
config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"_name_or_path": "meta-llama/Llama-2-7b-hf", "architectures": ["LlamaForCausalLM"], "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 11008, "max_position_embeddings": 4096, "model_type": "llama", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.31.0.dev0", "use_cache": true, "vocab_size": 32000}
|
hf_model_0001_0.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:756dbbb0acb398af2da84d2506318f74e1f125aae492ec7889a3771b9c826548
|
3 |
+
size 19953163444
|
hf_model_0002_0.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac220bb5e717b2e5762ea029cf790f82681376bc593c4cbc811e91405701bcf1
|
3 |
+
size 7000596392
|