qklent commited on
Commit
8bb74af
·
verified ·
1 Parent(s): a3967d1

Upload folder using huggingface_hub

Browse files
.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlamaForCausalLM"
4
+ ],
5
+ "bos_token_id": 128000,
6
+ "eos_token_id": 128001,
7
+ "hidden_act": "silu",
8
+ "hidden_size": 4096,
9
+ "initializer_range": 0.02,
10
+ "intermediate_size": 14336,
11
+ "max_position_embeddings": 2048,
12
+ "model_type": "llama",
13
+ "num_attention_heads": 32,
14
+ "num_key_value_heads": 8,
15
+ "num_hidden_layers": 1,
16
+ "pad_token_id": 0,
17
+ "rms_norm_eps": 1e-05,
18
+ "tie_word_embeddings": false,
19
+ "torch_dtype": "float16",
20
+ "transformers_version": "4.28.1",
21
+ "use_cache": true,
22
+ "vocab_size": 128256,
23
+ "rope_theta": 500000.0,
24
+ "bias": false
25
+ }
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"model_type": "eagle", "model": {"architectures": ["LlamaForCausalLM"], "bos_token_id": 128000, "eos_token_id": 128001, "hidden_act": "silu", "hidden_size": 4096, "initializer_range": 0.02, "intermediate_size": 14336, "max_position_embeddings": 2048, "model_type": "llama", "num_attention_heads": 32, "num_key_value_heads": 8, "num_hidden_layers": 1, "pad_token_id": 0, "rms_norm_eps": 1e-05, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.28.1", "use_cache": true, "vocab_size": 128256, "rope_theta": 500000.0, "bias": false}}
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b26b1e1a306da62703b3c3ee0117a72863314f47479deb83654511105aea7cf
3
+ size 4158686424