Svak commited on
Commit
708a1d1
·
verified ·
1 Parent(s): 0c1ab5b

Upload 11 files

Browse files
config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "TheDrummer/Nautilus-70B-v0.1",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128009,
10
+ "head_dim": 128,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 8192,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 28672,
15
+ "max_position_embeddings": 131072,
16
+ "mlp_bias": false,
17
+ "model_type": "llama",
18
+ "num_attention_heads": 64,
19
+ "num_hidden_layers": 80,
20
+ "num_key_value_heads": 8,
21
+ "pretraining_tp": 1,
22
+ "quantization_config": {
23
+ "activation_scheme": "dynamic",
24
+ "ignored_layers": [
25
+ "lm_head"
26
+ ],
27
+ "quant_method": "fp8"
28
+ },
29
+ "rms_norm_eps": 1e-05,
30
+ "rope_scaling": {
31
+ "factor": 8.0,
32
+ "high_freq_factor": 4.0,
33
+ "low_freq_factor": 1.0,
34
+ "original_max_position_embeddings": 8192,
35
+ "rope_type": "llama3"
36
+ },
37
+ "rope_theta": 500000.0,
38
+ "tie_word_embeddings": false,
39
+ "torch_dtype": "bfloat16",
40
+ "transformers_version": "4.46.0",
41
+ "use_cache": false,
42
+ "vocab_size": 128256
43
+ }
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128008,
7
+ 128009
8
+ ],
9
+ "transformers_version": "4.46.0"
10
+ }
model-00001-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efdeb28de138e68f20376a38422e5a710d0ddfaf8a10cfcb51f9ce134e788cc3
3
+ size 4819359900
model-00002-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:609299c1afd187c51dff2d114ac0dc79a4bdd4719b5d9472cfa0b8014cd60559
3
+ size 4983039376
model-00003-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c69b4b0704e1adbde3a50d56e624df1ea24911884b5bce26c476aa51446c864
3
+ size 4899121052
model-00004-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:217a6a79901c88e310cfaad513883423e81e106ff8aab7a43823f529fc61bfef
3
+ size 4899154060
model-00005-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cefecab868f24a5b2c66aa45a0112c30f63cdf5edba78799aa452e87c6c6c395
3
+ size 4899154060
model-00006-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4d221836d8343b89a5d9f3790479160f870ab7d4e73e9555c25b984ae927247
3
+ size 4983039464
model-00007-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25b79d9a86ead07145e5f100e96eb82a2797063158911cb1d80414b10877742f
3
+ size 4899121068
model-00008-of-00015.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b27c5a6e12dafc12b0eecaba81571f170be4543b0efd42400fc865b7682a97e
3
+ size 4899154060
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff