gizemgg commited on
Commit
100fb02
1 Parent(s): 34d9b1d

(Trained with Unsloth)

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "unsloth/mistral-7b-v0.3-bnb-4bit",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -16,15 +16,15 @@
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
- "pad_token_id": 770,
20
  "rms_norm_eps": 1e-05,
21
  "rope_scaling": null,
22
- "rope_theta": 1000000.0,
23
- "sliding_window": null,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float16",
26
  "transformers_version": "4.42.4",
27
  "unsloth_version": "2024.7",
28
  "use_cache": true,
29
- "vocab_size": 32768
30
  }
 
1
  {
2
+ "_name_or_path": "unsloth/mistral-7b-bnb-4bit",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
+ "pad_token_id": 2,
20
  "rms_norm_eps": 1e-05,
21
  "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "sliding_window": 4096,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "float16",
26
  "transformers_version": "4.42.4",
27
  "unsloth_version": "2024.7",
28
  "use_cache": true,
29
+ "vocab_size": 32000
30
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:68891d03739a9a3d42586342ee2465b99b1b6089e12627756935f59ce06facda
3
- size 4949453696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5707c2b0b6d5f391aaa82e49cf705d704c4186ddef769561353182a31e407a3a
3
+ size 4943162240
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:967df32d725be01614033eb4a1c0d7a9dd87e522413cd4abdd5734edeee79a71
3
  size 4999819232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:669d3c5dc665f3dd3652c8d2eb4e66f1aaba0e310cfa5c1de2816d37983fca5a
3
  size 4999819232
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e4216a7e0da4478608e9a0c5f032dfbcb733679efce91c54dcada4a42fa2a6c5
3
- size 4546807712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7fb9c6bf3e804b3129ab527a3d84a31cdc04ac0c1aa7358522d2edd096e417d
3
+ size 4540516256
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 14496047104
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 14483464192
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",
special_tokens_map.json CHANGED
@@ -14,7 +14,7 @@
14
  "single_word": false
15
  },
16
  "pad_token": {
17
- "content": "[control_768]",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
 
14
  "single_word": false
15
  },
16
  "pad_token": {
17
+ "content": "<unk>",
18
  "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:37f00374dea48658ee8f5d0f21895b9bc55cb0103939607c8185bfd1c6ca1f89
3
- size 587404
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json CHANGED
The diff for this file is too large to render. See raw diff