riczhou commited on
Commit
d5a15db
·
verified ·
1 Parent(s): 65d42a0

Upload folder using huggingface_hub

Browse files
mlc-chat-config.json ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "qwen2",
4
+ "quantization": "q4f32_1",
5
+ "model_config": {
6
+ "hidden_act": "silu",
7
+ "hidden_size": 1536,
8
+ "intermediate_size": 8960,
9
+ "num_attention_heads": 12,
10
+ "num_hidden_layers": 28,
11
+ "num_key_value_heads": 2,
12
+ "rms_norm_eps": 1e-06,
13
+ "rope_theta": 10000,
14
+ "vocab_size": 151936,
15
+ "tie_word_embeddings": false,
16
+ "context_window_size": 131072,
17
+ "prefill_chunk_size": 8192,
18
+ "tensor_parallel_shards": 1,
19
+ "head_dim": 128,
20
+ "dtype": "float32",
21
+ "max_batch_size": 128
22
+ },
23
+ "vocab_size": 151936,
24
+ "context_window_size": 131072,
25
+ "sliding_window_size": -1,
26
+ "prefill_chunk_size": 8192,
27
+ "attention_sink_size": -1,
28
+ "tensor_parallel_shards": 1,
29
+ "pipeline_parallel_stages": 1,
30
+ "temperature": 1.0,
31
+ "presence_penalty": 0.0,
32
+ "frequency_penalty": 0.0,
33
+ "repetition_penalty": 1.0,
34
+ "top_p": 1.0,
35
+ "tokenizer_files": [
36
+ "tokenizer.json",
37
+ "tokenizer_config.json"
38
+ ],
39
+ "tokenizer_info": {
40
+ "token_postproc_method": "byte_level",
41
+ "prepend_space_in_encode": false,
42
+ "strip_space_in_decode": false
43
+ },
44
+ "conv_template": {
45
+ "name": "deepseek_v3",
46
+ "system_template": "<\uff5cbegin\u2581of\u2581sentence\uff5c>{system_message}",
47
+ "system_message": "You are a helpful assistant.",
48
+ "system_prefix_token_ids": null,
49
+ "add_role_after_system_message": true,
50
+ "roles": {
51
+ "user": "<\uff5cUser\uff5c>",
52
+ "assistant": "<\uff5cAssistant\uff5c>"
53
+ },
54
+ "role_templates": {
55
+ "user": "{user_message}",
56
+ "assistant": "{assistant_message}",
57
+ "tool": "{tool_message}"
58
+ },
59
+ "messages": [],
60
+ "seps": [
61
+ "",
62
+ "<\uff5cend\u2581of\u2581sentence\uff5c>"
63
+ ],
64
+ "role_content_sep": "",
65
+ "role_empty_sep": "",
66
+ "stop_str": [],
67
+ "stop_token_ids": [
68
+ 151643
69
+ ],
70
+ "function_string": "",
71
+ "use_function_calling": false
72
+ },
73
+ "pad_token_id": 0,
74
+ "bos_token_id": 151643,
75
+ "eos_token_id": 151643
76
+ }
ndarray-cache-b16.json ADDED
The diff for this file is too large to render. See raw diff
 
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4d1fa25bab93bdee27531e61adba6dc2caea6073c31a2fd544f63dda2b338af
3
+ size 116686848
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29ab837f64b3ac1937cbbe3dfd24c9958bc8208f9d3bf96e27c64164ba5ed5e7
3
+ size 116686848
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b9804d09b1e919ed3b75a68ce2ee69ee3d710337b06af79a9bef033208ad4a3
3
+ size 26331136
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db2a5b10f92bf55dea115a922c3d0542a8f4d37a504a2decd7f96d9b67bf8dd5
3
+ size 26331136
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24b30d6f4efacbf535ef4c4178e181ce6f9f1b68e9e5b60568afd1f534dbad91
3
+ size 26331136
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30467c7f05df29c3117913518d89d634dea9a6d38b6c487a3b3e74d88fc3a583
3
+ size 26331136
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c066570bc414c818ed3a7ae2feefc9c154f83bb3e5a14eb134e409b2b03c353
3
+ size 26331136
params_shard_15.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02f86898b41c5670edf82e3afa5a91760c1b33d88b30e8fbd664a6e87ce42c4b
3
+ size 26331136
params_shard_16.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29c070a0392cf4a29855b381d126fa1dff21567162b0a9305fe7244a29060b21
3
+ size 26331136
params_shard_17.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3f37453bfee2894fd2225c75bcecf3b8ac63a1a732b90c0f7a387c824812476
3
+ size 26331136
params_shard_18.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f52432700ffb5c2506d6939133fc0e1366842bfc5b6398830498e0695ee7460e
3
+ size 26331136
params_shard_19.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3692ddfabe072c1376ed61209bcb42d2cb4f430ee01911cac98ad541212c7ad
3
+ size 26331136
params_shard_2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0168a8b8f6459dad35d5fc9956219d31414b7ec246b3a019bfb29d92968c496e
3
+ size 29174784
params_shard_20.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d30e8dc087c3801f92bf962d514c99b6e54e07e3cd4e726b371df4090c03f861
3
+ size 26331136
params_shard_21.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:befae26ea25cc5919bb9646fea6496b78e7100b836db9c52439fd5910cea3703
3
+ size 26331136
params_shard_22.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65c5c86b043d62bac22d6d7f591e345568eaffe9a0484f2abdaefac15cae57df
3
+ size 26331136
params_shard_23.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00b663c5bb64030d233a6f0c449fa5343c61b036e432d1d6928544cd85b49b7f
3
+ size 26331136
params_shard_24.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:677d1df1179512e30f372dc91f90684ee0af2ad0e3a4f63094f2a73ba6221b05
3
+ size 26331136
params_shard_25.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83d0ff8801ee8dce27cf701d7d2362672e1bb5f740ef7b38b74018fa42e6c129
3
+ size 26331136
params_shard_26.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:616c37ad221a4b3d79b8bdad4e204507469e858a7724ab1d5da293134b21eca4
3
+ size 26331136
params_shard_27.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fae875a6701fb126172d6b67d46269c97c3a58902143dd5c2f3b5f17f41a08dc
3
+ size 26331136
params_shard_28.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9f0b0eeee4dcbfe3b1bafbe1d006a8203ebc43c15b8a17a6694d7a588d203c0
3
+ size 26331136
params_shard_29.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70694802b48fa31110c5b12bfb64c3f8d93dedc0c835197c72dad01026b92d54
3
+ size 26331136
params_shard_3.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3340731366f5eef347332610f751ebb53a41d6c6058294f867daa6e7f09a3f14
3
+ size 33212416
params_shard_30.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86074eb00eb1ba19a4b9843af5d0921f40a0ea0465a23683365ad97da549e0e5
3
+ size 18589696
params_shard_4.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35c1be0353e94b955b7d6b2040984634eaf25bfd184c2d97fc2ed23e37e83048
3
+ size 27191296
params_shard_5.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03a0ce57941633fa5f6a2e1d8faae013278e1fd22caf5d029cf0686da836a3ec
3
+ size 26331136
params_shard_6.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcea40b18bd7ef899c2aff644d66770baa7000e8fd5659ab28941d5e2bb2cc80
3
+ size 26331136
params_shard_7.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:720ad30c2c64af45206f7c52a25091a5b35b463d0d86a31d584dbf525eabbe27
3
+ size 26331136
params_shard_8.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d71409e44c67ea5ff80283a9cdc44e72d2b14bcc1977991fe1a36e7d981e3f09
3
+ size 26331136
params_shard_9.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65d2fac827f0f40496b8ea02ff1e9d560d1fe01e69b4d60156889d9cd38a75fd
3
+ size 26331136
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "bos_token": {
5
+ "__type": "AddedToken",
6
+ "content": "<|begin▁of▁sentence|>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "clean_up_tokenization_spaces": false,
13
+ "eos_token": {
14
+ "__type": "AddedToken",
15
+ "content": "<|end▁of▁sentence|>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false
20
+ },
21
+ "legacy": true,
22
+ "model_max_length": 16384,
23
+ "pad_token": {
24
+ "__type": "AddedToken",
25
+ "content": "<|end▁of▁sentence|>",
26
+ "lstrip": false,
27
+ "normalized": true,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ "sp_model_kwargs": {},
32
+ "unk_token": null,
33
+ "tokenizer_class": "LlamaTokenizerFast",
34
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}"
35
+ }