yujuanqin commited on
Commit
66400da
1 Parent(s): b7d0f1b

Upload 35 files

Browse files
Qwen1.5-1.8B-Chat-q4f16_1-android.tar ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66189d304a5834d43e84ecac49dccd58376371433a30d64c214210a3f4ba2b01
3
+ size 397537
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
mlc-chat-config.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "qwen2",
3
+ "quantization": "q4f16_1",
4
+ "model_config": {
5
+ "hidden_act": "silu",
6
+ "hidden_size": 2048,
7
+ "intermediate_size": 5504,
8
+ "num_attention_heads": 16,
9
+ "num_hidden_layers": 24,
10
+ "num_key_value_heads": 16,
11
+ "rms_norm_eps": 1e-06,
12
+ "rope_theta": 1000000.0,
13
+ "vocab_size": 151936,
14
+ "context_window_size": 768,
15
+ "prefill_chunk_size": 768,
16
+ "tensor_parallel_shards": 1,
17
+ "head_dim": 128,
18
+ "dtype": "float32"
19
+ },
20
+ "vocab_size": 151936,
21
+ "context_window_size": 768,
22
+ "sliding_window_size": -1,
23
+ "prefill_chunk_size": 768,
24
+ "attention_sink_size": -1,
25
+ "tensor_parallel_shards": 1,
26
+ "mean_gen_len": 128,
27
+ "max_gen_len": 512,
28
+ "shift_fill_factor": 0.3,
29
+ "temperature": 0.7,
30
+ "presence_penalty": 0.0,
31
+ "frequency_penalty": 0.0,
32
+ "repetition_penalty": 1.1,
33
+ "top_p": 0.8,
34
+ "conv_template": {
35
+ "name": "llama-2",
36
+ "system_template": "[INST] <<SYS>>\n{system_message}\n<</SYS>>\n\n",
37
+ "system_message": "You are a helpful, respectful and honest assistant.",
38
+ "system_prefix_token_ids": [
39
+ 1
40
+ ],
41
+ "add_role_after_system_message": false,
42
+ "roles": {
43
+ "user": "<s>[INST]",
44
+ "assistant": "[/INST]",
45
+ "tool": "[INST]"
46
+ },
47
+ "role_templates": {
48
+ "user": "{user_message}",
49
+ "assistant": "{assistant_message}",
50
+ "tool": "{tool_message}"
51
+ },
52
+ "messages": [],
53
+ "seps": [
54
+ " ",
55
+ " </s>"
56
+ ],
57
+ "role_content_sep": " ",
58
+ "role_empty_sep": " ",
59
+ "stop_str": [
60
+ "[INST]"
61
+ ],
62
+ "stop_token_ids": [
63
+ 2
64
+ ],
65
+ "function_string": "",
66
+ "use_function_calling": false
67
+ },
68
+ "pad_token_id": 151643,
69
+ "bos_token_id": 151643,
70
+ "eos_token_id": [
71
+ 151645,
72
+ 151643
73
+ ],
74
+ "tokenizer_files": [
75
+ "tokenizer.json",
76
+ "vocab.json",
77
+ "merges.txt",
78
+ "tokenizer_config.json"
79
+ ],
80
+ "token_table_postproc_method": "byte_level",
81
+ "version": "0.1.0"
82
+ }
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be8dd1cdfb916be84e78092e7b3328b7f2b4de9cc7979abab8b649a6d069174d
3
+ size 155582464
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d3731778271ebff14f532c83b6dc7e58967abd5c6aa314f40d283789a3de27f
3
+ size 155582464
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eb2e5fb98962bc1a43182a05a7a9f722babe5f70973bf51f3f5648f4c0e323b
3
+ size 28479488
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8037268a7a2ccbe68eda9de358d681389113217261935e6b7c49fc58253c4c5a
3
+ size 28479488
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22e430dfa18d28b09d965e86a41932282b6a0d183c8fa927c5275874f55f97d1
3
+ size 28479488
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a7514c5748c88a425760fb5ece1374830cbc0f4650d9e1583a76bf3aacf1737
3
+ size 28479488
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0ef2e39fed6857dca2cbcf9c444137a0b240f9ec02e9a25a5bd4eb7476d3142
3
+ size 28479488
params_shard_15.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8594f6bdf205afb8cc38bd9ed225215e826326190b559114c69e499bc47cc89
3
+ size 28479488
params_shard_16.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d12432729ea4feb190ff688434d7336627715f1c56432191f180bdcb5df522f4
3
+ size 28479488
params_shard_17.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdb22e39f58c1edfd79a786333c255b407bb71204e6815f5f0d34102bbc3d373
3
+ size 28479488
params_shard_18.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dffe2aa29a836915390348cb69f1bb8fcdb3bef22536f8e45dbae412a2a6084b
3
+ size 28479488
params_shard_19.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bbd3fd35e5bd5cdd500d302e137a22d232c0990311301384ad5ef5da0492b497
3
+ size 28479488
params_shard_2.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92d28e00d872e03fdca57076746f2440a921e43d383f0174808cb2319cfe5c8f
3
+ size 19447808
params_shard_20.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6fd2522ba59ca50aba1e2ea0365a5683ebd58d138d24c87c51d507b47652f23
3
+ size 28479488
params_shard_21.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa1998cd78c7ede8ad38073e8b967fba447b939141be707d8ebd6667a168d881
3
+ size 28479488
params_shard_22.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c77a37e181d157d2e4cc4f4bc36343d0f3164a78b9bba802b028df74b8ce57dc
3
+ size 28479488
params_shard_23.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0228a664994dd2b66987336ca01ebe7dfbc7b91e2b24cd05c308cdc7012b187b
3
+ size 28479488
params_shard_24.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b323cffd30e605b921302ef86afc08908f3c0760301e23b711c4e5238d58c1f
3
+ size 28479488
params_shard_25.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99d08fe5821ee317454594a6ca6e494025e82055492bbee795333c47f6fd5d66
3
+ size 28479488
params_shard_26.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:860ba348155ce869b60606b130944d315dbcedf8aedd76752001d99552001b92
3
+ size 28479488
params_shard_27.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13f617ff376a20ad9c0db1ffae5d5d3e641f6c4a39db3bc0173a0acc46fc2912
3
+ size 22138880
params_shard_3.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:614907f6492bbbc5ce20be53c75843f09b9e90f69908e03fcade9ec386a3f5c8
3
+ size 25792512
params_shard_4.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5902ef375ba879fffa2afeef20f42a70c667fc54ce5ab859a7512a15dd3fd97b
3
+ size 28479488
params_shard_5.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfddb751d92f8c4e8b9eb97b16fae9f86d5f5df4f7beb581d143f3102bc9c3a8
3
+ size 28479488
params_shard_6.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e0a642a6c3f04b68724c731ae174a4056b777922a08176ea5552adef708fc08
3
+ size 28479488
params_shard_7.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5adfe29eba24411affacd634edc459c25bccc7e05570a751518cd07f772f1ad6
3
+ size 28479488
params_shard_8.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dee38eb4aa7fdee6d21bb6dd0454f7d735a2e0fa43654c2f7bc4ad7021ee99a4
3
+ size 28479488
params_shard_9.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15697da00ce8c66d7467ff768c9f85df1c1980fd749f46a3d3839b0e52fa659b
3
+ size 28479488
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "151643": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "151644": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "151645": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ }
28
+ },
29
+ "additional_special_tokens": ["<|im_start|>", "<|im_end|>"],
30
+ "bos_token": null,
31
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "<|im_end|>",
34
+ "errors": "replace",
35
+ "model_max_length": 32768,
36
+ "pad_token": "<|endoftext|>",
37
+ "split_special_tokens": false,
38
+ "tokenizer_class": "Qwen2Tokenizer",
39
+ "unk_token": null
40
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff