File size: 3,445 Bytes
16e1409
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
{
  "_name_or_path": "/root/autodl-tmp/chatglm3-6b",
  "add_bias_linear": false,
  "add_qkv_bias": true,
  "apply_query_key_layer_scaling": true,
  "apply_residual_connection_post_layernorm": false,
  "architectures": [
    "ChatGLMForConditionalGeneration"
  ],
  "attention_dropout": 0.0,
  "attention_softmax_in_fp32": true,
  "auto_map": {
    "AutoConfig": "configuration_chatglm.ChatGLMConfig",
    "AutoModel": "modeling_chatglm.ChatGLMForConditionalGeneration",
    "AutoModelForCausalLM": "modeling_chatglm.ChatGLMForConditionalGeneration",
    "AutoModelForSeq2SeqLM": "modeling_chatglm.ChatGLMForConditionalGeneration",
    "AutoModelForSequenceClassification": "modeling_chatglm.ChatGLMForSequenceClassification"
  },
  "bias_dropout_fusion": true,
  "classifier_dropout": null,
  "eos_token_id": 2,
  "ffn_hidden_size": 13696,
  "fp32_residual_connection": false,
  "hidden_dropout": 0.0,
  "hidden_size": 4096,
  "kv_channels": 128,
  "layernorm_epsilon": 1e-05,
  "model_type": "chatglm",
  "multi_query_attention": true,
  "multi_query_group_num": 2,
  "num_attention_heads": 32,
  "num_layers": 28,
  "original_rope": true,
  "pad_token_id": 0,
  "padded_vocab_size": 65024,
  "post_layer_norm": true,
  "pre_seq_len": null,
  "prefix_projection": false,
  "quantization_bit": 0,
  "quantization_config": {
    "batch_size": 1,
    "bits": 4,
    "block_name_to_quantize": null,
    "cache_block_outputs": true,
    "damp_percent": 0.1,
    "dataset": [
      "\u65b0\u98a8\u7cfb\u7d71\u662f\u900f\u904e\u7cfb\u7d71\u8a2d\u8a08\u9001\u98a8\u548c\u6392\u98a8\u4f7f\u5ba4\u5167\u7a7a\u6c23\u5b58\u5728\u4e00\u5b9a\u7684\u58d3\u5dee",
      "\u5411\u5ba4\u5167\u63d0\u4f9b\u8db3\u5920\u7684\u65b0\u98a8\u4e26\u6392\u51fa\u5ba4\u5167\u6c59\u6fc1\u7a7a\u6c23 ",
      "\u7121\u9700\u958b\u7a97\u5168\u5929\u6301\u7e8c\u4e0d\u65b7\u6709\u7d44\u7e54\u7684\u5411\u5ba4\u5167\u5f15\u5165\u65b0\u98a8",
      "\u70ba\u5ba4\u5167\u4eba\u54e1\u547c\u5438\u4ee3\u8b1d\u63d0\u4f9b\u6240\u9700\u6c27\u6c23",
      "\u4f7f\u7528\u8d85\u8fc72.4\u4e07\u4ebftokens\u7684\u6570\u636e\u8fdb\u884c\u9884\u8bad\u7ec3, \u5305\u542b\u9ad8\u8d28\u91cf\u4e2d\u3001\u82f1\u3001\u591a\u8bed\u8a00\u3001\u4ee3\u7801\u3001\u6570\u5b66\u7b49\u6570\u636e\uff0c\u6db5\u76d6\u901a\u7528\u53ca\u4e13\u4e1a\u9886\u57df\u7684\u8bad\u7ec3\u8bed\u6599\u3002\u901a\u8fc7\u5927\u91cf\u5bf9\u6bd4\u5b9e\u9a8c\u5bf9\u9884\u8bad\u7ec3\u8bed\u6599\u5206\u5e03\u8fdb\u884c\u4e86\u4f18\u5316\u76f8\u6bd4\u76ee\u524d\u4ee5\u4e2d\u82f1\u8bcd\u8868\u4e3a\u4e3b\u7684\u5f00\u6e90\u6a21\u578b, Qwen-7B\u4f7f\u7528\u4e86\u7ea615\u4e07\u5927\u5c0f\u7684\u8bcd\u8868\u3002\u8be5\u8bcd\u8868\u5bf9\u591a\u8bed\u8a00\u66f4\u52a0\u53cb\u597d, \u65b9\u4fbf\u7528\u6237\u5728\u4e0d\u6269\u5c55\u8bcd\u8868\u7684\u60c5\u51b5\u4e0b\u5bf9\u90e8\u5206\u8bed\u79cd\u8fdb\u884c\u80fd\u529b\u589e\u5f3a\u548c\u6269\u5c55\u3002"
    ],
    "desc_act": false,
    "exllama_config": {
      "version": 1
    },
    "group_size": 128,
    "max_input_length": null,
    "model_seqlen": null,
    "module_name_preceding_first_block": null,
    "pad_token_id": null,
    "quant_method": "gptq",
    "sym": true,
    "tokenizer": null,
    "true_sequential": true,
    "use_cuda_fp16": false,
    "use_exllama": true
  },
  "rmsnorm": true,
  "seq_length": 8192,
  "tie_word_embeddings": false,
  "torch_dtype": "float16",
  "transformers_version": "4.36.2",
  "use_cache": true,
  "vocab_size": 65024
}