File size: 249 Bytes
4ca8b3f
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
{
  "quant_method": "QUiP",
  "rescale_WH": false,
  "use_rand": true,
  "codebook": "E8P12RVQ3B",
  "codesz": 8,
  "idx_dtype": "torch.int32",
  "merge_suv": false,
  "per_channel": false,
  "opt_resid_scale": -1,
  "modules_to_not_convert": null
}