ctranslate2-4you's picture
Upload 11 files
e6a4a6d verified
raw
history blame
247 Bytes
{
"bos_token": "[BOS]",
"eos_token": "[|endofturn|]",
"layer_norm_epsilon": 1e-05,
"multi_query_attention": true,
"quantization_bits": null,
"quantization_group_size": null,
"quantization_type": 0,
"unk_token": "[UNK]"
}