abhinavkulkarni
commited on
Commit
•
818a87e
1
Parent(s):
97e604d
Upload config
Browse files- config.json +3 -3
config.json
CHANGED
@@ -6,7 +6,7 @@
|
|
6 |
"attn_config": {
|
7 |
"alibi": true,
|
8 |
"alibi_bias_max": 8,
|
9 |
-
"attn_impl": "
|
10 |
"attn_pdrop": 0,
|
11 |
"attn_type": "multihead_attention",
|
12 |
"attn_uses_sequence_id": false,
|
@@ -45,8 +45,8 @@
|
|
45 |
"norm_type": "low_precision_layernorm",
|
46 |
"resid_pdrop": 0,
|
47 |
"tokenizer_name": "EleutherAI/gpt-neox-20b",
|
48 |
-
"torch_dtype": "
|
49 |
-
"transformers_version": "4.
|
50 |
"use_cache": false,
|
51 |
"verbose": 0,
|
52 |
"vocab_size": 50432
|
|
|
6 |
"attn_config": {
|
7 |
"alibi": true,
|
8 |
"alibi_bias_max": 8,
|
9 |
+
"attn_impl": "torch",
|
10 |
"attn_pdrop": 0,
|
11 |
"attn_type": "multihead_attention",
|
12 |
"attn_uses_sequence_id": false,
|
|
|
45 |
"norm_type": "low_precision_layernorm",
|
46 |
"resid_pdrop": 0,
|
47 |
"tokenizer_name": "EleutherAI/gpt-neox-20b",
|
48 |
+
"torch_dtype": "bfloat16",
|
49 |
+
"transformers_version": "4.33.1",
|
50 |
"use_cache": false,
|
51 |
"verbose": 0,
|
52 |
"vocab_size": 50432
|