Muennighoff
commited on
Commit
•
fdd9eac
1
Parent(s):
fb716d3
Update config.json (#17)
Browse files- Update config.json (1c8ef235738d73ee34dae38a945ad6b049312dae)
- config.json +0 -1
config.json
CHANGED
@@ -21,7 +21,6 @@
|
|
21 |
"num_attention_heads": 32,
|
22 |
"offset_alibi": 100,
|
23 |
"pretraining_tp": 1,
|
24 |
-
"seq_length": 2048,
|
25 |
"skip_bias_add": true,
|
26 |
"skip_bias_add_qkv": false,
|
27 |
"transformers_version": "4.20.0",
|
|
|
21 |
"num_attention_heads": 32,
|
22 |
"offset_alibi": 100,
|
23 |
"pretraining_tp": 1,
|
|
|
24 |
"skip_bias_add": true,
|
25 |
"skip_bias_add_qkv": false,
|
26 |
"transformers_version": "4.20.0",
|