Change use_cache to True which significantly speeds up inference (#2)
Browse files- Change use_cache to True which significantly speeds up inference (721f3de37aa143265cc344fe3a6df08e371282e2)
Co-authored-by: Tom Jobbins <[email protected]>
- config.json +1 -1
config.json
CHANGED
@@ -18,6 +18,6 @@
|
|
18 |
"tie_word_embeddings": false,
|
19 |
"torch_dtype": "float16",
|
20 |
"transformers_version": "4.29.0.dev0",
|
21 |
-
"use_cache":
|
22 |
"vocab_size": 32001
|
23 |
}
|
|
|
18 |
"tie_word_embeddings": false,
|
19 |
"torch_dtype": "float16",
|
20 |
"transformers_version": "4.29.0.dev0",
|
21 |
+
"use_cache": true,
|
22 |
"vocab_size": 32001
|
23 |
}
|