remove max_new_tokens parameter
#3
by
jannalu
- opened
- generation_config.json +11 -12
generation_config.json
CHANGED
@@ -1,13 +1,12 @@
|
|
1 |
-
{
|
2 |
-
"chat_format": "chatml",
|
3 |
-
"bos_token_id": 0,
|
4 |
-
"eos_token_id": 0,
|
5 |
-
"pad_token_id": 0,
|
6 |
-
"max_window_size": 2147483647,
|
7 |
-
"
|
8 |
-
"
|
9 |
-
"
|
10 |
-
"
|
11 |
-
"
|
12 |
-
"transformers_version": "4.48.0"
|
13 |
}
|
|
|
1 |
+
{
|
2 |
+
"chat_format": "chatml",
|
3 |
+
"bos_token_id": 0,
|
4 |
+
"eos_token_id": 0,
|
5 |
+
"pad_token_id": 0,
|
6 |
+
"max_window_size": 2147483647,
|
7 |
+
"do_sample": true,
|
8 |
+
"top_k": 65536,
|
9 |
+
"top_p": 1.0,
|
10 |
+
"temperature": 1.0,
|
11 |
+
"transformers_version": "4.48.0"
|
|
|
12 |
}
|