remove max_new_tokens parameter

#3
by jannalu - opened
Files changed (1) hide show
  1. generation_config.json +11 -12
generation_config.json CHANGED
@@ -1,13 +1,12 @@
1
- {
2
- "chat_format": "chatml",
3
- "bos_token_id": 0,
4
- "eos_token_id": 0,
5
- "pad_token_id": 0,
6
- "max_window_size": 2147483647,
7
- "max_new_tokens": 4096,
8
- "do_sample": true,
9
- "top_k": 65536,
10
- "top_p": 1.0,
11
- "temperature": 1.0,
12
- "transformers_version": "4.48.0"
13
  }
 
1
+ {
2
+ "chat_format": "chatml",
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 0,
5
+ "pad_token_id": 0,
6
+ "max_window_size": 2147483647,
7
+ "do_sample": true,
8
+ "top_k": 65536,
9
+ "top_p": 1.0,
10
+ "temperature": 1.0,
11
+ "transformers_version": "4.48.0"
 
12
  }