ankner commited on
Commit
592069b
·
verified ·
1 Parent(s): 6c017b3

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. generation_config.json +2 -5
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128009,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 3072,
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 3072,
generation_config.json CHANGED
@@ -1,11 +1,8 @@
1
  {
 
2
  "bos_token_id": 128000,
3
  "do_sample": true,
4
- "eos_token_id": [
5
- 128001,
6
- 128008,
7
- 128009
8
- ],
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
  "transformers_version": "4.46.3"
 
1
  {
2
+ "_from_model_config": true,
3
  "bos_token_id": 128000,
4
  "do_sample": true,
5
+ "eos_token_id": 128001,
 
 
 
 
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
  "transformers_version": "4.46.3"