yujiepan commited on
Commit
03acaae
1 Parent(s): 25f8965

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +9 -3
  3. tokenizer.json +1 -0
config.json CHANGED
@@ -22,7 +22,7 @@
22
  "rope_theta": 500000.0,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
- "transformers_version": "4.38.2",
26
  "use_cache": true,
27
  "vocab_size": 128256
28
  }
 
22
  "rope_theta": 500000.0,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.40.1",
26
  "use_cache": true,
27
  "vocab_size": 128256
28
  }
generation_config.json CHANGED
@@ -1,6 +1,12 @@
1
  {
2
- "_from_model_config": true,
3
  "bos_token_id": 128000,
4
- "eos_token_id": 128001,
5
- "transformers_version": "4.38.2"
 
 
 
 
 
 
 
6
  }
 
1
  {
 
2
  "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 128001,
6
+ 128009
7
+ ],
8
+ "max_length": 4096,
9
+ "temperature": 0.6,
10
+ "top_p": 0.9,
11
+ "transformers_version": "4.40.1"
12
  }
tokenizer.json CHANGED
@@ -2407,6 +2407,7 @@
2407
  "end_of_word_suffix": null,
2408
  "fuse_unk": false,
2409
  "byte_fallback": false,
 
2410
  "vocab": {
2411
  "!": 0,
2412
  "\"": 1,
 
2407
  "end_of_word_suffix": null,
2408
  "fuse_unk": false,
2409
  "byte_fallback": false,
2410
+ "ignore_merges": true,
2411
  "vocab": {
2412
  "!": 0,
2413
  "\"": 1,