danielhanchen commited on
Commit
7a052bc
·
verified ·
1 Parent(s): 22b4117

Upload Qwen2ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
config.json CHANGED
@@ -23,7 +23,7 @@
23
  "sliding_window": null,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
- "transformers_version": "4.48.0",
27
  "unsloth_fixed": true,
28
  "use_cache": true,
29
  "use_sliding_window": false,
 
23
  "sliding_window": null,
24
  "tie_word_embeddings": false,
25
  "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.48.1",
27
  "unsloth_fixed": true,
28
  "use_cache": true,
29
  "use_sliding_window": false,
generation_config.json CHANGED
@@ -4,5 +4,5 @@
4
  "eos_token_id": 151643,
5
  "max_length": 131072,
6
  "pad_token_id": 151654,
7
- "transformers_version": "4.48.0"
8
  }
 
4
  "eos_token_id": 151643,
5
  "max_length": 131072,
6
  "pad_token_id": 151654,
7
+ "transformers_version": "4.48.1"
8
  }