danielhanchen commited on
Commit
4c77876
1 Parent(s): 4202058

Upload Qwen2ForCausalLM

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -24,6 +24,7 @@
24
  "tie_word_embeddings": true,
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.46.1",
 
27
  "use_cache": true,
28
  "use_mrope": false,
29
  "use_sliding_window": false,
 
24
  "tie_word_embeddings": true,
25
  "torch_dtype": "bfloat16",
26
  "transformers_version": "4.46.1",
27
+ "unsloth_fixed": true,
28
  "use_cache": true,
29
  "use_mrope": false,
30
  "use_sliding_window": false,