abhinavkulkarni commited on
Commit
c6647e8
1 Parent(s): 7dcae5a

Upload RWForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +3 -3
  3. pytorch_model.bin +2 -2
config.json CHANGED
@@ -23,7 +23,7 @@
23
  "n_layer": 32,
24
  "parallel_attn": true,
25
  "torch_dtype": "float16",
26
- "transformers_version": "4.30.0.dev0",
27
  "use_cache": true,
28
  "vocab_size": 65024
29
  }
 
23
  "n_layer": 32,
24
  "parallel_attn": true,
25
  "torch_dtype": "float16",
26
+ "transformers_version": "4.31.0",
27
  "use_cache": true,
28
  "vocab_size": 65024
29
  }
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "eos_token_id": 2,
5
- "transformers_version": "4.30.0.dev0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 11,
4
+ "eos_token_id": 11,
5
+ "transformers_version": "4.31.0"
6
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:17fd0dcc49adf167c6e9af7990a36442636900be1aa80158af97b6c405eb5498
3
- size 4755605457
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93c005075a7b9ab04bd2768b9e814594a51ef069a789a61170973e1deff21081
3
+ size 4164666457