abhinavkulkarni commited on
Commit
840e9c2
1 Parent(s): f2ce496

Upload RWForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. generation_config.json +3 -3
config.json CHANGED
@@ -7,8 +7,8 @@
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
- "AutoConfig": "configuration_RW.RWConfig",
11
- "AutoModelForCausalLM": "modelling_RW.RWForCausalLM"
12
  },
13
  "bias": false,
14
  "bos_token_id": 11,
@@ -23,7 +23,7 @@
23
  "n_layer": 32,
24
  "parallel_attn": true,
25
  "torch_dtype": "float16",
26
- "transformers_version": "4.28.1",
27
  "use_cache": true,
28
  "vocab_size": 65024
29
  }
 
7
  ],
8
  "attention_dropout": 0.0,
9
  "auto_map": {
10
+ "AutoConfig": "tiiuae/falcon-7b-instruct--configuration_RW.RWConfig",
11
+ "AutoModelForCausalLM": "tiiuae/falcon-7b-instruct--modelling_RW.RWForCausalLM"
12
  },
13
  "bias": false,
14
  "bos_token_id": 11,
 
23
  "n_layer": 32,
24
  "parallel_attn": true,
25
  "torch_dtype": "float16",
26
+ "transformers_version": "4.30.0.dev0",
27
  "use_cache": true,
28
  "vocab_size": 65024
29
  }
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 11,
4
- "eos_token_id": 11,
5
- "transformers_version": "4.28.1"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.30.0.dev0"
6
  }