Update README.md
Browse files
README.md
CHANGED
@@ -49,8 +49,8 @@ python3 preprocess.py --corpus_path corpora/cluecorpussmall.txt \
|
|
49 |
```
|
50 |
python3 pretrain.py --dataset_path cluecorpussmall_lm_seq128_dataset.pt \
|
51 |
--vocab_path models/google_zh_vocab.txt \
|
52 |
-
--output_model_path models/cluecorpussmall_gpt2_seq128_model.bin \
|
53 |
--config_path models/gpt2/config.json \
|
|
|
54 |
--world_size 8 --gpu_ranks 0 1 2 3 4 5 6 7 \
|
55 |
--total_steps 1000000 --save_checkpoint_steps 100000 --report_steps 50000 \
|
56 |
--learning_rate 1e-4 --batch_size 64 \
|
@@ -72,8 +72,8 @@ python3 preprocess.py --corpus_path corpora/cluecorpussmall.txt \
|
|
72 |
python3 pretrain.py --dataset_path cluecorpussmall_lm_seq1024_dataset.pt \
|
73 |
--pretrained_model_path models/cluecorpussmall_gpt2_seq128_model.bin-1000000 \
|
74 |
--vocab_path models/google_zh_vocab.txt \
|
75 |
-
--output_model_path models/cluecorpussmall_gpt2_seq1024_model.bin \
|
76 |
--config_path models/gpt2/config.json \
|
|
|
77 |
--world_size 8 --gpu_ranks 0 1 2 3 4 5 6 7 \
|
78 |
--total_steps 250000 --save_checkpoint_steps 50000 --report_steps 10000 \
|
79 |
--learning_rate 5e-5 --batch_size 16 \
|
|
|
49 |
```
|
50 |
python3 pretrain.py --dataset_path cluecorpussmall_lm_seq128_dataset.pt \
|
51 |
--vocab_path models/google_zh_vocab.txt \
|
|
|
52 |
--config_path models/gpt2/config.json \
|
53 |
+
--output_model_path models/cluecorpussmall_gpt2_seq128_model.bin \
|
54 |
--world_size 8 --gpu_ranks 0 1 2 3 4 5 6 7 \
|
55 |
--total_steps 1000000 --save_checkpoint_steps 100000 --report_steps 50000 \
|
56 |
--learning_rate 1e-4 --batch_size 64 \
|
|
|
72 |
python3 pretrain.py --dataset_path cluecorpussmall_lm_seq1024_dataset.pt \
|
73 |
--pretrained_model_path models/cluecorpussmall_gpt2_seq128_model.bin-1000000 \
|
74 |
--vocab_path models/google_zh_vocab.txt \
|
|
|
75 |
--config_path models/gpt2/config.json \
|
76 |
+
--output_model_path models/cluecorpussmall_gpt2_seq1024_model.bin \
|
77 |
--world_size 8 --gpu_ranks 0 1 2 3 4 5 6 7 \
|
78 |
--total_steps 250000 --save_checkpoint_steps 50000 --report_steps 10000 \
|
79 |
--learning_rate 5e-5 --batch_size 16 \
|