AlexN commited on
Commit
f25a924
1 Parent(s): d7c1a0a

End of training

Browse files
.ipynb_checkpoints/eval_results-checkpoint.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 2.0,
3
+ "eval_loss": 0.23875188827514648,
4
+ "eval_runtime": 294.1776,
5
+ "eval_samples": 5792,
6
+ "eval_samples_per_second": 19.689,
7
+ "eval_steps_per_second": 0.309,
8
+ "eval_wer": 0.3680797679950471
9
+ }
.ipynb_checkpoints/run-checkpoint.sh CHANGED
@@ -36,6 +36,6 @@ python run_speech_recognition_ctc.py \
36
  --fp16 \
37
  --group_by_length \
38
  --preprocessing_num_workers="64" \
39
- --do_train --do_eval \
40
  --load_best_model_at_end \
41
  --push_to_hub
 
36
  --fp16 \
37
  --group_by_length \
38
  --preprocessing_num_workers="64" \
39
+ --do_eval \
40
  --load_best_model_at_end \
41
  --push_to_hub
all_results.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "epoch": 2.0,
3
- "eval_loss": 0.23875188827514648,
4
- "eval_runtime": 294.1776,
5
- "eval_samples": 5792,
6
- "eval_samples_per_second": 19.689,
7
- "eval_steps_per_second": 0.309,
8
- "eval_wer": 0.3680797679950471,
9
  "train_loss": 1.442369053426242,
10
  "train_runtime": 53680.5392,
11
  "train_samples": 442265,
 
1
  {
2
  "epoch": 2.0,
3
+ "eval_loss": 21.909679412841797,
4
+ "eval_runtime": 271.2636,
5
+ "eval_samples": 4469,
6
+ "eval_samples_per_second": 16.475,
7
+ "eval_steps_per_second": 0.258,
8
+ "eval_wer": 1.0,
9
  "train_loss": 1.442369053426242,
10
  "train_runtime": 53680.5392,
11
  "train_samples": 442265,
eval_results.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
- "epoch": 2.0,
3
- "eval_loss": 0.23875188827514648,
4
- "eval_runtime": 294.1776,
5
- "eval_samples": 5792,
6
- "eval_samples_per_second": 19.689,
7
- "eval_steps_per_second": 0.309,
8
- "eval_wer": 0.3680797679950471
9
  }
 
1
  {
2
+ "eval_loss": 21.909679412841797,
3
+ "eval_runtime": 271.2636,
4
+ "eval_samples": 4469,
5
+ "eval_samples_per_second": 16.475,
6
+ "eval_steps_per_second": 0.258,
7
+ "eval_wer": 1.0
 
8
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f9c6a7fe50ee84c94a3ae149eea45203dc369fa4e7bcf46fbc4367f2385e4d3
3
  size 1262821553
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6afb0187b51e7ce5d940ad3d192ccb5384c6c92b414d047431d6ebb6a0b4729b
3
  size 1262821553
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea11d65b74bc35cdbdd475c519fd3f6ccaa32fbbc7747d0d176d1fc6825c659e
3
  size 3055
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f3a67b3f8a9b2a576d3736f6fe41f32790252519745bd9adf7f9fc9090a03b0
3
  size 3055
wandb/debug-internal.log CHANGED
@@ -1 +1 @@
1
- run-20220130_191954-1mtmnz5y/logs/debug-internal.log
 
1
+ run-20220201_103004-1yfj7vwy/logs/debug-internal.log
wandb/debug.log CHANGED
@@ -1 +1 @@
1
- run-20220130_191954-1mtmnz5y/logs/debug.log
 
1
+ run-20220201_103004-1yfj7vwy/logs/debug.log
wandb/latest-run CHANGED
@@ -1 +1 @@
1
- run-20220130_191954-1mtmnz5y
 
1
+ run-20220201_103004-1yfj7vwy
wandb/run-20220130_191954-1mtmnz5y/files/config.yaml CHANGED
@@ -4829,7 +4829,13 @@ _wandb:
4829
  - 1
4830
  - 5
4831
  - 11
 
 
 
 
4832
  3:
 
 
4833
  - 13
4834
  4: 3.8.8
4835
  5: 0.12.9
 
4829
  - 1
4830
  - 5
4831
  - 11
4832
+ 2:
4833
+ - 1
4834
+ - 5
4835
+ - 11
4836
  3:
4837
+ - 1
4838
+ - 7
4839
  - 13
4840
  4: 3.8.8
4841
  5: 0.12.9
wandb/run-20220130_191954-1mtmnz5y/files/output.log CHANGED
@@ -32070,3 +32070,22 @@ Loading best model from ./checkpoint-18000 (score: 0.22597630321979523).
32070
  Saving model checkpoint to ./ckpoint-18000 (score: 0.22597630321979523).
32071
  Saving model checkpoint to ./rch_model.bin (score: 0.22597630321979523).
32072
  Model weights saved in ./pytorch_model.bin (score: 0.22597630321979523).
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32070
  Saving model checkpoint to ./ckpoint-18000 (score: 0.22597630321979523).
32071
  Saving model checkpoint to ./rch_model.bin (score: 0.22597630321979523).
32072
  Model weights saved in ./pytorch_model.bin (score: 0.22597630321979523).
32073
+ Model weights saved in ./pytorch_model.bin (score: 0.22597630321979523).
32074
+ 02/01/2022 02:46:43 - WARNING - huggingface_hub.repository - Several commits (52) will be pushed upstream.
32075
+ Several commits (52) will be pushed upstream.core: 0.22597630321979523).
32076
+ Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 0%| | 3.39k/200M [00:00<?, ?B/s]
32077
+ Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 9%|████████▎ | 17.6M/200M [00:02<00:17, 10.7MB/s]
32078
+ Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 23%|█████████████████████▍ | 45.6M/200M [00:03<00:08, 18.8MB/s]
32079
+ Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 52%|█████████████████████████████████████████████████ | 103M/200M [00:05<00:04, 25.0MB/s]
32080
+ Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 52%|█████████████████████████████████████████████████ | 103M/200M [00:05<00:04, 25.0MB/s]
32081
+ Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 52%|█████████████████████████████████████████████████ | 103M/200M [00:05<00:04, 25.0MB/s]
32082
+ 02/01/2022 02:46:56 - WARNING - huggingface_hub.repository - To https://huggingface.co/AlexN/xls-r-300m-fr
32083
+ ! [rejected] main -> main (fetch first)
32084
+ error: failed to push some refs to 'https://huggingface.co/AlexN/xls-r-300m-fr'
32085
+ hint: Updates were rejected because the remote contains work that you do
32086
+ hint: not have locally. This is usually caused by another repository pushing
32087
+ hint: to the same ref. You may want to first integrate the remote changes
32088
+ hint: (e.g., 'git pull ...') before pushing again.
32089
+ hint: See the 'Note about fast-forwards' in 'git push --help' for details.'origin', 'main']' returned non-zero exit status 1.██████████████████████████████████████████| 200M/200M [00:09<00:00, 22.6MB/s]
32090
+ hint: See the 'Note about fast-forwards' in 'git push --help' for details.'origin', 'main']' returned non-zero exit status 1.██████████████████████████████████████████| 200M/200M [00:09<00:00, 22.6MB/s]
32091
+ hint: See the 'Note about fast-forwards' in 'git push --help' for details.'origin', 'main']' returned non-zero exit status 1.██████████████████████████████████████████| 200M/200M [00:09<00:00, 22.6MB/s]
wandb/run-20220130_191954-1mtmnz5y/files/wandb-summary.json CHANGED
The diff for this file is too large to render. See raw diff
 
wandb/run-20220130_191954-1mtmnz5y/logs/debug-internal.log CHANGED
@@ -52999,3 +52999,131 @@
52999
  2022-02-01 02:46:13,940 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: stop_status
53000
  2022-02-01 02:46:29,111 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: stop_status
53001
  2022-02-01 02:46:29,111 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: stop_status
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52999
  2022-02-01 02:46:13,940 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: stop_status
53000
  2022-02-01 02:46:29,111 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: stop_status
53001
  2022-02-01 02:46:29,111 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: stop_status
53002
+ 2022-02-01 02:46:41,749 DEBUG SenderThread:55777 [sender.py:send():234] send: stats
53003
+ 2022-02-01 02:46:44,280 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: stop_status
53004
+ 2022-02-01 02:46:44,281 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: stop_status
53005
+ 2022-02-01 02:46:44,486 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53006
+ 2022-02-01 02:46:48,489 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53007
+ 2022-02-01 02:46:50,491 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53008
+ 2022-02-01 02:46:52,493 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53009
+ 2022-02-01 02:46:54,494 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53010
+ 2022-02-01 02:46:56,495 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53011
+ 2022-02-01 02:46:58,497 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53012
+ 2022-02-01 02:46:59,808 DEBUG SenderThread:55777 [sender.py:send():234] send: telemetry
53013
+ 2022-02-01 02:46:59,809 DEBUG SenderThread:55777 [sender.py:send():234] send: exit
53014
+ 2022-02-01 02:46:59,809 INFO SenderThread:55777 [sender.py:send_exit():366] handling exit code: 1
53015
+ 2022-02-01 02:46:59,809 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53016
+ 2022-02-01 02:46:59,810 INFO SenderThread:55777 [sender.py:send_exit():368] handling runtime: 113223
53017
+ 2022-02-01 02:46:59,847 INFO SenderThread:55777 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
53018
+ 2022-02-01 02:46:59,848 INFO SenderThread:55777 [sender.py:send_exit():374] send defer
53019
+ 2022-02-01 02:46:59,848 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53020
+ 2022-02-01 02:46:59,849 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53021
+ 2022-02-01 02:46:59,849 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 0
53022
+ 2022-02-01 02:46:59,849 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53023
+ 2022-02-01 02:46:59,849 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 0
53024
+ 2022-02-01 02:46:59,849 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 1
53025
+ 2022-02-01 02:46:59,850 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53026
+ 2022-02-01 02:46:59,850 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 1
53027
+ 2022-02-01 02:46:59,911 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53028
+ 2022-02-01 02:46:59,911 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 1
53029
+ 2022-02-01 02:46:59,911 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 2
53030
+ 2022-02-01 02:46:59,912 DEBUG SenderThread:55777 [sender.py:send():234] send: stats
53031
+ 2022-02-01 02:46:59,913 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53032
+ 2022-02-01 02:46:59,913 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 2
53033
+ 2022-02-01 02:46:59,913 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53034
+ 2022-02-01 02:46:59,913 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 2
53035
+ 2022-02-01 02:46:59,914 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 3
53036
+ 2022-02-01 02:46:59,914 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53037
+ 2022-02-01 02:46:59,914 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 3
53038
+ 2022-02-01 02:46:59,955 DEBUG SenderThread:55777 [sender.py:send():234] send: summary
53039
+ 2022-02-01 02:46:59,957 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53040
+ 2022-02-01 02:47:00,012 INFO SenderThread:55777 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
53041
+ 2022-02-01 02:47:00,012 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53042
+ 2022-02-01 02:47:00,013 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 3
53043
+ 2022-02-01 02:47:00,013 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 4
53044
+ 2022-02-01 02:47:00,013 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53045
+ 2022-02-01 02:47:00,014 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53046
+ 2022-02-01 02:47:00,014 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 4
53047
+ 2022-02-01 02:47:00,014 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53048
+ 2022-02-01 02:47:00,014 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 4
53049
+ 2022-02-01 02:47:00,116 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53050
+ 2022-02-01 02:47:00,499 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/wandb-summary.json
53051
+ 2022-02-01 02:47:00,499 INFO Thread-8 :55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53052
+ 2022-02-01 02:47:00,818 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 5
53053
+ 2022-02-01 02:47:00,819 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53054
+ 2022-02-01 02:47:00,820 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53055
+ 2022-02-01 02:47:00,820 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 5
53056
+ 2022-02-01 02:47:00,820 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53057
+ 2022-02-01 02:47:00,820 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 5
53058
+ 2022-02-01 02:47:00,820 INFO SenderThread:55777 [dir_watcher.py:finish():283] shutting down directory watcher
53059
+ 2022-02-01 02:47:00,922 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53060
+ 2022-02-01 02:47:01,501 INFO SenderThread:55777 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/config.yaml
53061
+ 2022-02-01 02:47:01,501 INFO SenderThread:55777 [dir_watcher.py:finish():313] scan: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files
53062
+ 2022-02-01 02:47:01,502 INFO SenderThread:55777 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log output.log
53063
+ 2022-02-01 02:47:01,502 INFO SenderThread:55777 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/wandb-metadata.json wandb-metadata.json
53064
+ 2022-02-01 02:47:01,502 INFO SenderThread:55777 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/config.yaml config.yaml
53065
+ 2022-02-01 02:47:01,503 INFO SenderThread:55777 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/conda-environment.yaml conda-environment.yaml
53066
+ 2022-02-01 02:47:01,509 INFO SenderThread:55777 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/wandb-summary.json wandb-summary.json
53067
+ 2022-02-01 02:47:01,509 INFO SenderThread:55777 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/requirements.txt requirements.txt
53068
+ 2022-02-01 02:47:01,510 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 6
53069
+ 2022-02-01 02:47:01,510 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53070
+ 2022-02-01 02:47:01,511 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53071
+ 2022-02-01 02:47:01,511 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 6
53072
+ 2022-02-01 02:47:01,512 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53073
+ 2022-02-01 02:47:01,512 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 6
53074
+ 2022-02-01 02:47:01,513 INFO SenderThread:55777 [file_pusher.py:finish():177] shutting down file pusher
53075
+ 2022-02-01 02:47:01,613 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53076
+ 2022-02-01 02:47:01,613 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53077
+ 2022-02-01 02:47:01,716 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53078
+ 2022-02-01 02:47:01,719 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53079
+ 2022-02-01 02:47:01,827 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53080
+ 2022-02-01 02:47:01,827 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53081
+ 2022-02-01 02:47:01,930 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53082
+ 2022-02-01 02:47:01,930 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53083
+ 2022-02-01 02:47:02,033 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53084
+ 2022-02-01 02:47:02,034 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53085
+ 2022-02-01 02:47:02,136 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53086
+ 2022-02-01 02:47:02,136 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53087
+ 2022-02-01 02:47:02,191 INFO Thread-15 :55777 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/requirements.txt
53088
+ 2022-02-01 02:47:02,239 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53089
+ 2022-02-01 02:47:02,239 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53090
+ 2022-02-01 02:47:02,294 INFO Thread-13 :55777 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/config.yaml
53091
+ 2022-02-01 02:47:02,342 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53092
+ 2022-02-01 02:47:02,343 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53093
+ 2022-02-01 02:47:02,375 INFO Thread-12 :55777 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/output.log
53094
+ 2022-02-01 02:47:02,408 INFO Thread-14 :55777 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/files/wandb-summary.json
53095
+ 2022-02-01 02:47:02,445 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53096
+ 2022-02-01 02:47:02,445 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53097
+ 2022-02-01 02:47:02,547 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53098
+ 2022-02-01 02:47:02,547 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53099
+ 2022-02-01 02:47:02,609 INFO Thread-7 :55777 [sender.py:transition_state():387] send defer: 7
53100
+ 2022-02-01 02:47:02,610 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53101
+ 2022-02-01 02:47:02,610 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 7
53102
+ 2022-02-01 02:47:02,610 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53103
+ 2022-02-01 02:47:02,610 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 7
53104
+ 2022-02-01 02:47:02,649 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53105
+ 2022-02-01 02:47:03,641 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 8
53106
+ 2022-02-01 02:47:03,642 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53107
+ 2022-02-01 02:47:03,643 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53108
+ 2022-02-01 02:47:03,643 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 8
53109
+ 2022-02-01 02:47:03,644 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53110
+ 2022-02-01 02:47:03,644 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 8
53111
+ 2022-02-01 02:47:03,644 INFO SenderThread:55777 [sender.py:transition_state():387] send defer: 9
53112
+ 2022-02-01 02:47:03,646 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: defer
53113
+ 2022-02-01 02:47:03,646 DEBUG SenderThread:55777 [sender.py:send():234] send: final
53114
+ 2022-02-01 02:47:03,647 INFO HandlerThread:55777 [handler.py:handle_request_defer():147] handle defer: 9
53115
+ 2022-02-01 02:47:03,647 DEBUG SenderThread:55777 [sender.py:send():234] send: footer
53116
+ 2022-02-01 02:47:03,647 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: defer
53117
+ 2022-02-01 02:47:03,647 INFO SenderThread:55777 [sender.py:send_request_defer():383] handle sender defer: 9
53118
+ 2022-02-01 02:47:03,745 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: poll_exit
53119
+ 2022-02-01 02:47:03,746 DEBUG SenderThread:55777 [sender.py:send_request():248] send_request: poll_exit
53120
+ 2022-02-01 02:47:03,746 INFO SenderThread:55777 [file_pusher.py:join():182] waiting for file pusher
53121
+ 2022-02-01 02:47:04,051 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: get_summary
53122
+ 2022-02-01 02:47:04,134 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: sampled_history
53123
+ 2022-02-01 02:47:04,141 DEBUG HandlerThread:55777 [handler.py:handle_request():130] handle_request: shutdown
53124
+ 2022-02-01 02:47:04,141 INFO HandlerThread:55777 [handler.py:finish():731] shutting down handler
53125
+ 2022-02-01 02:47:04,646 INFO WriterThread:55777 [datastore.py:close():281] close: /workspace/xls-r-300m-fr/wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb
53126
+ 2022-02-01 02:47:05,049 INFO SenderThread:55777 [sender.py:finish():1070] shutting down sender
53127
+ 2022-02-01 02:47:05,049 INFO SenderThread:55777 [file_pusher.py:finish():177] shutting down file pusher
53128
+ 2022-02-01 02:47:05,050 INFO SenderThread:55777 [file_pusher.py:join():182] waiting for file pusher
53129
+ 2022-02-01 02:47:05,057 INFO MainThread:55777 [internal.py:handle_exit():77] Internal process exited
wandb/run-20220130_191954-1mtmnz5y/logs/debug.log CHANGED
@@ -22,3 +22,141 @@ config: {}
22
  2022-01-30 19:19:56,771 INFO MainThread:26084 [wandb_init.py:init():633] run started, returning control to user process
23
  2022-01-30 19:19:56,774 INFO MainThread:26084 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.6, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 4.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Jan30_18-03-27_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
  2022-01-30 19:19:56,778 INFO MainThread:26084 [wandb_watch.py:watch():43] Watching
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  2022-01-30 19:19:56,771 INFO MainThread:26084 [wandb_init.py:init():633] run started, returning control to user process
23
  2022-01-30 19:19:56,774 INFO MainThread:26084 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.6, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 4.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Jan30_18-03-27_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
  2022-01-30 19:19:56,778 INFO MainThread:26084 [wandb_watch.py:watch():43] Watching
25
+ 2022-02-01 02:46:57,430 INFO MainThread:26084 [wandb_run.py:_atexit_cleanup():1780] got exitcode: 1
26
+ 2022-02-01 02:46:57,433 INFO MainThread:26084 [wandb_run.py:_restore():1752] restore
27
+ 2022-02-01 02:46:59,849 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
28
+ wandb_count: 1
29
+ }
30
+ pusher_stats {
31
+ uploaded_bytes: 2180
32
+ total_bytes: 2180
33
+ }
34
+
35
+ 2022-02-01 02:47:00,014 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
36
+ wandb_count: 1
37
+ }
38
+ pusher_stats {
39
+ uploaded_bytes: 2180
40
+ total_bytes: 2180
41
+ }
42
+
43
+ 2022-02-01 02:47:00,820 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
44
+ wandb_count: 1
45
+ }
46
+ pusher_stats {
47
+ uploaded_bytes: 2180
48
+ total_bytes: 2180
49
+ }
50
+
51
+ 2022-02-01 02:47:01,511 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
52
+ wandb_count: 2
53
+ }
54
+ pusher_stats {
55
+ uploaded_bytes: 2180
56
+ total_bytes: 2925431
57
+ }
58
+
59
+ 2022-02-01 02:47:01,614 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
60
+ wandb_count: 5
61
+ }
62
+ pusher_stats {
63
+ uploaded_bytes: 2180
64
+ total_bytes: 3863613
65
+ }
66
+
67
+ 2022-02-01 02:47:01,725 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
68
+ wandb_count: 5
69
+ }
70
+ pusher_stats {
71
+ uploaded_bytes: 2180
72
+ total_bytes: 3863613
73
+ }
74
+
75
+ 2022-02-01 02:47:01,828 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
76
+ wandb_count: 5
77
+ }
78
+ pusher_stats {
79
+ uploaded_bytes: 3863613
80
+ total_bytes: 3863613
81
+ }
82
+
83
+ 2022-02-01 02:47:01,932 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
84
+ wandb_count: 5
85
+ }
86
+ pusher_stats {
87
+ uploaded_bytes: 3863613
88
+ total_bytes: 3863613
89
+ }
90
+
91
+ 2022-02-01 02:47:02,035 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
92
+ wandb_count: 5
93
+ }
94
+ pusher_stats {
95
+ uploaded_bytes: 3863613
96
+ total_bytes: 3863613
97
+ }
98
+
99
+ 2022-02-01 02:47:02,137 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
100
+ wandb_count: 5
101
+ }
102
+ pusher_stats {
103
+ uploaded_bytes: 3863613
104
+ total_bytes: 3863613
105
+ }
106
+
107
+ 2022-02-01 02:47:02,241 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
108
+ wandb_count: 5
109
+ }
110
+ pusher_stats {
111
+ uploaded_bytes: 3863613
112
+ total_bytes: 3863613
113
+ }
114
+
115
+ 2022-02-01 02:47:02,344 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
116
+ wandb_count: 5
117
+ }
118
+ pusher_stats {
119
+ uploaded_bytes: 3863613
120
+ total_bytes: 3863613
121
+ }
122
+
123
+ 2022-02-01 02:47:02,446 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
124
+ wandb_count: 5
125
+ }
126
+ pusher_stats {
127
+ uploaded_bytes: 3863613
128
+ total_bytes: 3863613
129
+ }
130
+
131
+ 2022-02-01 02:47:02,548 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
132
+ wandb_count: 5
133
+ }
134
+ pusher_stats {
135
+ uploaded_bytes: 3863613
136
+ total_bytes: 3863613
137
+ }
138
+
139
+ 2022-02-01 02:47:03,643 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
140
+ wandb_count: 5
141
+ }
142
+ pusher_stats {
143
+ uploaded_bytes: 3863613
144
+ total_bytes: 3863613
145
+ }
146
+
147
+ 2022-02-01 02:47:04,050 INFO MainThread:26084 [wandb_run.py:_wait_for_finish():1912] got exit ret: done: true
148
+ exit_result {
149
+ }
150
+ file_counts {
151
+ wandb_count: 5
152
+ }
153
+ pusher_stats {
154
+ uploaded_bytes: 3863613
155
+ total_bytes: 3863613
156
+ }
157
+ local_info {
158
+ }
159
+
160
+ 2022-02-01 02:47:05,238 INFO MainThread:26084 [wandb_run.py:_append_history():2130] rendering history
161
+ 2022-02-01 02:47:05,240 INFO MainThread:26084 [wandb_run.py:_append_summary():2085] rendering summary
162
+ 2022-02-01 02:47:05,242 INFO MainThread:26084 [wandb_run.py:_append_files():2180] logging synced files
wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7f5b593fdaefa910ae05bc5af97f6bc8d25ada9af2e6ec67735515b11ed2b47e
3
- size 209609416
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4a39ee3a8244cf084758be4c81d3388cff41451e299f174a171843449221b2e
3
+ size 210409571
wandb/run-20220201_103004-1yfj7vwy/files/conda-environment.yaml ADDED
File without changes
wandb/run-20220201_103004-1yfj7vwy/files/config.yaml ADDED
@@ -0,0 +1,668 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _n_gpu:
4
+ desc: null
5
+ value: 1
6
+ _name_or_path:
7
+ desc: null
8
+ value: facebook/wav2vec2-xls-r-300m
9
+ _wandb:
10
+ desc: null
11
+ value:
12
+ cli_version: 0.12.9
13
+ framework: huggingface
14
+ huggingface_version: 4.17.0.dev0
15
+ is_jupyter_run: false
16
+ is_kaggle_kernel: false
17
+ m:
18
+ - 1: train/global_step
19
+ 6:
20
+ - 3
21
+ - 1: eval/loss
22
+ 5: 1
23
+ 6:
24
+ - 1
25
+ - 1: eval/wer
26
+ 5: 1
27
+ 6:
28
+ - 1
29
+ - 1: eval/runtime
30
+ 5: 1
31
+ 6:
32
+ - 1
33
+ - 1: eval/samples_per_second
34
+ 5: 1
35
+ 6:
36
+ - 1
37
+ - 1: eval/steps_per_second
38
+ 5: 1
39
+ 6:
40
+ - 1
41
+ python_version: 3.8.8
42
+ start_time: 1643711404
43
+ t:
44
+ 1:
45
+ - 1
46
+ - 5
47
+ - 11
48
+ 3:
49
+ - 13
50
+ 4: 3.8.8
51
+ 5: 0.12.9
52
+ 6: 4.17.0.dev0
53
+ 8:
54
+ - 5
55
+ activation_dropout:
56
+ desc: null
57
+ value: 0.05
58
+ adafactor:
59
+ desc: null
60
+ value: false
61
+ adam_beta1:
62
+ desc: null
63
+ value: 0.9
64
+ adam_beta2:
65
+ desc: null
66
+ value: 0.999
67
+ adam_epsilon:
68
+ desc: null
69
+ value: 1.0e-08
70
+ adapter_kernel_size:
71
+ desc: null
72
+ value: 3
73
+ adapter_stride:
74
+ desc: null
75
+ value: 2
76
+ add_adapter:
77
+ desc: null
78
+ value: false
79
+ add_cross_attention:
80
+ desc: null
81
+ value: false
82
+ apply_spec_augment:
83
+ desc: null
84
+ value: true
85
+ architectures:
86
+ desc: null
87
+ value:
88
+ - Wav2Vec2ForPreTraining
89
+ attention_dropout:
90
+ desc: null
91
+ value: 0.0
92
+ bad_words_ids:
93
+ desc: null
94
+ value: null
95
+ bf16:
96
+ desc: null
97
+ value: false
98
+ bf16_full_eval:
99
+ desc: null
100
+ value: false
101
+ bos_token_id:
102
+ desc: null
103
+ value: 1
104
+ chunk_size_feed_forward:
105
+ desc: null
106
+ value: 0
107
+ classifier_proj_size:
108
+ desc: null
109
+ value: 256
110
+ codevector_dim:
111
+ desc: null
112
+ value: 768
113
+ contrastive_logits_temperature:
114
+ desc: null
115
+ value: 0.1
116
+ conv_bias:
117
+ desc: null
118
+ value: true
119
+ conv_dim:
120
+ desc: null
121
+ value:
122
+ - 512
123
+ - 512
124
+ - 512
125
+ - 512
126
+ - 512
127
+ - 512
128
+ - 512
129
+ conv_kernel:
130
+ desc: null
131
+ value:
132
+ - 10
133
+ - 3
134
+ - 3
135
+ - 3
136
+ - 3
137
+ - 2
138
+ - 2
139
+ conv_stride:
140
+ desc: null
141
+ value:
142
+ - 5
143
+ - 2
144
+ - 2
145
+ - 2
146
+ - 2
147
+ - 2
148
+ - 2
149
+ cross_attention_hidden_size:
150
+ desc: null
151
+ value: null
152
+ ctc_loss_reduction:
153
+ desc: null
154
+ value: mean
155
+ ctc_zero_infinity:
156
+ desc: null
157
+ value: false
158
+ dataloader_drop_last:
159
+ desc: null
160
+ value: false
161
+ dataloader_num_workers:
162
+ desc: null
163
+ value: 0
164
+ dataloader_pin_memory:
165
+ desc: null
166
+ value: true
167
+ ddp_bucket_cap_mb:
168
+ desc: null
169
+ value: None
170
+ ddp_find_unused_parameters:
171
+ desc: null
172
+ value: None
173
+ debug:
174
+ desc: null
175
+ value: '[]'
176
+ decoder_start_token_id:
177
+ desc: null
178
+ value: null
179
+ deepspeed:
180
+ desc: null
181
+ value: None
182
+ disable_tqdm:
183
+ desc: null
184
+ value: false
185
+ diversity_loss_weight:
186
+ desc: null
187
+ value: 0.1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ do_eval:
192
+ desc: null
193
+ value: true
194
+ do_predict:
195
+ desc: null
196
+ value: false
197
+ do_sample:
198
+ desc: null
199
+ value: false
200
+ do_stable_layer_norm:
201
+ desc: null
202
+ value: true
203
+ do_train:
204
+ desc: null
205
+ value: false
206
+ early_stopping:
207
+ desc: null
208
+ value: false
209
+ encoder_no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ eos_token_id:
213
+ desc: null
214
+ value: 2
215
+ eval_accumulation_steps:
216
+ desc: null
217
+ value: None
218
+ eval_batch_size:
219
+ desc: null
220
+ value: 64
221
+ eval_steps:
222
+ desc: null
223
+ value: 500
224
+ evaluation_strategy:
225
+ desc: null
226
+ value: steps
227
+ feat_extract_activation:
228
+ desc: null
229
+ value: gelu
230
+ feat_extract_dropout:
231
+ desc: null
232
+ value: 0.0
233
+ feat_extract_norm:
234
+ desc: null
235
+ value: layer
236
+ feat_proj_dropout:
237
+ desc: null
238
+ value: 0.0
239
+ feat_quantizer_dropout:
240
+ desc: null
241
+ value: 0.0
242
+ final_dropout:
243
+ desc: null
244
+ value: 0.0
245
+ finetuning_task:
246
+ desc: null
247
+ value: null
248
+ forced_bos_token_id:
249
+ desc: null
250
+ value: null
251
+ forced_eos_token_id:
252
+ desc: null
253
+ value: null
254
+ fp16:
255
+ desc: null
256
+ value: true
257
+ fp16_backend:
258
+ desc: null
259
+ value: auto
260
+ fp16_full_eval:
261
+ desc: null
262
+ value: false
263
+ fp16_opt_level:
264
+ desc: null
265
+ value: O1
266
+ gradient_accumulation_steps:
267
+ desc: null
268
+ value: 1
269
+ gradient_checkpointing:
270
+ desc: null
271
+ value: true
272
+ greater_is_better:
273
+ desc: null
274
+ value: false
275
+ group_by_length:
276
+ desc: null
277
+ value: true
278
+ half_precision_backend:
279
+ desc: null
280
+ value: amp
281
+ hidden_act:
282
+ desc: null
283
+ value: gelu
284
+ hidden_dropout:
285
+ desc: null
286
+ value: 0.0
287
+ hidden_size:
288
+ desc: null
289
+ value: 1024
290
+ hub_model_id:
291
+ desc: null
292
+ value: None
293
+ hub_strategy:
294
+ desc: null
295
+ value: every_save
296
+ hub_token:
297
+ desc: null
298
+ value: <HUB_TOKEN>
299
+ id2label:
300
+ desc: null
301
+ value:
302
+ '0': LABEL_0
303
+ '1': LABEL_1
304
+ ignore_data_skip:
305
+ desc: null
306
+ value: false
307
+ initializer_range:
308
+ desc: null
309
+ value: 0.02
310
+ intermediate_size:
311
+ desc: null
312
+ value: 4096
313
+ is_decoder:
314
+ desc: null
315
+ value: false
316
+ is_encoder_decoder:
317
+ desc: null
318
+ value: false
319
+ label2id:
320
+ desc: null
321
+ value:
322
+ LABEL_0: 0
323
+ LABEL_1: 1
324
+ label_names:
325
+ desc: null
326
+ value: None
327
+ label_smoothing_factor:
328
+ desc: null
329
+ value: 0.0
330
+ layer_norm_eps:
331
+ desc: null
332
+ value: 1.0e-05
333
+ layerdrop:
334
+ desc: null
335
+ value: 0.0
336
+ learning_rate:
337
+ desc: null
338
+ value: 0.0001
339
+ length_column_name:
340
+ desc: null
341
+ value: input_length
342
+ length_penalty:
343
+ desc: null
344
+ value: 1.0
345
+ load_best_model_at_end:
346
+ desc: null
347
+ value: true
348
+ local_rank:
349
+ desc: null
350
+ value: -1
351
+ log_level:
352
+ desc: null
353
+ value: -1
354
+ log_level_replica:
355
+ desc: null
356
+ value: -1
357
+ log_on_each_node:
358
+ desc: null
359
+ value: true
360
+ logging_dir:
361
+ desc: null
362
+ value: ./runs/Feb01_10-24-12_job-1abccd0a-3293-4ffe-8274-9e8f841f653f
363
+ logging_first_step:
364
+ desc: null
365
+ value: false
366
+ logging_nan_inf_filter:
367
+ desc: null
368
+ value: true
369
+ logging_steps:
370
+ desc: null
371
+ value: 100
372
+ logging_strategy:
373
+ desc: null
374
+ value: steps
375
+ lr_scheduler_type:
376
+ desc: null
377
+ value: linear
378
+ mask_feature_length:
379
+ desc: null
380
+ value: 10
381
+ mask_feature_min_masks:
382
+ desc: null
383
+ value: 0
384
+ mask_feature_prob:
385
+ desc: null
386
+ value: 0.25
387
+ mask_time_length:
388
+ desc: null
389
+ value: 10
390
+ mask_time_min_masks:
391
+ desc: null
392
+ value: 2
393
+ mask_time_prob:
394
+ desc: null
395
+ value: 0.6
396
+ max_grad_norm:
397
+ desc: null
398
+ value: 1.0
399
+ max_length:
400
+ desc: null
401
+ value: 20
402
+ max_steps:
403
+ desc: null
404
+ value: -1
405
+ metric_for_best_model:
406
+ desc: null
407
+ value: loss
408
+ min_length:
409
+ desc: null
410
+ value: 0
411
+ model_type:
412
+ desc: null
413
+ value: wav2vec2
414
+ mp_parameters:
415
+ desc: null
416
+ value: ''
417
+ no_cuda:
418
+ desc: null
419
+ value: false
420
+ no_repeat_ngram_size:
421
+ desc: null
422
+ value: 0
423
+ num_adapter_layers:
424
+ desc: null
425
+ value: 3
426
+ num_attention_heads:
427
+ desc: null
428
+ value: 16
429
+ num_beam_groups:
430
+ desc: null
431
+ value: 1
432
+ num_beams:
433
+ desc: null
434
+ value: 1
435
+ num_codevector_groups:
436
+ desc: null
437
+ value: 2
438
+ num_codevectors_per_group:
439
+ desc: null
440
+ value: 320
441
+ num_conv_pos_embedding_groups:
442
+ desc: null
443
+ value: 16
444
+ num_conv_pos_embeddings:
445
+ desc: null
446
+ value: 128
447
+ num_feat_extract_layers:
448
+ desc: null
449
+ value: 7
450
+ num_hidden_layers:
451
+ desc: null
452
+ value: 24
453
+ num_negatives:
454
+ desc: null
455
+ value: 100
456
+ num_return_sequences:
457
+ desc: null
458
+ value: 1
459
+ num_train_epochs:
460
+ desc: null
461
+ value: 4.0
462
+ optim:
463
+ desc: null
464
+ value: adamw_hf
465
+ output_attentions:
466
+ desc: null
467
+ value: false
468
+ output_dir:
469
+ desc: null
470
+ value: ./
471
+ output_hidden_size:
472
+ desc: null
473
+ value: 1024
474
+ output_hidden_states:
475
+ desc: null
476
+ value: false
477
+ output_scores:
478
+ desc: null
479
+ value: false
480
+ overwrite_output_dir:
481
+ desc: null
482
+ value: true
483
+ pad_token_id:
484
+ desc: null
485
+ value: 218
486
+ past_index:
487
+ desc: null
488
+ value: -1
489
+ per_device_eval_batch_size:
490
+ desc: null
491
+ value: 64
492
+ per_device_train_batch_size:
493
+ desc: null
494
+ value: 64
495
+ per_gpu_eval_batch_size:
496
+ desc: null
497
+ value: None
498
+ per_gpu_train_batch_size:
499
+ desc: null
500
+ value: None
501
+ prediction_loss_only:
502
+ desc: null
503
+ value: false
504
+ prefix:
505
+ desc: null
506
+ value: null
507
+ problem_type:
508
+ desc: null
509
+ value: null
510
+ proj_codevector_dim:
511
+ desc: null
512
+ value: 768
513
+ pruned_heads:
514
+ desc: null
515
+ value: {}
516
+ push_to_hub:
517
+ desc: null
518
+ value: true
519
+ push_to_hub_model_id:
520
+ desc: null
521
+ value: None
522
+ push_to_hub_organization:
523
+ desc: null
524
+ value: None
525
+ push_to_hub_token:
526
+ desc: null
527
+ value: <PUSH_TO_HUB_TOKEN>
528
+ remove_invalid_values:
529
+ desc: null
530
+ value: false
531
+ remove_unused_columns:
532
+ desc: null
533
+ value: true
534
+ repetition_penalty:
535
+ desc: null
536
+ value: 1.0
537
+ report_to:
538
+ desc: null
539
+ value: '[''wandb'']'
540
+ resume_from_checkpoint:
541
+ desc: null
542
+ value: None
543
+ return_dict:
544
+ desc: null
545
+ value: true
546
+ return_dict_in_generate:
547
+ desc: null
548
+ value: false
549
+ run_name:
550
+ desc: null
551
+ value: xls-r-300m-fr
552
+ save_on_each_node:
553
+ desc: null
554
+ value: false
555
+ save_steps:
556
+ desc: null
557
+ value: 500
558
+ save_strategy:
559
+ desc: null
560
+ value: steps
561
+ save_total_limit:
562
+ desc: null
563
+ value: 2
564
+ seed:
565
+ desc: null
566
+ value: 42
567
+ sep_token_id:
568
+ desc: null
569
+ value: null
570
+ sharded_ddp:
571
+ desc: null
572
+ value: '[]'
573
+ skip_memory_metrics:
574
+ desc: null
575
+ value: true
576
+ task_specific_params:
577
+ desc: null
578
+ value: null
579
+ tdnn_dilation:
580
+ desc: null
581
+ value:
582
+ - 1
583
+ - 2
584
+ - 3
585
+ - 1
586
+ - 1
587
+ tdnn_dim:
588
+ desc: null
589
+ value:
590
+ - 512
591
+ - 512
592
+ - 512
593
+ - 512
594
+ - 1500
595
+ tdnn_kernel:
596
+ desc: null
597
+ value:
598
+ - 5
599
+ - 3
600
+ - 3
601
+ - 1
602
+ - 1
603
+ temperature:
604
+ desc: null
605
+ value: 1.0
606
+ tf32:
607
+ desc: null
608
+ value: None
609
+ tie_encoder_decoder:
610
+ desc: null
611
+ value: false
612
+ tie_word_embeddings:
613
+ desc: null
614
+ value: true
615
+ tokenizer_class:
616
+ desc: null
617
+ value: null
618
+ top_k:
619
+ desc: null
620
+ value: 50
621
+ top_p:
622
+ desc: null
623
+ value: 1.0
624
+ torch_dtype:
625
+ desc: null
626
+ value: float32
627
+ torchscript:
628
+ desc: null
629
+ value: false
630
+ tpu_metrics_debug:
631
+ desc: null
632
+ value: false
633
+ tpu_num_cores:
634
+ desc: null
635
+ value: None
636
+ train_batch_size:
637
+ desc: null
638
+ value: 64
639
+ transformers_version:
640
+ desc: null
641
+ value: 4.17.0.dev0
642
+ use_bfloat16:
643
+ desc: null
644
+ value: false
645
+ use_legacy_prediction_loop:
646
+ desc: null
647
+ value: false
648
+ use_weighted_layer_sum:
649
+ desc: null
650
+ value: false
651
+ vocab_size:
652
+ desc: null
653
+ value: 219
654
+ warmup_ratio:
655
+ desc: null
656
+ value: 0.0
657
+ warmup_steps:
658
+ desc: null
659
+ value: 2700
660
+ weight_decay:
661
+ desc: null
662
+ value: 0.0
663
+ xpu_backend:
664
+ desc: null
665
+ value: None
666
+ xvector_output_dim:
667
+ desc: null
668
+ value: 512
wandb/run-20220201_103004-1yfj7vwy/files/output.log ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ***** eval metrics *****
2
+ eval_loss = 21.9097
3
+ eval_runtime = 0:04:31.26
4
+ eval_samples = 4469
5
+ eval_samples_per_second = 16.475
6
+ eval_steps_per_second = 0.258
7
+ eval_wer = 1.0
8
+ 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 70/70 [04:30<00:00, 3.87s/it]
9
+ Saving model checkpoint to ./
10
+ Configuration saved in ./config.json
11
+ Model weights saved in ./pytorch_model.bin
wandb/run-20220201_103004-1yfj7vwy/files/requirements.txt ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.1
2
+ aiosignal==1.2.0
3
+ analytics-python==1.4.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asgiref==3.5.0
9
+ asttokens==2.0.5
10
+ async-timeout==4.0.2
11
+ attrs==21.4.0
12
+ audioread==2.1.9
13
+ backcall==0.2.0
14
+ backoff==1.10.0
15
+ bcrypt==3.2.0
16
+ beautifulsoup4==4.9.3
17
+ black==21.12b0
18
+ bleach==4.1.0
19
+ brotlipy==0.7.0
20
+ certifi==2020.12.5
21
+ cffi==1.14.3
22
+ chardet==3.0.4
23
+ charset-normalizer==2.0.10
24
+ click==8.0.3
25
+ conda-build==3.21.4
26
+ conda-package-handling==1.7.2
27
+ conda==4.9.2
28
+ configparser==5.2.0
29
+ cryptography==3.2.1
30
+ cycler==0.11.0
31
+ datasets==1.18.2.dev0
32
+ debugpy==1.5.1
33
+ decorator==4.4.2
34
+ defusedxml==0.7.1
35
+ dill==0.3.4
36
+ dnspython==2.1.0
37
+ docker-pycreds==0.4.0
38
+ entrypoints==0.3
39
+ executing==0.8.2
40
+ fastapi==0.73.0
41
+ ffmpy==0.3.0
42
+ filelock==3.0.12
43
+ fonttools==4.29.0
44
+ frozenlist==1.3.0
45
+ fsspec==2022.1.0
46
+ gitdb==4.0.9
47
+ gitpython==3.1.26
48
+ glob2==0.7
49
+ gradio==2.7.5.2
50
+ h11==0.13.0
51
+ huggingface-hub==0.4.0
52
+ idna==2.10
53
+ importlib-resources==5.4.0
54
+ ipykernel==6.7.0
55
+ ipython-genutils==0.2.0
56
+ ipython==8.0.1
57
+ ipywidgets==7.6.3
58
+ jedi==0.17.0
59
+ jinja2==2.11.3
60
+ jiwer==2.3.0
61
+ joblib==1.1.0
62
+ json5==0.9.6
63
+ jsonschema==4.4.0
64
+ jupyter-client==7.1.2
65
+ jupyter-core==4.9.1
66
+ jupyterlab-pygments==0.1.2
67
+ jupyterlab-server==1.2.0
68
+ jupyterlab-widgets==1.0.2
69
+ jupyterlab==2.2.9
70
+ kiwisolver==1.3.2
71
+ libarchive-c==2.9
72
+ librosa==0.8.1
73
+ llvmlite==0.38.0
74
+ markdown2==2.4.2
75
+ markupsafe==1.1.1
76
+ matplotlib-inline==0.1.3
77
+ matplotlib==3.5.1
78
+ mistune==0.8.4
79
+ mkl-fft==1.3.0
80
+ mkl-random==1.1.1
81
+ mkl-service==2.3.0
82
+ monotonic==1.6
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nano==0.10.0
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.19.2
94
+ olefile==0.46
95
+ packaging==21.3
96
+ pandas==1.4.0
97
+ pandocfilters==1.5.0
98
+ paramiko==2.9.2
99
+ parso==0.8.1
100
+ pathspec==0.9.0
101
+ pathtools==0.1.2
102
+ pexpect==4.8.0
103
+ pickleshare==0.7.5
104
+ pillow==8.1.2
105
+ pip==21.3.1
106
+ pkginfo==1.7.0
107
+ platformdirs==2.4.1
108
+ pooch==1.6.0
109
+ prometheus-client==0.13.0
110
+ promise==2.3
111
+ prompt-toolkit==3.0.8
112
+ protobuf==3.19.4
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ pure-eval==0.2.2
116
+ pyarrow==6.0.1
117
+ pycosat==0.6.3
118
+ pycparser==2.20
119
+ pycryptodome==3.13.0
120
+ pydantic==1.9.0
121
+ pydub==0.25.1
122
+ pygments==2.8.0
123
+ pynacl==1.5.0
124
+ pyopenssl==19.1.0
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ pysocks==1.7.1
128
+ python-dateutil==2.8.2
129
+ python-etcd==0.4.5
130
+ python-levenshtein==0.12.2
131
+ python-multipart==0.0.5
132
+ pytz==2021.1
133
+ pyyaml==5.4.1
134
+ pyzmq==22.3.0
135
+ regex==2022.1.18
136
+ requests==2.24.0
137
+ resampy==0.2.2
138
+ ruamel-yaml==0.15.87
139
+ sacremoses==0.0.47
140
+ scikit-learn==1.0.2
141
+ scipy==1.7.3
142
+ send2trash==1.8.0
143
+ sentry-sdk==1.5.4
144
+ setuptools==50.3.1.post20201107
145
+ shortuuid==1.0.8
146
+ six==1.15.0
147
+ smmap==5.0.0
148
+ sniffio==1.2.0
149
+ soundfile==0.10.3.post1
150
+ soupsieve==2.2
151
+ stack-data==0.1.4
152
+ starlette==0.17.1
153
+ subprocess32==3.5.4
154
+ termcolor==1.1.0
155
+ terminado==0.13.1
156
+ testpath==0.5.0
157
+ threadpoolctl==3.0.0
158
+ tokenizers==0.11.4
159
+ tomli==1.2.3
160
+ torch==1.10.2
161
+ torchaudio==0.10.2
162
+ torchelastic==0.2.2
163
+ torchtext==0.9.1
164
+ torchvision==0.9.1
165
+ tornado==6.1
166
+ tqdm==4.62.3
167
+ traitlets==5.1.1
168
+ transformers==4.17.0.dev0
169
+ typing-extensions==4.0.1
170
+ urllib3==1.25.11
171
+ uvicorn==0.17.1
172
+ wandb==0.12.9
173
+ wcwidth==0.2.5
174
+ webencodings==0.5.1
175
+ wheel==0.35.1
176
+ widgetsnbextension==3.5.2
177
+ xxhash==2.0.2
178
+ yarl==1.7.2
179
+ yaspin==2.1.0
180
+ zipp==3.7.0
wandb/run-20220201_103004-1yfj7vwy/files/wandb-metadata.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-4.15.0-151-generic-x86_64-with-glibc2.10",
3
+ "python": "3.8.8",
4
+ "heartbeatAt": "2022-02-01T10:30:05.909307",
5
+ "startedAt": "2022-02-01T10:30:04.544710",
6
+ "docker": null,
7
+ "gpu": "Tesla V100S-PCIE-32GB",
8
+ "gpu_count": 1,
9
+ "cpu_count": 60,
10
+ "cuda": null,
11
+ "args": [
12
+ "--dataset_name=mozilla-foundation/common_voice_8_0",
13
+ "--model_name_or_path=facebook/wav2vec2-xls-r-300m",
14
+ "--dataset_config_name=fr",
15
+ "--tokenizer_name_or_path=./",
16
+ "--output_dir=./",
17
+ "--overwrite_output_dir",
18
+ "--num_train_epochs=4",
19
+ "--per_device_train_batch_size=64",
20
+ "--per_device_eval_batch_size=64",
21
+ "--gradient_accumulation_steps=1",
22
+ "--learning_rate=1e-4",
23
+ "--warmup_steps=2700",
24
+ "--length_column_name=input_length",
25
+ "--evaluation_strategy=steps",
26
+ "--text_column_name=sentence",
27
+ "--save_steps=500",
28
+ "--eval_steps=500",
29
+ "--logging_steps=100",
30
+ "--layerdrop=0.0",
31
+ "--activation_dropout=0.05",
32
+ "--save_total_limit=2",
33
+ "--freeze_feature_encoder",
34
+ "--feat_proj_dropout=0.0",
35
+ "--mask_time_prob=0.6",
36
+ "--mask_time_length=10",
37
+ "--mask_feature_prob=0.25",
38
+ "--mask_feature_length=10",
39
+ "--gradient_checkpointing",
40
+ "--report_to=wandb",
41
+ "--run_name=xls-r-300m-fr",
42
+ "--max_eval_samples=4500",
43
+ "--max_duration_in_seconds=10",
44
+ "--use_auth_token",
45
+ "--fp16",
46
+ "--group_by_length",
47
+ "--preprocessing_num_workers=64",
48
+ "--do_eval",
49
+ "--load_best_model_at_end",
50
+ "--push_to_hub"
51
+ ],
52
+ "state": "running",
53
+ "program": "run_speech_recognition_ctc.py",
54
+ "codePath": "run_speech_recognition_ctc.py",
55
+ "git": {
56
+ "remote": "https://huggingface.co/AlexN/xls-r-300m-fr",
57
+ "commit": "d7c1a0a82eae9cfa6aaf8f9e1c39c342f17e40a8"
58
+ },
59
+ "email": "[email protected]",
60
+ "root": "/workspace/xls-r-300m-fr",
61
+ "host": "job-1abccd0a-3293-4ffe-8274-9e8f841f653f",
62
+ "username": "ovh",
63
+ "executable": "/opt/conda/bin/python"
64
+ }
wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eval/loss": 21.909679412841797, "eval/wer": 1.0, "eval/runtime": 271.2636, "eval/samples_per_second": 16.475, "eval/steps_per_second": 0.258, "train/global_step": 0, "_runtime": 2, "_timestamp": 1643711406, "_step": 0}
wandb/run-20220201_103004-1yfj7vwy/logs/debug-internal.log ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-02-01 10:30:05,624 INFO MainThread:61045 [internal.py:wandb_internal():87] W&B internal server running at pid: 61045, started at: 2022-02-01 10:30:05.624191
2
+ 2022-02-01 10:30:05,628 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: check_version
3
+ 2022-02-01 10:30:05,628 INFO WriterThread:61045 [datastore.py:open_for_write():77] open: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb
4
+ 2022-02-01 10:30:05,632 DEBUG SenderThread:61045 [sender.py:send():234] send: header
5
+ 2022-02-01 10:30:05,632 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: check_version
6
+ 2022-02-01 10:30:05,706 DEBUG SenderThread:61045 [sender.py:send():234] send: run
7
+ 2022-02-01 10:30:05,896 INFO SenderThread:61045 [dir_watcher.py:__init__():169] watching files in: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files
8
+ 2022-02-01 10:30:05,896 INFO SenderThread:61045 [sender.py:_start_run_threads():804] run started: 1yfj7vwy with start time 1643711404
9
+ 2022-02-01 10:30:05,896 DEBUG SenderThread:61045 [sender.py:send():234] send: summary
10
+ 2022-02-01 10:30:05,897 INFO SenderThread:61045 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
11
+ 2022-02-01 10:30:05,898 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: run_start
12
+ 2022-02-01 10:30:05,908 DEBUG HandlerThread:61045 [meta.py:__init__():40] meta init
13
+ 2022-02-01 10:30:05,909 DEBUG HandlerThread:61045 [meta.py:__init__():54] meta init done
14
+ 2022-02-01 10:30:05,909 DEBUG HandlerThread:61045 [meta.py:probe():214] probe
15
+ 2022-02-01 10:30:05,917 DEBUG HandlerThread:61045 [meta.py:_setup_git():204] setup git
16
+ 2022-02-01 10:30:05,952 DEBUG HandlerThread:61045 [meta.py:_setup_git():211] setup git done
17
+ 2022-02-01 10:30:05,953 DEBUG HandlerThread:61045 [meta.py:_save_pip():58] save pip
18
+ 2022-02-01 10:30:05,954 DEBUG HandlerThread:61045 [meta.py:_save_pip():72] save pip done
19
+ 2022-02-01 10:30:05,954 DEBUG HandlerThread:61045 [meta.py:_save_conda():79] save conda
20
+ 2022-02-01 10:30:06,494 DEBUG HandlerThread:61045 [meta.py:_save_conda():89] save conda done
21
+ 2022-02-01 10:30:06,495 DEBUG HandlerThread:61045 [meta.py:probe():252] probe done
22
+ 2022-02-01 10:30:06,500 DEBUG SenderThread:61045 [sender.py:send():234] send: files
23
+ 2022-02-01 10:30:06,501 INFO SenderThread:61045 [sender.py:_save_file():939] saving file wandb-metadata.json with policy now
24
+ 2022-02-01 10:30:06,511 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: stop_status
25
+ 2022-02-01 10:30:06,512 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
26
+ 2022-02-01 10:30:06,673 DEBUG SenderThread:61045 [sender.py:send():234] send: config
27
+ 2022-02-01 10:30:06,675 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
28
+ 2022-02-01 10:30:06,676 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
29
+ 2022-02-01 10:30:06,676 WARNING SenderThread:61045 [sender.py:send_metric():897] Seen metric with glob (shouldnt happen)
30
+ 2022-02-01 10:30:06,676 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
31
+ 2022-02-01 10:30:06,676 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
32
+ 2022-02-01 10:30:06,676 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
33
+ 2022-02-01 10:30:06,677 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
34
+ 2022-02-01 10:30:06,677 DEBUG SenderThread:61045 [sender.py:send():234] send: metric
35
+ 2022-02-01 10:30:06,677 DEBUG SenderThread:61045 [sender.py:send():234] send: history
36
+ 2022-02-01 10:30:06,677 DEBUG SenderThread:61045 [sender.py:send():234] send: summary
37
+ 2022-02-01 10:30:06,678 INFO SenderThread:61045 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
38
+ 2022-02-01 10:30:06,898 INFO Thread-8 :61045 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/conda-environment.yaml
39
+ 2022-02-01 10:30:06,898 INFO Thread-8 :61045 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
40
+ 2022-02-01 10:30:06,898 INFO Thread-8 :61045 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json
41
+ 2022-02-01 10:30:06,898 INFO Thread-8 :61045 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-metadata.json
42
+ 2022-02-01 10:30:06,899 INFO Thread-8 :61045 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/requirements.txt
43
+ 2022-02-01 10:30:07,139 INFO Thread-11 :61045 [upload_job.py:push():137] Uploaded file /tmp/tmpzeoerhazwandb/25qe2qjl-wandb-metadata.json
44
+ 2022-02-01 10:30:08,898 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
45
+ 2022-02-01 10:30:10,899 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
46
+ 2022-02-01 10:30:21,675 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: stop_status
47
+ 2022-02-01 10:30:21,676 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
48
+ 2022-02-01 10:30:34,180 DEBUG SenderThread:61045 [sender.py:send():234] send: stats
49
+ 2022-02-01 10:30:36,839 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: stop_status
50
+ 2022-02-01 10:30:36,839 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
51
+ 2022-02-01 10:30:36,921 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/config.yaml
52
+ 2022-02-01 10:30:52,000 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: stop_status
53
+ 2022-02-01 10:30:52,001 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
54
+ 2022-02-01 10:31:04,514 DEBUG SenderThread:61045 [sender.py:send():234] send: stats
55
+ 2022-02-01 10:31:07,162 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: stop_status
56
+ 2022-02-01 10:31:07,163 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
57
+ 2022-02-01 10:31:22,319 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: stop_status
58
+ 2022-02-01 10:31:22,320 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
wandb/run-20220201_103004-1yfj7vwy/logs/debug.log ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-02-01 10:30:04,548 INFO MainThread:58652 [wandb_setup.py:_flush():71] setting env: {}
2
+ 2022-02-01 10:30:04,549 INFO MainThread:58652 [wandb_setup.py:_flush():71] setting login settings: {}
3
+ 2022-02-01 10:30:04,549 INFO MainThread:58652 [wandb_init.py:_log_setup():371] Logging user logs to /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/logs/debug.log
4
+ 2022-02-01 10:30:04,549 INFO MainThread:58652 [wandb_init.py:_log_setup():372] Logging internal logs to /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/logs/debug-internal.log
5
+ 2022-02-01 10:30:04,549 INFO MainThread:58652 [wandb_init.py:init():404] calling init triggers
6
+ 2022-02-01 10:30:04,550 INFO MainThread:58652 [wandb_init.py:init():409] wandb.init called with sweep_config: {}
7
+ config: {}
8
+ 2022-02-01 10:30:04,550 INFO MainThread:58652 [wandb_init.py:init():460] starting backend
9
+ 2022-02-01 10:30:04,550 INFO MainThread:58652 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
10
+ 2022-02-01 10:30:04,647 INFO MainThread:58652 [backend.py:ensure_launched():216] starting backend process...
11
+ 2022-02-01 10:30:04,775 INFO MainThread:58652 [backend.py:ensure_launched():221] started backend process with pid: 61045
12
+ 2022-02-01 10:30:04,778 INFO MainThread:58652 [wandb_init.py:init():469] backend started and connected
13
+ 2022-02-01 10:30:04,787 INFO MainThread:58652 [wandb_init.py:init():533] updated telemetry
14
+ 2022-02-01 10:30:05,012 INFO MainThread:58652 [wandb_init.py:init():563] communicating current version
15
+ 2022-02-01 10:30:05,703 INFO MainThread:58652 [wandb_init.py:init():568] got version response
16
+ 2022-02-01 10:30:05,703 INFO MainThread:58652 [wandb_init.py:init():578] communicating run to backend with 30 second timeout
17
+ 2022-02-01 10:30:05,897 INFO MainThread:58652 [wandb_init.py:init():606] starting run threads in backend
18
+ 2022-02-01 10:30:06,508 INFO MainThread:58652 [wandb_run.py:_console_start():1810] atexit reg
19
+ 2022-02-01 10:30:06,509 INFO MainThread:58652 [wandb_run.py:_redirect():1684] redirect: SettingsConsole.REDIRECT
20
+ 2022-02-01 10:30:06,510 INFO MainThread:58652 [wandb_run.py:_redirect():1689] Redirecting console.
21
+ 2022-02-01 10:30:06,516 INFO MainThread:58652 [wandb_run.py:_redirect():1745] Redirects installed.
22
+ 2022-02-01 10:30:06,517 INFO MainThread:58652 [wandb_init.py:init():633] run started, returning control to user process
23
+ 2022-02-01 10:30:06,519 INFO MainThread:58652 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.6, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': False, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 4.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Feb01_10-24-12_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
+ 2022-02-01 10:30:06,524 INFO MainThread:58652 [wandb_watch.py:watch():43] Watching
wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb ADDED
Binary file (5.75 kB). View file