AlexN commited on
Commit
4641f67
1 Parent(s): a37375f

End of training

Browse files
Files changed (38) hide show
  1. .ipynb_checkpoints/run-checkpoint.sh +3 -3
  2. .ipynb_checkpoints/run_speech_recognition_ctc-checkpoint.py +3 -4
  3. all_results.json +5 -5
  4. config.json +2 -2
  5. eval_results.json +5 -5
  6. pytorch_model.bin +1 -1
  7. run.sh +3 -4
  8. run_speech_recognition_ctc.py +1 -1
  9. special_tokens_map.json +1 -1
  10. training_args.bin +1 -1
  11. wandb/debug-internal.log +1 -1
  12. wandb/debug.log +1 -1
  13. wandb/latest-run +1 -1
  14. wandb/run-20220130_163246-20w0bl60/files/.ipynb_checkpoints/requirements-checkpoint.txt +180 -0
  15. wandb/run-20220201_103004-1yfj7vwy/files/config.yaml +6 -0
  16. wandb/run-20220201_103004-1yfj7vwy/files/output.log +5 -0
  17. wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json +1 -1
  18. wandb/run-20220201_103004-1yfj7vwy/logs/debug-internal.log +112 -0
  19. wandb/run-20220201_103004-1yfj7vwy/logs/debug.log +106 -0
  20. wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb +0 -0
  21. wandb/run-20220201_115151-3ujx6xdv/files/conda-environment.yaml +0 -0
  22. wandb/run-20220201_115151-3ujx6xdv/files/config.yaml +0 -0
  23. wandb/run-20220201_115151-3ujx6xdv/files/output.log +243 -0
  24. wandb/run-20220201_115151-3ujx6xdv/files/requirements.txt +180 -0
  25. wandb/run-20220201_115151-3ujx6xdv/files/wandb-metadata.json +65 -0
  26. wandb/run-20220201_115151-3ujx6xdv/files/wandb-summary.json +0 -0
  27. wandb/run-20220201_115151-3ujx6xdv/logs/debug-internal.log +0 -0
  28. wandb/run-20220201_115151-3ujx6xdv/logs/debug.log +162 -0
  29. wandb/run-20220201_115151-3ujx6xdv/run-3ujx6xdv.wandb +0 -0
  30. wandb/run-20220201_142447-3hqii6h6/files/conda-environment.yaml +0 -0
  31. wandb/run-20220201_142447-3hqii6h6/files/config.yaml +668 -0
  32. wandb/run-20220201_142447-3hqii6h6/files/output.log +11 -0
  33. wandb/run-20220201_142447-3hqii6h6/files/requirements.txt +180 -0
  34. wandb/run-20220201_142447-3hqii6h6/files/wandb-metadata.json +65 -0
  35. wandb/run-20220201_142447-3hqii6h6/files/wandb-summary.json +1 -0
  36. wandb/run-20220201_142447-3hqii6h6/logs/debug-internal.log +58 -0
  37. wandb/run-20220201_142447-3hqii6h6/logs/debug.log +24 -0
  38. wandb/run-20220201_142447-3hqii6h6/run-3hqii6h6.wandb +0 -0
.ipynb_checkpoints/run-checkpoint.sh CHANGED
@@ -6,7 +6,7 @@ python run_speech_recognition_ctc.py \
6
  --tokenizer_name_or_path="./" \
7
  --output_dir="./" \
8
  --overwrite_output_dir \
9
- --num_train_epochs="4" \
10
  --per_device_train_batch_size="64" \
11
  --per_device_eval_batch_size="64" \
12
  --gradient_accumulation_steps="1" \
@@ -23,7 +23,7 @@ python run_speech_recognition_ctc.py \
23
  --save_total_limit="2" \
24
  --freeze_feature_encoder \
25
  --feat_proj_dropout="0.0" \
26
- --mask_time_prob="0.6" \
27
  --mask_time_length="10" \
28
  --mask_feature_prob="0.25" \
29
  --mask_feature_length="10" \
@@ -36,6 +36,6 @@ python run_speech_recognition_ctc.py \
36
  --fp16 \
37
  --group_by_length \
38
  --preprocessing_num_workers="64" \
39
- --do_train --do_eval \
40
  --load_best_model_at_end \
41
  --push_to_hub
 
6
  --tokenizer_name_or_path="./" \
7
  --output_dir="./" \
8
  --overwrite_output_dir \
9
+ --num_train_epochs="1" \
10
  --per_device_train_batch_size="64" \
11
  --per_device_eval_batch_size="64" \
12
  --gradient_accumulation_steps="1" \
 
23
  --save_total_limit="2" \
24
  --freeze_feature_encoder \
25
  --feat_proj_dropout="0.0" \
26
+ --mask_time_prob="0.75" \
27
  --mask_time_length="10" \
28
  --mask_feature_prob="0.25" \
29
  --mask_feature_length="10" \
 
36
  --fp16 \
37
  --group_by_length \
38
  --preprocessing_num_workers="64" \
39
+ --do_train=False --do_eval \
40
  --load_best_model_at_end \
41
  --push_to_hub
.ipynb_checkpoints/run_speech_recognition_ctc-checkpoint.py CHANGED
@@ -642,8 +642,7 @@ def main():
642
 
643
  pred.label_ids[pred.label_ids == -100] = tokenizer.pad_token_id
644
 
645
- pred_str = tokenizer.batch_decode(pred_ids)#, skip_special_tokens=True)#being sure to remove <s> from the output
646
- print(pred_str)
647
  # we do not want to group tokens when computing the metrics
648
  label_str = tokenizer.batch_decode(pred.label_ids, group_tokens=False)
649
 
@@ -687,7 +686,7 @@ def main():
687
  # 8. Finally, we can start training
688
 
689
  # Training
690
- if training_args.do_train:
691
 
692
  # use last checkpoint if exist
693
  if last_checkpoint is not None:
@@ -730,7 +729,7 @@ def main():
730
  kwargs = {
731
  "finetuned_from": model_args.model_name_or_path,
732
  "tasks": "speech-recognition",
733
- "tags": ["automatic-speech-recognition", data_args.dataset_name],
734
  "dataset_args": f"Config: {config_name}, Training split: {data_args.train_split_name}, Eval split: {data_args.eval_split_name}",
735
  "dataset": f"{data_args.dataset_name.upper()} - {config_name.upper()}",
736
  }
 
642
 
643
  pred.label_ids[pred.label_ids == -100] = tokenizer.pad_token_id
644
 
645
+ pred_str = tokenizer.batch_decode(pred_ids, skip_special_tokens=True)#being sure to remove <s> from the output
 
646
  # we do not want to group tokens when computing the metrics
647
  label_str = tokenizer.batch_decode(pred.label_ids, group_tokens=False)
648
 
 
686
  # 8. Finally, we can start training
687
 
688
  # Training
689
+ if training_args.do_train and 1 == 2:
690
 
691
  # use last checkpoint if exist
692
  if last_checkpoint is not None:
 
729
  kwargs = {
730
  "finetuned_from": model_args.model_name_or_path,
731
  "tasks": "speech-recognition",
732
+ "tags": ["automatic-speech-recognition", "robust-speech-event", data_args.dataset_name],
733
  "dataset_args": f"Config: {config_name}, Training split: {data_args.train_split_name}, Eval split: {data_args.eval_split_name}",
734
  "dataset": f"{data_args.dataset_name.upper()} - {config_name.upper()}",
735
  }
all_results.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "epoch": 2.0,
3
- "eval_loss": 21.909679412841797,
4
- "eval_runtime": 271.2636,
5
  "eval_samples": 4469,
6
- "eval_samples_per_second": 16.475,
7
- "eval_steps_per_second": 0.258,
8
- "eval_wer": 1.0,
9
  "train_loss": 1.442369053426242,
10
  "train_runtime": 53680.5392,
11
  "train_samples": 442265,
 
1
  {
2
  "epoch": 2.0,
3
+ "eval_loss": 0.22597630321979523,
4
+ "eval_runtime": 276.6037,
5
  "eval_samples": 4469,
6
+ "eval_samples_per_second": 16.157,
7
+ "eval_steps_per_second": 0.253,
8
+ "eval_wer": 0.2948882012145466,
9
  "train_loss": 1.442369053426242,
10
  "train_runtime": 53680.5392,
11
  "train_samples": 442265,
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
  "activation_dropout": 0.05,
4
  "adapter_kernel_size": 3,
5
  "adapter_stride": 2,
@@ -64,7 +64,7 @@
64
  "mask_feature_prob": 0.25,
65
  "mask_time_length": 10,
66
  "mask_time_min_masks": 2,
67
- "mask_time_prob": 0.6,
68
  "model_type": "wav2vec2",
69
  "num_adapter_layers": 3,
70
  "num_attention_heads": 16,
 
1
  {
2
+ "_name_or_path": "./checkpoint-18000",
3
  "activation_dropout": 0.05,
4
  "adapter_kernel_size": 3,
5
  "adapter_stride": 2,
 
64
  "mask_feature_prob": 0.25,
65
  "mask_time_length": 10,
66
  "mask_time_min_masks": 2,
67
+ "mask_time_prob": 0.75,
68
  "model_type": "wav2vec2",
69
  "num_adapter_layers": 3,
70
  "num_attention_heads": 16,
eval_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
- "eval_loss": 21.909679412841797,
3
- "eval_runtime": 271.2636,
4
  "eval_samples": 4469,
5
- "eval_samples_per_second": 16.475,
6
- "eval_steps_per_second": 0.258,
7
- "eval_wer": 1.0
8
  }
 
1
  {
2
+ "eval_loss": 0.22597630321979523,
3
+ "eval_runtime": 276.6037,
4
  "eval_samples": 4469,
5
+ "eval_samples_per_second": 16.157,
6
+ "eval_steps_per_second": 0.253,
7
+ "eval_wer": 0.2948882012145466
8
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6afb0187b51e7ce5d940ad3d192ccb5384c6c92b414d047431d6ebb6a0b4729b
3
  size 1262821553
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f9c6a7fe50ee84c94a3ae149eea45203dc369fa4e7bcf46fbc4367f2385e4d3
3
  size 1262821553
run.sh CHANGED
@@ -6,7 +6,7 @@ python run_speech_recognition_ctc.py \
6
  --tokenizer_name_or_path="./" \
7
  --output_dir="./" \
8
  --overwrite_output_dir \
9
- --num_train_epochs="4" \
10
  --per_device_train_batch_size="64" \
11
  --per_device_eval_batch_size="64" \
12
  --gradient_accumulation_steps="1" \
@@ -23,19 +23,18 @@ python run_speech_recognition_ctc.py \
23
  --save_total_limit="2" \
24
  --freeze_feature_encoder \
25
  --feat_proj_dropout="0.0" \
26
- --mask_time_prob="0.6" \
27
  --mask_time_length="10" \
28
  --mask_feature_prob="0.25" \
29
  --mask_feature_length="10" \
30
  --gradient_checkpointing \
31
  --report_to="wandb" \
32
  --run_name="xls-r-300m-fr" \
33
- --max_eval_samples="4500" \
34
  --max_duration_in_seconds="10" \
35
  --use_auth_token \
36
  --fp16 \
37
  --group_by_length \
38
  --preprocessing_num_workers="64" \
39
- --do_train --do_eval \
40
  --load_best_model_at_end \
41
  --push_to_hub
 
6
  --tokenizer_name_or_path="./" \
7
  --output_dir="./" \
8
  --overwrite_output_dir \
9
+ --num_train_epochs="1" \
10
  --per_device_train_batch_size="64" \
11
  --per_device_eval_batch_size="64" \
12
  --gradient_accumulation_steps="1" \
 
23
  --save_total_limit="2" \
24
  --freeze_feature_encoder \
25
  --feat_proj_dropout="0.0" \
26
+ --mask_time_prob="0.75" \
27
  --mask_time_length="10" \
28
  --mask_feature_prob="0.25" \
29
  --mask_feature_length="10" \
30
  --gradient_checkpointing \
31
  --report_to="wandb" \
32
  --run_name="xls-r-300m-fr" \
 
33
  --max_duration_in_seconds="10" \
34
  --use_auth_token \
35
  --fp16 \
36
  --group_by_length \
37
  --preprocessing_num_workers="64" \
38
+ --do_train=False --do_eval \
39
  --load_best_model_at_end \
40
  --push_to_hub
run_speech_recognition_ctc.py CHANGED
@@ -686,7 +686,7 @@ def main():
686
  # 8. Finally, we can start training
687
 
688
  # Training
689
- if training_args.do_train:
690
 
691
  # use last checkpoint if exist
692
  if last_checkpoint is not None:
 
686
  # 8. Finally, we can start training
687
 
688
  # Training
689
+ if training_args.do_train and 1 == 2:
690
 
691
  # use last checkpoint if exist
692
  if last_checkpoint is not None:
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": [{"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "</s>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}, {"content": "<pad>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true}]}
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1f3a67b3f8a9b2a576d3736f6fe41f32790252519745bd9adf7f9fc9090a03b0
3
  size 3055
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acd1210a48013d18dc58e4f6ea5c766d8c7c54f299c6e4b1a4f46aa0e01bcec9
3
  size 3055
wandb/debug-internal.log CHANGED
@@ -1 +1 @@
1
- run-20220201_103004-1yfj7vwy/logs/debug-internal.log
 
1
+ run-20220201_142447-3hqii6h6/logs/debug-internal.log
wandb/debug.log CHANGED
@@ -1 +1 @@
1
- run-20220201_103004-1yfj7vwy/logs/debug.log
 
1
+ run-20220201_142447-3hqii6h6/logs/debug.log
wandb/latest-run CHANGED
@@ -1 +1 @@
1
- run-20220201_103004-1yfj7vwy
 
1
+ run-20220201_142447-3hqii6h6
wandb/run-20220130_163246-20w0bl60/files/.ipynb_checkpoints/requirements-checkpoint.txt ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.1
2
+ aiosignal==1.2.0
3
+ analytics-python==1.4.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asgiref==3.5.0
9
+ asttokens==2.0.5
10
+ async-timeout==4.0.2
11
+ attrs==21.4.0
12
+ audioread==2.1.9
13
+ backcall==0.2.0
14
+ backoff==1.10.0
15
+ bcrypt==3.2.0
16
+ beautifulsoup4==4.9.3
17
+ black==21.12b0
18
+ bleach==4.1.0
19
+ brotlipy==0.7.0
20
+ certifi==2020.12.5
21
+ cffi==1.14.3
22
+ chardet==3.0.4
23
+ charset-normalizer==2.0.10
24
+ click==8.0.3
25
+ conda-build==3.21.4
26
+ conda-package-handling==1.7.2
27
+ conda==4.9.2
28
+ configparser==5.2.0
29
+ cryptography==3.2.1
30
+ cycler==0.11.0
31
+ datasets==1.18.2.dev0
32
+ debugpy==1.5.1
33
+ decorator==4.4.2
34
+ defusedxml==0.7.1
35
+ dill==0.3.4
36
+ dnspython==2.1.0
37
+ docker-pycreds==0.4.0
38
+ entrypoints==0.3
39
+ executing==0.8.2
40
+ fastapi==0.73.0
41
+ ffmpy==0.3.0
42
+ filelock==3.0.12
43
+ fonttools==4.29.0
44
+ frozenlist==1.3.0
45
+ fsspec==2022.1.0
46
+ gitdb==4.0.9
47
+ gitpython==3.1.26
48
+ glob2==0.7
49
+ gradio==2.7.5.2
50
+ h11==0.13.0
51
+ huggingface-hub==0.4.0
52
+ idna==2.10
53
+ importlib-resources==5.4.0
54
+ ipykernel==6.7.0
55
+ ipython-genutils==0.2.0
56
+ ipython==8.0.1
57
+ ipywidgets==7.6.3
58
+ jedi==0.17.0
59
+ jinja2==2.11.3
60
+ jiwer==2.3.0
61
+ joblib==1.1.0
62
+ json5==0.9.6
63
+ jsonschema==4.4.0
64
+ jupyter-client==7.1.2
65
+ jupyter-core==4.9.1
66
+ jupyterlab-pygments==0.1.2
67
+ jupyterlab-server==1.2.0
68
+ jupyterlab-widgets==1.0.2
69
+ jupyterlab==2.2.9
70
+ kiwisolver==1.3.2
71
+ libarchive-c==2.9
72
+ librosa==0.8.1
73
+ llvmlite==0.38.0
74
+ markdown2==2.4.2
75
+ markupsafe==1.1.1
76
+ matplotlib-inline==0.1.3
77
+ matplotlib==3.5.1
78
+ mistune==0.8.4
79
+ mkl-fft==1.3.0
80
+ mkl-random==1.1.1
81
+ mkl-service==2.3.0
82
+ monotonic==1.6
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nano==0.10.0
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.19.2
94
+ olefile==0.46
95
+ packaging==21.3
96
+ pandas==1.4.0
97
+ pandocfilters==1.5.0
98
+ paramiko==2.9.2
99
+ parso==0.8.1
100
+ pathspec==0.9.0
101
+ pathtools==0.1.2
102
+ pexpect==4.8.0
103
+ pickleshare==0.7.5
104
+ pillow==8.1.2
105
+ pip==21.3.1
106
+ pkginfo==1.7.0
107
+ platformdirs==2.4.1
108
+ pooch==1.6.0
109
+ prometheus-client==0.13.0
110
+ promise==2.3
111
+ prompt-toolkit==3.0.8
112
+ protobuf==3.19.4
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ pure-eval==0.2.2
116
+ pyarrow==6.0.1
117
+ pycosat==0.6.3
118
+ pycparser==2.20
119
+ pycryptodome==3.13.0
120
+ pydantic==1.9.0
121
+ pydub==0.25.1
122
+ pygments==2.8.0
123
+ pynacl==1.5.0
124
+ pyopenssl==19.1.0
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ pysocks==1.7.1
128
+ python-dateutil==2.8.2
129
+ python-etcd==0.4.5
130
+ python-levenshtein==0.12.2
131
+ python-multipart==0.0.5
132
+ pytz==2021.1
133
+ pyyaml==5.4.1
134
+ pyzmq==22.3.0
135
+ regex==2022.1.18
136
+ requests==2.24.0
137
+ resampy==0.2.2
138
+ ruamel-yaml==0.15.87
139
+ sacremoses==0.0.47
140
+ scikit-learn==1.0.2
141
+ scipy==1.7.3
142
+ send2trash==1.8.0
143
+ sentry-sdk==1.5.4
144
+ setuptools==50.3.1.post20201107
145
+ shortuuid==1.0.8
146
+ six==1.15.0
147
+ smmap==5.0.0
148
+ sniffio==1.2.0
149
+ soundfile==0.10.3.post1
150
+ soupsieve==2.2
151
+ stack-data==0.1.4
152
+ starlette==0.17.1
153
+ subprocess32==3.5.4
154
+ termcolor==1.1.0
155
+ terminado==0.13.1
156
+ testpath==0.5.0
157
+ threadpoolctl==3.0.0
158
+ tokenizers==0.11.4
159
+ tomli==1.2.3
160
+ torch==1.10.2
161
+ torchaudio==0.10.2
162
+ torchelastic==0.2.2
163
+ torchtext==0.9.1
164
+ torchvision==0.9.1
165
+ tornado==6.1
166
+ tqdm==4.62.3
167
+ traitlets==5.1.1
168
+ transformers==4.17.0.dev0
169
+ typing-extensions==4.0.1
170
+ urllib3==1.25.11
171
+ uvicorn==0.17.1
172
+ wandb==0.12.9
173
+ wcwidth==0.2.5
174
+ webencodings==0.5.1
175
+ wheel==0.35.1
176
+ widgetsnbextension==3.5.2
177
+ xxhash==2.0.2
178
+ yarl==1.7.2
179
+ yaspin==2.1.0
180
+ zipp==3.7.0
wandb/run-20220201_103004-1yfj7vwy/files/config.yaml CHANGED
@@ -45,7 +45,13 @@ _wandb:
45
  - 1
46
  - 5
47
  - 11
 
 
 
 
48
  3:
 
 
49
  - 13
50
  4: 3.8.8
51
  5: 0.12.9
 
45
  - 1
46
  - 5
47
  - 11
48
+ 2:
49
+ - 1
50
+ - 5
51
+ - 11
52
  3:
53
+ - 1
54
+ - 7
55
  - 13
56
  4: 3.8.8
57
  5: 0.12.9
wandb/run-20220201_103004-1yfj7vwy/files/output.log CHANGED
@@ -28,3 +28,8 @@ Upload file pytorch_model.bin: 100%|██████████████
28
  Upload file training_args.bin: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████| 2.98k/2.98k [01:12<?, ?B/s]
29
  Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 100%|█████████████████████████████████████████████████████████████████████| 201M/201M [01:12<00:00, 2.89MB/s]
30
 
 
 
 
 
 
 
28
  Upload file training_args.bin: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████| 2.98k/2.98k [01:12<?, ?B/s]
29
  Upload file wandb/run-20220130_191954-1mtmnz5y/run-1mtmnz5y.wandb: 100%|█████████████████████████████████████████████████████████████████████| 201M/201M [01:12<00:00, 2.89MB/s]
30
 
31
+ {'dataset': {'name': 'MOZILLA-FOUNDATION/COMMON_VOICE_8_0 - FR', 'type': 'common_voice', 'args': 'Config: fr, Training split: train+validation, Eval split: test'}}00, 1.02MB/s]
32
+ 02/01/2022 10:32:47 - WARNING - huggingface_hub.repository - To https://huggingface.co/AlexN/xls-r-300m-fr
33
+ f25a924..a37375f main -> main
34
+ To https://huggingface.co/AlexN/xls-r-300m-fr
35
+ f25a924..a37375f main -> main
wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json CHANGED
@@ -1 +1 @@
1
- {"eval/loss": 21.909679412841797, "eval/wer": 1.0, "eval/runtime": 271.2636, "eval/samples_per_second": 16.475, "eval/steps_per_second": 0.258, "train/global_step": 0, "_runtime": 2, "_timestamp": 1643711406, "_step": 0}
 
1
+ {"eval/loss": 21.909679412841797, "eval/wer": 1.0, "eval/runtime": 271.2636, "eval/samples_per_second": 16.475, "eval/steps_per_second": 0.258, "train/global_step": 0, "_runtime": 2, "_timestamp": 1643711406, "_step": 0, "_wandb": {"runtime": 167}}
wandb/run-20220201_103004-1yfj7vwy/logs/debug-internal.log CHANGED
@@ -78,3 +78,115 @@
78
  2022-02-01 10:32:38,165 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
79
  2022-02-01 10:32:42,989 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
80
  2022-02-01 10:32:44,990 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  2022-02-01 10:32:38,165 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: stop_status
79
  2022-02-01 10:32:42,989 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
80
  2022-02-01 10:32:44,990 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
81
+ 2022-02-01 10:32:48,993 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
82
+ 2022-02-01 10:32:52,987 DEBUG SenderThread:61045 [sender.py:send():234] send: telemetry
83
+ 2022-02-01 10:32:52,988 DEBUG SenderThread:61045 [sender.py:send():234] send: exit
84
+ 2022-02-01 10:32:52,988 INFO SenderThread:61045 [sender.py:send_exit():366] handling exit code: 0
85
+ 2022-02-01 10:32:52,988 INFO SenderThread:61045 [sender.py:send_exit():368] handling runtime: 167
86
+ 2022-02-01 10:32:52,989 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
87
+ 2022-02-01 10:32:52,990 INFO SenderThread:61045 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
88
+ 2022-02-01 10:32:52,990 INFO SenderThread:61045 [sender.py:send_exit():374] send defer
89
+ 2022-02-01 10:32:52,990 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
90
+ 2022-02-01 10:32:52,991 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
91
+ 2022-02-01 10:32:52,992 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 0
92
+ 2022-02-01 10:32:52,992 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
93
+ 2022-02-01 10:32:52,992 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 0
94
+ 2022-02-01 10:32:52,992 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 1
95
+ 2022-02-01 10:32:52,993 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
96
+ 2022-02-01 10:32:52,993 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 1
97
+ 2022-02-01 10:32:52,996 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
98
+ 2022-02-01 10:32:52,996 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json
99
+ 2022-02-01 10:32:53,088 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
100
+ 2022-02-01 10:32:53,088 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 1
101
+ 2022-02-01 10:32:53,088 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 2
102
+ 2022-02-01 10:32:53,088 DEBUG SenderThread:61045 [sender.py:send():234] send: stats
103
+ 2022-02-01 10:32:53,089 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
104
+ 2022-02-01 10:32:53,089 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 2
105
+ 2022-02-01 10:32:53,090 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
106
+ 2022-02-01 10:32:53,090 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 2
107
+ 2022-02-01 10:32:53,090 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 3
108
+ 2022-02-01 10:32:53,090 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
109
+ 2022-02-01 10:32:53,090 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 3
110
+ 2022-02-01 10:32:53,091 DEBUG SenderThread:61045 [sender.py:send():234] send: summary
111
+ 2022-02-01 10:32:53,092 INFO SenderThread:61045 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
112
+ 2022-02-01 10:32:53,092 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
113
+ 2022-02-01 10:32:53,092 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 3
114
+ 2022-02-01 10:32:53,092 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 4
115
+ 2022-02-01 10:32:53,093 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
116
+ 2022-02-01 10:32:53,093 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 4
117
+ 2022-02-01 10:32:53,093 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
118
+ 2022-02-01 10:32:53,093 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 4
119
+ 2022-02-01 10:32:53,103 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
120
+ 2022-02-01 10:32:53,344 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 5
121
+ 2022-02-01 10:32:53,344 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
122
+ 2022-02-01 10:32:53,345 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
123
+ 2022-02-01 10:32:53,345 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 5
124
+ 2022-02-01 10:32:53,345 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
125
+ 2022-02-01 10:32:53,345 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 5
126
+ 2022-02-01 10:32:53,345 INFO SenderThread:61045 [dir_watcher.py:finish():283] shutting down directory watcher
127
+ 2022-02-01 10:32:53,448 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
128
+ 2022-02-01 10:32:53,997 INFO Thread-8 :61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/config.yaml
129
+ 2022-02-01 10:32:53,998 INFO SenderThread:61045 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json
130
+ 2022-02-01 10:32:53,999 INFO SenderThread:61045 [dir_watcher.py:finish():313] scan: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files
131
+ 2022-02-01 10:32:53,999 INFO SenderThread:61045 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log output.log
132
+ 2022-02-01 10:32:54,000 INFO SenderThread:61045 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-metadata.json wandb-metadata.json
133
+ 2022-02-01 10:32:54,001 INFO SenderThread:61045 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/config.yaml config.yaml
134
+ 2022-02-01 10:32:54,007 INFO SenderThread:61045 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/conda-environment.yaml conda-environment.yaml
135
+ 2022-02-01 10:32:54,014 INFO SenderThread:61045 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json wandb-summary.json
136
+ 2022-02-01 10:32:54,015 INFO SenderThread:61045 [dir_watcher.py:finish():327] scan save: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/requirements.txt requirements.txt
137
+ 2022-02-01 10:32:54,019 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 6
138
+ 2022-02-01 10:32:54,020 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
139
+ 2022-02-01 10:32:54,026 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
140
+ 2022-02-01 10:32:54,026 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 6
141
+ 2022-02-01 10:32:54,026 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
142
+ 2022-02-01 10:32:54,027 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 6
143
+ 2022-02-01 10:32:54,027 INFO SenderThread:61045 [file_pusher.py:finish():177] shutting down file pusher
144
+ 2022-02-01 10:32:54,122 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
145
+ 2022-02-01 10:32:54,123 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
146
+ 2022-02-01 10:32:54,225 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
147
+ 2022-02-01 10:32:54,226 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
148
+ 2022-02-01 10:32:54,328 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
149
+ 2022-02-01 10:32:54,329 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
150
+ 2022-02-01 10:32:54,431 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
151
+ 2022-02-01 10:32:54,432 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
152
+ 2022-02-01 10:32:54,524 INFO Thread-14 :61045 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/wandb-summary.json
153
+ 2022-02-01 10:32:54,531 INFO Thread-12 :61045 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/output.log
154
+ 2022-02-01 10:32:54,534 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
155
+ 2022-02-01 10:32:54,534 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
156
+ 2022-02-01 10:32:54,542 INFO Thread-15 :61045 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/requirements.txt
157
+ 2022-02-01 10:32:54,556 INFO Thread-13 :61045 [upload_job.py:push():137] Uploaded file /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/files/config.yaml
158
+ 2022-02-01 10:32:54,636 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
159
+ 2022-02-01 10:32:54,637 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
160
+ 2022-02-01 10:32:54,739 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
161
+ 2022-02-01 10:32:54,739 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
162
+ 2022-02-01 10:32:54,757 INFO Thread-7 :61045 [sender.py:transition_state():387] send defer: 7
163
+ 2022-02-01 10:32:54,758 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
164
+ 2022-02-01 10:32:54,759 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 7
165
+ 2022-02-01 10:32:54,759 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
166
+ 2022-02-01 10:32:54,759 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 7
167
+ 2022-02-01 10:32:54,842 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
168
+ 2022-02-01 10:32:55,205 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 8
169
+ 2022-02-01 10:32:55,206 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
170
+ 2022-02-01 10:32:55,206 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
171
+ 2022-02-01 10:32:55,207 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 8
172
+ 2022-02-01 10:32:55,208 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
173
+ 2022-02-01 10:32:55,208 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 8
174
+ 2022-02-01 10:32:55,208 INFO SenderThread:61045 [sender.py:transition_state():387] send defer: 9
175
+ 2022-02-01 10:32:55,209 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: defer
176
+ 2022-02-01 10:32:55,210 INFO HandlerThread:61045 [handler.py:handle_request_defer():147] handle defer: 9
177
+ 2022-02-01 10:32:55,210 DEBUG SenderThread:61045 [sender.py:send():234] send: final
178
+ 2022-02-01 10:32:55,210 DEBUG SenderThread:61045 [sender.py:send():234] send: footer
179
+ 2022-02-01 10:32:55,210 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: defer
180
+ 2022-02-01 10:32:55,210 INFO SenderThread:61045 [sender.py:send_request_defer():383] handle sender defer: 9
181
+ 2022-02-01 10:32:55,309 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: poll_exit
182
+ 2022-02-01 10:32:55,310 DEBUG SenderThread:61045 [sender.py:send_request():248] send_request: poll_exit
183
+ 2022-02-01 10:32:55,310 INFO SenderThread:61045 [file_pusher.py:join():182] waiting for file pusher
184
+ 2022-02-01 10:32:55,613 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: get_summary
185
+ 2022-02-01 10:32:55,615 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: sampled_history
186
+ 2022-02-01 10:32:55,617 DEBUG HandlerThread:61045 [handler.py:handle_request():130] handle_request: shutdown
187
+ 2022-02-01 10:32:55,617 INFO HandlerThread:61045 [handler.py:finish():731] shutting down handler
188
+ 2022-02-01 10:32:56,210 INFO WriterThread:61045 [datastore.py:close():281] close: /workspace/xls-r-300m-fr/wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb
189
+ 2022-02-01 10:32:56,611 INFO SenderThread:61045 [sender.py:finish():1070] shutting down sender
190
+ 2022-02-01 10:32:56,611 INFO SenderThread:61045 [file_pusher.py:finish():177] shutting down file pusher
191
+ 2022-02-01 10:32:56,611 INFO SenderThread:61045 [file_pusher.py:join():182] waiting for file pusher
192
+ 2022-02-01 10:32:56,615 INFO MainThread:61045 [internal.py:handle_exit():77] Internal process exited
wandb/run-20220201_103004-1yfj7vwy/logs/debug.log CHANGED
@@ -22,3 +22,109 @@ config: {}
22
  2022-02-01 10:30:06,517 INFO MainThread:58652 [wandb_init.py:init():633] run started, returning control to user process
23
  2022-02-01 10:30:06,519 INFO MainThread:58652 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.6, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': False, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 4.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Feb01_10-24-12_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
  2022-02-01 10:30:06,524 INFO MainThread:58652 [wandb_watch.py:watch():43] Watching
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  2022-02-01 10:30:06,517 INFO MainThread:58652 [wandb_init.py:init():633] run started, returning control to user process
23
  2022-02-01 10:30:06,519 INFO MainThread:58652 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.6, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': False, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 4.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Feb01_10-24-12_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
  2022-02-01 10:30:06,524 INFO MainThread:58652 [wandb_watch.py:watch():43] Watching
25
+ 2022-02-01 10:32:50,469 INFO MainThread:58652 [wandb_run.py:_atexit_cleanup():1780] got exitcode: 0
26
+ 2022-02-01 10:32:50,473 INFO MainThread:58652 [wandb_run.py:_restore():1752] restore
27
+ 2022-02-01 10:32:52,992 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
28
+ wandb_count: 1
29
+ }
30
+ pusher_stats {
31
+ uploaded_bytes: 2158
32
+ total_bytes: 2158
33
+ }
34
+
35
+ 2022-02-01 10:32:53,346 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
36
+ wandb_count: 1
37
+ }
38
+ pusher_stats {
39
+ uploaded_bytes: 2158
40
+ total_bytes: 2158
41
+ }
42
+
43
+ 2022-02-01 10:32:54,021 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
44
+ wandb_count: 5
45
+ }
46
+ pusher_stats {
47
+ uploaded_bytes: 2158
48
+ total_bytes: 18907
49
+ }
50
+
51
+ 2022-02-01 10:32:54,124 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
52
+ wandb_count: 5
53
+ }
54
+ pusher_stats {
55
+ uploaded_bytes: 2158
56
+ total_bytes: 18907
57
+ }
58
+
59
+ 2022-02-01 10:32:54,227 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
60
+ wandb_count: 5
61
+ }
62
+ pusher_stats {
63
+ uploaded_bytes: 2158
64
+ total_bytes: 18907
65
+ }
66
+
67
+ 2022-02-01 10:32:54,330 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
68
+ wandb_count: 5
69
+ }
70
+ pusher_stats {
71
+ uploaded_bytes: 18907
72
+ total_bytes: 18907
73
+ }
74
+
75
+ 2022-02-01 10:32:54,432 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
76
+ wandb_count: 5
77
+ }
78
+ pusher_stats {
79
+ uploaded_bytes: 18907
80
+ total_bytes: 18907
81
+ }
82
+
83
+ 2022-02-01 10:32:54,535 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
84
+ wandb_count: 5
85
+ }
86
+ pusher_stats {
87
+ uploaded_bytes: 18907
88
+ total_bytes: 18907
89
+ }
90
+
91
+ 2022-02-01 10:32:54,637 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
92
+ wandb_count: 5
93
+ }
94
+ pusher_stats {
95
+ uploaded_bytes: 18907
96
+ total_bytes: 18907
97
+ }
98
+
99
+ 2022-02-01 10:32:54,740 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
100
+ wandb_count: 5
101
+ }
102
+ pusher_stats {
103
+ uploaded_bytes: 18907
104
+ total_bytes: 18907
105
+ }
106
+
107
+ 2022-02-01 10:32:55,208 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
108
+ wandb_count: 5
109
+ }
110
+ pusher_stats {
111
+ uploaded_bytes: 18907
112
+ total_bytes: 18907
113
+ }
114
+
115
+ 2022-02-01 10:32:55,612 INFO MainThread:58652 [wandb_run.py:_wait_for_finish():1912] got exit ret: done: true
116
+ exit_result {
117
+ }
118
+ file_counts {
119
+ wandb_count: 5
120
+ }
121
+ pusher_stats {
122
+ uploaded_bytes: 18907
123
+ total_bytes: 18907
124
+ }
125
+ local_info {
126
+ }
127
+
128
+ 2022-02-01 10:32:56,753 INFO MainThread:58652 [wandb_run.py:_append_history():2130] rendering history
129
+ 2022-02-01 10:32:56,754 INFO MainThread:58652 [wandb_run.py:_append_summary():2085] rendering summary
130
+ 2022-02-01 10:32:56,754 INFO MainThread:58652 [wandb_run.py:_append_files():2180] logging synced files
wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb CHANGED
Binary files a/wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb and b/wandb/run-20220201_103004-1yfj7vwy/run-1yfj7vwy.wandb differ
 
wandb/run-20220201_115151-3ujx6xdv/files/conda-environment.yaml ADDED
File without changes
wandb/run-20220201_115151-3ujx6xdv/files/config.yaml ADDED
The diff for this file is too large to render. See raw diff
 
wandb/run-20220201_115151-3ujx6xdv/files/output.log ADDED
@@ -0,0 +1,243 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ 0%| | 0/27860 [00:00<?, ?it/s]
3
+
4
+
5
+
6
+
7
+
8
+
9
+
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+
26
+
27
+
28
+
29
+
30
+
31
+
32
+
33
+
34
+
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
44
+
45
+
46
+
47
+
48
+
49
+
50
+
51
+
52
+
53
+
54
+
55
+
56
+
57
+
58
+
59
+
60
+
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+
91
+
92
+
93
+
94
+
95
+
96
+
97
+
98
+
99
+
100
+ 65%|███████████████████████████████████████████████████████████████████████████████████▏ | 18100/27860 [2:16:21<10:06:40, 3.73s/it]
101
+
102
+
103
+
104
+
105
+
106
+
107
+
108
+
109
+
110
+
111
+
112
+
113
+
114
+
115
+
116
+
117
+
118
+
119
+
120
+
121
+
122
+
123
+
124
+
125
+
126
+
127
+
128
+
129
+
130
+
131
+
132
+
133
+
134
+
135
+
136
+
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+
154
+
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+
166
+
167
+
168
+
169
+
170
+
171
+
172
+
173
+
174
+
175
+
176
+
177
+
178
+
179
+
180
+
181
+
182
+
183
+
184
+
185
+
186
+
187
+
188
+
189
+
190
+
191
+
192
+
193
+
194
+
195
+
196
+
197
+
198
+
199
+ 65%|███████████████████████████████████████████████████████████████████████████████████▌ | 18200/27860 [2:22:00<10:24:13, 3.88s/it]
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+
208
+
209
+
210
+
211
+
212
+
213
+
214
+
215
+
216
+
217
+
218
+
219
+
220
+
221
+
222
+
223
+
224
+
225
+
226
+
227
+
228
+
229
+ File "run_speech_recognition_ctc.py", line 748, in <module>████████████████████████████▍ | 18230/27860 [2:23:19<4:36:39, 1.72s/it]
230
+ main()
231
+ File "run_speech_recognition_ctc.py", line 699, in main
232
+ train_result = trainer.train(resume_from_checkpoint=checkpoint)
233
+ File "/opt/conda/lib/python3.8/site-packages/transformers/trainer.py", line 1373, in train
234
+ tr_loss_step = self.training_step(model, inputs)
235
+ File "/opt/conda/lib/python3.8/site-packages/transformers/trainer.py", line 1948, in training_step
236
+ loss = self.compute_loss(model, inputs)
237
+ File "/opt/conda/lib/python3.8/site-packages/transformers/trainer.py", line 1980, in compute_loss
238
+ outputs = model(**inputs)
239
+ File "/opt/conda/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl
240
+ return forward_call(*input, **kwargs)
241
+ File "/opt/conda/lib/python3.8/site-packages/transformers/models/wav2vec2/modeling_wav2vec2.py", line 1758, in forward
242
+ if labels.max() >= self.config.vocab_size:
243
+ KeyboardInterrupt
wandb/run-20220201_115151-3ujx6xdv/files/requirements.txt ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.1
2
+ aiosignal==1.2.0
3
+ analytics-python==1.4.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asgiref==3.5.0
9
+ asttokens==2.0.5
10
+ async-timeout==4.0.2
11
+ attrs==21.4.0
12
+ audioread==2.1.9
13
+ backcall==0.2.0
14
+ backoff==1.10.0
15
+ bcrypt==3.2.0
16
+ beautifulsoup4==4.9.3
17
+ black==21.12b0
18
+ bleach==4.1.0
19
+ brotlipy==0.7.0
20
+ certifi==2020.12.5
21
+ cffi==1.14.3
22
+ chardet==3.0.4
23
+ charset-normalizer==2.0.10
24
+ click==8.0.3
25
+ conda-build==3.21.4
26
+ conda-package-handling==1.7.2
27
+ conda==4.9.2
28
+ configparser==5.2.0
29
+ cryptography==3.2.1
30
+ cycler==0.11.0
31
+ datasets==1.18.2.dev0
32
+ debugpy==1.5.1
33
+ decorator==4.4.2
34
+ defusedxml==0.7.1
35
+ dill==0.3.4
36
+ dnspython==2.1.0
37
+ docker-pycreds==0.4.0
38
+ entrypoints==0.3
39
+ executing==0.8.2
40
+ fastapi==0.73.0
41
+ ffmpy==0.3.0
42
+ filelock==3.0.12
43
+ fonttools==4.29.0
44
+ frozenlist==1.3.0
45
+ fsspec==2022.1.0
46
+ gitdb==4.0.9
47
+ gitpython==3.1.26
48
+ glob2==0.7
49
+ gradio==2.7.5.2
50
+ h11==0.13.0
51
+ huggingface-hub==0.4.0
52
+ idna==2.10
53
+ importlib-resources==5.4.0
54
+ ipykernel==6.7.0
55
+ ipython-genutils==0.2.0
56
+ ipython==8.0.1
57
+ ipywidgets==7.6.3
58
+ jedi==0.17.0
59
+ jinja2==2.11.3
60
+ jiwer==2.3.0
61
+ joblib==1.1.0
62
+ json5==0.9.6
63
+ jsonschema==4.4.0
64
+ jupyter-client==7.1.2
65
+ jupyter-core==4.9.1
66
+ jupyterlab-pygments==0.1.2
67
+ jupyterlab-server==1.2.0
68
+ jupyterlab-widgets==1.0.2
69
+ jupyterlab==2.2.9
70
+ kiwisolver==1.3.2
71
+ libarchive-c==2.9
72
+ librosa==0.8.1
73
+ llvmlite==0.38.0
74
+ markdown2==2.4.2
75
+ markupsafe==1.1.1
76
+ matplotlib-inline==0.1.3
77
+ matplotlib==3.5.1
78
+ mistune==0.8.4
79
+ mkl-fft==1.3.0
80
+ mkl-random==1.1.1
81
+ mkl-service==2.3.0
82
+ monotonic==1.6
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nano==0.10.0
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.19.2
94
+ olefile==0.46
95
+ packaging==21.3
96
+ pandas==1.4.0
97
+ pandocfilters==1.5.0
98
+ paramiko==2.9.2
99
+ parso==0.8.1
100
+ pathspec==0.9.0
101
+ pathtools==0.1.2
102
+ pexpect==4.8.0
103
+ pickleshare==0.7.5
104
+ pillow==8.1.2
105
+ pip==21.3.1
106
+ pkginfo==1.7.0
107
+ platformdirs==2.4.1
108
+ pooch==1.6.0
109
+ prometheus-client==0.13.0
110
+ promise==2.3
111
+ prompt-toolkit==3.0.8
112
+ protobuf==3.19.4
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ pure-eval==0.2.2
116
+ pyarrow==6.0.1
117
+ pycosat==0.6.3
118
+ pycparser==2.20
119
+ pycryptodome==3.13.0
120
+ pydantic==1.9.0
121
+ pydub==0.25.1
122
+ pygments==2.8.0
123
+ pynacl==1.5.0
124
+ pyopenssl==19.1.0
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ pysocks==1.7.1
128
+ python-dateutil==2.8.2
129
+ python-etcd==0.4.5
130
+ python-levenshtein==0.12.2
131
+ python-multipart==0.0.5
132
+ pytz==2021.1
133
+ pyyaml==5.4.1
134
+ pyzmq==22.3.0
135
+ regex==2022.1.18
136
+ requests==2.24.0
137
+ resampy==0.2.2
138
+ ruamel-yaml==0.15.87
139
+ sacremoses==0.0.47
140
+ scikit-learn==1.0.2
141
+ scipy==1.7.3
142
+ send2trash==1.8.0
143
+ sentry-sdk==1.5.4
144
+ setuptools==50.3.1.post20201107
145
+ shortuuid==1.0.8
146
+ six==1.15.0
147
+ smmap==5.0.0
148
+ sniffio==1.2.0
149
+ soundfile==0.10.3.post1
150
+ soupsieve==2.2
151
+ stack-data==0.1.4
152
+ starlette==0.17.1
153
+ subprocess32==3.5.4
154
+ termcolor==1.1.0
155
+ terminado==0.13.1
156
+ testpath==0.5.0
157
+ threadpoolctl==3.0.0
158
+ tokenizers==0.11.4
159
+ tomli==1.2.3
160
+ torch==1.10.2
161
+ torchaudio==0.10.2
162
+ torchelastic==0.2.2
163
+ torchtext==0.9.1
164
+ torchvision==0.9.1
165
+ tornado==6.1
166
+ tqdm==4.62.3
167
+ traitlets==5.1.1
168
+ transformers==4.17.0.dev0
169
+ typing-extensions==4.0.1
170
+ urllib3==1.25.11
171
+ uvicorn==0.17.1
172
+ wandb==0.12.9
173
+ wcwidth==0.2.5
174
+ webencodings==0.5.1
175
+ wheel==0.35.1
176
+ widgetsnbextension==3.5.2
177
+ xxhash==2.0.2
178
+ yarl==1.7.2
179
+ yaspin==2.1.0
180
+ zipp==3.7.0
wandb/run-20220201_115151-3ujx6xdv/files/wandb-metadata.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-4.15.0-151-generic-x86_64-with-glibc2.10",
3
+ "python": "3.8.8",
4
+ "heartbeatAt": "2022-02-01T11:51:53.049589",
5
+ "startedAt": "2022-02-01T11:51:51.626030",
6
+ "docker": null,
7
+ "gpu": "Tesla V100S-PCIE-32GB",
8
+ "gpu_count": 1,
9
+ "cpu_count": 60,
10
+ "cuda": null,
11
+ "args": [
12
+ "--dataset_name=mozilla-foundation/common_voice_8_0",
13
+ "--model_name_or_path=./checkpoint-18000",
14
+ "--dataset_config_name=fr",
15
+ "--tokenizer_name_or_path=./",
16
+ "--output_dir=./",
17
+ "--overwrite_output_dir",
18
+ "--num_train_epochs=4",
19
+ "--per_device_train_batch_size=64",
20
+ "--per_device_eval_batch_size=64",
21
+ "--gradient_accumulation_steps=1",
22
+ "--learning_rate=1e-4",
23
+ "--warmup_steps=2700",
24
+ "--length_column_name=input_length",
25
+ "--evaluation_strategy=steps",
26
+ "--text_column_name=sentence",
27
+ "--save_steps=500",
28
+ "--eval_steps=500",
29
+ "--logging_steps=100",
30
+ "--layerdrop=0.0",
31
+ "--activation_dropout=0.05",
32
+ "--save_total_limit=2",
33
+ "--freeze_feature_encoder",
34
+ "--feat_proj_dropout=0.0",
35
+ "--mask_time_prob=0.6",
36
+ "--mask_time_length=10",
37
+ "--mask_feature_prob=0.25",
38
+ "--mask_feature_length=10",
39
+ "--gradient_checkpointing",
40
+ "--report_to=wandb",
41
+ "--run_name=xls-r-300m-fr",
42
+ "--max_eval_samples=4500",
43
+ "--max_duration_in_seconds=10",
44
+ "--use_auth_token",
45
+ "--fp16",
46
+ "--group_by_length",
47
+ "--preprocessing_num_workers=64",
48
+ "--do_train",
49
+ "--do_eval",
50
+ "--load_best_model_at_end",
51
+ "--push_to_hub"
52
+ ],
53
+ "state": "running",
54
+ "program": "run_speech_recognition_ctc.py",
55
+ "codePath": "run_speech_recognition_ctc.py",
56
+ "git": {
57
+ "remote": "https://huggingface.co/AlexN/xls-r-300m-fr",
58
+ "commit": "a37375f05121cad39aba4409ef22f6f71610efc6"
59
+ },
60
+ "email": "[email protected]",
61
+ "root": "/workspace/xls-r-300m-fr",
62
+ "host": "job-1abccd0a-3293-4ffe-8274-9e8f841f653f",
63
+ "username": "ovh",
64
+ "executable": "/opt/conda/bin/python"
65
+ }
wandb/run-20220201_115151-3ujx6xdv/files/wandb-summary.json ADDED
The diff for this file is too large to render. See raw diff
 
wandb/run-20220201_115151-3ujx6xdv/logs/debug-internal.log ADDED
The diff for this file is too large to render. See raw diff
 
wandb/run-20220201_115151-3ujx6xdv/logs/debug.log ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-02-01 11:51:51,630 INFO MainThread:1674 [wandb_setup.py:_flush():71] setting env: {}
2
+ 2022-02-01 11:51:51,630 INFO MainThread:1674 [wandb_setup.py:_flush():71] setting login settings: {}
3
+ 2022-02-01 11:51:51,630 INFO MainThread:1674 [wandb_init.py:_log_setup():371] Logging user logs to /workspace/xls-r-300m-fr/wandb/run-20220201_115151-3ujx6xdv/logs/debug.log
4
+ 2022-02-01 11:51:51,630 INFO MainThread:1674 [wandb_init.py:_log_setup():372] Logging internal logs to /workspace/xls-r-300m-fr/wandb/run-20220201_115151-3ujx6xdv/logs/debug-internal.log
5
+ 2022-02-01 11:51:51,631 INFO MainThread:1674 [wandb_init.py:init():404] calling init triggers
6
+ 2022-02-01 11:51:51,631 INFO MainThread:1674 [wandb_init.py:init():409] wandb.init called with sweep_config: {}
7
+ config: {}
8
+ 2022-02-01 11:51:51,631 INFO MainThread:1674 [wandb_init.py:init():460] starting backend
9
+ 2022-02-01 11:51:51,631 INFO MainThread:1674 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
10
+ 2022-02-01 11:51:51,727 INFO MainThread:1674 [backend.py:ensure_launched():216] starting backend process...
11
+ 2022-02-01 11:51:51,860 INFO MainThread:1674 [backend.py:ensure_launched():221] started backend process with pid: 32352
12
+ 2022-02-01 11:51:51,865 INFO MainThread:1674 [wandb_init.py:init():469] backend started and connected
13
+ 2022-02-01 11:51:51,874 INFO MainThread:1674 [wandb_init.py:init():533] updated telemetry
14
+ 2022-02-01 11:51:52,115 INFO MainThread:1674 [wandb_init.py:init():563] communicating current version
15
+ 2022-02-01 11:51:52,852 INFO MainThread:1674 [wandb_init.py:init():568] got version response
16
+ 2022-02-01 11:51:52,852 INFO MainThread:1674 [wandb_init.py:init():578] communicating run to backend with 30 second timeout
17
+ 2022-02-01 11:51:53,041 INFO MainThread:1674 [wandb_init.py:init():606] starting run threads in backend
18
+ 2022-02-01 11:51:53,693 INFO MainThread:1674 [wandb_run.py:_console_start():1810] atexit reg
19
+ 2022-02-01 11:51:53,694 INFO MainThread:1674 [wandb_run.py:_redirect():1684] redirect: SettingsConsole.REDIRECT
20
+ 2022-02-01 11:51:53,695 INFO MainThread:1674 [wandb_run.py:_redirect():1689] Redirecting console.
21
+ 2022-02-01 11:51:53,701 INFO MainThread:1674 [wandb_run.py:_redirect():1745] Redirects installed.
22
+ 2022-02-01 11:51:53,701 INFO MainThread:1674 [wandb_init.py:init():633] run started, returning control to user process
23
+ 2022-02-01 11:51:53,703 INFO MainThread:1674 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForCTC'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': './checkpoint-18000', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.6, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 4.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Feb01_10-33-03_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
+ 2022-02-01 11:51:53,707 INFO MainThread:1674 [wandb_watch.py:watch():43] Watching
25
+ 2022-02-01 14:15:18,365 INFO MainThread:1674 [wandb_run.py:_atexit_cleanup():1780] got exitcode: 255
26
+ 2022-02-01 14:15:18,367 INFO MainThread:1674 [wandb_run.py:_restore():1752] restore
27
+ 2022-02-01 14:15:20,545 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
28
+ wandb_count: 1
29
+ }
30
+ pusher_stats {
31
+ uploaded_bytes: 2170
32
+ total_bytes: 2170
33
+ }
34
+
35
+ 2022-02-01 14:15:20,670 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
36
+ wandb_count: 1
37
+ }
38
+ pusher_stats {
39
+ uploaded_bytes: 2170
40
+ total_bytes: 2170
41
+ }
42
+
43
+ 2022-02-01 14:15:21,400 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
44
+ wandb_count: 1
45
+ }
46
+ pusher_stats {
47
+ uploaded_bytes: 2170
48
+ total_bytes: 2170
49
+ }
50
+
51
+ 2022-02-01 14:15:21,882 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
52
+ wandb_count: 4
53
+ }
54
+ pusher_stats {
55
+ uploaded_bytes: 2170
56
+ total_bytes: 875506
57
+ }
58
+
59
+ 2022-02-01 14:15:21,985 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
60
+ wandb_count: 5
61
+ }
62
+ pusher_stats {
63
+ uploaded_bytes: 2170
64
+ total_bytes: 878639
65
+ }
66
+
67
+ 2022-02-01 14:15:22,092 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
68
+ wandb_count: 5
69
+ }
70
+ pusher_stats {
71
+ uploaded_bytes: 2170
72
+ total_bytes: 878639
73
+ }
74
+
75
+ 2022-02-01 14:15:22,196 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
76
+ wandb_count: 5
77
+ }
78
+ pusher_stats {
79
+ uploaded_bytes: 878639
80
+ total_bytes: 878639
81
+ }
82
+
83
+ 2022-02-01 14:15:22,299 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
84
+ wandb_count: 5
85
+ }
86
+ pusher_stats {
87
+ uploaded_bytes: 878639
88
+ total_bytes: 878639
89
+ }
90
+
91
+ 2022-02-01 14:15:22,401 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
92
+ wandb_count: 5
93
+ }
94
+ pusher_stats {
95
+ uploaded_bytes: 878639
96
+ total_bytes: 878639
97
+ }
98
+
99
+ 2022-02-01 14:15:22,504 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
100
+ wandb_count: 5
101
+ }
102
+ pusher_stats {
103
+ uploaded_bytes: 878639
104
+ total_bytes: 878639
105
+ }
106
+
107
+ 2022-02-01 14:15:22,606 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
108
+ wandb_count: 5
109
+ }
110
+ pusher_stats {
111
+ uploaded_bytes: 878639
112
+ total_bytes: 878639
113
+ }
114
+
115
+ 2022-02-01 14:15:22,709 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
116
+ wandb_count: 5
117
+ }
118
+ pusher_stats {
119
+ uploaded_bytes: 878639
120
+ total_bytes: 878639
121
+ }
122
+
123
+ 2022-02-01 14:15:22,812 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
124
+ wandb_count: 5
125
+ }
126
+ pusher_stats {
127
+ uploaded_bytes: 878639
128
+ total_bytes: 878639
129
+ }
130
+
131
+ 2022-02-01 14:15:22,915 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
132
+ wandb_count: 5
133
+ }
134
+ pusher_stats {
135
+ uploaded_bytes: 878639
136
+ total_bytes: 878639
137
+ }
138
+
139
+ 2022-02-01 14:15:24,234 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: file_counts {
140
+ wandb_count: 5
141
+ }
142
+ pusher_stats {
143
+ uploaded_bytes: 878639
144
+ total_bytes: 878639
145
+ }
146
+
147
+ 2022-02-01 14:15:24,652 INFO MainThread:1674 [wandb_run.py:_wait_for_finish():1912] got exit ret: done: true
148
+ exit_result {
149
+ }
150
+ file_counts {
151
+ wandb_count: 5
152
+ }
153
+ pusher_stats {
154
+ uploaded_bytes: 878639
155
+ total_bytes: 878639
156
+ }
157
+ local_info {
158
+ }
159
+
160
+ 2022-02-01 14:15:25,813 INFO MainThread:1674 [wandb_run.py:_append_history():2130] rendering history
161
+ 2022-02-01 14:15:25,813 INFO MainThread:1674 [wandb_run.py:_append_summary():2085] rendering summary
162
+ 2022-02-01 14:15:25,814 INFO MainThread:1674 [wandb_run.py:_append_files():2180] logging synced files
wandb/run-20220201_115151-3ujx6xdv/run-3ujx6xdv.wandb ADDED
Binary file (2.37 MB). View file
 
wandb/run-20220201_142447-3hqii6h6/files/conda-environment.yaml ADDED
File without changes
wandb/run-20220201_142447-3hqii6h6/files/config.yaml ADDED
@@ -0,0 +1,668 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _n_gpu:
4
+ desc: null
5
+ value: 1
6
+ _name_or_path:
7
+ desc: null
8
+ value: ./checkpoint-18000
9
+ _wandb:
10
+ desc: null
11
+ value:
12
+ cli_version: 0.12.9
13
+ framework: huggingface
14
+ huggingface_version: 4.17.0.dev0
15
+ is_jupyter_run: false
16
+ is_kaggle_kernel: false
17
+ m:
18
+ - 1: train/global_step
19
+ 6:
20
+ - 3
21
+ - 1: eval/loss
22
+ 5: 1
23
+ 6:
24
+ - 1
25
+ - 1: eval/wer
26
+ 5: 1
27
+ 6:
28
+ - 1
29
+ - 1: eval/runtime
30
+ 5: 1
31
+ 6:
32
+ - 1
33
+ - 1: eval/samples_per_second
34
+ 5: 1
35
+ 6:
36
+ - 1
37
+ - 1: eval/steps_per_second
38
+ 5: 1
39
+ 6:
40
+ - 1
41
+ python_version: 3.8.8
42
+ start_time: 1643725488
43
+ t:
44
+ 1:
45
+ - 1
46
+ - 5
47
+ - 11
48
+ 3:
49
+ - 13
50
+ 4: 3.8.8
51
+ 5: 0.12.9
52
+ 6: 4.17.0.dev0
53
+ 8:
54
+ - 5
55
+ activation_dropout:
56
+ desc: null
57
+ value: 0.05
58
+ adafactor:
59
+ desc: null
60
+ value: false
61
+ adam_beta1:
62
+ desc: null
63
+ value: 0.9
64
+ adam_beta2:
65
+ desc: null
66
+ value: 0.999
67
+ adam_epsilon:
68
+ desc: null
69
+ value: 1.0e-08
70
+ adapter_kernel_size:
71
+ desc: null
72
+ value: 3
73
+ adapter_stride:
74
+ desc: null
75
+ value: 2
76
+ add_adapter:
77
+ desc: null
78
+ value: false
79
+ add_cross_attention:
80
+ desc: null
81
+ value: false
82
+ apply_spec_augment:
83
+ desc: null
84
+ value: true
85
+ architectures:
86
+ desc: null
87
+ value:
88
+ - Wav2Vec2ForCTC
89
+ attention_dropout:
90
+ desc: null
91
+ value: 0.0
92
+ bad_words_ids:
93
+ desc: null
94
+ value: null
95
+ bf16:
96
+ desc: null
97
+ value: false
98
+ bf16_full_eval:
99
+ desc: null
100
+ value: false
101
+ bos_token_id:
102
+ desc: null
103
+ value: 1
104
+ chunk_size_feed_forward:
105
+ desc: null
106
+ value: 0
107
+ classifier_proj_size:
108
+ desc: null
109
+ value: 256
110
+ codevector_dim:
111
+ desc: null
112
+ value: 768
113
+ contrastive_logits_temperature:
114
+ desc: null
115
+ value: 0.1
116
+ conv_bias:
117
+ desc: null
118
+ value: true
119
+ conv_dim:
120
+ desc: null
121
+ value:
122
+ - 512
123
+ - 512
124
+ - 512
125
+ - 512
126
+ - 512
127
+ - 512
128
+ - 512
129
+ conv_kernel:
130
+ desc: null
131
+ value:
132
+ - 10
133
+ - 3
134
+ - 3
135
+ - 3
136
+ - 3
137
+ - 2
138
+ - 2
139
+ conv_stride:
140
+ desc: null
141
+ value:
142
+ - 5
143
+ - 2
144
+ - 2
145
+ - 2
146
+ - 2
147
+ - 2
148
+ - 2
149
+ cross_attention_hidden_size:
150
+ desc: null
151
+ value: null
152
+ ctc_loss_reduction:
153
+ desc: null
154
+ value: mean
155
+ ctc_zero_infinity:
156
+ desc: null
157
+ value: false
158
+ dataloader_drop_last:
159
+ desc: null
160
+ value: false
161
+ dataloader_num_workers:
162
+ desc: null
163
+ value: 0
164
+ dataloader_pin_memory:
165
+ desc: null
166
+ value: true
167
+ ddp_bucket_cap_mb:
168
+ desc: null
169
+ value: None
170
+ ddp_find_unused_parameters:
171
+ desc: null
172
+ value: None
173
+ debug:
174
+ desc: null
175
+ value: '[]'
176
+ decoder_start_token_id:
177
+ desc: null
178
+ value: null
179
+ deepspeed:
180
+ desc: null
181
+ value: None
182
+ disable_tqdm:
183
+ desc: null
184
+ value: false
185
+ diversity_loss_weight:
186
+ desc: null
187
+ value: 0.1
188
+ diversity_penalty:
189
+ desc: null
190
+ value: 0.0
191
+ do_eval:
192
+ desc: null
193
+ value: true
194
+ do_predict:
195
+ desc: null
196
+ value: false
197
+ do_sample:
198
+ desc: null
199
+ value: false
200
+ do_stable_layer_norm:
201
+ desc: null
202
+ value: true
203
+ do_train:
204
+ desc: null
205
+ value: false
206
+ early_stopping:
207
+ desc: null
208
+ value: false
209
+ encoder_no_repeat_ngram_size:
210
+ desc: null
211
+ value: 0
212
+ eos_token_id:
213
+ desc: null
214
+ value: 2
215
+ eval_accumulation_steps:
216
+ desc: null
217
+ value: None
218
+ eval_batch_size:
219
+ desc: null
220
+ value: 64
221
+ eval_steps:
222
+ desc: null
223
+ value: 500
224
+ evaluation_strategy:
225
+ desc: null
226
+ value: steps
227
+ feat_extract_activation:
228
+ desc: null
229
+ value: gelu
230
+ feat_extract_dropout:
231
+ desc: null
232
+ value: 0.0
233
+ feat_extract_norm:
234
+ desc: null
235
+ value: layer
236
+ feat_proj_dropout:
237
+ desc: null
238
+ value: 0.0
239
+ feat_quantizer_dropout:
240
+ desc: null
241
+ value: 0.0
242
+ final_dropout:
243
+ desc: null
244
+ value: 0.0
245
+ finetuning_task:
246
+ desc: null
247
+ value: null
248
+ forced_bos_token_id:
249
+ desc: null
250
+ value: null
251
+ forced_eos_token_id:
252
+ desc: null
253
+ value: null
254
+ fp16:
255
+ desc: null
256
+ value: true
257
+ fp16_backend:
258
+ desc: null
259
+ value: auto
260
+ fp16_full_eval:
261
+ desc: null
262
+ value: false
263
+ fp16_opt_level:
264
+ desc: null
265
+ value: O1
266
+ gradient_accumulation_steps:
267
+ desc: null
268
+ value: 1
269
+ gradient_checkpointing:
270
+ desc: null
271
+ value: true
272
+ greater_is_better:
273
+ desc: null
274
+ value: false
275
+ group_by_length:
276
+ desc: null
277
+ value: true
278
+ half_precision_backend:
279
+ desc: null
280
+ value: amp
281
+ hidden_act:
282
+ desc: null
283
+ value: gelu
284
+ hidden_dropout:
285
+ desc: null
286
+ value: 0.0
287
+ hidden_size:
288
+ desc: null
289
+ value: 1024
290
+ hub_model_id:
291
+ desc: null
292
+ value: None
293
+ hub_strategy:
294
+ desc: null
295
+ value: every_save
296
+ hub_token:
297
+ desc: null
298
+ value: <HUB_TOKEN>
299
+ id2label:
300
+ desc: null
301
+ value:
302
+ '0': LABEL_0
303
+ '1': LABEL_1
304
+ ignore_data_skip:
305
+ desc: null
306
+ value: false
307
+ initializer_range:
308
+ desc: null
309
+ value: 0.02
310
+ intermediate_size:
311
+ desc: null
312
+ value: 4096
313
+ is_decoder:
314
+ desc: null
315
+ value: false
316
+ is_encoder_decoder:
317
+ desc: null
318
+ value: false
319
+ label2id:
320
+ desc: null
321
+ value:
322
+ LABEL_0: 0
323
+ LABEL_1: 1
324
+ label_names:
325
+ desc: null
326
+ value: None
327
+ label_smoothing_factor:
328
+ desc: null
329
+ value: 0.0
330
+ layer_norm_eps:
331
+ desc: null
332
+ value: 1.0e-05
333
+ layerdrop:
334
+ desc: null
335
+ value: 0.0
336
+ learning_rate:
337
+ desc: null
338
+ value: 0.0001
339
+ length_column_name:
340
+ desc: null
341
+ value: input_length
342
+ length_penalty:
343
+ desc: null
344
+ value: 1.0
345
+ load_best_model_at_end:
346
+ desc: null
347
+ value: true
348
+ local_rank:
349
+ desc: null
350
+ value: -1
351
+ log_level:
352
+ desc: null
353
+ value: -1
354
+ log_level_replica:
355
+ desc: null
356
+ value: -1
357
+ log_on_each_node:
358
+ desc: null
359
+ value: true
360
+ logging_dir:
361
+ desc: null
362
+ value: ./runs/Feb01_14-18-51_job-1abccd0a-3293-4ffe-8274-9e8f841f653f
363
+ logging_first_step:
364
+ desc: null
365
+ value: false
366
+ logging_nan_inf_filter:
367
+ desc: null
368
+ value: true
369
+ logging_steps:
370
+ desc: null
371
+ value: 100
372
+ logging_strategy:
373
+ desc: null
374
+ value: steps
375
+ lr_scheduler_type:
376
+ desc: null
377
+ value: linear
378
+ mask_feature_length:
379
+ desc: null
380
+ value: 10
381
+ mask_feature_min_masks:
382
+ desc: null
383
+ value: 0
384
+ mask_feature_prob:
385
+ desc: null
386
+ value: 0.25
387
+ mask_time_length:
388
+ desc: null
389
+ value: 10
390
+ mask_time_min_masks:
391
+ desc: null
392
+ value: 2
393
+ mask_time_prob:
394
+ desc: null
395
+ value: 0.75
396
+ max_grad_norm:
397
+ desc: null
398
+ value: 1.0
399
+ max_length:
400
+ desc: null
401
+ value: 20
402
+ max_steps:
403
+ desc: null
404
+ value: -1
405
+ metric_for_best_model:
406
+ desc: null
407
+ value: loss
408
+ min_length:
409
+ desc: null
410
+ value: 0
411
+ model_type:
412
+ desc: null
413
+ value: wav2vec2
414
+ mp_parameters:
415
+ desc: null
416
+ value: ''
417
+ no_cuda:
418
+ desc: null
419
+ value: false
420
+ no_repeat_ngram_size:
421
+ desc: null
422
+ value: 0
423
+ num_adapter_layers:
424
+ desc: null
425
+ value: 3
426
+ num_attention_heads:
427
+ desc: null
428
+ value: 16
429
+ num_beam_groups:
430
+ desc: null
431
+ value: 1
432
+ num_beams:
433
+ desc: null
434
+ value: 1
435
+ num_codevector_groups:
436
+ desc: null
437
+ value: 2
438
+ num_codevectors_per_group:
439
+ desc: null
440
+ value: 320
441
+ num_conv_pos_embedding_groups:
442
+ desc: null
443
+ value: 16
444
+ num_conv_pos_embeddings:
445
+ desc: null
446
+ value: 128
447
+ num_feat_extract_layers:
448
+ desc: null
449
+ value: 7
450
+ num_hidden_layers:
451
+ desc: null
452
+ value: 24
453
+ num_negatives:
454
+ desc: null
455
+ value: 100
456
+ num_return_sequences:
457
+ desc: null
458
+ value: 1
459
+ num_train_epochs:
460
+ desc: null
461
+ value: 1.0
462
+ optim:
463
+ desc: null
464
+ value: adamw_hf
465
+ output_attentions:
466
+ desc: null
467
+ value: false
468
+ output_dir:
469
+ desc: null
470
+ value: ./
471
+ output_hidden_size:
472
+ desc: null
473
+ value: 1024
474
+ output_hidden_states:
475
+ desc: null
476
+ value: false
477
+ output_scores:
478
+ desc: null
479
+ value: false
480
+ overwrite_output_dir:
481
+ desc: null
482
+ value: true
483
+ pad_token_id:
484
+ desc: null
485
+ value: 218
486
+ past_index:
487
+ desc: null
488
+ value: -1
489
+ per_device_eval_batch_size:
490
+ desc: null
491
+ value: 64
492
+ per_device_train_batch_size:
493
+ desc: null
494
+ value: 64
495
+ per_gpu_eval_batch_size:
496
+ desc: null
497
+ value: None
498
+ per_gpu_train_batch_size:
499
+ desc: null
500
+ value: None
501
+ prediction_loss_only:
502
+ desc: null
503
+ value: false
504
+ prefix:
505
+ desc: null
506
+ value: null
507
+ problem_type:
508
+ desc: null
509
+ value: null
510
+ proj_codevector_dim:
511
+ desc: null
512
+ value: 768
513
+ pruned_heads:
514
+ desc: null
515
+ value: {}
516
+ push_to_hub:
517
+ desc: null
518
+ value: true
519
+ push_to_hub_model_id:
520
+ desc: null
521
+ value: None
522
+ push_to_hub_organization:
523
+ desc: null
524
+ value: None
525
+ push_to_hub_token:
526
+ desc: null
527
+ value: <PUSH_TO_HUB_TOKEN>
528
+ remove_invalid_values:
529
+ desc: null
530
+ value: false
531
+ remove_unused_columns:
532
+ desc: null
533
+ value: true
534
+ repetition_penalty:
535
+ desc: null
536
+ value: 1.0
537
+ report_to:
538
+ desc: null
539
+ value: '[''wandb'']'
540
+ resume_from_checkpoint:
541
+ desc: null
542
+ value: None
543
+ return_dict:
544
+ desc: null
545
+ value: true
546
+ return_dict_in_generate:
547
+ desc: null
548
+ value: false
549
+ run_name:
550
+ desc: null
551
+ value: xls-r-300m-fr
552
+ save_on_each_node:
553
+ desc: null
554
+ value: false
555
+ save_steps:
556
+ desc: null
557
+ value: 500
558
+ save_strategy:
559
+ desc: null
560
+ value: steps
561
+ save_total_limit:
562
+ desc: null
563
+ value: 2
564
+ seed:
565
+ desc: null
566
+ value: 42
567
+ sep_token_id:
568
+ desc: null
569
+ value: null
570
+ sharded_ddp:
571
+ desc: null
572
+ value: '[]'
573
+ skip_memory_metrics:
574
+ desc: null
575
+ value: true
576
+ task_specific_params:
577
+ desc: null
578
+ value: null
579
+ tdnn_dilation:
580
+ desc: null
581
+ value:
582
+ - 1
583
+ - 2
584
+ - 3
585
+ - 1
586
+ - 1
587
+ tdnn_dim:
588
+ desc: null
589
+ value:
590
+ - 512
591
+ - 512
592
+ - 512
593
+ - 512
594
+ - 1500
595
+ tdnn_kernel:
596
+ desc: null
597
+ value:
598
+ - 5
599
+ - 3
600
+ - 3
601
+ - 1
602
+ - 1
603
+ temperature:
604
+ desc: null
605
+ value: 1.0
606
+ tf32:
607
+ desc: null
608
+ value: None
609
+ tie_encoder_decoder:
610
+ desc: null
611
+ value: false
612
+ tie_word_embeddings:
613
+ desc: null
614
+ value: true
615
+ tokenizer_class:
616
+ desc: null
617
+ value: null
618
+ top_k:
619
+ desc: null
620
+ value: 50
621
+ top_p:
622
+ desc: null
623
+ value: 1.0
624
+ torch_dtype:
625
+ desc: null
626
+ value: float32
627
+ torchscript:
628
+ desc: null
629
+ value: false
630
+ tpu_metrics_debug:
631
+ desc: null
632
+ value: false
633
+ tpu_num_cores:
634
+ desc: null
635
+ value: None
636
+ train_batch_size:
637
+ desc: null
638
+ value: 64
639
+ transformers_version:
640
+ desc: null
641
+ value: 4.17.0.dev0
642
+ use_bfloat16:
643
+ desc: null
644
+ value: false
645
+ use_legacy_prediction_loop:
646
+ desc: null
647
+ value: false
648
+ use_weighted_layer_sum:
649
+ desc: null
650
+ value: false
651
+ vocab_size:
652
+ desc: null
653
+ value: 219
654
+ warmup_ratio:
655
+ desc: null
656
+ value: 0.0
657
+ warmup_steps:
658
+ desc: null
659
+ value: 2700
660
+ weight_decay:
661
+ desc: null
662
+ value: 0.0
663
+ xpu_backend:
664
+ desc: null
665
+ value: None
666
+ xvector_output_dim:
667
+ desc: null
668
+ value: 512
wandb/run-20220201_142447-3hqii6h6/files/output.log ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ***** eval metrics *****
2
+ eval_loss = 0.226
3
+ eval_runtime = 0:04:36.60
4
+ eval_samples = 4469
5
+ eval_samples_per_second = 16.157
6
+ eval_steps_per_second = 0.253
7
+ eval_wer = 0.2949
8
+ 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 70/70 [04:36<00:00, 3.95s/it]
9
+ Saving model checkpoint to ./
10
+ Configuration saved in ./config.json
11
+ Model weights saved in ./pytorch_model.bin
wandb/run-20220201_142447-3hqii6h6/files/requirements.txt ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.1
2
+ aiosignal==1.2.0
3
+ analytics-python==1.4.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asgiref==3.5.0
9
+ asttokens==2.0.5
10
+ async-timeout==4.0.2
11
+ attrs==21.4.0
12
+ audioread==2.1.9
13
+ backcall==0.2.0
14
+ backoff==1.10.0
15
+ bcrypt==3.2.0
16
+ beautifulsoup4==4.9.3
17
+ black==21.12b0
18
+ bleach==4.1.0
19
+ brotlipy==0.7.0
20
+ certifi==2020.12.5
21
+ cffi==1.14.3
22
+ chardet==3.0.4
23
+ charset-normalizer==2.0.10
24
+ click==8.0.3
25
+ conda-build==3.21.4
26
+ conda-package-handling==1.7.2
27
+ conda==4.9.2
28
+ configparser==5.2.0
29
+ cryptography==3.2.1
30
+ cycler==0.11.0
31
+ datasets==1.18.2.dev0
32
+ debugpy==1.5.1
33
+ decorator==4.4.2
34
+ defusedxml==0.7.1
35
+ dill==0.3.4
36
+ dnspython==2.1.0
37
+ docker-pycreds==0.4.0
38
+ entrypoints==0.3
39
+ executing==0.8.2
40
+ fastapi==0.73.0
41
+ ffmpy==0.3.0
42
+ filelock==3.0.12
43
+ fonttools==4.29.0
44
+ frozenlist==1.3.0
45
+ fsspec==2022.1.0
46
+ gitdb==4.0.9
47
+ gitpython==3.1.26
48
+ glob2==0.7
49
+ gradio==2.7.5.2
50
+ h11==0.13.0
51
+ huggingface-hub==0.4.0
52
+ idna==2.10
53
+ importlib-resources==5.4.0
54
+ ipykernel==6.7.0
55
+ ipython-genutils==0.2.0
56
+ ipython==8.0.1
57
+ ipywidgets==7.6.3
58
+ jedi==0.17.0
59
+ jinja2==2.11.3
60
+ jiwer==2.3.0
61
+ joblib==1.1.0
62
+ json5==0.9.6
63
+ jsonschema==4.4.0
64
+ jupyter-client==7.1.2
65
+ jupyter-core==4.9.1
66
+ jupyterlab-pygments==0.1.2
67
+ jupyterlab-server==1.2.0
68
+ jupyterlab-widgets==1.0.2
69
+ jupyterlab==2.2.9
70
+ kiwisolver==1.3.2
71
+ libarchive-c==2.9
72
+ librosa==0.8.1
73
+ llvmlite==0.38.0
74
+ markdown2==2.4.2
75
+ markupsafe==1.1.1
76
+ matplotlib-inline==0.1.3
77
+ matplotlib==3.5.1
78
+ mistune==0.8.4
79
+ mkl-fft==1.3.0
80
+ mkl-random==1.1.1
81
+ mkl-service==2.3.0
82
+ monotonic==1.6
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nano==0.10.0
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.19.2
94
+ olefile==0.46
95
+ packaging==21.3
96
+ pandas==1.4.0
97
+ pandocfilters==1.5.0
98
+ paramiko==2.9.2
99
+ parso==0.8.1
100
+ pathspec==0.9.0
101
+ pathtools==0.1.2
102
+ pexpect==4.8.0
103
+ pickleshare==0.7.5
104
+ pillow==8.1.2
105
+ pip==21.3.1
106
+ pkginfo==1.7.0
107
+ platformdirs==2.4.1
108
+ pooch==1.6.0
109
+ prometheus-client==0.13.0
110
+ promise==2.3
111
+ prompt-toolkit==3.0.8
112
+ protobuf==3.19.4
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ pure-eval==0.2.2
116
+ pyarrow==6.0.1
117
+ pycosat==0.6.3
118
+ pycparser==2.20
119
+ pycryptodome==3.13.0
120
+ pydantic==1.9.0
121
+ pydub==0.25.1
122
+ pygments==2.8.0
123
+ pynacl==1.5.0
124
+ pyopenssl==19.1.0
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ pysocks==1.7.1
128
+ python-dateutil==2.8.2
129
+ python-etcd==0.4.5
130
+ python-levenshtein==0.12.2
131
+ python-multipart==0.0.5
132
+ pytz==2021.1
133
+ pyyaml==5.4.1
134
+ pyzmq==22.3.0
135
+ regex==2022.1.18
136
+ requests==2.24.0
137
+ resampy==0.2.2
138
+ ruamel-yaml==0.15.87
139
+ sacremoses==0.0.47
140
+ scikit-learn==1.0.2
141
+ scipy==1.7.3
142
+ send2trash==1.8.0
143
+ sentry-sdk==1.5.4
144
+ setuptools==50.3.1.post20201107
145
+ shortuuid==1.0.8
146
+ six==1.15.0
147
+ smmap==5.0.0
148
+ sniffio==1.2.0
149
+ soundfile==0.10.3.post1
150
+ soupsieve==2.2
151
+ stack-data==0.1.4
152
+ starlette==0.17.1
153
+ subprocess32==3.5.4
154
+ termcolor==1.1.0
155
+ terminado==0.13.1
156
+ testpath==0.5.0
157
+ threadpoolctl==3.0.0
158
+ tokenizers==0.11.4
159
+ tomli==1.2.3
160
+ torch==1.10.2
161
+ torchaudio==0.10.2
162
+ torchelastic==0.2.2
163
+ torchtext==0.9.1
164
+ torchvision==0.9.1
165
+ tornado==6.1
166
+ tqdm==4.62.3
167
+ traitlets==5.1.1
168
+ transformers==4.17.0.dev0
169
+ typing-extensions==4.0.1
170
+ urllib3==1.25.11
171
+ uvicorn==0.17.1
172
+ wandb==0.12.9
173
+ wcwidth==0.2.5
174
+ webencodings==0.5.1
175
+ wheel==0.35.1
176
+ widgetsnbextension==3.5.2
177
+ xxhash==2.0.2
178
+ yarl==1.7.2
179
+ yaspin==2.1.0
180
+ zipp==3.7.0
wandb/run-20220201_142447-3hqii6h6/files/wandb-metadata.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-4.15.0-151-generic-x86_64-with-glibc2.10",
3
+ "python": "3.8.8",
4
+ "heartbeatAt": "2022-02-01T14:24:49.335786",
5
+ "startedAt": "2022-02-01T14:24:47.897859",
6
+ "docker": null,
7
+ "gpu": "Tesla V100S-PCIE-32GB",
8
+ "gpu_count": 1,
9
+ "cpu_count": 60,
10
+ "cuda": null,
11
+ "args": [
12
+ "--dataset_name=mozilla-foundation/common_voice_8_0",
13
+ "--model_name_or_path=./checkpoint-18000",
14
+ "--dataset_config_name=fr",
15
+ "--tokenizer_name_or_path=./",
16
+ "--output_dir=./",
17
+ "--overwrite_output_dir",
18
+ "--num_train_epochs=1",
19
+ "--per_device_train_batch_size=64",
20
+ "--per_device_eval_batch_size=64",
21
+ "--gradient_accumulation_steps=1",
22
+ "--learning_rate=1e-4",
23
+ "--warmup_steps=2700",
24
+ "--length_column_name=input_length",
25
+ "--evaluation_strategy=steps",
26
+ "--text_column_name=sentence",
27
+ "--save_steps=500",
28
+ "--eval_steps=500",
29
+ "--logging_steps=100",
30
+ "--layerdrop=0.0",
31
+ "--activation_dropout=0.05",
32
+ "--save_total_limit=2",
33
+ "--freeze_feature_encoder",
34
+ "--feat_proj_dropout=0.0",
35
+ "--mask_time_prob=0.75",
36
+ "--mask_time_length=10",
37
+ "--mask_feature_prob=0.25",
38
+ "--mask_feature_length=10",
39
+ "--gradient_checkpointing",
40
+ "--report_to=wandb",
41
+ "--run_name=xls-r-300m-fr",
42
+ "--max_eval_samples=4500",
43
+ "--max_duration_in_seconds=10",
44
+ "--use_auth_token",
45
+ "--fp16",
46
+ "--group_by_length",
47
+ "--preprocessing_num_workers=64",
48
+ "--do_train=False",
49
+ "--do_eval",
50
+ "--load_best_model_at_end",
51
+ "--push_to_hub"
52
+ ],
53
+ "state": "running",
54
+ "program": "run_speech_recognition_ctc.py",
55
+ "codePath": "run_speech_recognition_ctc.py",
56
+ "git": {
57
+ "remote": "https://huggingface.co/AlexN/xls-r-300m-fr",
58
+ "commit": "a37375f05121cad39aba4409ef22f6f71610efc6"
59
+ },
60
+ "email": "[email protected]",
61
+ "root": "/workspace/xls-r-300m-fr",
62
+ "host": "job-1abccd0a-3293-4ffe-8274-9e8f841f653f",
63
+ "username": "ovh",
64
+ "executable": "/opt/conda/bin/python"
65
+ }
wandb/run-20220201_142447-3hqii6h6/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eval/loss": 0.22597630321979523, "eval/wer": 0.2948882012145466, "eval/runtime": 276.6037, "eval/samples_per_second": 16.157, "eval/steps_per_second": 0.253, "train/global_step": 0, "_runtime": 2, "_timestamp": 1643725490, "_step": 0}
wandb/run-20220201_142447-3hqii6h6/logs/debug-internal.log ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-02-01 14:24:48,994 INFO MainThread:29947 [internal.py:wandb_internal():87] W&B internal server running at pid: 29947, started at: 2022-02-01 14:24:48.994283
2
+ 2022-02-01 14:24:48,998 INFO WriterThread:29947 [datastore.py:open_for_write():77] open: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/run-3hqii6h6.wandb
3
+ 2022-02-01 14:24:48,999 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: check_version
4
+ 2022-02-01 14:24:49,002 DEBUG SenderThread:29947 [sender.py:send():234] send: header
5
+ 2022-02-01 14:24:49,002 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: check_version
6
+ 2022-02-01 14:24:49,081 DEBUG SenderThread:29947 [sender.py:send():234] send: run
7
+ 2022-02-01 14:24:49,273 INFO SenderThread:29947 [dir_watcher.py:__init__():169] watching files in: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files
8
+ 2022-02-01 14:24:49,273 INFO SenderThread:29947 [sender.py:_start_run_threads():804] run started: 3hqii6h6 with start time 1643725488
9
+ 2022-02-01 14:24:49,273 DEBUG SenderThread:29947 [sender.py:send():234] send: summary
10
+ 2022-02-01 14:24:49,274 INFO SenderThread:29947 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
11
+ 2022-02-01 14:24:49,276 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: run_start
12
+ 2022-02-01 14:24:49,335 DEBUG HandlerThread:29947 [meta.py:__init__():40] meta init
13
+ 2022-02-01 14:24:49,335 DEBUG HandlerThread:29947 [meta.py:__init__():54] meta init done
14
+ 2022-02-01 14:24:49,335 DEBUG HandlerThread:29947 [meta.py:probe():214] probe
15
+ 2022-02-01 14:24:49,342 DEBUG HandlerThread:29947 [meta.py:_setup_git():204] setup git
16
+ 2022-02-01 14:24:49,374 DEBUG HandlerThread:29947 [meta.py:_setup_git():211] setup git done
17
+ 2022-02-01 14:24:49,374 DEBUG HandlerThread:29947 [meta.py:_save_pip():58] save pip
18
+ 2022-02-01 14:24:49,375 DEBUG HandlerThread:29947 [meta.py:_save_pip():72] save pip done
19
+ 2022-02-01 14:24:49,375 DEBUG HandlerThread:29947 [meta.py:_save_conda():79] save conda
20
+ 2022-02-01 14:24:49,989 DEBUG HandlerThread:29947 [meta.py:_save_conda():89] save conda done
21
+ 2022-02-01 14:24:49,989 DEBUG HandlerThread:29947 [meta.py:probe():252] probe done
22
+ 2022-02-01 14:24:49,998 DEBUG SenderThread:29947 [sender.py:send():234] send: files
23
+ 2022-02-01 14:24:49,999 INFO SenderThread:29947 [sender.py:_save_file():939] saving file wandb-metadata.json with policy now
24
+ 2022-02-01 14:24:50,010 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: stop_status
25
+ 2022-02-01 14:24:50,011 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: stop_status
26
+ 2022-02-01 14:24:50,173 DEBUG SenderThread:29947 [sender.py:send():234] send: config
27
+ 2022-02-01 14:24:50,176 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
28
+ 2022-02-01 14:24:50,176 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
29
+ 2022-02-01 14:24:50,177 WARNING SenderThread:29947 [sender.py:send_metric():897] Seen metric with glob (shouldnt happen)
30
+ 2022-02-01 14:24:50,177 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
31
+ 2022-02-01 14:24:50,177 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
32
+ 2022-02-01 14:24:50,177 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
33
+ 2022-02-01 14:24:50,177 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
34
+ 2022-02-01 14:24:50,177 DEBUG SenderThread:29947 [sender.py:send():234] send: metric
35
+ 2022-02-01 14:24:50,178 DEBUG SenderThread:29947 [sender.py:send():234] send: history
36
+ 2022-02-01 14:24:50,178 DEBUG SenderThread:29947 [sender.py:send():234] send: summary
37
+ 2022-02-01 14:24:50,178 INFO SenderThread:29947 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
38
+ 2022-02-01 14:24:50,276 INFO Thread-8 :29947 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/output.log
39
+ 2022-02-01 14:24:50,277 INFO Thread-8 :29947 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/requirements.txt
40
+ 2022-02-01 14:24:50,277 INFO Thread-8 :29947 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/conda-environment.yaml
41
+ 2022-02-01 14:24:50,278 INFO Thread-8 :29947 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/wandb-metadata.json
42
+ 2022-02-01 14:24:50,278 INFO Thread-8 :29947 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/wandb-summary.json
43
+ 2022-02-01 14:24:50,617 INFO Thread-11 :29947 [upload_job.py:push():137] Uploaded file /tmp/tmpp56x1syiwandb/1qh281ys-wandb-metadata.json
44
+ 2022-02-01 14:24:52,275 INFO Thread-8 :29947 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/output.log
45
+ 2022-02-01 14:24:54,276 INFO Thread-8 :29947 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/output.log
46
+ 2022-02-01 14:25:05,175 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: stop_status
47
+ 2022-02-01 14:25:05,176 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: stop_status
48
+ 2022-02-01 14:25:17,592 DEBUG SenderThread:29947 [sender.py:send():234] send: stats
49
+ 2022-02-01 14:25:20,294 INFO Thread-8 :29947 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/files/config.yaml
50
+ 2022-02-01 14:25:20,335 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: stop_status
51
+ 2022-02-01 14:25:20,335 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: stop_status
52
+ 2022-02-01 14:25:35,486 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: stop_status
53
+ 2022-02-01 14:25:35,487 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: stop_status
54
+ 2022-02-01 14:25:47,927 DEBUG SenderThread:29947 [sender.py:send():234] send: stats
55
+ 2022-02-01 14:25:50,662 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: stop_status
56
+ 2022-02-01 14:25:50,663 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: stop_status
57
+ 2022-02-01 14:26:05,830 DEBUG HandlerThread:29947 [handler.py:handle_request():130] handle_request: stop_status
58
+ 2022-02-01 14:26:05,830 DEBUG SenderThread:29947 [sender.py:send_request():248] send_request: stop_status
wandb/run-20220201_142447-3hqii6h6/logs/debug.log ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-02-01 14:24:47,902 INFO MainThread:27520 [wandb_setup.py:_flush():71] setting env: {}
2
+ 2022-02-01 14:24:47,902 INFO MainThread:27520 [wandb_setup.py:_flush():71] setting login settings: {}
3
+ 2022-02-01 14:24:47,902 INFO MainThread:27520 [wandb_init.py:_log_setup():371] Logging user logs to /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/logs/debug.log
4
+ 2022-02-01 14:24:47,902 INFO MainThread:27520 [wandb_init.py:_log_setup():372] Logging internal logs to /workspace/xls-r-300m-fr/wandb/run-20220201_142447-3hqii6h6/logs/debug-internal.log
5
+ 2022-02-01 14:24:47,903 INFO MainThread:27520 [wandb_init.py:init():404] calling init triggers
6
+ 2022-02-01 14:24:47,903 INFO MainThread:27520 [wandb_init.py:init():409] wandb.init called with sweep_config: {}
7
+ config: {}
8
+ 2022-02-01 14:24:47,903 INFO MainThread:27520 [wandb_init.py:init():460] starting backend
9
+ 2022-02-01 14:24:47,903 INFO MainThread:27520 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
10
+ 2022-02-01 14:24:48,004 INFO MainThread:27520 [backend.py:ensure_launched():216] starting backend process...
11
+ 2022-02-01 14:24:48,099 INFO MainThread:27520 [backend.py:ensure_launched():221] started backend process with pid: 29947
12
+ 2022-02-01 14:24:48,102 INFO MainThread:27520 [wandb_init.py:init():469] backend started and connected
13
+ 2022-02-01 14:24:48,111 INFO MainThread:27520 [wandb_init.py:init():533] updated telemetry
14
+ 2022-02-01 14:24:48,319 INFO MainThread:27520 [wandb_init.py:init():563] communicating current version
15
+ 2022-02-01 14:24:49,079 INFO MainThread:27520 [wandb_init.py:init():568] got version response
16
+ 2022-02-01 14:24:49,079 INFO MainThread:27520 [wandb_init.py:init():578] communicating run to backend with 30 second timeout
17
+ 2022-02-01 14:24:49,275 INFO MainThread:27520 [wandb_init.py:init():606] starting run threads in backend
18
+ 2022-02-01 14:24:50,008 INFO MainThread:27520 [wandb_run.py:_console_start():1810] atexit reg
19
+ 2022-02-01 14:24:50,009 INFO MainThread:27520 [wandb_run.py:_redirect():1684] redirect: SettingsConsole.REDIRECT
20
+ 2022-02-01 14:24:50,010 INFO MainThread:27520 [wandb_run.py:_redirect():1689] Redirecting console.
21
+ 2022-02-01 14:24:50,018 INFO MainThread:27520 [wandb_run.py:_redirect():1745] Redirects installed.
22
+ 2022-02-01 14:24:50,018 INFO MainThread:27520 [wandb_init.py:init():633] run started, returning control to user process
23
+ 2022-02-01 14:24:50,021 INFO MainThread:27520 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForCTC'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 218, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': './checkpoint-18000', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.05, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 219, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.75, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 10, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': False, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 64, 'per_device_eval_batch_size': 64, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 1, 'eval_accumulation_steps': 'None', 'learning_rate': 0.0001, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 1.0, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2700, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Feb01_14-18-51_job-1abccd0a-3293-4ffe-8274-9e8f841f653f', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 2, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': 'xls-r-300m-fr', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 64, 'eval_batch_size': 64}
24
+ 2022-02-01 14:24:50,026 INFO MainThread:27520 [wandb_watch.py:watch():43] Watching
wandb/run-20220201_142447-3hqii6h6/run-3hqii6h6.wandb ADDED
Binary file (5.73 kB). View file