jhpassion0621 commited on
Commit
4da4871
1 Parent(s): 0e8b3b4

Training in progress, step 5000

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "jhpassion0621/kp-umt5-base",
3
  "architectures": [
4
  "UMT5ForConditionalGeneration"
5
  ],
@@ -21,7 +21,7 @@
21
  "num_heads": 12,
22
  "num_layers": 12,
23
  "output_past": true,
24
- "pad_token_id": 1,
25
  "relative_attention_max_distance": 128,
26
  "relative_attention_num_buckets": 32,
27
  "scalable_attention": true,
 
1
  {
2
+ "_name_or_path": "google/umt5-base",
3
  "architectures": [
4
  "UMT5ForConditionalGeneration"
5
  ],
 
21
  "num_heads": 12,
22
  "num_layers": 12,
23
  "output_past": true,
24
+ "pad_token_id": 0,
25
  "relative_attention_max_distance": 128,
26
  "relative_attention_num_buckets": 32,
27
  "scalable_attention": true,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:40d92a0d12268d1d432379540d09866dc042c93d7a45ecce48a823bc6d51550a
3
  size 2368210808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fb4b7f78a909e67393594c7bcb32085cc14bba5b9c8f1af80df9d47ee170d54
3
  size 2368210808
runs/Feb02_19-00-59_c32d5226e6c1/events.out.tfevents.1706900464.c32d5226e6c1.4626.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1939b5710756a46fb329bdd2c0406abc7e42e4e3edfe89bf3c3ff991f3e5fe2
3
+ size 4954
special_tokens_map.json CHANGED
@@ -301,32 +301,8 @@
301
  "<extra_id_298>",
302
  "<extra_id_299>"
303
  ],
304
- "bos_token": {
305
- "content": "<s>",
306
- "lstrip": false,
307
- "normalized": false,
308
- "rstrip": false,
309
- "single_word": false
310
- },
311
- "eos_token": {
312
- "content": "</s>",
313
- "lstrip": false,
314
- "normalized": false,
315
- "rstrip": false,
316
- "single_word": false
317
- },
318
- "pad_token": {
319
- "content": "<pad>",
320
- "lstrip": false,
321
- "normalized": false,
322
- "rstrip": false,
323
- "single_word": false
324
- },
325
- "unk_token": {
326
- "content": "<unk>",
327
- "lstrip": false,
328
- "normalized": false,
329
- "rstrip": false,
330
- "single_word": false
331
- }
332
  }
 
301
  "<extra_id_298>",
302
  "<extra_id_299>"
303
  ],
304
+ "bos_token": "<s>",
305
+ "eos_token": "</s>",
306
+ "pad_token": "<pad>",
307
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
308
  }
tokenizer_config.json CHANGED
@@ -2739,14 +2739,10 @@
2739
  "clean_up_tokenization_spaces": true,
2740
  "eos_token": "</s>",
2741
  "extra_ids": 300,
2742
- "max_length": 256,
2743
  "model_max_length": 1000000000000000019884624838656,
2744
  "pad_token": "<pad>",
2745
  "sp_model_kwargs": {},
2746
  "spaces_between_special_tokens": false,
2747
- "stride": 0,
2748
  "tokenizer_class": "T5Tokenizer",
2749
- "truncation_side": "right",
2750
- "truncation_strategy": "longest_first",
2751
  "unk_token": "<unk>"
2752
  }
 
2739
  "clean_up_tokenization_spaces": true,
2740
  "eos_token": "</s>",
2741
  "extra_ids": 300,
 
2742
  "model_max_length": 1000000000000000019884624838656,
2743
  "pad_token": "<pad>",
2744
  "sp_model_kwargs": {},
2745
  "spaces_between_special_tokens": false,
 
2746
  "tokenizer_class": "T5Tokenizer",
 
 
2747
  "unk_token": "<unk>"
2748
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db6b6323ef2deb8a7324687d20f3bb1198efb837f06b2d0018a076d233a53fc1
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbdd3b4f9b491e964ffb3607f4e802236ffa8c879e02ab5ed71ff01cc7fbcd53
3
  size 4792