jiuhai commited on
Commit
70f4ed0
·
verified ·
1 Parent(s): f318f27

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. checkpoint-160000/added_tokens.json +27 -0
  3. checkpoint-160000/config.json +69 -0
  4. checkpoint-160000/generation_config.json +15 -0
  5. checkpoint-160000/latest +1 -0
  6. checkpoint-160000/merges.txt +0 -0
  7. checkpoint-160000/model-00001-of-00006.safetensors +3 -0
  8. checkpoint-160000/model-00002-of-00006.safetensors +3 -0
  9. checkpoint-160000/model-00003-of-00006.safetensors +3 -0
  10. checkpoint-160000/model-00004-of-00006.safetensors +3 -0
  11. checkpoint-160000/model-00005-of-00006.safetensors +3 -0
  12. checkpoint-160000/model-00006-of-00006.safetensors +3 -0
  13. checkpoint-160000/model.safetensors.index.json +0 -0
  14. checkpoint-160000/rng_state_0.pth +3 -0
  15. checkpoint-160000/rng_state_1.pth +3 -0
  16. checkpoint-160000/rng_state_10.pth +3 -0
  17. checkpoint-160000/rng_state_100.pth +3 -0
  18. checkpoint-160000/rng_state_101.pth +3 -0
  19. checkpoint-160000/rng_state_102.pth +3 -0
  20. checkpoint-160000/rng_state_103.pth +3 -0
  21. checkpoint-160000/rng_state_104.pth +3 -0
  22. checkpoint-160000/rng_state_105.pth +3 -0
  23. checkpoint-160000/rng_state_106.pth +3 -0
  24. checkpoint-160000/rng_state_107.pth +3 -0
  25. checkpoint-160000/rng_state_108.pth +3 -0
  26. checkpoint-160000/rng_state_109.pth +3 -0
  27. checkpoint-160000/rng_state_11.pth +3 -0
  28. checkpoint-160000/rng_state_110.pth +3 -0
  29. checkpoint-160000/rng_state_111.pth +3 -0
  30. checkpoint-160000/rng_state_112.pth +3 -0
  31. checkpoint-160000/rng_state_113.pth +3 -0
  32. checkpoint-160000/rng_state_114.pth +3 -0
  33. checkpoint-160000/rng_state_115.pth +3 -0
  34. checkpoint-160000/rng_state_116.pth +3 -0
  35. checkpoint-160000/rng_state_117.pth +3 -0
  36. checkpoint-160000/rng_state_118.pth +3 -0
  37. checkpoint-160000/rng_state_119.pth +3 -0
  38. checkpoint-160000/rng_state_12.pth +3 -0
  39. checkpoint-160000/rng_state_120.pth +3 -0
  40. checkpoint-160000/rng_state_121.pth +3 -0
  41. checkpoint-160000/rng_state_122.pth +3 -0
  42. checkpoint-160000/rng_state_123.pth +3 -0
  43. checkpoint-160000/rng_state_124.pth +3 -0
  44. checkpoint-160000/rng_state_125.pth +3 -0
  45. checkpoint-160000/rng_state_126.pth +3 -0
  46. checkpoint-160000/rng_state_127.pth +3 -0
  47. checkpoint-160000/rng_state_13.pth +3 -0
  48. checkpoint-160000/rng_state_14.pth +3 -0
  49. checkpoint-160000/rng_state_15.pth +3 -0
  50. checkpoint-160000/rng_state_16.pth +3 -0
.gitattributes CHANGED
@@ -48,3 +48,5 @@ checkpoint-140000/tokenizer.json filter=lfs diff=lfs merge=lfs -text
48
  checkpoint-140000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
49
  checkpoint-150000/tokenizer.json filter=lfs diff=lfs merge=lfs -text
50
  checkpoint-150000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
 
 
 
48
  checkpoint-140000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
49
  checkpoint-150000/tokenizer.json filter=lfs diff=lfs merge=lfs -text
50
  checkpoint-150000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
51
+ checkpoint-160000/tokenizer.json filter=lfs diff=lfs merge=lfs -text
52
+ checkpoint-160000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
checkpoint-160000/added_tokens.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<image>": 151667,
4
+ "<tool_call>": 151657,
5
+ "<|box_end|>": 151649,
6
+ "<|box_start|>": 151648,
7
+ "<|endoftext|>": 151643,
8
+ "<|file_sep|>": 151664,
9
+ "<|fim_middle|>": 151660,
10
+ "<|fim_pad|>": 151662,
11
+ "<|fim_prefix|>": 151659,
12
+ "<|fim_suffix|>": 151661,
13
+ "<|im_end|>": 151645,
14
+ "<|im_start|>": 151644,
15
+ "<|image_pad|>": 151655,
16
+ "<|object_ref_end|>": 151647,
17
+ "<|object_ref_start|>": 151646,
18
+ "<|quad_end|>": 151651,
19
+ "<|quad_start|>": 151650,
20
+ "<|repo_name|>": 151663,
21
+ "<|video_pad|>": 151656,
22
+ "<|vision_end|>": 151653,
23
+ "<|vision_pad|>": 151654,
24
+ "<|vision_start|>": 151652,
25
+ "[/IMG]": 151666,
26
+ "[IMG]": 151665
27
+ }
checkpoint-160000/config.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-VL-7B-Instruct",
3
+ "architectures": [
4
+ "LlavaQwenForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "freeze_mm_mlp_adapter": false,
10
+ "gen_hidden_size": 1792,
11
+ "gen_pooling": "early_pool2d_4",
12
+ "gen_vision_tower": "eva-clip-E-14-plus",
13
+ "hidden_act": "silu",
14
+ "hidden_size": 3584,
15
+ "image_aspect_ratio": "square",
16
+ "image_token_id": 151655,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 18944,
19
+ "max_position_embeddings": 128000,
20
+ "max_window_layers": 28,
21
+ "mm_patch_merge_type": "flat",
22
+ "mm_projector_lr": null,
23
+ "mm_projector_type": "mlp2x_gelu",
24
+ "mm_use_im_patch_token": false,
25
+ "mm_use_im_start_end": false,
26
+ "mm_vision_select_feature": "patch",
27
+ "mm_vision_select_layer": -2,
28
+ "model_type": "llava_qwen",
29
+ "n_query": 64,
30
+ "num_attention_heads": 28,
31
+ "num_hidden_layers": 28,
32
+ "num_key_value_heads": 4,
33
+ "pad_token_id": 151643,
34
+ "rms_norm_eps": 1e-06,
35
+ "rope_scaling": {
36
+ "mrope_section": [
37
+ 16,
38
+ 24,
39
+ 24
40
+ ],
41
+ "rope_type": "default",
42
+ "type": "default"
43
+ },
44
+ "rope_theta": 1000000.0,
45
+ "sliding_window": 32768,
46
+ "tie_word_embeddings": false,
47
+ "tokenizer_model_max_length": 2048,
48
+ "tokenizer_padding_side": "right",
49
+ "torch_dtype": "bfloat16",
50
+ "transformers_version": "4.49.0.dev0",
51
+ "tune_mm_mlp_adapter": false,
52
+ "use_cache": false,
53
+ "use_mm_proj": true,
54
+ "use_sliding_window": false,
55
+ "video_token_id": 151656,
56
+ "vision_config": {
57
+ "hidden_size": 1280,
58
+ "in_chans": 3,
59
+ "model_type": "qwen2_5_vl",
60
+ "spatial_patch_size": 14,
61
+ "tokens_per_second": 2,
62
+ "torch_dtype": "bfloat16"
63
+ },
64
+ "vision_end_token_id": 151653,
65
+ "vision_start_token_id": 151652,
66
+ "vision_token_id": 151654,
67
+ "vision_tower_pretrained": null,
68
+ "vocab_size": 151668
69
+ }
checkpoint-160000/generation_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attn_implementation": "flash_attention_2",
3
+ "bos_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": [
6
+ 151645,
7
+ 151643
8
+ ],
9
+ "pad_token_id": 151643,
10
+ "repetition_penalty": 1.05,
11
+ "temperature": 0.1,
12
+ "top_k": 1,
13
+ "top_p": 0.001,
14
+ "transformers_version": "4.49.0.dev0"
15
+ }
checkpoint-160000/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step160000
checkpoint-160000/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-160000/model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9785276491729e39ba577402c3b4c0baeb60ffb89df9fe97f52c52be110a852c
3
+ size 4965863624
checkpoint-160000/model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1830db6908dcc76df3a71492acbcf2b8cac130114cf1f3c2d9edae8de8c6de3
3
+ size 4991495816
checkpoint-160000/model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09c1807c6d00d7cab94f7db39d4c02ebb8537225ccde383861ac48db97945aa6
3
+ size 4932751040
checkpoint-160000/model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55c4aaf28b2362b2fadb23efcc08914dbae2f3a1604a53c0aa820c4bb5e26f1c
3
+ size 4999900758
checkpoint-160000/model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85caeb523023e201267f25d483f5b9aa32a41165fcdb2c4af0504892defc9302
3
+ size 4971582792
checkpoint-160000/model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9eb53d377a698ed095f465913024fde162304954a2cd8a5e6403f78467d6ad7
3
+ size 3315894576
checkpoint-160000/model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-160000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba8fda2bfdf3bb66b69d0845609fa82feeff0cac39d526b9160ed46ea17c8f8c
3
+ size 15984
checkpoint-160000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e86cf5daf46af6efd6d497301d9e90dc833a863f9e8252ab62a9785a3c1e5958
3
+ size 15984
checkpoint-160000/rng_state_10.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b625cdd1837413e7a2462fafd8a3d9a8d34c7d6f6f0b8440dd06c004b5bdadf2
3
+ size 15997
checkpoint-160000/rng_state_100.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11e5c40abae8c0e6ecd509f068b65428664eeb3e6ab77f7c8f7c155b4da480be
3
+ size 16010
checkpoint-160000/rng_state_101.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebab00b70d504711cf43d0e1dffa0e105cd18445db0b21e6ecf35f3c8bb7221c
3
+ size 16010
checkpoint-160000/rng_state_102.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:953417e9c28995246ab02698db7eedde96fd205b9e51a289722823279dbcdffb
3
+ size 16010
checkpoint-160000/rng_state_103.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e8aba328e0352d869572faa5b0daebd2f350dcc71ca1ba1d8a81efd2a93a750
3
+ size 16010
checkpoint-160000/rng_state_104.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a5a8b39f1a321d7de0c559696b32c882f4b32b695b6d10561f4bcb502ee4423
3
+ size 16010
checkpoint-160000/rng_state_105.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04dd0d693906e9a7e38620462635b1e6f3251907f012972eafefeee9a2444350
3
+ size 16010
checkpoint-160000/rng_state_106.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea61fe11b9a03115c23036f122deff7a736f66ed280d53825d606a2805e05496
3
+ size 16010
checkpoint-160000/rng_state_107.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e232ce7cc2c3aece078f315bd7376e744b347c24893c7201e2d4d1a2de13a709
3
+ size 16010
checkpoint-160000/rng_state_108.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d846b8fd88619f8bbabd1132ba389aa60a1c9b5a634ac247984549264062f377
3
+ size 16010
checkpoint-160000/rng_state_109.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5b87e4483e008103b2da2b364a47329d3c02bc5a88998dea7865810c6255953
3
+ size 16010
checkpoint-160000/rng_state_11.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1610238aeba7e7b38d6589466f36249bb317c314e1945e4a5fb32635c93a59a9
3
+ size 15997
checkpoint-160000/rng_state_110.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a55c8de3dfc329a4adfa206201a88b2bb6b5e86da425130acd4b1031957c762
3
+ size 16010
checkpoint-160000/rng_state_111.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d7b9e1587ef70d2c643590c780131327462baa4751e01d6e9de92442fb381b7
3
+ size 16010
checkpoint-160000/rng_state_112.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:675aa656b6c39371d5d6cb47da8dafff1af5e27f863c521e7b9905c89ef04e72
3
+ size 16010
checkpoint-160000/rng_state_113.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c12f626b8f436e20e3ce5d50d9edf622e0a9435d0ce8e17a3ef2dcad7e905771
3
+ size 16010
checkpoint-160000/rng_state_114.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e35802ef3d5d9d7243fc631ef2ed23f2faf99dae7e123ac5a26eb6735c00c071
3
+ size 16010
checkpoint-160000/rng_state_115.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a2582dfd4978e2958ed86aac78b7d2f63c5955d6ac75d88def0ae4b8aec527e
3
+ size 16010
checkpoint-160000/rng_state_116.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4612bf34bc11671fbadf137bf75d752fab0ba7f49592d612d5b9aef136691a6b
3
+ size 16010
checkpoint-160000/rng_state_117.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:354e89c451ca3b329d76ff20e504e800a33406d2ac6577120c0651227ce02661
3
+ size 16010
checkpoint-160000/rng_state_118.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b3cdb3905bc8e5fa41320675a82155eb6453cdd194d85be68b158231ff363c3
3
+ size 16010
checkpoint-160000/rng_state_119.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4991bc083a773be4f6cc10ae6a7d78876c3a5aab363b4a6006617541e494f56
3
+ size 16010
checkpoint-160000/rng_state_12.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c196f890f4c1c8931944514ea45a0a50078cb6d9152187d399ba2423e8e32eb0
3
+ size 15997
checkpoint-160000/rng_state_120.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c830e70d62d1b144ce2644e4f9c49a3fe1bf6cabf64b7f2961fa445870bc8729
3
+ size 16010
checkpoint-160000/rng_state_121.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca123131f9787785f6cc782906097c940acfc0525369c4149c84c0c77608a64d
3
+ size 16010
checkpoint-160000/rng_state_122.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:caa5eab8aa1ad809e152a219ebbb50867878d88877be2e6ca1796f4f4227ead4
3
+ size 16010
checkpoint-160000/rng_state_123.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5f7bef60a1c2006deed4a5cafd577ac17b42fee6af9440758948b4f9df449e9
3
+ size 16010
checkpoint-160000/rng_state_124.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ca75a3534fa08ccf56db6395e8e052c4f4739a17748f4f11c6f920372fd2514
3
+ size 16010
checkpoint-160000/rng_state_125.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13a29003d59dad5b4568d819e224e10bb156a78232e613cd002a9bed3bead023
3
+ size 16010
checkpoint-160000/rng_state_126.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:319d8ba6bdc7fbdc7c610afc23a9474118ba952bdce991e04eeb276bc99ef6b4
3
+ size 16010
checkpoint-160000/rng_state_127.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f60efb4805c13da19639887328c192a49f1038cb907ab66152cfcf1abc15f16f
3
+ size 16010
checkpoint-160000/rng_state_13.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9c613ef750d503586201681b8537152d7b74274d5674123273f2f038f89ae2e
3
+ size 15997
checkpoint-160000/rng_state_14.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bee253b4ec5b5dfed8748e29b13251a657b04fede71765b4cfb167c5c72f8a8
3
+ size 15997
checkpoint-160000/rng_state_15.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebddd8df57413b3eca5ab3df5971a5d13496673701792b4886629a3b90eb5eb4
3
+ size 15997
checkpoint-160000/rng_state_16.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b27fa8f9479ad92c02d0df117860f7cbb3d9f89b07fb53bda3dc9ebd1f6460b
3
+ size 15997