aliak1401 commited on
Commit
edd1c02
·
1 Parent(s): ce977eb

Upload tokenizer

Browse files
Files changed (4) hide show
  1. added_tokens.json +5 -0
  2. special_tokens_map.json +29 -0
  3. tokenizer_config.json +13 -0
  4. vocab.json +144 -0
added_tokens.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "</s>": 143,
3
+ "<s>": 142,
4
+ "[PAD]": 144
5
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ },
10
+ {
11
+ "content": "</s>",
12
+ "lstrip": false,
13
+ "normalized": true,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ {
18
+ "content": "[PAD]",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ ],
25
+ "bos_token": "<s>",
26
+ "eos_token": "</s>",
27
+ "pad_token": "[PAD]",
28
+ "unk_token": "[UNK]"
29
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "do_lower_case": false,
4
+ "eos_token": "</s>",
5
+ "name_or_path": "./",
6
+ "pad_token": "[PAD]",
7
+ "replace_word_delimiter_char": " ",
8
+ "special_tokens_map_file": null,
9
+ "target_lang": "fas",
10
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
11
+ "unk_token": "[UNK]",
12
+ "word_delimiter_token": "|"
13
+ }
vocab.json ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ " ": 0,
3
+ "#": 1,
4
+ "$": 2,
5
+ "&": 3,
6
+ "(": 4,
7
+ ")": 5,
8
+ "/": 6,
9
+ "0": 7,
10
+ "1": 8,
11
+ "2": 9,
12
+ "3": 10,
13
+ "4": 11,
14
+ "5": 12,
15
+ "6": 13,
16
+ "7": 14,
17
+ "8": 15,
18
+ "9": 16,
19
+ "[": 17,
20
+ "]": 18,
21
+ "_": 19,
22
+ "a": 20,
23
+ "b": 21,
24
+ "c": 22,
25
+ "d": 23,
26
+ "e": 24,
27
+ "f": 25,
28
+ "g": 26,
29
+ "h": 27,
30
+ "i": 28,
31
+ "k": 29,
32
+ "m": 30,
33
+ "n": 31,
34
+ "o": 32,
35
+ "p": 33,
36
+ "q": 34,
37
+ "s": 35,
38
+ "t": 36,
39
+ "u": 37,
40
+ "y": 38,
41
+ "z": 39,
42
+ "«": 40,
43
+ "»": 41,
44
+ "،": 42,
45
+ "؛": 43,
46
+ "؟": 44,
47
+ "ء": 45,
48
+ "آ": 46,
49
+ "أ": 47,
50
+ "ؤ": 48,
51
+ "ئ": 49,
52
+ "ا": 50,
53
+ "ب": 51,
54
+ "ت": 52,
55
+ "ث": 53,
56
+ "ج": 54,
57
+ "ح": 55,
58
+ "خ": 56,
59
+ "د": 57,
60
+ "ذ": 58,
61
+ "ر": 59,
62
+ "ز": 60,
63
+ "س": 61,
64
+ "ش": 62,
65
+ "ص": 63,
66
+ "ض": 64,
67
+ "ط": 65,
68
+ "ظ": 66,
69
+ "ع": 67,
70
+ "غ": 68,
71
+ "ـ": 69,
72
+ "ف": 70,
73
+ "ق": 71,
74
+ "ك": 72,
75
+ "ل": 73,
76
+ "م": 74,
77
+ "ن": 75,
78
+ "ه": 76,
79
+ "و": 77,
80
+ "ى": 78,
81
+ "ي": 79,
82
+ "ً": 80,
83
+ "ٌ": 81,
84
+ "َ": 82,
85
+ "ُ": 83,
86
+ "ِ": 84,
87
+ "ّ": 85,
88
+ "ٔ": 86,
89
+ "٠": 87,
90
+ "١": 88,
91
+ "٢": 89,
92
+ "٣": 90,
93
+ "٤": 91,
94
+ "٥": 92,
95
+ "٦": 93,
96
+ "٧": 94,
97
+ "٨": 95,
98
+ "٪": 96,
99
+ "٫": 97,
100
+ "٬": 98,
101
+ "پ": 99,
102
+ "چ": 100,
103
+ "ژ": 101,
104
+ "ک": 102,
105
+ "گ": 103,
106
+ "ی": 104,
107
+ "۰": 105,
108
+ "۱": 106,
109
+ "۲": 107,
110
+ "۳": 108,
111
+ "۴": 109,
112
+ "۵": 110,
113
+ "۶": 111,
114
+ "۷": 112,
115
+ "۸": 113,
116
+ "۹": 114,
117
+ "…": 115,
118
+ "ﮐ": 116,
119
+ "ﮔ": 117,
120
+ "ﯾ": 118,
121
+ "ﯿ": 119,
122
+ "ﺍ": 120,
123
+ "ﺎ": 121,
124
+ "ﺑ": 122,
125
+ "ﺒ": 123,
126
+ "ﺖ": 124,
127
+ "ﺘ": 125,
128
+ "ﺧ": 126,
129
+ "ﺩ": 127,
130
+ "ﺪ": 128,
131
+ "ﺭ": 129,
132
+ "ﺮ": 130,
133
+ "ﺴ": 131,
134
+ "ﺷ": 132,
135
+ "ﺸ": 133,
136
+ "ﻋ": 134,
137
+ "ﻌ": 135,
138
+ "ﻢ": 136,
139
+ "ﻤ": 137,
140
+ "ﻥ": 138,
141
+ "ﻧ": 139,
142
+ "ﻭ": 140,
143
+ "ﻮ": 141
144
+ }