set tokenizer to same repo
Browse files- tokenizer_config.json +2 -5
tokenizer_config.json
CHANGED
@@ -2051,7 +2051,7 @@
|
|
2051 |
},
|
2052 |
"auto_map": {
|
2053 |
"AutoTokenizer": [
|
2054 |
-
"
|
2055 |
null
|
2056 |
]
|
2057 |
},
|
@@ -2060,10 +2060,7 @@
|
|
2060 |
"clean_up_tokenization_spaces": true,
|
2061 |
"eos_token": "<|eot_id|>",
|
2062 |
"legacy": true,
|
2063 |
-
"model_input_names": [
|
2064 |
-
"input_ids",
|
2065 |
-
"attention_mask"
|
2066 |
-
],
|
2067 |
"model_max_length": 16384,
|
2068 |
"pad_token": "<|eot_id|>",
|
2069 |
"padding_side": "right",
|
|
|
2051 |
},
|
2052 |
"auto_map": {
|
2053 |
"AutoTokenizer": [
|
2054 |
+
"hivata/functionary-small-v3.2--tokenization_functionary.FunctionaryTokenizer",
|
2055 |
null
|
2056 |
]
|
2057 |
},
|
|
|
2060 |
"clean_up_tokenization_spaces": true,
|
2061 |
"eos_token": "<|eot_id|>",
|
2062 |
"legacy": true,
|
2063 |
+
"model_input_names": ["input_ids", "attention_mask"],
|
|
|
|
|
|
|
2064 |
"model_max_length": 16384,
|
2065 |
"pad_token": "<|eot_id|>",
|
2066 |
"padding_side": "right",
|