Spaces:
Sleeping
Sleeping
Update infer.py
Browse files
infer.py
CHANGED
@@ -26,8 +26,8 @@ def tokenize_function(example, tokenizer):
|
|
26 |
words_lengths = [len(item) for item in question_sub_words_ids + context_sub_words_ids]
|
27 |
|
28 |
return {
|
29 |
-
"input_ids": input_ids,
|
30 |
-
"words_lengths": words_lengths,
|
31 |
"valid": valid
|
32 |
}
|
33 |
|
|
|
26 |
words_lengths = [len(item) for item in question_sub_words_ids + context_sub_words_ids]
|
27 |
|
28 |
return {
|
29 |
+
"input_ids": input_ids[:tokenizer.max_len_single_sentence],
|
30 |
+
"words_lengths": words_lengths[:tokenizer.max_len_single_sentence],
|
31 |
"valid": valid
|
32 |
}
|
33 |
|