Spaces:
Sleeping
Sleeping
Varun Wadhwa
commited on
Logs
Browse files
app.py
CHANGED
@@ -99,11 +99,11 @@ def tokenize_function(examples):
|
|
99 |
max_length=512)
|
100 |
for _, labels in enumerate(examples['mbert_token_classes']):
|
101 |
new_labels.append(align_labels_with_tokens(labels))
|
102 |
-
print("Printing input with tokenized output")
|
103 |
-
print(inputs)
|
104 |
-
print(inputs.tokens())
|
105 |
-
print(inputs.word_ids())
|
106 |
-
print(new_labels)
|
107 |
inputs["labels"] = new_labels
|
108 |
return inputs
|
109 |
|
|
|
99 |
max_length=512)
|
100 |
for _, labels in enumerate(examples['mbert_token_classes']):
|
101 |
new_labels.append(align_labels_with_tokens(labels))
|
102 |
+
print("Printing partial input with tokenized output")
|
103 |
+
print(inputs[:100])
|
104 |
+
print(inputs[:100].tokens())
|
105 |
+
print(inputs[:100].word_ids())
|
106 |
+
print(new_labels[:100])
|
107 |
inputs["labels"] = new_labels
|
108 |
return inputs
|
109 |
|