HaloMaster's picture
add fengshen
50f0fbb
raw
history blame
1.82 kB
# coding=utf-8
# Copyright 2021 The IDEA Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from transformers.file_utils import _LazyModule, is_torch_available
_import_structure = {
"configuration_longformer": ["LongformerConfig"],
"tokenization_longformer": ["LongformerTokenizer"],
}
if is_torch_available():
_import_structure["modeling_longformer"] = [
"LongformerModel",
"LongformerForMaskedLM",
"LongformerForMultipleChoice",
"LongformerPreTrainedModel",
"LongformerForQuestionAnswering",
"LongformerForSequenceClassification",
"LongformerForTokenClassification",
]
if TYPE_CHECKING:
from .configuration_longformer import LongformerConfig
from .tokenization_longformer import LongformerTokenizer
if is_torch_available():
from .modeling_longformer import (
LongformerModel,
LongformerForMaskedLM,
LongformerForMultipleChoice,
LongformerPreTrainedModel,
LongformerForQuestionAnswering,
LongformerForSequenceClassification,
LongformerForTokenClassification,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure)