File size: 1,682 Bytes
f54e655 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
from typing import TYPE_CHECKING
from transformers.utils import OptionalDependencyNotAvailable, _LazyModule, is_tokenizers_available, is_torch_available
_import_structure = {
"configuration_codify": ["CODIFY_PRETRAINED_CONFIG_ARCHIVE_MAP", "CodifyConfig", "CodifyOnnxConfig"],
}
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_codify_fast"] = ["CodifyTokenizerFast"]
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_codify"] = [
"CODIFY_PRETRAINED_MODEL_ARCHIVE_LIST",
"CodifyForCausalLM",
"CodifyModel",
"CodifyPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_codify import CODIFY_PRETRAINED_CONFIG_ARCHIVE_MAP, CodifyConfig, CodifyOnnxConfig
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_codify_fast import CodifyTokenizerFast
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_codify import (
CODIFY_PRETRAINED_MODEL_ARCHIVE_LIST,
CodifyForCausalLM,
CodifyModel,
CodifyPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|