|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from typing import TYPE_CHECKING |
|
|
|
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available |
|
|
|
|
|
_import_structure = {} |
|
|
|
try: |
|
if not is_sentencepiece_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
_import_structure["tokenization_barthez"] = ["BarthezTokenizer"] |
|
|
|
try: |
|
if not is_tokenizers_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
_import_structure["tokenization_barthez_fast"] = ["BarthezTokenizerFast"] |
|
|
|
|
|
if TYPE_CHECKING: |
|
try: |
|
if not is_sentencepiece_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
from .tokenization_barthez import BarthezTokenizer |
|
|
|
try: |
|
if not is_tokenizers_available(): |
|
raise OptionalDependencyNotAvailable() |
|
except OptionalDependencyNotAvailable: |
|
pass |
|
else: |
|
from .tokenization_barthez_fast import BarthezTokenizerFast |
|
|
|
else: |
|
import sys |
|
|
|
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__) |
|
|