Skip to content

Commit

Permalink
Avoid TensorFlow import in Trainer
Browse files Browse the repository at this point in the history
  • Loading branch information
sgugger committed May 21, 2021
1 parent e2c1dd0 commit b8697bc
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion src/transformers/modelcard.py
Original file line number Diff line number Diff line change
@@ -40,7 +40,6 @@
is_tokenizers_available,
is_torch_available,
)
from .models.auto.configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP
from .training_args import ParallelMode
from .utils import logging

@@ -145,6 +144,9 @@ def from_pretrained(cls, pretrained_model_name_or_path, **kwargs):
modelcard = ModelCard.from_pretrained('bert-base-uncased', output_attentions=True, foo=False)
"""
# This imports every model so let's do it dynamically here.
from transformers.models.auto.configuration_auto import ALL_PRETRAINED_CONFIG_ARCHIVE_MAP

cache_dir = kwargs.pop("cache_dir", None)
proxies = kwargs.pop("proxies", None)
find_from_standard_name = kwargs.pop("find_from_standard_name", True)

0 comments on commit b8697bc

Please sign in to comment.