Skip to content

Commit 4975002

Browse files
authored
Reorganize file utils (#16264)
* Split file_utils in several submodules * Fixes * Add back more objects * More fixes * Who exactly decided to import that from there? * Second suggestion to code with code review * Revert wront move * Fix imports * Adapt all imports * Adapt all imports everywhere * Revert this import, will fix in a separate commit
1 parent 7135603 commit 4975002

File tree

583 files changed

+4666
-4510
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

583 files changed

+4666
-4510
lines changed

ISSUES.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ You are not required to read the following guidelines before opening an issue. H
7171
File "/transformers/src/transformers/__init__.py", line 34, in <module>
7272
from . import dependency_versions_check
7373
File "/transformers/src/transformers/dependency_versions_check.py", line 34, in <module>
74-
from .file_utils import is_tokenizers_available
74+
from .utils import is_tokenizers_available
7575
File "/transformers/src/transformers/file_utils.py", line 40, in <module>
7676
from tqdm.auto import tqdm
7777
ModuleNotFoundError: No module named 'tqdm.auto'
@@ -124,7 +124,7 @@ You are not required to read the following guidelines before opening an issue. H
124124
File "/transformers/src/transformers/__init__.py", line 34, in <module>
125125
from . import dependency_versions_check
126126
File "/transformers/src/transformers/dependency_versions_check.py", line 34, in <module>
127-
from .file_utils import is_tokenizers_available
127+
from .utils import is_tokenizers_available
128128
File "/transformers/src/transformers/file_utils.py", line 40, in <module>
129129
from tqdm.auto import tqdm
130130
ModuleNotFoundError: No module named 'tqdm.auto'

examples/flax/image-captioning/run_image_captioning_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
HfArgumentParser,
5353
is_tensorboard_available,
5454
)
55-
from transformers.file_utils import get_full_repo_name, is_offline_mode
55+
from transformers.utils import get_full_repo_name, is_offline_mode
5656

5757

5858
logger = logging.getLogger(__name__)

examples/flax/language-modeling/run_clm_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,8 @@
5757
is_tensorboard_available,
5858
set_seed,
5959
)
60-
from transformers.file_utils import get_full_repo_name
6160
from transformers.testing_utils import CaptureLogger
61+
from transformers.utils import get_full_repo_name
6262

6363

6464
logger = logging.getLogger(__name__)

examples/flax/language-modeling/run_mlm_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@
5858
is_tensorboard_available,
5959
set_seed,
6060
)
61-
from transformers.file_utils import get_full_repo_name
61+
from transformers.utils import get_full_repo_name
6262

6363

6464
MODEL_CONFIG_CLASSES = list(FLAX_MODEL_FOR_MASKED_LM_MAPPING.keys())

examples/flax/language-modeling/run_t5_mlm_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,8 @@
5656
is_tensorboard_available,
5757
set_seed,
5858
)
59-
from transformers.file_utils import get_full_repo_name
6059
from transformers.models.t5.modeling_flax_t5 import shift_tokens_right
60+
from transformers.utils import get_full_repo_name
6161

6262

6363
MODEL_CONFIG_CLASSES = list(FLAX_MODEL_FOR_MASKED_LM_MAPPING.keys())

examples/flax/question-answering/run_qa.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,7 @@
5353
PreTrainedTokenizerFast,
5454
is_tensorboard_available,
5555
)
56-
from transformers.file_utils import get_full_repo_name
57-
from transformers.utils import check_min_version
56+
from transformers.utils import check_min_version, get_full_repo_name
5857
from utils_qa import postprocess_qa_predictions
5958

6059

examples/flax/summarization/run_summarization_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
HfArgumentParser,
5555
is_tensorboard_available,
5656
)
57-
from transformers.file_utils import get_full_repo_name, is_offline_mode
57+
from transformers.utils import get_full_repo_name, is_offline_mode
5858

5959

6060
logger = logging.getLogger(__name__)

examples/flax/text-classification/run_flax_glue.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,7 @@
4848
TrainingArguments,
4949
is_tensorboard_available,
5050
)
51-
from transformers.file_utils import get_full_repo_name
52-
from transformers.utils import check_min_version
51+
from transformers.utils import check_min_version, get_full_repo_name
5352

5453

5554
logger = logging.getLogger(__name__)

examples/flax/token-classification/run_flax_ner.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,7 @@
4747
HfArgumentParser,
4848
is_tensorboard_available,
4949
)
50-
from transformers.file_utils import get_full_repo_name
51-
from transformers.utils import check_min_version
50+
from transformers.utils import check_min_version, get_full_repo_name
5251
from transformers.utils.versions import require_version
5352

5453

examples/flax/vision/run_image_classification.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@
5353
is_tensorboard_available,
5454
set_seed,
5555
)
56-
from transformers.file_utils import get_full_repo_name
56+
from transformers.utils import get_full_repo_name
5757

5858

5959
logger = logging.getLogger(__name__)

examples/legacy/seq2seq/old_test_tatoeba_conversion.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@
1616
import tempfile
1717
import unittest
1818

19-
from transformers.file_utils import cached_property
2019
from transformers.models.marian.convert_marian_tatoeba_to_pytorch import DEFAULT_REPO, TatoebaConverter
2120
from transformers.testing_utils import slow
21+
from transformers.utils import cached_property
2222

2323

2424
@unittest.skipUnless(os.path.exists(DEFAULT_REPO), "Tatoeba directory does not exist.")

examples/legacy/seq2seq/seq2seq_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
from torch.utils.data import DistributedSampler, RandomSampler
2020

2121
from transformers import PreTrainedModel, Trainer, logging
22-
from transformers.file_utils import is_torch_tpu_available
2322
from transformers.integrations import is_fairscale_available
2423
from transformers.models.fsmt.configuration_fsmt import FSMTConfig
2524
from transformers.optimization import (
@@ -34,6 +33,7 @@
3433
)
3534
from transformers.trainer_pt_utils import get_tpu_sampler
3635
from transformers.training_args import ParallelMode
36+
from transformers.utils import is_torch_tpu_available
3737

3838

3939
if is_fairscale_available():

examples/legacy/seq2seq/utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@
3434

3535
from sentence_splitter import add_newline_to_end_of_each_sentence
3636
from transformers import BartTokenizer, EvalPrediction, PreTrainedTokenizer, T5Tokenizer
37-
from transformers.file_utils import cached_property
3837
from transformers.models.bart.modeling_bart import shift_tokens_right
38+
from transformers.utils import cached_property
3939

4040

4141
try:

examples/pytorch/language-modeling/run_clm_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
get_scheduler,
5252
set_seed,
5353
)
54-
from transformers.file_utils import get_full_repo_name
54+
from transformers.utils import get_full_repo_name
5555
from transformers.utils.versions import require_version
5656

5757

examples/pytorch/language-modeling/run_mlm_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@
5151
get_scheduler,
5252
set_seed,
5353
)
54-
from transformers.file_utils import get_full_repo_name
54+
from transformers.utils import get_full_repo_name
5555
from transformers.utils.versions import require_version
5656

5757

examples/pytorch/multiple-choice/run_swag.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -41,10 +41,9 @@
4141
default_data_collator,
4242
set_seed,
4343
)
44-
from transformers.file_utils import PaddingStrategy
4544
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
4645
from transformers.trainer_utils import get_last_checkpoint
47-
from transformers.utils import check_min_version
46+
from transformers.utils import PaddingStrategy, check_min_version
4847

4948

5049
# Will error if the minimal version of Transformers is not installed. Remove at your own risks.

examples/pytorch/multiple-choice/run_swag_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
get_scheduler,
5151
set_seed,
5252
)
53-
from transformers.file_utils import PaddingStrategy, get_full_repo_name
53+
from transformers.utils import PaddingStrategy, get_full_repo_name
5454

5555

5656
logger = logging.getLogger(__name__)

examples/pytorch/question-answering/run_qa_beam_search_no_trainer.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,7 @@
4747
get_scheduler,
4848
set_seed,
4949
)
50-
from transformers.file_utils import get_full_repo_name
51-
from transformers.utils import check_min_version
50+
from transformers.utils import check_min_version, get_full_repo_name
5251
from transformers.utils.versions import require_version
5352
from utils_qa import postprocess_qa_predictions_with_beam_search
5453

examples/pytorch/question-answering/run_qa_no_trainer.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,7 @@
4949
get_scheduler,
5050
set_seed,
5151
)
52-
from transformers.file_utils import get_full_repo_name
53-
from transformers.utils import check_min_version
52+
from transformers.utils import check_min_version, get_full_repo_name
5453
from transformers.utils.versions import require_version
5554
from utils_qa import postprocess_qa_predictions
5655

examples/pytorch/speech-pretraining/run_wav2vec2_pretraining_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@
4242
is_wandb_available,
4343
set_seed,
4444
)
45-
from transformers.file_utils import get_full_repo_name
4645
from transformers.models.wav2vec2.modeling_wav2vec2 import _compute_mask_indices, _sample_negative_indices
46+
from transformers.utils import get_full_repo_name
4747

4848

4949
logger = logging.getLogger(__name__)

examples/pytorch/summarization/run_summarization.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,8 @@
4545
Seq2SeqTrainingArguments,
4646
set_seed,
4747
)
48-
from transformers.file_utils import is_offline_mode
4948
from transformers.trainer_utils import get_last_checkpoint
50-
from transformers.utils import check_min_version
49+
from transformers.utils import check_min_version, is_offline_mode
5150
from transformers.utils.versions import require_version
5251

5352

examples/pytorch/summarization/run_summarization_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
get_scheduler,
5050
set_seed,
5151
)
52-
from transformers.file_utils import get_full_repo_name, is_offline_mode
52+
from transformers.utils import get_full_repo_name, is_offline_mode
5353
from transformers.utils.versions import require_version
5454

5555

examples/pytorch/test_pytorch_examples.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@
2525
import torch
2626

2727
from transformers import ViTMAEForPreTraining, Wav2Vec2ForPreTraining
28-
from transformers.file_utils import is_apex_available
2928
from transformers.testing_utils import CaptureLogger, TestCasePlus, get_gpu_count, slow, torch_device
29+
from transformers.utils import is_apex_available
3030

3131

3232
SRC_DIRS = [

examples/pytorch/text-classification/run_glue_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
get_scheduler,
4141
set_seed,
4242
)
43-
from transformers.file_utils import get_full_repo_name
43+
from transformers.utils import get_full_repo_name
4444
from transformers.utils.versions import require_version
4545

4646

examples/pytorch/token-classification/run_ner_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848
get_scheduler,
4949
set_seed,
5050
)
51-
from transformers.file_utils import get_full_repo_name
51+
from transformers.utils import get_full_repo_name
5252
from transformers.utils.versions import require_version
5353

5454

examples/pytorch/translation/run_translation_no_trainer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
get_scheduler,
5151
set_seed,
5252
)
53-
from transformers.file_utils import get_full_repo_name
53+
from transformers.utils import get_full_repo_name
5454
from transformers.utils.versions import require_version
5555

5656

examples/tensorflow/multiple-choice/run_swag.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,8 @@
4343
create_optimizer,
4444
set_seed,
4545
)
46-
from transformers.file_utils import PaddingStrategy
4746
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
48-
from transformers.utils import check_min_version
47+
from transformers.utils import PaddingStrategy, check_min_version
4948

5049

5150
# Will error if the minimal version of Transformers is not installed. Remove at your own risks.

examples/tensorflow/question-answering/run_qa.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,7 @@
4141
TFTrainingArguments,
4242
set_seed,
4343
)
44-
from transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME
45-
from transformers.utils import check_min_version
44+
from transformers.utils import CONFIG_NAME, TF2_WEIGHTS_NAME, check_min_version
4645
from utils_qa import postprocess_qa_predictions
4746

4847

examples/tensorflow/summarization/run_summarization.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,8 @@
4343
create_optimizer,
4444
set_seed,
4545
)
46-
from transformers.file_utils import is_offline_mode
4746
from transformers.trainer_utils import get_last_checkpoint
48-
from transformers.utils import check_min_version
47+
from transformers.utils import check_min_version, is_offline_mode
4948
from transformers.utils.versions import require_version
5049

5150

examples/tensorflow/text-classification/run_text_classification.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
TFTrainingArguments,
3838
set_seed,
3939
)
40-
from transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME
40+
from transformers.utils import CONFIG_NAME, TF2_WEIGHTS_NAME
4141

4242

4343
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1" # Reduce the amount of console output from TF

0 commit comments

Comments
 (0)