Skip to content

Commit a3c5883

Browse files
committed
Rename file for consistency.
1 parent daf8beb commit a3c5883

16 files changed

+15
-15
lines changed

templates/adding_a_new_model/tests/test_tokenization_xxx.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from transformers.tokenization_bert import VOCAB_FILES_NAMES, XxxTokenizer
2121

22-
from .tokenization_tests_commons import CommonTestCases
22+
from .test_tokenization_commo import CommonTestCases
2323

2424

2525
class XxxTokenizationTest(CommonTestCases.CommonTokenizerTester):

tests/test_configuration_common.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
import json
1818
import os
1919

20-
from .tokenization_tests_commons import TemporaryDirectory
20+
from .test_tokenization_commo import TemporaryDirectory
2121

2222

2323
class ConfigTester(object):

tests/test_model_card.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
from transformers.modelcard import ModelCard
2222

23-
from .tokenization_tests_commons import TemporaryDirectory
23+
from .test_tokenization_commo import TemporaryDirectory
2424

2525

2626
class ModelCardTester(unittest.TestCase):

tests/test_optimization.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from transformers import is_torch_available
2121

22-
from .tokenization_tests_commons import TemporaryDirectory
22+
from .test_tokenization_commo import TemporaryDirectory
2323
from .utils import require_torch
2424

2525

tests/test_tokenization_albert.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
from transformers.tokenization_albert import AlbertTokenizer
2020

21-
from .tokenization_tests_commons import CommonTestCases
21+
from .test_tokenization_commo import CommonTestCases
2222

2323

2424
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/spiece.model")

tests/test_tokenization_bert.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
_is_whitespace,
2828
)
2929

30-
from .tokenization_tests_commons import CommonTestCases
30+
from .test_tokenization_commo import CommonTestCases
3131
from .utils import slow
3232

3333

tests/test_tokenization_bert_japanese.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
MecabTokenizer,
2626
)
2727

28-
from .tokenization_tests_commons import CommonTestCases
28+
from .test_tokenization_commo import CommonTestCases
2929
from .utils import custom_tokenizers, slow
3030

3131

File renamed without changes.

tests/test_tokenization_ctrl.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from transformers.tokenization_ctrl import VOCAB_FILES_NAMES, CTRLTokenizer
2121

22-
from .tokenization_tests_commons import CommonTestCases
22+
from .test_tokenization_commo import CommonTestCases
2323

2424

2525
class CTRLTokenizationTest(CommonTestCases.CommonTokenizerTester):

tests/test_tokenization_gpt2.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
from transformers.tokenization_gpt2 import VOCAB_FILES_NAMES, GPT2Tokenizer
2222

23-
from .tokenization_tests_commons import CommonTestCases
23+
from .test_tokenization_commo import CommonTestCases
2424

2525

2626
class GPT2TokenizationTest(CommonTestCases.CommonTokenizerTester):

tests/test_tokenization_openai.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from transformers.tokenization_openai import VOCAB_FILES_NAMES, OpenAIGPTTokenizer
2121

22-
from .tokenization_tests_commons import CommonTestCases
22+
from .test_tokenization_commo import CommonTestCases
2323

2424

2525
class OpenAIGPTTokenizationTest(CommonTestCases.CommonTokenizerTester):

tests/test_tokenization_roberta.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
from transformers.tokenization_roberta import VOCAB_FILES_NAMES, RobertaTokenizer
2222

23-
from .tokenization_tests_commons import CommonTestCases
23+
from .test_tokenization_commo import CommonTestCases
2424
from .utils import slow
2525

2626

tests/test_tokenization_t5.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
from transformers.tokenization_t5 import T5Tokenizer
2020
from transformers.tokenization_xlnet import SPIECE_UNDERLINE
2121

22-
from .tokenization_tests_commons import CommonTestCases
22+
from .test_tokenization_commo import CommonTestCases
2323

2424

2525
SAMPLE_VOCAB = os.path.join(os.path.dirname(os.path.abspath(__file__)), "fixtures/test_sentencepiece.model")

tests/test_tokenization_transfo_xl.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from transformers import is_torch_available
2121

22-
from .tokenization_tests_commons import CommonTestCases
22+
from .test_tokenization_commo import CommonTestCases
2323
from .utils import require_torch
2424

2525

tests/test_tokenization_xlm.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from transformers.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer
2121

22-
from .tokenization_tests_commons import CommonTestCases
22+
from .test_tokenization_commo import CommonTestCases
2323
from .utils import slow
2424

2525

tests/test_tokenization_xlnet.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
from transformers.tokenization_xlnet import SPIECE_UNDERLINE, XLNetTokenizer
2020

21-
from .tokenization_tests_commons import CommonTestCases
21+
from .test_tokenization_commo import CommonTestCases
2222
from .utils import slow
2323

2424

0 commit comments

Comments
 (0)