mirror of
https://github.com/huggingface/transformers.git
synced 2025-10-20 17:13:56 +08:00
Compare commits
6 Commits
740f952218
...
circleci_d
Author | SHA1 | Date | |
---|---|---|---|
df13f454b8 | |||
960c80576d | |||
9024333794 | |||
da2b1346c2 | |||
ddadd29c55 | |||
a5ae82d1cf |
@ -191,9 +191,6 @@ workflows:
|
||||
- equal: [<<pipeline.project.git_url>>, https://github.com/huggingface/transformers]
|
||||
- not: <<pipeline.parameters.nightly>>
|
||||
jobs:
|
||||
- check_circleci_user
|
||||
- check_code_quality
|
||||
- check_repository_consistency
|
||||
- fetch_tests
|
||||
|
||||
setup_and_quality_2:
|
||||
|
@ -127,7 +127,7 @@ class CircleCIJob:
|
||||
timeout_cmd = f"timeout {self.command_timeout} " if self.command_timeout else ""
|
||||
marker_cmd = f"-m '{self.marker}'" if self.marker is not None else ""
|
||||
additional_flags = f" -p no:warning -o junit_family=xunit1 --junitxml=test-results/junit.xml"
|
||||
parallel = f' << pipeline.parameters.{self.job_name}_parallelism >> '
|
||||
parallel = 1
|
||||
steps = [
|
||||
"checkout",
|
||||
{"attach_workspace": {"at": "test_preparation"}},
|
||||
@ -152,10 +152,24 @@ class CircleCIJob:
|
||||
"command": f"TESTS=$(circleci tests split --split-by=timings {self.job_name}_test_list.txt) && echo $TESTS > splitted_tests.txt && echo $TESTS | tr ' ' '\n'" if self.parallelism else f"awk '{{printf \"%s \", $0}}' {self.job_name}_test_list.txt > splitted_tests.txt"
|
||||
}
|
||||
},
|
||||
{"run": "pip install -U pytest"},
|
||||
{"run": "pip install pytest-flakefinder"},
|
||||
|
||||
# {"run": {
|
||||
# "name": "Run tests",
|
||||
# "command": f"({timeout_cmd} python3 -m pytest {marker_cmd} -n 1 {additional_flags} {' '.join(pytest_flags)} tests/models/flaubert/test_modeling_flaubert.py::FlaubertModelTest::test_batching_equivalence | tee tests_output.txt)"}
|
||||
# },
|
||||
|
||||
# {"run": {
|
||||
# "name": "Run tests",
|
||||
# "command": f"({timeout_cmd} python3 -m pytest @pytest.txt | tee tests_output.txt)"}
|
||||
# },
|
||||
|
||||
{"run": {
|
||||
"name": "Run tests",
|
||||
"command": f"({timeout_cmd} python3 -m pytest {marker_cmd} -n {self.pytest_num_workers} {additional_flags} {' '.join(pytest_flags)} $(cat splitted_tests.txt) | tee tests_output.txt)"}
|
||||
"command": f"({timeout_cmd} python3 -m pytest -v -n 8 --make-reports=tests_torch --flake-finder --flake-runs=2000 tests/models/timm_backbone/test_modeling_timm_backbone.py::TimmBackboneModelTest::test_batching_equivalence | tee tests_output.txt)"}
|
||||
},
|
||||
|
||||
{"run": {"name": "Expand to show skipped tests", "when": "always", "command": f"python3 .circleci/parse_test_outputs.py --file tests_output.txt --skip"}},
|
||||
{"run": {"name": "Failed tests: show reasons", "when": "always", "command": f"python3 .circleci/parse_test_outputs.py --file tests_output.txt --fail"}},
|
||||
{"run": {"name": "Errors", "when": "always", "command": f"python3 .circleci/parse_test_outputs.py --file tests_output.txt --errors"}},
|
||||
@ -198,7 +212,7 @@ torch_job = CircleCIJob(
|
||||
"torch",
|
||||
docker_image=[{"image": "huggingface/transformers-torch-light"}],
|
||||
marker="not generate",
|
||||
parallelism=6,
|
||||
parallelism=1,
|
||||
)
|
||||
|
||||
generate_job = CircleCIJob(
|
||||
@ -358,7 +372,8 @@ EXAMPLES_TESTS = [examples_torch_job, examples_tensorflow_job]
|
||||
PIPELINE_TESTS = [pipelines_torch_job, pipelines_tf_job]
|
||||
REPO_UTIL_TESTS = [repo_utils_job]
|
||||
DOC_TESTS = [doc_test_job]
|
||||
ALL_TESTS = REGULAR_TESTS + EXAMPLES_TESTS + PIPELINE_TESTS + REPO_UTIL_TESTS + DOC_TESTS + [custom_tokenizers_job] + [exotic_models_job] # fmt: skip
|
||||
# ALL_TESTS = REGULAR_TESTS + EXAMPLES_TESTS + PIPELINE_TESTS + REPO_UTIL_TESTS + DOC_TESTS + [custom_tokenizers_job] + [exotic_models_job] # fmt: skip
|
||||
ALL_TESTS = [torch_job]
|
||||
|
||||
|
||||
def create_circleci_config(folder=None):
|
||||
|
@ -115,11 +115,11 @@ class TimmBackboneModelTest(ModelTesterMixin, BackboneTesterMixin, PipelineTeste
|
||||
def test_config(self):
|
||||
self.config_tester.run_common_tests()
|
||||
|
||||
@is_flaky(
|
||||
description="`TimmBackbone` has no `_init_weights`. Timm's way of weight init. seems to give larger magnitude in the intermediate values during `forward`."
|
||||
)
|
||||
def test_batching_equivalence(self):
|
||||
super().test_batching_equivalence()
|
||||
# @is_flaky(
|
||||
# description="`TimmBackbone` has no `_init_weights`. Timm's way of weight init. seems to give larger magnitude in the intermediate values during `forward`."
|
||||
# )
|
||||
def test_batching_equivalence(self, atol=1e-4, rtol=1e-4):
|
||||
super().test_batching_equivalence(atol=atol, rtol=rtol)
|
||||
|
||||
def test_timm_transformer_backbone_equivalence(self):
|
||||
timm_checkpoint = "resnet18"
|
||||
|
@ -768,7 +768,7 @@ class ModelTesterMixin:
|
||||
else:
|
||||
check_determinism(first, second)
|
||||
|
||||
def test_batching_equivalence(self):
|
||||
def test_batching_equivalence(self, atol=1e-5, rtol=1e-5):
|
||||
"""
|
||||
Tests that the model supports batching and that the output is the nearly the same for the same input in
|
||||
different batch sizes.
|
||||
@ -812,7 +812,7 @@ class ModelTesterMixin:
|
||||
torch.isinf(single_row_object).any(), f"Single row output has `inf` in {model_name} for key={key}"
|
||||
)
|
||||
try:
|
||||
torch.testing.assert_close(batched_row, single_row_object, atol=1e-5, rtol=1e-5)
|
||||
torch.testing.assert_close(batched_row, single_row_object, atol=atol, rtol=rtol)
|
||||
except AssertionError as e:
|
||||
msg = f"Batched and Single row outputs are not equal in {model_name} for key={key}.\n\n"
|
||||
msg += str(e)
|
||||
|
@ -1134,23 +1134,23 @@ def parse_commit_message(commit_message: str) -> Dict[str, bool]:
|
||||
|
||||
|
||||
JOB_TO_TEST_FILE = {
|
||||
"tests_torch_and_tf": r"tests/models/.*/test_modeling_(?:tf_|(?!flax)).*",
|
||||
"tests_torch_and_flax": r"tests/models/.*/test_modeling_(?:flax|(?!tf)).*",
|
||||
"tests_tf": r"tests/models/.*/test_modeling_tf_.*",
|
||||
# "tests_torch_and_tf": r"tests/models/.*/test_modeling_(?:tf_|(?!flax)).*",
|
||||
# "tests_torch_and_flax": r"tests/models/.*/test_modeling_(?:flax|(?!tf)).*",
|
||||
# "tests_tf": r"tests/models/.*/test_modeling_tf_.*",
|
||||
"tests_torch": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
|
||||
"tests_generate": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
|
||||
"tests_tokenization": r"tests/models/.*/test_tokenization.*",
|
||||
"tests_processors": r"tests/models/.*/test_(?!(?:modeling_|tokenization_)).*", # takes feature extractors, image processors, processors
|
||||
"examples_torch": r"examples/pytorch/.*test_.*",
|
||||
"examples_tensorflow": r"examples/tensorflow/.*test_.*",
|
||||
"tests_exotic_models": r"tests/models/.*(?=layoutlmv|nat|deta|udop|nougat).*",
|
||||
"tests_custom_tokenizers": r"tests/models/.*/test_tokenization_(?=bert_japanese|openai|clip).*",
|
||||
# "repo_utils": r"tests/[^models].*test.*", TODO later on we might want to do
|
||||
"pipelines_tf": r"tests/models/.*/test_modeling_tf_.*",
|
||||
"pipelines_torch": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
|
||||
"tests_hub": r"tests/.*",
|
||||
"tests_onnx": r"tests/models/.*/test_modeling_(?:tf_|(?!flax)).*",
|
||||
"tests_non_model": r"tests/[^/]*?/test_.*\.py",
|
||||
# "tests_generate": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
|
||||
# "tests_tokenization": r"tests/models/.*/test_tokenization.*",
|
||||
# "tests_processors": r"tests/models/.*/test_(?!(?:modeling_|tokenization_)).*", # takes feature extractors, image processors, processors
|
||||
# "examples_torch": r"examples/pytorch/.*test_.*",
|
||||
# "examples_tensorflow": r"examples/tensorflow/.*test_.*",
|
||||
# "tests_exotic_models": r"tests/models/.*(?=layoutlmv|nat|deta|udop|nougat).*",
|
||||
# "tests_custom_tokenizers": r"tests/models/.*/test_tokenization_(?=bert_japanese|openai|clip).*",
|
||||
# # "repo_utils": r"tests/[^models].*test.*", TODO later on we might want to do
|
||||
# "pipelines_tf": r"tests/models/.*/test_modeling_tf_.*",
|
||||
# "pipelines_torch": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
|
||||
# "tests_hub": r"tests/.*",
|
||||
# "tests_onnx": r"tests/models/.*/test_modeling_(?:tf_|(?!flax)).*",
|
||||
# "tests_non_model": r"tests/[^/]*?/test_.*\.py",
|
||||
}
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user