Compare commits

...

8 Commits

Author SHA1 Message Date
009aa74c4e {' '.join(pytest_flags)} 2025-09-01 20:02:09 +02:00
7f12363ceb check crash 2 2025-09-01 18:35:10 +02:00
8b7a8d0d39 check crash 2025-09-01 18:31:36 +02:00
6ae8c9c666 check crash 2025-09-01 18:21:56 +02:00
6d455ae4ae check crash 2025-09-01 18:20:30 +02:00
ded3ad8087 check crash 2025-09-01 18:14:04 +02:00
c7963e479a update 2025-09-01 18:09:04 +02:00
4e267ebb1a update 2025-09-01 18:09:04 +02:00
5 changed files with 10 additions and 22 deletions

View File

@ -200,9 +200,6 @@ workflows:
- equal: [<<pipeline.project.git_url>>, https://github.com/huggingface/transformers]
- not: <<pipeline.parameters.nightly>>
jobs:
- check_circleci_user
- check_code_quality
- check_repository_consistency
- fetch_tests
setup_and_quality_2:

View File

@ -111,7 +111,7 @@ class CircleCIJob:
if self.install_steps is None:
self.install_steps = ["uv pip install ."]
# Use a custom patched pytest to force exit the process at the end, to avoid `Too long with no output (exceeded 10m0s): context deadline exceeded`
self.install_steps.append("uv pip install git+https://github.com/ydshieh/pytest.git@8.4.1-ydshieh")
#vself.install_steps.append("uv pip install git+https://github.com/ydshieh/pytest.git@8.4.1-ydshieh")
if self.pytest_options is None:
self.pytest_options = {}
if isinstance(self.tests_to_run, str):
@ -180,7 +180,8 @@ class CircleCIJob:
{"run": {"name": "fetch hub objects before pytest", "command": "python3 utils/fetch_hub_objects_for_ci.py"}},
{"run": {
"name": "Run tests",
"command": f"({timeout_cmd} python3 -m pytest {marker_cmd} -n {self.pytest_num_workers} {junit_flags} {repeat_on_failure_flags} {' '.join(pytest_flags)} $(cat splitted_tests.txt) | tee tests_output.txt)"}
# "command": f"({timeout_cmd} python3 -m pytest {marker_cmd} -n {self.pytest_num_workers} {junit_flags} {repeat_on_failure_flags} {' '.join(pytest_flags)} $(cat splitted_tests.txt) | tee tests_output.txt)"}
"command": f"({timeout_cmd} python3 -m pytest {marker_cmd} -n {self.pytest_num_workers} {' '.join(pytest_flags)} $(cat tests.txt) | tee tests_output.txt)"}
},
{"run":
{
@ -204,7 +205,7 @@ class CircleCIJob:
{"store_test_results": {"path": "test-results"}},
{"store_artifacts": {"path": "test-results/junit.xml"}},
{"store_artifacts": {"path": "reports"}},
{"store_artifacts": {"path": "tests.txt"}},
{"store_artifacts": {"path": "tests_output.txt"}},
{"store_artifacts": {"path": "splitted_tests.txt"}},
{"store_artifacts": {"path": "installed.txt"}},
]
@ -245,7 +246,7 @@ tokenization_job = CircleCIJob(
processor_job = CircleCIJob(
"processors",
docker_image=[{"image": "huggingface/transformers-torch-light"}],
parallelism=8,
parallelism=1,
)
pipelines_torch_job = CircleCIJob(
@ -355,7 +356,7 @@ EXAMPLES_TESTS = [examples_torch_job]
PIPELINE_TESTS = [pipelines_torch_job]
REPO_UTIL_TESTS = [repo_utils_job]
DOC_TESTS = [doc_test_job]
ALL_TESTS = REGULAR_TESTS + EXAMPLES_TESTS + PIPELINE_TESTS + REPO_UTIL_TESTS + DOC_TESTS + [custom_tokenizers_job] + [exotic_models_job] # fmt: skip
ALL_TESTS = [processor_job]
def create_circleci_config(folder=None):

1
tests.txt Normal file
View File

@ -0,0 +1 @@
tests/models/deformable_detr/test_modeling_deformable_detr.py tests/models/rt_detr/test_modeling_rt_detr.py tests/models/xcodec/test_modeling_xcodec.py tests/models/rag/test_modeling_rag.py tests/models/vision_encoder_decoder/test_modeling_vision_encoder_decoder.py tests/models/encoder_decoder/test_modeling_encoder_decoder.py tests/models/llava/test_modeling_llava.py tests/models/seamless_m4t_v2/test_modeling_seamless_m4t_v2.py tests/models/olmo/test_modeling_olmo.py tests/models/distilbert/test_modeling_distilbert.py tests/models/flava/test_modeling_flava.py tests/models/moshi/test_modeling_moshi.py tests/models/bamba/test_modeling_bamba.py tests/models/gptj/test_modeling_gptj.py tests/models/depth_pro/test_modeling_depth_pro.py tests/models/dpt/test_modeling_dpt_hybrid.py tests/models/levit/test_modeling_levit.py tests/models/resnet/test_modeling_resnet.py tests/models/bloom/test_modeling_bloom.py tests/models/zamba/test_modeling_zamba.py tests/models/yolos/test_modeling_yolos.py tests/models/clipseg/test_modeling_clipseg.py tests/models/mgp_str/test_modeling_mgp_str.py tests/models/siglip/test_modeling_siglip.py tests/models/diffllama/test_modeling_diffllama.py tests/models/gpt_neo/test_modeling_gpt_neo.py tests/models/wavlm/test_modeling_wavlm.py tests/models/kosmos2_5/test_modeling_kosmos2_5.py tests/models/vit_mae/test_modeling_vit_mae.py tests/models/dac/test_modeling_dac.py tests/models/vitpose_backbone/test_modeling_vitpose_backbone.py tests/models/bit/test_modeling_bit.py tests/models/regnet/test_modeling_regnet.py tests/models/glm4v/test_modeling_glm4v.py tests/models/gemma3/test_modeling_gemma3.py tests/models/xlm_roberta_xl/test_modeling_xlm_roberta_xl.py tests/models/electra/test_modeling_electra.py tests/models/layoutlm/test_modeling_layoutlm.py tests/models/bitnet/test_modeling_bitnet.py tests/models/esm/test_modeling_esm.py tests/models/csm/test_modeling_csm.py tests/models/megatron_bert/test_modeling_megatron_bert.py tests/models/deberta_v2/test_modeling_deberta_v2.py tests/models/decision_transformer/test_modeling_decision_transformer.py tests/models/fuyu/test_modeling_fuyu.py tests/models/mistral/test_modeling_mistral.py tests/models/layoutlmv2/test_modeling_layoutlmv2.py tests/models/dinat/test_modeling_dinat.py tests/models/glm4/test_modeling_glm4.py tests/models/jetmoe/test_modeling_jetmoe.py tests/models/phimoe/test_modeling_phimoe.py

View File

@ -23,8 +23,8 @@ import math
import os
MAX_PARALLEL_NODES = 8 # TODO create a mapping!
AVERAGE_TESTS_PER_NODES = 5
MAX_PARALLEL_NODES = 5 # TODO create a mapping!
AVERAGE_TESTS_PER_NODES = 1
def count_lines(filepath):
@ -41,7 +41,7 @@ def compute_parallel_nodes(line_count, max_tests_per_node=10):
num_nodes = math.ceil(line_count / AVERAGE_TESTS_PER_NODES)
if line_count < 4:
return 1
return min(MAX_PARALLEL_NODES, num_nodes)
return MAX_PARALLEL_NODES
def process_artifacts(input_file, output_file):

View File

@ -1105,18 +1105,7 @@ def parse_commit_message(commit_message: str) -> dict[str, bool]:
JOB_TO_TEST_FILE = {
"tests_torch": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
"tests_generate": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
"tests_tokenization": r"tests/(?:models/.*/test_tokenization.*|test_tokenization_mistral_common\.py)",
"tests_processors": r"tests/models/.*/test_(?!(?:modeling_|tokenization_)).*", # takes feature extractors, image processors, processors
"examples_torch": r"examples/pytorch/.*test_.*",
"tests_exotic_models": r"tests/models/.*(?=layoutlmv|nat|deta|udop|nougat).*",
"tests_custom_tokenizers": r"tests/models/.*/test_tokenization_(?=bert_japanese|openai|clip).*",
# "repo_utils": r"tests/[^models].*test.*", TODO later on we might want to do
"pipelines_torch": r"tests/models/.*/test_modeling_(?!(?:flax_|tf_)).*",
"tests_hub": r"tests/.*",
"tests_onnx": r"tests/models/.*/test_modeling_(?:tf_|(?!flax)).*",
"tests_non_model": r"tests/[^/]*?/test_.*\.py",
}