@ -15,7 +15,6 @@
|
||||
import ast
|
||||
import collections
|
||||
import contextlib
|
||||
import copy
|
||||
import doctest
|
||||
import functools
|
||||
import gc
|
||||
@ -1598,58 +1597,6 @@ def assert_screenout(out, what):
|
||||
assert match_str != -1, f"expecting to find {what} in output: f{out_pr}"
|
||||
|
||||
|
||||
def set_model_tester_for_less_flaky_test(test_case):
|
||||
# NOTE: this function edits the config object, which may lead to hard-to-debug side-effects. Use with caution.
|
||||
# Do not use in tests/models where objects behave very differently based on the config's hidden layer settings
|
||||
# (e.g. KV caches, sliding window attention, ...)
|
||||
|
||||
# TODO (if possible): Avoid exceptional cases
|
||||
exceptional_classes = [
|
||||
"ZambaModelTester",
|
||||
"Zamba2ModelTester",
|
||||
"RwkvModelTester",
|
||||
"AriaVisionText2TextModelTester",
|
||||
"GPTNeoModelTester",
|
||||
"DPTModelTester",
|
||||
"Qwen3NextModelTester",
|
||||
]
|
||||
if test_case.model_tester.__class__.__name__ in exceptional_classes:
|
||||
return
|
||||
|
||||
target_num_hidden_layers = 1
|
||||
if hasattr(test_case.model_tester, "out_features") or hasattr(test_case.model_tester, "out_indices"):
|
||||
target_num_hidden_layers = None
|
||||
|
||||
if hasattr(test_case.model_tester, "num_hidden_layers") and target_num_hidden_layers is not None:
|
||||
test_case.model_tester.num_hidden_layers = target_num_hidden_layers
|
||||
if (
|
||||
hasattr(test_case.model_tester, "vision_config")
|
||||
and "num_hidden_layers" in test_case.model_tester.vision_config
|
||||
and target_num_hidden_layers is not None
|
||||
):
|
||||
test_case.model_tester.vision_config = copy.deepcopy(test_case.model_tester.vision_config)
|
||||
if isinstance(test_case.model_tester.vision_config, dict):
|
||||
test_case.model_tester.vision_config["num_hidden_layers"] = 1
|
||||
else:
|
||||
test_case.model_tester.vision_config.num_hidden_layers = 1
|
||||
if (
|
||||
hasattr(test_case.model_tester, "text_config")
|
||||
and "num_hidden_layers" in test_case.model_tester.text_config
|
||||
and target_num_hidden_layers is not None
|
||||
):
|
||||
test_case.model_tester.text_config = copy.deepcopy(test_case.model_tester.text_config)
|
||||
if isinstance(test_case.model_tester.text_config, dict):
|
||||
test_case.model_tester.text_config["num_hidden_layers"] = 1
|
||||
else:
|
||||
test_case.model_tester.text_config.num_hidden_layers = 1
|
||||
|
||||
# A few model class specific handling
|
||||
|
||||
# For Albert
|
||||
if hasattr(test_case.model_tester, "num_hidden_groups"):
|
||||
test_case.model_tester.num_hidden_groups = test_case.model_tester.num_hidden_layers
|
||||
|
||||
|
||||
def set_config_for_less_flaky_test(config):
|
||||
target_attrs = [
|
||||
"rms_norm_eps",
|
||||
|
@ -23,7 +23,6 @@ from transformers.testing_utils import (
|
||||
require_vision,
|
||||
set_config_for_less_flaky_test,
|
||||
set_model_for_less_flaky_test,
|
||||
set_model_tester_for_less_flaky_test,
|
||||
slow,
|
||||
torch_device,
|
||||
)
|
||||
@ -360,8 +359,6 @@ class EfficientLoFTRModelTest(ModelTesterMixin, unittest.TestCase):
|
||||
msg += str(e)
|
||||
raise AssertionError(msg)
|
||||
|
||||
set_model_tester_for_less_flaky_test(self)
|
||||
|
||||
config, batched_input = self.model_tester.prepare_config_and_inputs_for_common()
|
||||
set_config_for_less_flaky_test(config)
|
||||
|
||||
|
Reference in New Issue
Block a user