mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
Revert "test fixing benchmarks (#162503)"
This reverts commit 484c4093a87a3e6767e55ed553f95db8fc137442. Reverted https://github.com/pytorch/pytorch/pull/162503 on behalf of https://github.com/huydhn due to Sorry for reverting your change but it regresses CPU perf smoke test ([comment](https://github.com/pytorch/pytorch/pull/162503#issuecomment-3273554680))
This commit is contained in:
@ -386,10 +386,10 @@ elif [[ $TEST_CONFIG == *"perf_hf"* ]]; then
|
||||
test_hf_perf
|
||||
elif [[ $TEST_CONFIG == *"perf_timm"* ]]; then
|
||||
test_timm_perf
|
||||
elif [[ $TEST_CONFIG == *"aot_inductor_perf_smoketest"* ]]; then
|
||||
test_aoti_torchbench_smoketest "${SHARD_NUMBER}"
|
||||
elif [[ $TEST_CONFIG == *"perf_smoketest"* ]]; then
|
||||
test_torchbench_smoketest "${SHARD_NUMBER}"
|
||||
elif [[ $TEST_CONFIG == *"aot_inductor_perf_smoketest"* ]]; then
|
||||
test_aoti_torchbench_smoketest "${SHARD_NUMBER}"
|
||||
elif [[ $TEST_CONFIG == *"mps"* ]]; then
|
||||
test_python_mps
|
||||
elif [[ $NUM_TEST_SHARDS -gt 1 ]]; then
|
||||
|
||||
@ -1424,7 +1424,7 @@ class AOTInductorModelCache:
|
||||
torch.hpu.max_memory_allocated() - pre_clone_memory_used
|
||||
) / 1e9
|
||||
|
||||
inductor_configs = {"aot_inductor.package_constants_in_so": False}
|
||||
inductor_configs = {}
|
||||
if mode == "max-autotune":
|
||||
inductor_configs["max_autotune"] = True
|
||||
ep = torch.export.export(
|
||||
@ -1439,14 +1439,8 @@ class AOTInductorModelCache:
|
||||
ep, inductor_configs=inductor_configs
|
||||
) # type: ignore[arg-type]
|
||||
|
||||
compiled = torch._inductor.aoti_load_package(package_path)
|
||||
compiled.load_constants(
|
||||
{**ep.state_dict, **ep.constants},
|
||||
check_full_update=False,
|
||||
user_managed=True,
|
||||
)
|
||||
cls.cache[key] = (
|
||||
compiled,
|
||||
torch._inductor.aoti_load_package(package_path),
|
||||
clone_memory_used,
|
||||
)
|
||||
|
||||
|
||||
@ -721,15 +721,8 @@ void AOTIModelPackageLoader::load_constants(
|
||||
for (const auto& it : constants_map) {
|
||||
if (fqn_to_constant_name.find(it.first) != fqn_to_constant_name.end()) {
|
||||
updated_constants_map.emplace(fqn_to_constant_name[it.first], it.second);
|
||||
} else if (check_full_update) {
|
||||
std::string constant_fqns = "";
|
||||
for (const auto& it2 : fqn_to_constant_name) {
|
||||
constant_fqns += it2.first + ", ";
|
||||
}
|
||||
throw std::runtime_error(
|
||||
"The constant with FQN " + it.first +
|
||||
" was not found in the model. The available constants are: " +
|
||||
constant_fqns);
|
||||
} else {
|
||||
throw std::runtime_error("Constant not found: " + it.first);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user