Remove dtype from gpt-fast micro benchmark experiments model name (#128789)

Per comments on https://github.com/pytorch/test-infra/pull/5344, we already have a dtype column with the same information

Pull Request resolved: https://github.com/pytorch/pytorch/pull/128789
Approved by: https://github.com/yanboliang
This commit is contained in:
Huy Do
2024-06-18 01:26:45 +00:00
committed by PyTorch MergeBot
parent fbc7559ceb
commit 9e8443b56f

View File

@ -76,7 +76,7 @@ def run_mlp_layer_norm_gelu(device: str = "cuda"):
dtype_str = str(dtype).replace("torch.", "")
results.append(
Experiment(
f"mlp_layer_norm_gelu_{dtype_str}",
"mlp_layer_norm_gelu",
"flops_utilization",
expected_flops_utilization,
f"{flops_utilization:.02f}",
@ -113,7 +113,7 @@ def run_layer_norm(device: str = "cuda"):
dtype_str = str(dtype).replace("torch.", "")
results.append(
Experiment(
f"layer_norm_{dtype_str}",
"layer_norm",
"memory_bandwidth(GB/s)",
expected_memory_bandwidth,
f"{memory_bandwidth:.02f}",
@ -156,7 +156,7 @@ def run_gather_gemv(device: str = "cuda"):
dtype_str = str(dtype).replace("torch.", "")
results.append(
Experiment(
f"gather_gemv_{dtype_str}",
"gather_gemv",
"memory_bandwidth(GB/s)",
expected_memory_bandwidth,
f"{memory_bandwidth:.02f}",
@ -197,7 +197,7 @@ def run_gemv(device: str = "cuda"):
dtype_str = str(dtype).replace("torch.", "")
results.append(
Experiment(
f"gemv_{dtype_str}",
"gemv",
"memory_bandwidth(GB/s)",
expected_memory_bandwidth,
f"{memory_bandwidth:.02f}",