Compare commits

...

5 Commits

Author SHA1 Message Date
9b07e852b8 tc 2025-10-23 13:34:11 -07:00
f74c7d3220 improvements to printed results 2025-10-23 13:34:11 -07:00
cab289016c tc 2025-10-23 13:34:11 -07:00
5efe0dfd92 tc 2025-10-23 13:34:11 -07:00
84652c2fc0 tc 2025-10-23 13:34:11 -07:00

View File

@ -16,6 +16,9 @@ from typing import Any, NamedTuple, TYPE_CHECKING
from yaml import dump, load
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
if TYPE_CHECKING:
from collections.abc import Iterable
@ -59,7 +62,13 @@ def is_workflow(yaml: Any) -> bool:
return yaml.get("jobs") is not None
def print_lint_message(path: Path, job: dict[str, Any], sync_tag: str) -> None:
def print_lint_message(
path: Path,
job: dict[str, Any],
sync_tag: str,
baseline_path: Path,
baseline_job_id: str,
) -> None:
job_id = next(iter(job.keys()))
with open(path) as f:
lines = f.readlines()
@ -77,11 +86,30 @@ def print_lint_message(path: Path, job: dict[str, Any], sync_tag: str) -> None:
name="workflow-inconsistency",
original=None,
replacement=None,
description=f"Job doesn't match other jobs with sync-tag: '{sync_tag}'",
description=f"Job doesn't match other job {baseline_job_id} in file {baseline_path} with sync-tag: '{sync_tag}'",
)
print(json.dumps(lint_message._asdict()), flush=True)
def get_jobs_with_sync_tag(
job: dict[str, Any],
) -> tuple[str, str, dict[str, Any]] | None:
sync_tag = job.get("with", {}).get("sync-tag")
if sync_tag is None:
return None
# remove the "if" field, which we allow to be different between jobs
# (since you might have different triggering conditions on pull vs.
# trunk, say.)
if "if" in job:
del job["if"]
# same is true for ['with']['test-matrix']
if "test-matrix" in job.get("with", {}):
del job["with"]["test-matrix"]
return (sync_tag, job_id, job)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="workflow consistency linter.",
@ -94,41 +122,39 @@ if __name__ == "__main__":
)
args = parser.parse_args()
# Go through the provided files, aggregating jobs with the same sync tag
# Go through all files, aggregating jobs with the same sync tag
tag_to_jobs = defaultdict(list)
for path in REPO_ROOT.glob(".github/workflows/*"):
if not path.is_file() or path.suffix not in {".yml", ".yaml"}:
continue
workflow = load_yaml(path)
if not is_workflow(workflow):
continue
clean_path = path.relative_to(REPO_ROOT)
jobs = workflow.get("jobs", {})
for job_id, job in jobs.items():
res = get_jobs_with_sync_tag(job)
if res is None:
continue
sync_tag, job_id, job_dict = res
tag_to_jobs[sync_tag].append((clean_path, job_id, job_dict))
# Check the files passed as arguments
for path in args.filenames:
workflow = load_yaml(Path(path))
jobs = workflow["jobs"]
for job_id, job in jobs.items():
try:
sync_tag = job["with"]["sync-tag"]
except KeyError:
res = get_jobs_with_sync_tag(job)
if res is None:
continue
# remove the "if" field, which we allow to be different between jobs
# (since you might have different triggering conditions on pull vs.
# trunk, say.)
if "if" in job:
del job["if"]
# same is true for ['with']['test-matrix']
if "test-matrix" in job.get("with", {}):
del job["with"]["test-matrix"]
tag_to_jobs[sync_tag].append((path, {job_id: job}))
# For each sync tag, check that all the jobs have the same code.
for sync_tag, path_and_jobs in tag_to_jobs.items():
baseline_path, baseline_dict = path_and_jobs.pop()
baseline_str = dump(baseline_dict)
printed_baseline = False
for path, job_dict in path_and_jobs:
sync_tag, job_id, job_dict = res
job_str = dump(job_dict)
if baseline_str != job_str:
print_lint_message(path, job_dict, sync_tag)
if not printed_baseline:
print_lint_message(baseline_path, baseline_dict, sync_tag)
printed_baseline = True
# For each sync tag, check that all the jobs have the same code.
for baseline_path, baseline_job_id, baseline_dict in tag_to_jobs[sync_tag]:
baseline_str = dump(baseline_dict)
if job_id != baseline_job_id or job_str != baseline_str:
print_lint_message(
path, job_dict, sync_tag, baseline_path, baseline_job_id
)