mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-23 06:34:55 +08:00
[GHF] Better check for internal diffs (#104344)
During revert, use title of "Meta Internal-Only Changes Check" to determine whether or not internal diff is associated with the PR. When PR is merged/closed, "Meta Internal-Only Changes Check" status is always success, but title message can differ: - "There is no internal Diff connected, this can be merged now" means that there are no internal change associated with PR (or it was landed via GitHub First workflow) - "The internal Diff has landed, this can be merged now" meaning that PR has associated internal DIFF, and OSS and internal reverts must happen in sync using internal tooling. (Or a revert PR can be authored in OSS) Add regression test for https://github.com/pytorch/pytorch/pull/100652 that was originated from the internal diff, but was merged as OSS PR. Fixes https://github.com/pytorch/pytorch/issues/104232 Pull Request resolved: https://github.com/pytorch/pytorch/pull/104344 Approved by: https://github.com/bigfootjon, https://github.com/huydhn
This commit is contained in:
committed by
PyTorch MergeBot
parent
998c07799f
commit
8464a6a165
11632
.github/scripts/gql_mocks.json
generated
vendored
11632
.github/scripts/gql_mocks.json
generated
vendored
File diff suppressed because it is too large
Load Diff
18
.github/scripts/test_trymerge.py
vendored
18
.github/scripts/test_trymerge.py
vendored
@ -453,13 +453,7 @@ class TestTryMerge(TestCase):
|
|||||||
def test_revert_codev_fails(self, *args: Any) -> None:
|
def test_revert_codev_fails(self, *args: Any) -> None:
|
||||||
pr = GitHubPR("pytorch", "pytorch", 91340)
|
pr = GitHubPR("pytorch", "pytorch", 91340)
|
||||||
|
|
||||||
class GitRepoCoDev(GitRepo):
|
class GitRepoCoDev(DummyGitRepo):
|
||||||
def __init__(self) -> None:
|
|
||||||
super().__init__(get_git_repo_dir(), get_git_remote_name())
|
|
||||||
|
|
||||||
def commits_resolving_gh_pr(self, pr_num: int) -> List[str]:
|
|
||||||
return ["FakeCommitSha"]
|
|
||||||
|
|
||||||
def commit_message(self, ref: str) -> str:
|
def commit_message(self, ref: str) -> str:
|
||||||
return pr.get_body()
|
return pr.get_body()
|
||||||
|
|
||||||
@ -470,6 +464,16 @@ class TestTryMerge(TestCase):
|
|||||||
lambda: validate_revert(repo, pr, comment_id=1372496233),
|
lambda: validate_revert(repo, pr, comment_id=1372496233),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_revert_codev_abandoned_diff_succeeds(self, *args: Any) -> None:
|
||||||
|
pr = GitHubPR("pytorch", "pytorch", 100652)
|
||||||
|
|
||||||
|
class GitRepoCoDev(DummyGitRepo):
|
||||||
|
def commit_message(self, ref: str) -> str:
|
||||||
|
return pr.get_body()
|
||||||
|
|
||||||
|
repo = GitRepoCoDev()
|
||||||
|
validate_revert(repo, pr, comment_id=1588195237)
|
||||||
|
|
||||||
def test_pr_changed_submodule_detection(self, *args: Any) -> None:
|
def test_pr_changed_submodule_detection(self, *args: Any) -> None:
|
||||||
# Updates submodule during dev-cycle but reverts it later
|
# Updates submodule during dev-cycle but reverts it later
|
||||||
pr = GitHubPR("pytorch", "pytorch", 95045)
|
pr = GitHubPR("pytorch", "pytorch", 95045)
|
||||||
|
52
.github/scripts/trymerge.py
vendored
52
.github/scripts/trymerge.py
vendored
@ -59,6 +59,7 @@ class JobCheckState(NamedTuple):
|
|||||||
status: Optional[str]
|
status: Optional[str]
|
||||||
classification: Optional[str]
|
classification: Optional[str]
|
||||||
job_id: Optional[int]
|
job_id: Optional[int]
|
||||||
|
title: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
JobNameToStateDict = Dict[str, JobCheckState]
|
JobNameToStateDict = Dict[str, JobCheckState]
|
||||||
@ -126,6 +127,7 @@ fragment PRCheckSuites on CheckSuiteConnection {
|
|||||||
conclusion
|
conclusion
|
||||||
detailsUrl
|
detailsUrl
|
||||||
databaseId
|
databaseId
|
||||||
|
title
|
||||||
}
|
}
|
||||||
pageInfo {
|
pageInfo {
|
||||||
endCursor
|
endCursor
|
||||||
@ -317,6 +319,7 @@ query ($owner: String!, $name: String!, $number: Int!, $cs_cursor: String, $cr_c
|
|||||||
conclusion
|
conclusion
|
||||||
detailsUrl
|
detailsUrl
|
||||||
databaseId
|
databaseId
|
||||||
|
title
|
||||||
}
|
}
|
||||||
pageInfo {
|
pageInfo {
|
||||||
endCursor
|
endCursor
|
||||||
@ -441,6 +444,10 @@ MERGE_RULE_PATH = Path(".github") / "merge_rules.yaml"
|
|||||||
ROCKSET_MERGES_COLLECTION = "merges"
|
ROCKSET_MERGES_COLLECTION = "merges"
|
||||||
ROCKSET_MERGES_WORKSPACE = "commons"
|
ROCKSET_MERGES_WORKSPACE = "commons"
|
||||||
REMOTE_MAIN_BRANCH = "origin/main"
|
REMOTE_MAIN_BRANCH = "origin/main"
|
||||||
|
INTERNAL_CHANGES_CHECKRUN_NAME = "Meta Internal-Only Changes Check"
|
||||||
|
HAS_NO_CONNECTED_DIFF_TITLE = (
|
||||||
|
"There is no internal Diff connected, this can be merged now"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def gh_graphql(query: str, **kwargs: Any) -> Dict[str, Any]:
|
def gh_graphql(query: str, **kwargs: Any) -> Dict[str, Any]:
|
||||||
@ -544,8 +551,9 @@ def add_workflow_conclusions(
|
|||||||
checkrun_name,
|
checkrun_name,
|
||||||
checkrun_node["detailsUrl"],
|
checkrun_node["detailsUrl"],
|
||||||
checkrun_node["conclusion"],
|
checkrun_node["conclusion"],
|
||||||
None,
|
classification=None,
|
||||||
checkrun_node["databaseId"],
|
job_id=checkrun_node["databaseId"],
|
||||||
|
title=checkrun_node["title"],
|
||||||
)
|
)
|
||||||
|
|
||||||
if bool(checkruns["pageInfo"]["hasNextPage"]):
|
if bool(checkruns["pageInfo"]["hasNextPage"]):
|
||||||
@ -573,8 +581,9 @@ def add_workflow_conclusions(
|
|||||||
workflow.name,
|
workflow.name,
|
||||||
workflow.url,
|
workflow.url,
|
||||||
workflow.status,
|
workflow.status,
|
||||||
None,
|
classification=None,
|
||||||
None,
|
job_id=None,
|
||||||
|
title=None,
|
||||||
)
|
)
|
||||||
for job_name, job in no_workflow_obj.jobs.items():
|
for job_name, job in no_workflow_obj.jobs.items():
|
||||||
res[job_name] = job
|
res[job_name] = job
|
||||||
@ -879,8 +888,9 @@ class GitHubPR:
|
|||||||
name,
|
name,
|
||||||
status["targetUrl"],
|
status["targetUrl"],
|
||||||
status["state"],
|
status["state"],
|
||||||
None,
|
classification=None,
|
||||||
None,
|
job_id=None,
|
||||||
|
title=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.conclusions
|
return self.conclusions
|
||||||
@ -977,7 +987,7 @@ class GitHubPR:
|
|||||||
return rc.group(1) if rc is not None else None
|
return rc.group(1) if rc is not None else None
|
||||||
|
|
||||||
def has_internal_changes(self) -> bool:
|
def has_internal_changes(self) -> bool:
|
||||||
checkrun_name = "Meta Internal-Only Changes Check"
|
checkrun_name = INTERNAL_CHANGES_CHECKRUN_NAME
|
||||||
if self.get_diff_revision() is None:
|
if self.get_diff_revision() is None:
|
||||||
return False
|
return False
|
||||||
checks = self.get_checkrun_conclusions()
|
checks = self.get_checkrun_conclusions()
|
||||||
@ -985,6 +995,13 @@ class GitHubPR:
|
|||||||
return False
|
return False
|
||||||
return checks[checkrun_name].status != "SUCCESS"
|
return checks[checkrun_name].status != "SUCCESS"
|
||||||
|
|
||||||
|
def has_no_connected_diff(self) -> bool:
|
||||||
|
checkrun_name = INTERNAL_CHANGES_CHECKRUN_NAME
|
||||||
|
checks = self.get_checkrun_conclusions()
|
||||||
|
if checks is None or checkrun_name not in checks:
|
||||||
|
return False
|
||||||
|
return checks[checkrun_name].title == HAS_NO_CONNECTED_DIFF_TITLE
|
||||||
|
|
||||||
def merge_ghstack_into(
|
def merge_ghstack_into(
|
||||||
self,
|
self,
|
||||||
repo: GitRepo,
|
repo: GitRepo,
|
||||||
@ -1507,11 +1524,17 @@ def get_classifications(
|
|||||||
check.status,
|
check.status,
|
||||||
"IGNORE_CURRENT_CHECK",
|
"IGNORE_CURRENT_CHECK",
|
||||||
check.job_id,
|
check.job_id,
|
||||||
|
check.title,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if "unstable" in name:
|
if "unstable" in name:
|
||||||
checks_with_classifications[name] = JobCheckState(
|
checks_with_classifications[name] = JobCheckState(
|
||||||
check.name, check.url, check.status, "UNSTABLE", check.job_id
|
check.name,
|
||||||
|
check.url,
|
||||||
|
check.status,
|
||||||
|
"UNSTABLE",
|
||||||
|
check.job_id,
|
||||||
|
check.title,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
head_sha_job = head_sha_jobs.get(name)
|
head_sha_job = head_sha_jobs.get(name)
|
||||||
@ -1523,11 +1546,16 @@ def get_classifications(
|
|||||||
and head_sha_job["failure_captures"] == merge_base_job["failure_captures"]
|
and head_sha_job["failure_captures"] == merge_base_job["failure_captures"]
|
||||||
):
|
):
|
||||||
checks_with_classifications[name] = JobCheckState(
|
checks_with_classifications[name] = JobCheckState(
|
||||||
check.name, check.url, check.status, "BROKEN_TRUNK", check.job_id
|
check.name,
|
||||||
|
check.url,
|
||||||
|
check.status,
|
||||||
|
"BROKEN_TRUNK",
|
||||||
|
check.job_id,
|
||||||
|
check.title,
|
||||||
)
|
)
|
||||||
elif any(rule.matches(head_sha_job) for rule in flaky_rules):
|
elif any(rule.matches(head_sha_job) for rule in flaky_rules):
|
||||||
checks_with_classifications[name] = JobCheckState(
|
checks_with_classifications[name] = JobCheckState(
|
||||||
check.name, check.url, check.status, "FLAKY", check.job_id
|
check.name, check.url, check.status, "FLAKY", check.job_id, check.title
|
||||||
)
|
)
|
||||||
return checks_with_classifications
|
return checks_with_classifications
|
||||||
|
|
||||||
@ -1563,6 +1591,10 @@ def validate_revert(
|
|||||||
)
|
)
|
||||||
skip_internal_checks = can_skip_internal_checks(pr, comment_id)
|
skip_internal_checks = can_skip_internal_checks(pr, comment_id)
|
||||||
|
|
||||||
|
# Ignore associated diff it PR does not have internal changes
|
||||||
|
if pr.has_no_connected_diff():
|
||||||
|
skip_internal_checks = True
|
||||||
|
|
||||||
# Raises exception if matching rule is not found, but ignores all status checks
|
# Raises exception if matching rule is not found, but ignores all status checks
|
||||||
find_matching_merge_rule(
|
find_matching_merge_rule(
|
||||||
pr, repo, skip_mandatory_checks=True, skip_internal_checks=skip_internal_checks
|
pr, repo, skip_mandatory_checks=True, skip_internal_checks=skip_internal_checks
|
||||||
|
Reference in New Issue
Block a user