Revert "Issue-88098: extract utils from check labels (#94597)"

This reverts commit 2c76838d7ff96cc7aa3a30cae54fded70e0bccc5.

Reverted https://github.com/pytorch/pytorch/pull/94597 on behalf of https://github.com/jeanschmidt due to reverting due internal breakages https://fburl.com/sandcastle/3ukij9xp
This commit is contained in:
PyTorch MergeBot
2023-02-13 20:19:50 +00:00
parent 1f7448eeda
commit 7c3fc2c7f0
8 changed files with 187 additions and 313 deletions

View File

@ -18,8 +18,11 @@ from typing import (
Optional,
Pattern,
Tuple,
Union,
cast,
)
from urllib.error import HTTPError
from urllib.request import Request, urlopen
from warnings import warn
from pathlib import Path
@ -30,14 +33,6 @@ from gitutils import (
get_git_repo_dir,
patterns_to_regex,
)
from github_utils import (
GitHubComment,
gh_fetch_json_list,
gh_fetch_url,
gh_post_commit_comment,
gh_post_pr_comment,
)
from label_utils import gh_add_labels
from trymerge_explainer import (
TryMergeExplainer,
get_revert_message,
@ -445,8 +440,67 @@ CIFLOW_TRUNK_LABEL = re.compile(r"^ciflow/trunk")
MERGE_RULE_PATH = Path(".github") / "merge_rules.yaml"
def _fetch_url(url: str, *,
headers: Optional[Dict[str, str]] = None,
data: Optional[Dict[str, Any]] = None,
method: Optional[str] = None,
reader: Callable[[Any], Any] = lambda x: x.read()) -> Any:
if headers is None:
headers = {}
token = os.environ.get("GITHUB_TOKEN")
if token is not None and url.startswith('https://api.github.com/'):
headers['Authorization'] = f'token {token}'
data_ = json.dumps(data).encode() if data is not None else None
try:
with urlopen(Request(url, headers=headers, data=data_, method=method)) as conn:
return reader(conn)
except HTTPError as err:
if err.code == 403 and all(key in err.headers for key in ['X-RateLimit-Limit', 'X-RateLimit-Used']):
print(f"Rate limit exceeded: {err.headers['X-RateLimit-Used']}/{err.headers['X-RateLimit-Limit']}")
raise
def _fetch_json_any(
url: str,
params: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None
) -> Any:
headers = {'Accept': 'application/vnd.github.v3+json'}
if params is not None and len(params) > 0:
url += '?' + '&'.join(f"{name}={urllib.parse.quote(str(val))}" for name, val in params.items())
return _fetch_url(url, headers=headers, data=data, reader=json.load)
def fetch_json_list(url: str,
params: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]:
return cast(List[Dict[str, Any]], _fetch_json_any(url, params, data))
def fetch_json_dict(url: str,
params: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None) -> Dict[str, Any] :
return cast(Dict[str, Any], _fetch_json_any(url, params, data))
def _gh_post_comment(url: str, comment: str, dry_run: bool = False) -> List[Dict[str, Any]]:
if dry_run:
print(comment)
return []
return fetch_json_list(url, data={"body": comment})
def gh_post_pr_comment(org: str, project: str, pr_num: int, comment: str, dry_run: bool = False) -> List[Dict[str, Any]]:
return _gh_post_comment(f'https://api.github.com/repos/{org}/{project}/issues/{pr_num}/comments', comment, dry_run)
def gh_post_commit_comment(org: str, project: str, sha: str, comment: str, dry_run: bool = False) -> List[Dict[str, Any]]:
return _gh_post_comment(f'https://api.github.com/repos/{org}/{project}/commits/{sha}/comments', comment, dry_run)
def gh_add_labels(org: str, project: str, pr_num: int, labels: Union[str, List[str]]) -> None:
fetch_json_list(f'https://api.github.com/repos/{org}/{project}/issues/{pr_num}/labels',
data={"labels": labels})
def gh_graphql(query: str, **kwargs: Any) -> Dict[str, Any]:
rc = gh_fetch_url("https://api.github.com/graphql", data={"query": query, "variables": kwargs}, reader=json.load)
rc = _fetch_url("https://api.github.com/graphql", data={"query": query, "variables": kwargs}, reader=json.load)
if "errors" in rc:
raise RuntimeError(f"GraphQL query {query}, args {kwargs} failed: {rc['errors']}")
return cast(Dict[str, Any], rc)
@ -623,6 +677,15 @@ def get_ghstack_prs(repo: GitRepo, pr: "GitHubPR") -> List[Tuple["GitHubPR", str
)
return entire_stack
@dataclass
class GitHubComment:
body_text: str
created_at: str
author_login: str
author_association: str
editor_login: Optional[str]
database_id: int
class GitHubPR:
def __init__(self, org: str, project: str, pr_num: int) -> None:
@ -1076,7 +1139,7 @@ def gen_new_issue_link(
def read_merge_rules(repo: Optional[GitRepo], org: str, project: str) -> List[MergeRule]:
repo_relative_rules_path = MERGE_RULE_PATH
if repo is None:
json_data = gh_fetch_url(
json_data = _fetch_url(
f"https://api.github.com/repos/{org}/{project}/contents/{repo_relative_rules_path}",
headers={'Accept': 'application/vnd.github.v3+json'},
reader=json.load,
@ -1261,7 +1324,7 @@ def checks_to_markdown_bullets(checks: List[Tuple[str, Optional[str]]]) -> List[
def _get_flaky_rules(url: str, num_retries: int = 3) -> List[FlakyRule]:
try:
return [FlakyRule(**rule) for rule in gh_fetch_json_list(url)]
return [FlakyRule(**rule) for rule in fetch_json_list(url)]
except Exception as e:
print(f"Could not download {url} because: {e}.")
if num_retries > 0:
@ -1446,7 +1509,7 @@ def check_for_sev(org: str, project: str, skip_mandatory_checks: bool) -> None:
return
response = cast(
Dict[str, Any],
gh_fetch_json_list(
fetch_json_list(
"https://api.github.com/search/issues",
params={"q": f'repo:{org}/{project} is:open is:issue label:"ci: sev"'},
),