diff --git a/.github/scripts/test_update_disabled_issues.py b/.github/scripts/test_update_disabled_issues.py
deleted file mode 100644
index f1d4d4d43f..0000000000
--- a/.github/scripts/test_update_disabled_issues.py
+++ /dev/null
@@ -1,346 +0,0 @@
-from unittest import main, mock, TestCase
-
-from update_disabled_issues import (
- condense_disable_jobs,
- filter_disable_issues,
- get_disable_issues,
- get_disabled_tests,
- OWNER,
- REPO,
- UNSTABLE_PREFIX,
-)
-
-
-MOCK_DATA = [
- {
- "url": "https://github.com/pytorch/pytorch/issues/32644",
- "number": 32644,
- "title": "DISABLED test_quantized_rnn (test_quanization.PostTrainingDynamicQuantTest)",
- "body": "Platforms: linux, rocm\n\nThis test was disabled because it is failing in CI.",
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/67289",
- "number": 67289,
- "title": "DISABLED test_zero_redundancy_optimizer (__main__.TestZeroRedundancyOptimizerDistributed)",
- "body": "",
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/94861",
- "number": 94861,
- "title": "DISABLED pull / linux-bionic-py3.8-clang9 / test (dynamo)",
- "author": {
- "login": "mock-user",
- },
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/42345",
- "number": 42345,
- "title": "DISABLED pull / linux-bionic-py3.8-clang9",
- "author": {
- "login": "mock-user",
- },
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/32132",
- "number": 32132,
- "title": "DISABLED pull",
- "author": {
- "login": "mock-user",
- },
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/53457",
- "url": "https://api.github.com/repos/pytorch/pytorch/issues/53457",
- "number": 53457,
- "title": "Not a DISABLED issue, but has the disabled keyword",
- },
-]
-
-MOCK_UNSTABLE_DATA = [
- {
- "url": "https://github.com/pytorch/pytorch/issues/102299",
- "number": 102299,
- "title": "UNSTABLE trunk / macos-12-py3-arm64",
- "author": {
- "login": "mock-user",
- },
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/102300",
- "number": 102300,
- "title": "UNSTABLE windows-binary-libtorch-release",
- "author": {
- "login": "mock-user",
- },
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/53457",
- "number": 53457,
- "title": "Not a UNSTABLE issue, but has the unstable keyword",
- },
-]
-
-
-@mock.patch("test_update_disabled_issues.get_disable_issues")
-class TestUpdateDisabledIssues(TestCase):
- def test_filter_disable_issues(self, mock_get_disable_issues):
- mock_get_disable_issues.return_value = sorted(
- MOCK_DATA, key=lambda x: x["number"]
- )
-
- disabled_issues = get_disable_issues("dummy token")
-
- disabled_tests, disabled_jobs = filter_disable_issues(disabled_issues)
- self.assertListEqual(
- [item["number"] for item in disabled_tests], [32644, 67289]
- )
- self.assertListEqual(
- [item["number"] for item in disabled_jobs], [32132, 42345, 94861]
- )
-
- def test_get_disable_tests(self, mock_get_disable_issues):
- mock_get_disable_issues.return_value = MOCK_DATA
-
- disabled_issues = get_disable_issues("dummy token")
-
- disabled_tests, _ = filter_disable_issues(disabled_issues)
- results = get_disabled_tests(disabled_tests)
-
- self.assertDictEqual(
- {
- "test_quantized_rnn (test_quanization.PostTrainingDynamicQuantTest)": (
- "32644",
- "https://github.com/pytorch/pytorch/issues/32644",
- [
- "linux",
- "rocm",
- ],
- ),
- "test_zero_redundancy_optimizer (__main__.TestZeroRedundancyOptimizerDistributed)": (
- "67289",
- "https://github.com/pytorch/pytorch/issues/67289",
- [],
- ),
- },
- results,
- )
-
- def test_get_disable_tests_aggregate_issue(self, mock_get_disable_issues):
- # Test that the function can read aggregate issues
- self.maxDiff = None
- mock_data = [
- {
- "url": "https://github.com/pytorch/pytorch/issues/32644",
- "number": 32644,
- "title": "DISABLED MULTIPLE dummy test",
- "body": "disable the following tests:\n```\ntest_quantized_nn (test_quantization.PostTrainingDynamicQuantTest): mac, win\ntest_zero_redundancy_optimizer (__main__.TestZeroRedundancyOptimizerDistributed)\n```",
- }
- ]
- disabled_tests = get_disabled_tests(mock_data)
- self.assertDictEqual(
- {
- "test_quantized_nn (test_quantization.PostTrainingDynamicQuantTest)": (
- str(mock_data[0]["number"]),
- mock_data[0]["url"],
- ["mac", "win"],
- ),
- "test_zero_redundancy_optimizer (__main__.TestZeroRedundancyOptimizerDistributed)": (
- str(mock_data[0]["number"]),
- mock_data[0]["url"],
- [],
- ),
- },
- disabled_tests,
- )
-
- def test_get_disable_tests_merge_issues(self, mock_get_disable_issues):
- # Test that the function can merge multiple issues with the same test
- # name
- self.maxDiff = None
- mock_data = [
- {
- "url": "https://github.com/pytorch/pytorch/issues/32644",
- "number": 32644,
- "title": "DISABLED MULTIPLE dummy test",
- "body": "disable the following tests:\n```\ntest_2 (abc.ABC): mac, win\ntest_3 (DEF)\n```",
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/32645",
- "number": 32645,
- "title": "DISABLED MULTIPLE dummy test",
- "body": "disable the following tests:\n```\ntest_2 (abc.ABC): mac, win, linux\ntest_3 (DEF): mac\n```",
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/32646",
- "number": 32646,
- "title": "DISABLED test_1 (__main__.Test1)",
- "body": "platforms: linux",
- },
- {
- "url": "https://github.com/pytorch/pytorch/issues/32647",
- "number": 32647,
- "title": "DISABLED test_2 (abc.ABC)",
- "body": "platforms: dynamo",
- },
- ]
- disabled_tests = get_disabled_tests(mock_data)
- self.assertDictEqual(
- {
- "test_2 (abc.ABC)": (
- str(mock_data[3]["number"]),
- mock_data[3]["url"],
- ["dynamo", "linux", "mac", "win"],
- ),
- "test_3 (DEF)": (
- str(mock_data[1]["number"]),
- mock_data[1]["url"],
- [],
- ),
- "test_1 (__main__.Test1)": (
- str(mock_data[2]["number"]),
- mock_data[2]["url"],
- ["linux"],
- ),
- },
- disabled_tests,
- )
-
- def test_condense_disable_jobs(self, mock_get_disable_issues):
- mock_get_disable_issues.return_value = MOCK_DATA
-
- disabled_issues = get_disable_issues("dummy token")
-
- _, disabled_jobs = filter_disable_issues(disabled_issues)
-
- with mock.patch(
- "update_disabled_issues.can_disable_jobs"
- ) as mock_can_disable_jobs:
- mock_can_disable_jobs.return_value = True
- results = condense_disable_jobs(
- disable_issues=disabled_jobs,
- owner=OWNER,
- repo=REPO,
- token="dummy token",
- )
-
- self.assertDictEqual(
- {
- "pull": (
- "mock-user",
- "32132",
- "https://github.com/pytorch/pytorch/issues/32132",
- "pull",
- "",
- "",
- ),
- "pull / linux-bionic-py3.8-clang9": (
- "mock-user",
- "42345",
- "https://github.com/pytorch/pytorch/issues/42345",
- "pull",
- "linux-bionic-py3.8-clang9",
- "",
- ),
- "pull / linux-bionic-py3.8-clang9 / test (dynamo)": (
- "mock-user",
- "94861",
- "https://github.com/pytorch/pytorch/issues/94861",
- "pull",
- "linux-bionic-py3.8-clang9",
- "test (dynamo)",
- ),
- },
- results,
- )
-
- def test_unstable_jobs(self, mock_get_disable_issues):
- mock_get_disable_issues.return_value = MOCK_UNSTABLE_DATA
-
- unstable_issues = get_disable_issues("dummy token", prefix=UNSTABLE_PREFIX)
-
- _, unstable_jobs = filter_disable_issues(
- unstable_issues, prefix=UNSTABLE_PREFIX
- )
-
- with mock.patch(
- "update_disabled_issues.can_disable_jobs"
- ) as mock_can_disable_jobs:
- mock_can_disable_jobs.return_value = True
- results = condense_disable_jobs(
- unstable_jobs,
- owner=OWNER,
- repo=REPO,
- token="dummy token",
- prefix=UNSTABLE_PREFIX,
- )
-
- self.assertDictEqual(
- {
- "trunk / macos-12-py3-arm64": (
- "mock-user",
- "102299",
- "https://github.com/pytorch/pytorch/issues/102299",
- "trunk",
- "macos-12-py3-arm64",
- "",
- ),
- "windows-binary-libtorch-release": (
- "mock-user",
- "102300",
- "https://github.com/pytorch/pytorch/issues/102300",
- "windows-binary-libtorch-release",
- "",
- "",
- ),
- },
- results,
- )
-
- def test_unauthorized_condense_disable_jobs(self, mock_get_disable_issues):
- mock_get_disable_issues.return_value = MOCK_DATA
-
- disabled_issues = get_disable_issues("dummy token")
-
- _, disabled_jobs = filter_disable_issues(disabled_issues)
-
- with mock.patch(
- "update_disabled_issues.can_disable_jobs"
- ) as mock_can_disable_jobs:
- mock_can_disable_jobs.return_value = False
- results = condense_disable_jobs(
- disable_issues=disabled_jobs,
- owner=OWNER,
- repo=REPO,
- token="dummy token",
- )
-
- # Nothing should be disabled here because of the lack of permission
- self.assertFalse(results)
-
- def test_unauthorized_unstable_jobs(self, mock_get_disable_issues):
- mock_get_disable_issues.return_value = MOCK_UNSTABLE_DATA
-
- unstable_issues = get_disable_issues("dummy token", MOCK_UNSTABLE_DATA)
-
- _, unstable_jobs = filter_disable_issues(
- unstable_issues, prefix=UNSTABLE_PREFIX
- )
-
- with mock.patch(
- "update_disabled_issues.can_disable_jobs"
- ) as mock_can_disable_jobs:
- mock_can_disable_jobs.return_value = False
- results = condense_disable_jobs(
- unstable_jobs,
- owner=OWNER,
- repo=REPO,
- token="dummy token",
- prefix=UNSTABLE_PREFIX,
- )
-
- # Nothing should be masked as unstable here because of the lack of permission
- self.assertFalse(results)
-
-
-if __name__ == "__main__":
- main()
diff --git a/.github/scripts/update_disabled_issues.py b/.github/scripts/update_disabled_issues.py
index 6de57c89a1..3f713cf540 100755
--- a/.github/scripts/update_disabled_issues.py
+++ b/.github/scripts/update_disabled_issues.py
@@ -1,291 +1,16 @@
#!/usr/bin/env python3
"""
-Query for the DISABLED and UNSTABLE issues and check:
- * if they are still flaky for disabled tests
- * if they are to disable workflow jobs
- * if they are to mark workflow jobs as unstable
+Query for the DISABLED and UNSTABLE issues and generate the JSON files that go
+in the stats folder of the generated-stats branch.
"""
-import argparse
import json
import os
-import re
-import urllib
-from functools import lru_cache
-from typing import Any, cast, Dict, List, Optional, Tuple
+from typing import Any, Dict
from urllib.request import Request, urlopen
-DISABLED_PREFIX = "DISABLED"
-UNSTABLE_PREFIX = "UNSTABLE"
-DISABLED_TEST_ISSUE_TITLE = re.compile(r"DISABLED\s*test_.+\s*\(.+\)")
-DISABLED_TEST_MULTI_ISSUE_TITLE = re.compile(r"DISABLED MULTIPLE")
-JOB_NAME_MAXSPLIT = 2
-
-OWNER = "pytorch"
-REPO = "pytorch"
-
-PERMISSIONS_TO_DISABLE_JOBS = {"admin", "write"}
-
-GRAPHQL_QUERY = """
-query ($q: String!, $cursor: String) {
- search(query: $q, type: ISSUE, first: 100, after: $cursor) {
- issueCount
- pageInfo {
- hasNextPage
- endCursor
- }
- nodes {
- ... on Issue {
- number
- title
- body
- url
- author {
- login
- }
- }
- }
- }
-}
-"""
-
-
-def github_api_request(
- url: str,
- data: Optional[Dict[str, Any]] = None,
- token: Optional[str] = None,
-) -> Any:
- headers = {"Accept": "application/vnd.github.v3+json"}
- if token is not None:
- headers["Authorization"] = f"token {token}"
-
- _data = json.dumps(data).encode() if data is not None else None
- try:
- with urlopen(Request(url, headers=headers, data=_data)) as conn:
- return json.load(conn)
- except Exception as err:
- print(f"Failed to get {url}: {err}")
-
-
-def gh_graphql(query: str, token: str, **kwargs: Any) -> Dict[str, Any]:
- rc = github_api_request(
- "https://api.github.com/graphql",
- data={"query": query, "variables": kwargs},
- token=token,
- )
- if "errors" in rc:
- raise RuntimeError(
- f"GraphQL query {query}, args {kwargs} failed: {rc['errors']}"
- )
- return cast(Dict[str, Any], rc)
-
-
-@lru_cache()
-def get_disable_issues(
- token: str, prefix: str = DISABLED_PREFIX
-) -> List[Dict[str, Any]]:
- q = f"is:issue is:open repo:{OWNER}/{REPO} in:title {prefix}"
- cursor = None
- has_next_page = True
- res = []
- total_count = None
- while has_next_page:
- rc = gh_graphql(GRAPHQL_QUERY, token, q=q, cursor=cursor)
- has_next_page = rc["data"]["search"]["pageInfo"]["hasNextPage"]
- cursor = rc["data"]["search"]["pageInfo"]["endCursor"]
- if total_count is None:
- total_count = rc["data"]["search"]["issueCount"]
- else:
- assert (
- total_count == rc["data"]["search"]["issueCount"]
- ), "total_count changed"
- res.extend(rc["data"]["search"]["nodes"])
-
- assert (
- len(res) == total_count
- ), f"len(items)={len(res)} but total_count={total_count}"
- res = sorted(res, key=lambda x: x["url"])
- return res
-
-
-def filter_disable_issues(
- issues: List[Dict[str, Any]], prefix: str = DISABLED_PREFIX
-) -> Tuple[List[Any], List[Any]]:
- """
- Return the list of disabled test and disabled job issues
- """
- disable_test_issues = []
- disable_job_issues = []
-
- for issue in issues:
- title = issue["title"]
- if not title or not title.startswith(prefix):
- continue
-
- if DISABLED_TEST_ISSUE_TITLE.match(
- title
- ) or DISABLED_TEST_MULTI_ISSUE_TITLE.match(title):
- disable_test_issues.append(issue)
- else:
- disable_job_issues.append(issue)
-
- return disable_test_issues, disable_job_issues
-
-
-def get_disabled_tests(issues: List[Dict[str, Any]]) -> Dict[str, Tuple]:
- def get_platforms_to_skip(body: str, prefix: str) -> List[str]:
- # Empty list = all platforms should skip the test
- platforms_to_skip = []
- if body is not None:
- for line in body.splitlines():
- line = line.lower()
- if line.startswith(prefix):
- platforms_to_skip.extend(
- [x.strip() for x in line[len(prefix) :].split(",") if x.strip()]
- )
- return platforms_to_skip
-
- disabled_tests = {}
-
- def update_disabled_tests(
- key: str, number: str, url: str, platforms_to_skip: List[str]
- ):
- # merge the list of platforms to skip if the test is disabled by
- # multiple issues. This results in some urls being wrong
- if key not in disabled_tests:
- disabled_tests[key] = (number, url, platforms_to_skip)
- else:
- original_platforms = disabled_tests[key][2]
- if len(original_platforms) == 0 or len(platforms_to_skip) == 0:
- platforms = []
- else:
- platforms = sorted(set(original_platforms + platforms_to_skip))
- disabled_tests[key] = (
- number,
- url,
- platforms,
- )
-
- test_name_regex = re.compile(r"(test_[a-zA-Z0-9-_\.]+)\s+\(([a-zA-Z0-9-_\.]+)\)")
-
- def parse_test_name(s: str) -> Optional[str]:
- test_name_match = test_name_regex.match(s)
- if test_name_match:
- return f"{test_name_match.group(1)} ({test_name_match.group(2)})"
- return None
-
- for issue in issues:
- try:
- url = issue["url"]
- number = url.split("/")[-1]
- title = issue["title"].strip()
- body = issue["body"]
-
- test_name = parse_test_name(title[len("DISABLED") :].strip())
- if test_name is not None:
- update_disabled_tests(
- test_name, number, url, get_platforms_to_skip(body, "platforms:")
- )
- elif DISABLED_TEST_MULTI_ISSUE_TITLE.match(title):
- # This is a multi-test issue
- start = body.lower().find("disable the following tests:")
- # Format for disabling tests:
- # Title: DISABLED MULTIPLE anything
- # disable the following tests:
- # ```
- # test_name1 (test_suite1): mac, windows
- # test_name2 (test_suite2): mac, windows
- # ```
- for line in body[start:].splitlines()[2:]:
- if "```" in line:
- break
- split_by_colon = line.split(":")
-
- test_name = parse_test_name(split_by_colon[0].strip())
- if test_name is None:
- continue
- update_disabled_tests(
- test_name,
- number,
- url,
- get_platforms_to_skip(
- split_by_colon[1].strip()
- if len(split_by_colon) > 1
- else "",
- "",
- ),
- )
- else:
- print(f"Unknown disable issue type: {title}")
- except Exception as e:
- print(f"Failed to parse issue {issue['url']}: {e}")
- continue
-
- return disabled_tests
-
-
-@lru_cache()
-def can_disable_jobs(owner: str, repo: str, username: str, token: str) -> bool:
- url = f"https://api.github.com/repos/{owner}/{repo}/collaborators/{username}/permission"
-
- try:
- perm = github_api_request(url=url, token=token)
- except urllib.error.HTTPError as error:
- print(f"Failed to get {owner}/{repo} permission for {username}: {error}")
- return False
-
- if not perm:
- return False
-
- return perm and perm.get("permission", "").lower() in PERMISSIONS_TO_DISABLE_JOBS
-
-
-def condense_disable_jobs(
- disable_issues: List[Any],
- owner: str,
- repo: str,
- token: str,
- prefix: str = DISABLED_PREFIX,
-) -> Dict[str, Tuple]:
- disabled_job_from_issues = {}
- for item in disable_issues:
- issue_url = item["url"]
- issue_number = issue_url.split("/")[-1]
-
- title = item["title"]
- job_name = title[len(prefix) :].strip()
-
- if not job_name:
- continue
-
- username = item["author"]["login"]
- # To keep the CI safe, we will only allow author with write permission
- # to the repo to disable jobs
- if not username or not can_disable_jobs(
- owner=owner, repo=repo, username=username, token=token
- ):
- continue
-
- parts = job_name.split("/", JOB_NAME_MAXSPLIT)
- # Split the job name into workflow, platform, and configuration names
- # For example, pull / linux-bionic-py3.8-clang9 / test (dynamo) name
- # include the following 3 parts: pull (job name), linux-bionic-py3.8-clang9
- # (platform name), and test (dynamo) (configuration name)
- workflow_name = parts[0].strip() if parts else ""
- platform_name = parts[1].strip() if len(parts) >= 2 else ""
- config_name = parts[2].strip() if len(parts) >= 3 else ""
-
- disabled_job_from_issues[job_name] = (
- username,
- issue_number,
- issue_url,
- workflow_name,
- platform_name,
- config_name,
- )
-
- return disabled_job_from_issues
+HUD_URL = "https://hud.pytorch.org"
def dump_json(data: Dict[str, Any], filename: str):
@@ -294,50 +19,20 @@ def dump_json(data: Dict[str, Any], filename: str):
def main() -> None:
- parser = argparse.ArgumentParser(description="Update the list of disabled tests")
- parser.add_argument(
- "--owner",
- default=OWNER,
- help="Set the repo owner to query the issues from",
- )
- parser.add_argument(
- "--repo",
- default=REPO,
- help="Set the repo to query the issues from",
- )
- args = parser.parse_args()
- token = os.getenv("GITHUB_TOKEN")
- if not token:
- raise RuntimeError("The GITHUB_TOKEN environment variable is required")
-
- # Get the list of disabled issues and sort them
- disable_issues = get_disable_issues(token)
-
- disable_test_issues, disable_job_issues = filter_disable_issues(disable_issues)
- # Create the list of disabled tests taken into account the list of disabled issues
- # and those that are not flaky anymore
- dump_json(get_disabled_tests(disable_test_issues), "disabled-tests-condensed.json")
- dump_json(
- condense_disable_jobs(disable_job_issues, args.owner, args.repo, token),
- "disabled-jobs.json",
- )
+ with urlopen(
+ Request(
+ f"{HUD_URL}/api/flaky-tests/getDisabledTestsAndJobs",
+ headers={"Authorization": os.environ["FLAKY_TEST_BOT_KEY"]},
+ )
+ ) as result:
+ if result.status != 200:
+ raise RuntimeError(f"Failed to fetch data: {result.status} {result.reason}")
- # Also handle UNSTABLE issues that mark CI jobs as unstable
- unstable_issues = get_disable_issues(token, prefix=UNSTABLE_PREFIX)
+ json_data = json.loads(result.read().decode("utf-8"))
- _, unstable_job_issues = filter_disable_issues(
- unstable_issues, prefix=UNSTABLE_PREFIX
- )
- dump_json(
- condense_disable_jobs(
- unstable_job_issues,
- args.owner,
- args.repo,
- token,
- prefix=UNSTABLE_PREFIX,
- ),
- "unstable-jobs.json",
- )
+ dump_json(json_data["disabledTests"], "disabled-tests-condensed.json")
+ dump_json(json_data["disabledJobs"], "disabled-jobs.json")
+ dump_json(json_data["unstableJobs"], "unstable-jobs.json")
if __name__ == "__main__":
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 310db1a554..f0097aa01b 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -56,23 +56,3 @@ jobs:
# Test aws lambda
pytest -v aws/lambda/tests
-
- test-github-scripts:
- name: Test github scripts
- if: ${{ github.repository == 'pytorch/test-infra' }}
- uses: ./.github/workflows/linux_job_v2.yml
- with:
- docker-image: python:3.11.0-slim-bullseye
- runner: linux.large
- script: |
- # Environment setup
- echo ::group::setup Python environment
- python -m venv .venv/
- source .venv/bin/activate
- pip install pip==23.0.1 pytest==7.2.0 \
- jsonschema==4.17.3 numpy==1.24.1 pandas==2.1.4 boto3==1.19.12 \
- clickhouse-connect==0.8.14
- echo ::endgroup::
-
- # Test tools
- pytest -v .github/scripts/test_*.py
diff --git a/.github/workflows/update_disabled_tests.yml b/.github/workflows/update_disabled_tests.yml
index 906d5fb69e..e0019057cd 100644
--- a/.github/workflows/update_disabled_tests.yml
+++ b/.github/workflows/update_disabled_tests.yml
@@ -30,7 +30,7 @@ jobs:
# PyTorch bot token is the most obvious choice. Outside of the
# environment, we do not have access to this token so fall back to the
# GITHUB_TOKEN.
- GITHUB_TOKEN: ${{ github.ref == 'refs/heads/main' && secrets.GH_MERGEBOT_TOKEN || secrets.GITHUB_TOKEN }}
+ FLAKY_TEST_BOT_KEY: ${{ secrets.FLAKY_TEST_BOT_KEY }}
run: |
python3 .github/scripts/update_disabled_issues.py
diff --git a/torchci/lib/flakyBot/aggregateDisableIssue.ts b/torchci/lib/flakyBot/aggregateDisableIssue.ts
index ea4f77a351..d26ee870cd 100644
--- a/torchci/lib/flakyBot/aggregateDisableIssue.ts
+++ b/torchci/lib/flakyBot/aggregateDisableIssue.ts
@@ -15,6 +15,7 @@ import {
getTestOwnerLabels,
getWorkflowJobNames,
NUM_HOURS_NOT_UPDATED_BEFORE_CLOSING,
+ parseTestName,
supportedPlatforms,
} from "./utils";
@@ -182,7 +183,7 @@ function parsePlatformsFromString(s: string) {
* - invalidPlatformMapping: a map of test name to invalid platforms
* - failedToParse: a list of tests that failed to parse
*/
-function parseBody(body: string) {
+export function parseBody(body: string) {
const start = body.toLowerCase().search("disable the following tests:");
const platformMapping = new Map();
const invalidPlatformMapping = new Map();
@@ -197,17 +198,16 @@ function parseBody(body: string) {
}
const codeBlock = body.substring(start).split("```")[1];
- const testRegex = new RegExp("(test_[a-zA-Z0-9_]+) \\(([a-zA-Z0-9\\._]+)\\)");
const possibleTests = codeBlock
.split("\n")
.filter((line) => line.trim().length > 0);
for (const test of possibleTests) {
- const match = test.match(testRegex);
- if (match) {
+ const splitTest = test.split(":");
+ const key = parseTestName(splitTest[0]);
+ if (key) {
const platforms = parsePlatformsFromString(
- test.split(":").length > 1 ? test.split(":")[1] : ""
+ splitTest.length > 1 ? splitTest[1] : ""
);
- const key = `${match[1]} (${match[2]})`;
const [validPlatforms, invalidPlatforms] = _.partition(
platforms,
(platform) => supportedPlatforms.has(platform)
diff --git a/torchci/lib/flakyBot/singleDisableIssue.ts b/torchci/lib/flakyBot/singleDisableIssue.ts
index 851ff71d76..3f3c83f251 100644
--- a/torchci/lib/flakyBot/singleDisableIssue.ts
+++ b/torchci/lib/flakyBot/singleDisableIssue.ts
@@ -12,6 +12,7 @@ import {
getTestOwnerLabels,
getWorkflowJobNames,
NUM_HOURS_NOT_UPDATED_BEFORE_CLOSING,
+ parseTestName,
supportedPlatforms,
} from "./utils";
@@ -286,30 +287,11 @@ export const parseBody = _.memoize((body: string) => {
// MARK: validation
-const disabledTestIssueTitle = new RegExp("test.+\\s*\\(.+\\)");
-
-function testNameIsExpected(testName: string): boolean {
- if (!disabledTestIssueTitle.test(testName)) {
- return false;
- }
-
- const split = testName.trim().split(/\s+/);
- if (split.length !== 2) {
- return false;
- }
-
- const testSuite = split[1].split(".");
- if (testSuite.length < 2) {
- return false;
- }
- return true;
-}
-
export function isSingleIssue(title: string): boolean {
const prefix = "DISABLED ";
return (
title.startsWith(prefix) &&
- testNameIsExpected(title.substring(prefix.length))
+ parseTestName(title.substring(prefix.length)) !== undefined
);
}
@@ -319,7 +301,7 @@ export function formValidationComment(
): string {
const username = issue.user.login;
const { platformsToSkip, invalidPlatforms } = parseBody(issue.body || "");
- const testName = issue.title.slice("DISABLED ".length);
+ const testName = parseTestName(issue.title.slice("DISABLED ".length));
const platformMsg =
platformsToSkip.length === 0
? "none parsed, defaulting to ALL platforms"
@@ -429,4 +411,5 @@ export const __forTesting__ = {
parseBody,
getExpectedLabels,
isSingleIssue,
+ parseTestName,
};
diff --git a/torchci/lib/flakyBot/utils.ts b/torchci/lib/flakyBot/utils.ts
index 562355e33f..d4509b4d05 100644
--- a/torchci/lib/flakyBot/utils.ts
+++ b/torchci/lib/flakyBot/utils.ts
@@ -131,6 +131,19 @@ export function getWorkflowJobNames(test: FlakyTestData): string[] {
);
}
+const disabledTestIssueTitle = new RegExp(
+ "^\\s*(test_[a-zA-Z0-9-_\\.]+)\\s+\\(([a-zA-Z0-9-_\\.]+)\\)\\s*$"
+);
+
+export function parseTestName(testName: string): string | undefined {
+ const parsed = testName.trim().match(disabledTestIssueTitle);
+ if (parsed === null) {
+ return undefined;
+ }
+
+ return `${parsed[1]} (${parsed[2]})`;
+}
+
// MARK: validation
export function genInvalidPlatformsValidationSection(
diff --git a/torchci/pages/api/flaky-tests/getDisabledTestsAndJobs.ts b/torchci/pages/api/flaky-tests/getDisabledTestsAndJobs.ts
new file mode 100644
index 0000000000..2d2aa060f0
--- /dev/null
+++ b/torchci/pages/api/flaky-tests/getDisabledTestsAndJobs.ts
@@ -0,0 +1,256 @@
+import { parseTitle } from "lib/bot/verifyDisableTestIssueBot";
+import * as aggregateDisableIssue from "lib/flakyBot/aggregateDisableIssue";
+import * as singleDisableIssue from "lib/flakyBot/singleDisableIssue";
+import { parseTestName } from "lib/flakyBot/utils";
+import { hasWritePermissionsUsingOctokit } from "lib/GeneralUtils";
+import { getOctokit } from "lib/github";
+import _ from "lodash";
+import type { NextApiRequest, NextApiResponse } from "next";
+import { Octokit } from "octokit";
+
+const PYTORCH = "pytorch";
+
+const GRAPHQL_QUERY = `
+query ($q: String!, $cursor: String) {
+ search(query: $q, type: ISSUE, first: 100, after: $cursor) {
+ issueCount
+ pageInfo {
+ hasNextPage
+ endCursor
+ }
+ nodes {
+ ... on Issue {
+ number
+ title
+ body
+ url
+ author {
+ login
+ }
+ }
+ }
+ }
+}
+`;
+
+interface IssueData {
+ number: number;
+ title: string;
+ body: string;
+ url: string;
+ author: { login: string };
+}
+
+interface GraphQLResponse {
+ search: {
+ issueCount: number;
+ pageInfo: { hasNextPage: boolean; endCursor: string };
+ nodes: IssueData[];
+ };
+}
+
+export default async function handler(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
+ const authorization = req.headers.authorization;
+ if (authorization === process.env.FLAKY_TEST_BOT_KEY) {
+ const octokit = await getOctokit(PYTORCH, PYTORCH);
+ res.status(200).json(await getDisabledTestsAndJobs(octokit));
+ } else {
+ res.status(403).end();
+ }
+}
+
+async function getDisabledTestsAndJobs(octokit: Octokit) {
+ const disableIssues = await getIssues(octokit, "DISABLED");
+ const unstableIssues = await getIssues(octokit, "UNSTABLE");
+ const { disableTestIssues, disableJobIssues } =
+ filterDisableIssues(disableIssues);
+
+ return {
+ disabledTests: getDisabledTests(disableTestIssues),
+ disabledJobs: await condenseJobs(octokit, disableJobIssues, "DISABLED"),
+ unstableJobs: await condenseJobs(octokit, unstableIssues, "UNSTABLE"),
+ };
+}
+
+async function getIssues(octokit: Octokit, prefix: string) {
+ const issues: IssueData[] = [];
+ let cursor: string | null = null;
+ let totalCount = undefined;
+
+ do {
+ const res: GraphQLResponse = await octokit.graphql(
+ GRAPHQL_QUERY,
+ {
+ q: `is:issue is:open repo:${PYTORCH}/${PYTORCH} in:title ${prefix}`,
+ cursor,
+ }
+ );
+ totalCount = res.search.issueCount;
+
+ issues.push(...res.search.nodes);
+ cursor = res.search.pageInfo.hasNextPage
+ ? res.search.pageInfo.endCursor
+ : null;
+ } while (cursor);
+
+ if (issues.length !== totalCount) {
+ console.warn(
+ `Expected ${totalCount} issues with prefix "${prefix}", but found ${issues.length}.`
+ );
+ }
+
+ return issues.sort((a, b) => a.url.localeCompare(b.url));
+}
+
+function filterDisableIssues(issues: IssueData[]) {
+ const disableTestIssues = [];
+ const disableJobIssues = [];
+
+ for (const issue of issues) {
+ if (
+ singleDisableIssue.isSingleIssue(issue.title) ||
+ aggregateDisableIssue.isAggregateIssue(issue.title)
+ ) {
+ disableTestIssues.push(issue);
+ } else {
+ disableJobIssues.push(issue);
+ }
+ }
+ return { disableTestIssues, disableJobIssues };
+}
+
+function getDisabledTests(issues: IssueData[]) {
+ interface ParsedDisableTestInfo {
+ number: number;
+ url: string;
+ platforms: string[];
+ }
+ const disabledTests = new Map();
+
+ function updateMap(
+ name: string,
+ number: number,
+ url: string,
+ platformsToSkip: string[]
+ ) {
+ const existing = disabledTests.get(name);
+ if (existing === undefined) {
+ disabledTests.set(name, { number, url, platforms: platformsToSkip });
+ } else if (platformsToSkip.length === 0) {
+ existing.platforms = [];
+ } else if (existing.platforms.length !== 0) {
+ existing.platforms.push(...platformsToSkip);
+ }
+ }
+ for (const issue of issues) {
+ if (singleDisableIssue.isSingleIssue(issue.title)) {
+ const { platformsToSkip } = singleDisableIssue.parseBody(issue.body);
+ const name = parseTestName(issue.title.substring("DISABLED ".length));
+ if (name === undefined) {
+ console.warn(`Failed to parse test name from issue: ${issue.title}`);
+ continue;
+ }
+ updateMap(name, issue.number, issue.url, platformsToSkip);
+ } else if (aggregateDisableIssue.isAggregateIssue(issue.title)) {
+ const { platformMapping } = aggregateDisableIssue.parseBody(issue.body);
+ for (const [test, platforms] of platformMapping.entries()) {
+ const name = parseTestName(test);
+ if (name === undefined) {
+ console.warn(`Failed to parse test name from issue: ${issue.title}`);
+ continue;
+ }
+ updateMap(name, issue.number, issue.url, platforms);
+ }
+ }
+ }
+
+ // Convert to object
+ disabledTests.forEach((info) => {
+ info.platforms = Array.from(new Set(info.platforms)).sort();
+ });
+
+ return Object.fromEntries(
+ [...disabledTests.entries()].map(([name, info]) => [
+ name,
+ [info.number.toString(), info.url, info.platforms],
+ ])
+ );
+}
+
+const hasPermission = _.memoize(async (username: string, octokit: Octokit) => {
+ // Check if the user has write permissions to the repository
+ return await hasWritePermissionsUsingOctokit(
+ octokit,
+ username,
+ PYTORCH,
+ PYTORCH
+ );
+});
+
+async function condenseJobs(
+ octokit: Octokit,
+ issues: IssueData[],
+ prefix: "DISABLED" | "UNSTABLE"
+) {
+ const jobs = new Map<
+ string,
+ {
+ username: string;
+ number: number;
+ url: string;
+ workflowName: string;
+ platformName: string;
+ configName: string;
+ }
+ >();
+ for (const issue of issues) {
+ if (issue.title.startsWith(prefix)) {
+ const jobName = parseTitle(issue.title, prefix);
+ if (jobName === undefined) {
+ console.warn(`Failed to parse job name from issue: ${issue.title}`);
+ continue;
+ }
+
+ // Check if the author is the bot or has permission
+ if (
+ issue.author.login !== "pytorch-bot" &&
+ !(await hasPermission(issue.author.login, octokit))
+ ) {
+ continue;
+ }
+
+ const parts = jobName.split("/");
+ jobs.set(jobName, {
+ username: issue.author.login,
+ number: issue.number,
+ url: issue.url,
+ workflowName: parts[0].trim(),
+ platformName: (parts[1] || "").trim(),
+ configName: parts.slice(2).join("/").trim(),
+ });
+ }
+ }
+
+ // Convert to object
+
+ return Object.fromEntries(
+ [...jobs.entries()].map(([name, info]) => [
+ name,
+ [
+ info.username,
+ info.number.toString(),
+ info.url,
+ info.workflowName,
+ info.platformName,
+ info.configName,
+ ],
+ ])
+ );
+}
+
+export const __forTesting__ = {
+ getDisabledTestsAndJobs,
+};
diff --git a/torchci/test/flakyBotTests/flakyBotIntegration.test.ts b/torchci/test/flakyBotTests/flakyBotIntegration.test.ts
index d04d0b32d1..ead5171c47 100644
--- a/torchci/test/flakyBotTests/flakyBotIntegration.test.ts
+++ b/torchci/test/flakyBotTests/flakyBotIntegration.test.ts
@@ -1,7 +1,6 @@
import dayjs from "dayjs";
-import { __forTesting__ as aggregateDisableIssue } from "lib/flakyBot/aggregateDisableIssue";
import * as flakyBotUtils from "lib/flakyBot/utils";
-import { FlakyTestData, IssueData } from "lib/types";
+import { IssueData } from "lib/types";
import nock from "nock";
import { __forTesting__ as disableFlakyTestBot } from "pages/api/flaky-tests/disable";
import { deepCopy, handleScope } from "../common";
@@ -9,6 +8,9 @@ import * as utils from "../utils";
import {
flakyTestA,
flakyTestB,
+ genAggIssueFor,
+ genAggTests,
+ genSingleIssueFor,
genValidFlakyTest,
mockGetRawTestFile,
nonFlakyTestA,
@@ -54,22 +56,6 @@ describe("Disable Flaky Test Integration Tests", () => {
});
describe("Single Test Issue", () => {
- function genSingleIssueFor(
- test: FlakyTestData,
- input: Partial
- ): IssueData {
- return {
- number: 1,
- title: `DISABLED ${test.name} (__main__.${test.suite})`,
- html_url: "test url",
- state: "open" as "open" | "closed",
- body: `Platforms: ${flakyBotUtils.getPlatformsAffected(test.jobNames)}`,
- updated_at: dayjs().subtract(4, "hour").toString(),
- author_association: "MEMBER",
- labels: [],
- ...input,
- };
- }
describe("Create/update issues", () => {
test("Create new issue", async () => {
const flakyTest = { ...flakyTestA };
@@ -384,33 +370,6 @@ describe("Disable Flaky Test Integration Tests", () => {
});
describe("Aggregate Test Issue", () => {
- function genAggTests(test: FlakyTestData) {
- return Array.from({ length: 11 }, (_, i) =>
- genValidFlakyTest({
- ...test,
-
- name: `test_${i}`,
- suite: `suite_${i}`,
- })
- );
- }
- function genAggIssueFor(
- tests: FlakyTestData[],
- input: Partial
- ): IssueData {
- return {
- number: 1,
- title: aggregateDisableIssue.getTitle(tests[0]),
- html_url: "test url",
- state: "open" as "open" | "closed",
- body: aggregateDisableIssue.getBody(tests),
- updated_at: dayjs().subtract(4, "hour").toString(),
- author_association: "MEMBER",
- labels: [],
- ...input,
- };
- }
-
describe("Create/update issues", () => {
test("Create new issue", async () => {
const tests = genAggTests(flakyTestA);
diff --git a/torchci/test/flakyBotTests/flakyBotTestsUtils.ts b/torchci/test/flakyBotTests/flakyBotTestsUtils.ts
index af10d9851b..e2be4d1073 100644
--- a/torchci/test/flakyBotTests/flakyBotTestsUtils.ts
+++ b/torchci/test/flakyBotTests/flakyBotTestsUtils.ts
@@ -1,5 +1,7 @@
import dayjs from "dayjs";
-import { FlakyTestData } from "lib/types";
+import { __forTesting__ as aggregateDisableIssue } from "lib/flakyBot/aggregateDisableIssue";
+import { getPlatformsAffected } from "lib/flakyBot/utils";
+import { FlakyTestData, IssueData } from "lib/types";
import nock from "nock";
// This file contains utils and mock data for flaky bot tests. I think if you
@@ -126,3 +128,48 @@ export function mockGetRawTestFile(file: string, content: string) {
.get(`/pytorch/pytorch/main/test/${file}`)
.reply(200, Buffer.from(content));
}
+
+export function genSingleIssueFor(
+ test: FlakyTestData,
+ input: Partial
+): IssueData {
+ return {
+ number: 1,
+ title: `DISABLED ${test.name} (__main__.${test.suite})`,
+ html_url: "test url",
+ state: "open" as "open" | "closed",
+ body: `Platforms: ${getPlatformsAffected(test.jobNames)}`,
+ updated_at: dayjs().subtract(4, "hour").toString(),
+ author_association: "MEMBER",
+ labels: [],
+ ...input,
+ };
+}
+
+export function genAggTests(test: FlakyTestData) {
+ return Array.from({ length: 11 }, (_, i) =>
+ genValidFlakyTest({
+ ...test,
+
+ name: `test_${i}`,
+ suite: `suite_${i}`,
+ })
+ );
+}
+
+export function genAggIssueFor(
+ tests: FlakyTestData[],
+ input: Partial
+): IssueData {
+ return {
+ number: 1,
+ title: aggregateDisableIssue.getTitle(tests[0]),
+ html_url: "test url",
+ state: "open" as "open" | "closed",
+ body: aggregateDisableIssue.getBody(tests),
+ updated_at: dayjs().subtract(4, "hour").toString(),
+ author_association: "MEMBER",
+ labels: [],
+ ...input,
+ };
+}
diff --git a/torchci/test/flakyBotTests/flakySingleIssue.test.ts b/torchci/test/flakyBotTests/flakySingleIssue.test.ts
index 006a5b6b20..72cd15738d 100644
--- a/torchci/test/flakyBotTests/flakySingleIssue.test.ts
+++ b/torchci/test/flakyBotTests/flakySingleIssue.test.ts
@@ -86,6 +86,7 @@ describe("Flaky Test Bot Single Issue Unit Tests", () => {
const areSingleIssues = [
"DISABLED test_a (__main__.suite_a)",
"DISABLED test_a (t.test_a)",
+ "DISABLED test_a (t.test_a) ",
"DISABLED test_a (t.test_a.TestLinAlgCPU)",
"DISABLED test_aDFSOIDJ (t.test_a.TestLinAlgCPU)",
];
diff --git a/torchci/test/flakyBotTests/getDisabledTestsAndJobs.test.ts b/torchci/test/flakyBotTests/getDisabledTestsAndJobs.test.ts
new file mode 100644
index 0000000000..5a5c907678
--- /dev/null
+++ b/torchci/test/flakyBotTests/getDisabledTestsAndJobs.test.ts
@@ -0,0 +1,420 @@
+import nock from "nock";
+import { __forTesting__ as getDisabledTestsAndJobs } from "pages/api/flaky-tests/getDisabledTestsAndJobs";
+import { handleScope } from "../common";
+import * as utils from "../utils";
+import {
+ flakyTestA,
+ genAggIssueFor,
+ genAggTests,
+ genSingleIssueFor,
+} from "./flakyBotTestsUtils";
+
+nock.disableNetConnect();
+
+function mockGraphQLQuery(
+ issues: {
+ number: number;
+ title: string;
+ body: string;
+ url: string;
+ authorLogin: string;
+ }[]
+) {
+ return nock("https://api.github.com")
+ .post("/graphql", (body) => {
+ return body.query.includes("search");
+ })
+ .reply(200, {
+ data: {
+ search: {
+ issueCount: issues.length,
+ pageInfo: { hasNextPage: false, endCursor: "" },
+ nodes: issues.map((issue) => ({
+ number: issue.number,
+ title: issue.title,
+ body: issue.body,
+ url: issue.url,
+ author: { login: issue.authorLogin },
+ })),
+ },
+ },
+ });
+}
+
+describe("Get disable/unstable job/test jsons", () => {
+ const octokit = utils.testOctokit();
+ beforeEach(() => {});
+
+ afterEach(async () => {
+ nock.cleanAll();
+ jest.restoreAllMocks();
+ });
+ test("Sanity check no results", async () => {
+ const scope = [mockGraphQLQuery([]), mockGraphQLQuery([])];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {},
+ disabledJobs: {},
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("One test", async () => {
+ const issue = genSingleIssueFor(flakyTestA, {});
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: issue.number,
+ title: issue.title,
+ body: issue.body,
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {
+ "test_a (__main__.suite_a)": ["1", "url", ["win"]],
+ },
+ disabledJobs: {},
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("Two tests merge platforms", async () => {
+ const issue = genSingleIssueFor(flakyTestA, {});
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: issue.number,
+ title: issue.title,
+ body: issue.body,
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ {
+ number: 2,
+ title: issue.title,
+ body: "Platforms: linux",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {
+ "test_a (__main__.suite_a)": ["1", "url", ["linux", "win"]],
+ },
+ disabledJobs: {},
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("Many tests merge platforms: all", async () => {
+ const issue = genSingleIssueFor(flakyTestA, {});
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: issue.number,
+ title: issue.title,
+ body: issue.body,
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ {
+ number: 2,
+ title: issue.title,
+ body: "Platforms:",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ {
+ number: 3,
+ title: issue.title,
+ body: "Platforms: linux",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ {
+ number: 4,
+ title: issue.title,
+ body: "Platforms: mac",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {
+ "test_a (__main__.suite_a)": ["1", "url", []],
+ },
+ disabledJobs: {},
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("Malformed test -> job", async () => {
+ const issue = genSingleIssueFor(flakyTestA, {});
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: issue.number,
+ title: issue.title + " 2",
+ body: issue.body + " 2",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {},
+ disabledJobs: {
+ "test_a (__main__.suite_a) 2": [
+ "pytorch-bot",
+ "1",
+ "url2",
+ "test_a (__main__.suite_a) 2",
+ "",
+ "",
+ ],
+ },
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("disabled job", async () => {
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: "DISABLED Lint / Link checks / lint-urls / linux-job",
+ body: "",
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {},
+ disabledJobs: {
+ "Lint / Link checks / lint-urls / linux-job": [
+ "pytorch-bot",
+ "1",
+ "url",
+ "Lint",
+ "Link checks",
+ "lint-urls / linux-job",
+ ],
+ },
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("unstable job", async () => {
+ const scope = [
+ mockGraphQLQuery([]),
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: "UNSTABLE Lint / Link checks / lint-urls / linux-job",
+ body: "",
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {},
+ disabledJobs: {},
+ unstableJobs: {
+ "Lint / Link checks / lint-urls / linux-job": [
+ "pytorch-bot",
+ "1",
+ "url",
+ "Lint",
+ "Link checks",
+ "lint-urls / linux-job",
+ ],
+ },
+ });
+
+ handleScope(scope);
+ });
+
+ test("unstable/disable mix up", async () => {
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: "UNSTABLE Lint / Link checks / lint-urls / linux-job",
+ body: "",
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: "DISABLED Lint / Link checks / lint-urls / linux-job",
+ body: "",
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(result).toEqual({
+ disabledTests: {},
+ disabledJobs: {},
+ unstableJobs: {},
+ });
+
+ handleScope(scope);
+ });
+
+ test("aggregate issue", async () => {
+ const issue = genAggIssueFor(genAggTests(flakyTestA), {});
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: issue.title,
+ body: issue.body,
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(Object.keys(result.disabledTests).length).toBe(11);
+ expect(result.disabledTests["test_5 (__main__.suite_5)"]).toEqual([
+ "1",
+ "url",
+ ["win"],
+ ]);
+ handleScope(scope);
+ });
+
+ describe("aggregate issue tests", () => {
+ const aggTests = genAggTests(flakyTestA);
+ const aggregateIssue = genAggIssueFor(aggTests, {});
+ const singleIssue = genSingleIssueFor(aggTests[5], {});
+
+ test("aggregate issue and single issue", async () => {
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: aggregateIssue.title,
+ body: aggregateIssue.body,
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ {
+ number: 2,
+ title: singleIssue.title,
+ body: "Platforms: linux",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(Object.keys(result.disabledTests).length).toBe(11);
+ expect(result.disabledTests["test_5 (__main__.suite_5)"]).toEqual([
+ "1",
+ "url",
+ ["linux", "win"],
+ ]);
+ handleScope(scope);
+ });
+
+ test("aggregate issue and single issue, all platforms", async () => {
+ const scope = [
+ mockGraphQLQuery([
+ {
+ number: 1,
+ title: aggregateIssue.title,
+ body: aggregateIssue.body,
+ url: "url",
+ authorLogin: "pytorch-bot",
+ },
+ {
+ number: 2,
+ title: singleIssue.title,
+ body: "Platforms:",
+ url: "url2",
+ authorLogin: "pytorch-bot",
+ },
+ ]),
+ mockGraphQLQuery([]),
+ ];
+
+ const result = await getDisabledTestsAndJobs.getDisabledTestsAndJobs(
+ octokit
+ );
+ expect(Object.keys(result.disabledTests).length).toBe(11);
+ expect(result.disabledTests["test_5 (__main__.suite_5)"]).toEqual([
+ "1",
+ "url",
+ [],
+ ]);
+ handleScope(scope);
+ });
+ });
+});
diff --git a/torchci/test/utils.ts b/torchci/test/utils.ts
index 33218c7cd4..2052a31ab0 100644
--- a/torchci/test/utils.ts
+++ b/torchci/test/utils.ts
@@ -48,8 +48,8 @@ export function mockConfig(
.reply(200, content);
}
-export function mockAccessToken(): void {
- nock("https://api.github.com")
+export function mockAccessToken() {
+ return nock("https://api.github.com")
.post("/app/installations/2/access_tokens")
.reply(200, { token: "test" });
}
diff --git a/torchci/test/verifyDisableTestIssue.test.ts b/torchci/test/verifyDisableTestIssue.test.ts
index 46c7fb39dc..e6051cacf0 100644
--- a/torchci/test/verifyDisableTestIssue.test.ts
+++ b/torchci/test/verifyDisableTestIssue.test.ts
@@ -113,7 +113,7 @@ describe("Verify disable issues integration tests", () => {
labels?: string[];
}) {
const payload = requireDeepCopy("./fixtures/issues.opened.json");
- payload.issue.title = "DISABLED testMethodName (testClass.TestSuite)";
+ payload.issue.title = "DISABLED test_method_name (testClass.TestSuite)";
payload.issue.user.id = pytorchBotId;
payload.issue.labels = [];
@@ -332,7 +332,7 @@ describe("Verify disable issues integration tests", () => {
number,
[
"",
- "~15 minutes, `testMethodName (testClass.TestSuite)` will be disabled",
+ "~15 minutes, `test_method_name (testClass.TestSuite)` will be disabled",
"these platforms: asan, rocm, win.",
],
["don't have permission", "ERROR", "WARNING"]
@@ -357,7 +357,7 @@ describe("Verify disable issues integration tests", () => {
number,
[
"",
- "~15 minutes, `testMethodName (testClass.TestSuite)` will be disabled",
+ "~15 minutes, `test_method_name (testClass.TestSuite)` will be disabled",
"all platforms.",
],
["don't have permission", "ERROR", "WARNING"]
@@ -399,7 +399,7 @@ describe("Verify disable issues integration tests", () => {
body: "Platforms: win",
labels: ["module: windows", "random label"],
title:
- "DISABLED testMethodName (quantization.core.test_workflow_ops.TestFakeQuantizeOps)",
+ "DISABLED test_method_name (quantization.core.test_workflow_ops.TestFakeQuantizeOps)",
});
const scope = [
@@ -411,7 +411,7 @@ describe("Verify disable issues integration tests", () => {
[
"",
"these platforms: win.",
- "testMethodName (quantization.core.test_workflow_ops.TestFakeQuantizeOps)",
+ "test_method_name (quantization.core.test_workflow_ops.TestFakeQuantizeOps)",
],
["don't have permission", "ERROR", "WARNING"]
),
@@ -446,7 +446,7 @@ describe("Verify disable issues integration tests", () => {
const { payload, owner, repo, number } = defaultE2ETestInputs({
body: "whatever\nPlatforms:\nyay",
labels: ["module: windows", "random label"],
- title: "DISABLED testMethodName cuz it borked ",
+ title: "DISABLED test_method_name cuz it borked ",
});
const scope = [