mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 12:54:11 +08:00
This PR moves PyTorch CI capacity from mi300 to a new, larger mi325 cluster. Both of these GPUs are the same architecture gfx942 and our testing plans don't change within an architecture, so we pool them under the same label `linux.rocm.gpu.gfx942.<#gpus>` with this PR as well to reduce overhead and confusion. Pull Request resolved: https://github.com/pytorch/pytorch/pull/159059 Approved by: https://github.com/jithunnair-amd, https://github.com/atalman Co-authored-by: deedongala <deekshitha.dongala@amd.com>
82 lines
3.0 KiB
YAML
82 lines
3.0 KiB
YAML
name: periodic-rocm-mi300
|
|
|
|
on:
|
|
schedule:
|
|
# We have several schedules so jobs can check github.event.schedule to activate only for a fraction of the runs.
|
|
# Also run less frequently on weekends.
|
|
- cron: 45 0,8,16 * * 1-5
|
|
- cron: 45 4 * * 0,6
|
|
- cron: 45 4,12,20 * * 1-5
|
|
- cron: 45 12 * * 0,6
|
|
- cron: 29 8 * * * # about 1:29am PDT, for mem leak check and rerun disabled tests
|
|
push:
|
|
tags:
|
|
- ciflow/periodic-rocm-mi300/*
|
|
branches:
|
|
- release/*
|
|
workflow_dispatch:
|
|
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref_name }}-${{ github.ref_type == 'branch' && github.sha }}-${{ github.event_name == 'workflow_dispatch' }}-${{ github.event_name == 'schedule' }}-${{ github.event.schedule }}
|
|
cancel-in-progress: true
|
|
|
|
permissions: read-all
|
|
|
|
jobs:
|
|
llm-td:
|
|
if: github.repository_owner == 'pytorch'
|
|
name: before-test
|
|
uses: ./.github/workflows/llm_td_retrieval.yml
|
|
permissions:
|
|
id-token: write
|
|
contents: read
|
|
|
|
target-determination:
|
|
name: before-test
|
|
uses: ./.github/workflows/target_determination.yml
|
|
needs: llm-td
|
|
permissions:
|
|
id-token: write
|
|
contents: read
|
|
|
|
get-label-type:
|
|
name: get-label-type
|
|
uses: pytorch/pytorch/.github/workflows/_runner-determinator.yml@main
|
|
if: (github.event_name != 'schedule' || github.repository == 'pytorch/pytorch') && github.repository_owner == 'pytorch'
|
|
with:
|
|
triggering_actor: ${{ github.triggering_actor }}
|
|
issue_owner: ${{ github.event.pull_request.user.login || github.event.issue.user.login }}
|
|
curr_branch: ${{ github.head_ref || github.ref_name }}
|
|
curr_ref_type: ${{ github.ref_type }}
|
|
|
|
linux-jammy-rocm-py3_10-build:
|
|
name: linux-jammy-rocm-py3.10-mi300
|
|
uses: ./.github/workflows/_linux-build.yml
|
|
needs: get-label-type
|
|
with:
|
|
runner_prefix: "${{ needs.get-label-type.outputs.label-type }}"
|
|
build-environment: linux-jammy-rocm-py3.10-mi300
|
|
docker-image-name: ci-image:pytorch-linux-jammy-rocm-n-py3
|
|
test-matrix: |
|
|
{ include: [
|
|
{ config: "distributed", shard: 1, num_shards: 3, runner: "linux.rocm.gpu.gfx942.4", owners: ["module:rocm", "oncall:distributed"] },
|
|
{ config: "distributed", shard: 2, num_shards: 3, runner: "linux.rocm.gpu.gfx942.4", owners: ["module:rocm", "oncall:distributed"] },
|
|
{ config: "distributed", shard: 3, num_shards: 3, runner: "linux.rocm.gpu.gfx942.4", owners: ["module:rocm", "oncall:distributed"] },
|
|
]}
|
|
secrets: inherit
|
|
|
|
linux-jammy-rocm-py3_10-test:
|
|
permissions:
|
|
id-token: write
|
|
contents: read
|
|
name: linux-jammy-rocm-py3.10-mi300
|
|
uses: ./.github/workflows/_rocm-test.yml
|
|
needs:
|
|
- linux-jammy-rocm-py3_10-build
|
|
- target-determination
|
|
with:
|
|
build-environment: linux-jammy-rocm-py3.10-mi300
|
|
docker-image: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.docker-image }}
|
|
test-matrix: ${{ needs.linux-jammy-rocm-py3_10-build.outputs.test-matrix }}
|
|
secrets: inherit
|