mirror of
https://github.com/pytorch/pytorch.git
synced 2025-10-20 21:14:14 +08:00
[ci] remove IN_CI env var
The conventional env var to set is CI. Both circle and GHA set it, so IN_CI is unnecessary Pull Request resolved: https://github.com/pytorch/pytorch/pull/79229 Approved by: https://github.com/janeyx99
This commit is contained in:
committed by
PyTorch MergeBot
parent
f51d5233f2
commit
c978b609f7
6
.circleci/config.yml
generated
6
.circleci/config.yml
generated
@ -512,7 +512,6 @@ jobs:
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export CROSS_COMPILE_ARM64=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
@ -550,7 +549,6 @@ jobs:
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
# Install sccache
|
||||
@ -587,7 +585,6 @@ jobs:
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
chmod a+x .jenkins/pytorch/macos-test.sh
|
||||
@ -600,7 +597,6 @@ jobs:
|
||||
source /Users/distiller/workspace/miniconda3/bin/activate
|
||||
python3 -m pip install boto3==1.19.12
|
||||
|
||||
export IN_CI=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
# Using the same IAM user to write stats to our OSS bucket
|
||||
@ -626,7 +622,6 @@ jobs:
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export BUILD_LITE_INTERPRETER=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
chmod a+x ${HOME}/project/.jenkins/pytorch/macos-lite-interpreter-build-test.sh
|
||||
@ -830,7 +825,6 @@ jobs:
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
WORKSPACE=/Users/distiller/workspace
|
||||
PROJ_ROOT=/Users/distiller/project
|
||||
export TCLLIBPATH="/usr/local/lib"
|
||||
|
||||
@ -66,7 +66,6 @@ add_to_env_file() {
|
||||
esac
|
||||
}
|
||||
|
||||
add_to_env_file IN_CI 1
|
||||
add_to_env_file CI_MASTER "${CI_MASTER:-}"
|
||||
add_to_env_file COMMIT_SOURCE "${CIRCLE_BRANCH:-}"
|
||||
add_to_env_file BUILD_ENVIRONMENT "${BUILD_ENVIRONMENT}"
|
||||
|
||||
@ -37,7 +37,6 @@
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export CROSS_COMPILE_ARM64=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
@ -75,7 +74,6 @@
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
# Install sccache
|
||||
@ -112,7 +110,6 @@
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
chmod a+x .jenkins/pytorch/macos-test.sh
|
||||
@ -125,7 +122,6 @@
|
||||
source /Users/distiller/workspace/miniconda3/bin/activate
|
||||
python3 -m pip install boto3==1.19.12
|
||||
|
||||
export IN_CI=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
|
||||
# Using the same IAM user to write stats to our OSS bucket
|
||||
@ -151,7 +147,6 @@
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
export BUILD_LITE_INTERPRETER=1
|
||||
export JOB_BASE_NAME=$CIRCLE_JOB
|
||||
chmod a+x ${HOME}/project/.jenkins/pytorch/macos-lite-interpreter-build-test.sh
|
||||
@ -355,7 +350,6 @@
|
||||
no_output_timeout: "1h"
|
||||
command: |
|
||||
set -e
|
||||
export IN_CI=1
|
||||
WORKSPACE=/Users/distiller/workspace
|
||||
PROJ_ROOT=/Users/distiller/project
|
||||
export TCLLIBPATH="/usr/local/lib"
|
||||
|
||||
1
.github/actions/setup-linux/action.yml
vendored
1
.github/actions/setup-linux/action.yml
vendored
@ -45,3 +45,4 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
|
||||
1
.github/actions/setup-rocm/action.yml
vendored
1
.github/actions/setup-rocm/action.yml
vendored
@ -50,6 +50,7 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
|
||||
- name: ROCm set GPU_FLAG
|
||||
shell: bash
|
||||
|
||||
1
.github/templates/common.yml.j2
vendored
1
.github/templates/common.yml.j2
vendored
@ -195,6 +195,7 @@ on:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro teardown_ec2_linux(pytorch_directory="") -%}
|
||||
|
||||
@ -37,7 +37,6 @@ env:
|
||||
BUILD_ENVIRONMENT: !{{ build_environment }}
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
|
||||
@ -47,7 +47,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: !{{ build_environment }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
@ -47,7 +47,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: !{{ build_environment }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/_android-build-test.yml
vendored
1
.github/workflows/_android-build-test.yml
vendored
@ -13,7 +13,6 @@ on:
|
||||
description: Name of the base docker image to build with.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
|
||||
@ -30,7 +30,6 @@ on:
|
||||
required: true
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/_bazel-build-test.yml
vendored
2
.github/workflows/_bazel-build-test.yml
vendored
@ -13,7 +13,6 @@ on:
|
||||
description: Name of the base docker image to build with.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
@ -123,7 +122,6 @@ jobs:
|
||||
-e CUSTOM_TEST_ARTIFACT_BUILD_DIR \
|
||||
-e GITHUB_ACTIONS \
|
||||
-e GIT_DEFAULT_BRANCH="$GIT_DEFAULT_BRANCH" \
|
||||
-e IN_CI \
|
||||
-e SHARD_NUMBER \
|
||||
-e NUM_TEST_SHARDS \
|
||||
-e JOB_BASE_NAME \
|
||||
|
||||
4
.github/workflows/_docs.yml
vendored
4
.github/workflows/_docs.yml
vendored
@ -22,9 +22,6 @@ on:
|
||||
required: false
|
||||
description: Permissions for pushing to the docs site.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
|
||||
jobs:
|
||||
build-docs:
|
||||
# Don't run on forked repos.
|
||||
@ -86,7 +83,6 @@ jobs:
|
||||
container_name=$(docker run \
|
||||
-e BUILD_ENVIRONMENT \
|
||||
-e CUSTOM_TEST_ARTIFACT_BUILD_DIR \
|
||||
-e IN_CI \
|
||||
-e MAX_JOBS="$(nproc --ignore=2)" \
|
||||
-e SHA1="$GITHUB_SHA" \
|
||||
-e DOCS_VERSION="${target}" \
|
||||
|
||||
1
.github/workflows/_ios-build-test.yml
vendored
1
.github/workflows/_ios-build-test.yml
vendored
@ -31,7 +31,6 @@ on:
|
||||
description: ios cert
|
||||
|
||||
env:
|
||||
IN_CI: 1
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
BUILD_ENVIRONMENT: ${{ inputs.build-environment }}
|
||||
IOS_PLATFORM: ${{ inputs.ios-platform }}
|
||||
|
||||
3
.github/workflows/_linux-build.yml
vendored
3
.github/workflows/_linux-build.yml
vendored
@ -27,9 +27,6 @@ on:
|
||||
value: ${{ jobs.build.outputs.docker-image }}
|
||||
description: The docker image containing the built PyTorch.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
|
||||
jobs:
|
||||
build:
|
||||
# Don't run on forked repos.
|
||||
|
||||
2
.github/workflows/_linux-test.yml
vendored
2
.github/workflows/_linux-test.yml
vendored
@ -17,7 +17,6 @@ on:
|
||||
description: Docker image to run in.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
@ -111,7 +110,6 @@ jobs:
|
||||
-e PR_NUMBER \
|
||||
-e CUSTOM_TEST_ARTIFACT_BUILD_DIR \
|
||||
-e GITHUB_ACTIONS \
|
||||
-e IN_CI \
|
||||
-e BRANCH \
|
||||
-e SHA1 \
|
||||
-e AWS_DEFAULT_REGION \
|
||||
|
||||
3
.github/workflows/_mac-build.yml
vendored
3
.github/workflows/_mac-build.yml
vendored
@ -29,9 +29,6 @@ on:
|
||||
required: true
|
||||
description: Secret for S3 bucket for macOS sccache.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
|
||||
# For setup-miniconda, see https://github.com/conda-incubator/setup-miniconda/issues/179
|
||||
defaults:
|
||||
run:
|
||||
|
||||
4
.github/workflows/_mac-test-arm64.yml
vendored
4
.github/workflows/_mac-test-arm64.yml
vendored
@ -53,5 +53,9 @@ jobs:
|
||||
# shellcheck disable=SC1090
|
||||
. ~/miniconda3/etc/profile.d/conda.sh
|
||||
set -ex
|
||||
# TODO(https://github.com/pytorch/pytorch/issues/79293)
|
||||
# This step currently fails if we actually run as if we're in CI.
|
||||
unset CI
|
||||
|
||||
conda run --cwd test -p "${ENV_NAME}" python3 test_mps.py -v
|
||||
conda env remove -p "${ENV_NAME}"
|
||||
|
||||
3
.github/workflows/_mac-test.yml
vendored
3
.github/workflows/_mac-test.yml
vendored
@ -20,9 +20,6 @@ on:
|
||||
required: true
|
||||
description: secret acess key for test stats upload
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
|
||||
# For setup-miniconda, see https://github.com/conda-incubator/setup-miniconda/issues/179
|
||||
defaults:
|
||||
run:
|
||||
|
||||
2
.github/workflows/_rocm-test.yml
vendored
2
.github/workflows/_rocm-test.yml
vendored
@ -29,7 +29,6 @@ on:
|
||||
description: secret acess key for test stats upload
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
@ -108,7 +107,6 @@ jobs:
|
||||
-e PR_NUMBER \
|
||||
-e CUSTOM_TEST_ARTIFACT_BUILD_DIR \
|
||||
-e GITHUB_ACTIONS \
|
||||
-e IN_CI \
|
||||
-e BRANCH \
|
||||
-e SHA1 \
|
||||
-e AWS_DEFAULT_REGION \
|
||||
|
||||
1
.github/workflows/_win-build.yml
vendored
1
.github/workflows/_win-build.yml
vendored
@ -18,7 +18,6 @@ on:
|
||||
description: If set, build in debug mode.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
|
||||
1
.github/workflows/_win-test.yml
vendored
1
.github/workflows/_win-test.yml
vendored
@ -17,7 +17,6 @@ on:
|
||||
description: JSON description of what test configs to run.
|
||||
|
||||
env:
|
||||
IN_CI: 1 # TODO delete in favor of GITHUB_ACTIONS
|
||||
GIT_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
|
||||
jobs:
|
||||
|
||||
1
.github/workflows/generated-linux-binary-conda-nightly.yml
generated
vendored
1
.github/workflows/generated-linux-binary-conda-nightly.yml
generated
vendored
@ -26,7 +26,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-conda
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
|
||||
1
.github/workflows/generated-linux-binary-libtorch-cxx11-abi-master.yml
generated
vendored
1
.github/workflows/generated-linux-binary-libtorch-cxx11-abi-master.yml
generated
vendored
@ -21,7 +21,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-libtorch-cxx11-abi
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
|
||||
5
.github/workflows/generated-linux-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
5
.github/workflows/generated-linux-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
@ -26,7 +26,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-libtorch-cxx11-abi
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
@ -5816,6 +5815,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6166,6 +6166,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6516,6 +6517,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6866,6 +6868,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
|
||||
1
.github/workflows/generated-linux-binary-libtorch-pre-cxx11-master.yml
generated
vendored
1
.github/workflows/generated-linux-binary-libtorch-pre-cxx11-master.yml
generated
vendored
@ -21,7 +21,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-libtorch-pre-cxx11
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
|
||||
5
.github/workflows/generated-linux-binary-libtorch-pre-cxx11-nightly.yml
generated
vendored
5
.github/workflows/generated-linux-binary-libtorch-pre-cxx11-nightly.yml
generated
vendored
@ -26,7 +26,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-libtorch-pre-cxx11
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
@ -5816,6 +5815,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6166,6 +6166,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6516,6 +6517,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6866,6 +6868,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
|
||||
1
.github/workflows/generated-linux-binary-manywheel-master.yml
generated
vendored
1
.github/workflows/generated-linux-binary-manywheel-master.yml
generated
vendored
@ -21,7 +21,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-manywheel
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
|
||||
9
.github/workflows/generated-linux-binary-manywheel-nightly.yml
generated
vendored
9
.github/workflows/generated-linux-binary-manywheel-nightly.yml
generated
vendored
@ -26,7 +26,6 @@ env:
|
||||
BUILD_ENVIRONMENT: linux-binary-manywheel
|
||||
BUILDER_ROOT: /builder
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PYTORCH_FINAL_PACKAGE_DIR: /artifacts
|
||||
@ -1602,6 +1601,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -1949,6 +1949,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -3684,6 +3685,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -4031,6 +4033,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -5766,6 +5769,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -6113,6 +6117,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -7848,6 +7853,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
@ -8195,6 +8201,7 @@ jobs:
|
||||
- name: Preserve github env variables for use in docker
|
||||
run: |
|
||||
env | grep '^GITHUB' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
env | grep '^CI' > "/tmp/github_env_${GITHUB_RUN_ID}"
|
||||
- uses: seemethere/download-artifact-s3@v4
|
||||
name: Download Build Artifacts
|
||||
with:
|
||||
|
||||
1
.github/workflows/generated-macos-arm64-binary-conda-nightly.yml
generated
vendored
1
.github/workflows/generated-macos-arm64-binary-conda-nightly.yml
generated
vendored
@ -25,7 +25,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: macos-arm64-binary-conda
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
1
.github/workflows/generated-macos-arm64-binary-wheel-nightly.yml
generated
vendored
1
.github/workflows/generated-macos-arm64-binary-wheel-nightly.yml
generated
vendored
@ -25,7 +25,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: macos-arm64-binary-wheel
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
1
.github/workflows/generated-macos-binary-conda-nightly.yml
generated
vendored
1
.github/workflows/generated-macos-binary-conda-nightly.yml
generated
vendored
@ -25,7 +25,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: macos-binary-conda
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
1
.github/workflows/generated-macos-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
1
.github/workflows/generated-macos-binary-libtorch-cxx11-abi-nightly.yml
generated
vendored
@ -25,7 +25,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: macos-binary-libtorch-cxx11-abi
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
1
.github/workflows/generated-macos-binary-libtorch-pre-cxx11-nightly.yml
generated
vendored
1
.github/workflows/generated-macos-binary-libtorch-pre-cxx11-nightly.yml
generated
vendored
@ -25,7 +25,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: macos-binary-libtorch-pre-cxx11
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
1
.github/workflows/generated-macos-binary-wheel-nightly.yml
generated
vendored
1
.github/workflows/generated-macos-binary-wheel-nightly.yml
generated
vendored
@ -25,7 +25,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: macos-binary-wheel
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SKIP_ALL_TESTS: 1
|
||||
|
||||
1
.github/workflows/generated-windows-binary-conda-nightly.yml
generated
vendored
1
.github/workflows/generated-windows-binary-conda-nightly.yml
generated
vendored
@ -24,7 +24,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-conda
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/generated-windows-binary-libtorch-debug-master.yml
generated
vendored
1
.github/workflows/generated-windows-binary-libtorch-debug-master.yml
generated
vendored
@ -19,7 +19,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-libtorch-debug
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/generated-windows-binary-libtorch-debug-nightly.yml
generated
vendored
1
.github/workflows/generated-windows-binary-libtorch-debug-nightly.yml
generated
vendored
@ -24,7 +24,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-libtorch-debug
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/generated-windows-binary-libtorch-release-master.yml
generated
vendored
1
.github/workflows/generated-windows-binary-libtorch-release-master.yml
generated
vendored
@ -19,7 +19,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-libtorch-release
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/generated-windows-binary-libtorch-release-nightly.yml
generated
vendored
1
.github/workflows/generated-windows-binary-libtorch-release-nightly.yml
generated
vendored
@ -24,7 +24,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-libtorch-release
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/generated-windows-binary-wheel-master.yml
generated
vendored
1
.github/workflows/generated-windows-binary-wheel-master.yml
generated
vendored
@ -19,7 +19,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-wheel
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
1
.github/workflows/generated-windows-binary-wheel-nightly.yml
generated
vendored
1
.github/workflows/generated-windows-binary-wheel-nightly.yml
generated
vendored
@ -24,7 +24,6 @@ env:
|
||||
AWS_DEFAULT_REGION: us-east-1
|
||||
BUILD_ENVIRONMENT: windows-binary-wheel
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
IN_CI: 1
|
||||
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
SHA1: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
@ -107,7 +107,7 @@ if [[ $BUILD_ENVIRONMENT == *cuda* ]]; then
|
||||
export PATH="/usr/local/cuda/bin:$PATH"
|
||||
fi
|
||||
if [[ $BUILD_ENVIRONMENT == *rocm* ]]; then
|
||||
if [[ -n "$IN_CI" && -z "$PYTORCH_ROCM_ARCH" ]]; then
|
||||
if [[ -n "$CI" && -z "$PYTORCH_ROCM_ARCH" ]]; then
|
||||
# Set ROCM_ARCH to gfx900 and gfx906 for CI builds, if user doesn't override.
|
||||
echo "Limiting PYTORCH_ROCM_ARCH to gfx90[06] for CI builds"
|
||||
export PYTORCH_ROCM_ARCH="gfx900;gfx906"
|
||||
|
||||
@ -37,6 +37,9 @@ gcc --version
|
||||
echo "CMake version:"
|
||||
cmake --version
|
||||
|
||||
echo "Environment variables:"
|
||||
env
|
||||
|
||||
if [[ "$BUILD_ENVIRONMENT" == *cuda* ]]; then
|
||||
echo "NVCC version:"
|
||||
nvcc --version
|
||||
@ -139,7 +142,7 @@ if [[ "$BUILD_ENVIRONMENT" == *rocm* ]]; then
|
||||
export MAX_JOBS=$(($(nproc) - 1))
|
||||
fi
|
||||
|
||||
if [[ -n "$IN_CI" && -z "$PYTORCH_ROCM_ARCH" ]]; then
|
||||
if [[ -n "$CI" && -z "$PYTORCH_ROCM_ARCH" ]]; then
|
||||
# Set ROCM_ARCH to gfx900 and gfx906 for CI builds, if user doesn't override.
|
||||
echo "Limiting PYTORCH_ROCM_ARCH to gfx90[06] for CI builds"
|
||||
export PYTORCH_ROCM_ARCH="gfx900;gfx906"
|
||||
|
||||
@ -34,11 +34,6 @@ fi
|
||||
# system; to find out more, grep for this string in ossci-job-dsl.
|
||||
echo "ENTERED_USER_LAND"
|
||||
|
||||
# Previously IN_CI is only set in .circleci/scripts/setup_ci_environment.sh,
|
||||
# this means other CI system doesn't actually have this flag properly set.
|
||||
# Now we explicitly export IN_CI environment variable here.
|
||||
export IN_CI=1
|
||||
|
||||
# compositional trap taken from https://stackoverflow.com/a/7287873/23845
|
||||
|
||||
# note: printf is used instead of echo to avoid backslash
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/macos-common.sh"
|
||||
|
||||
# Build PyTorch
|
||||
if [ -z "${IN_CI}" ]; then
|
||||
if [ -z "${CI}" ]; then
|
||||
export DEVELOPER_DIR=/Applications/Xcode9.app/Contents/Developer
|
||||
fi
|
||||
|
||||
|
||||
@ -12,19 +12,19 @@ pip install -q hypothesis "expecttest==0.1.3" "librosa>=0.6.2" "numba<=0.49.1" p
|
||||
pip install "unittest-xml-reporting<=3.2.0,>=2.0.0" \
|
||||
pytest
|
||||
|
||||
if [ -z "${IN_CI}" ]; then
|
||||
if [ -z "${CI}" ]; then
|
||||
rm -rf "${WORKSPACE_DIR}"/miniconda3/lib/python3.6/site-packages/torch*
|
||||
fi
|
||||
|
||||
export CMAKE_PREFIX_PATH=${WORKSPACE_DIR}/miniconda3/
|
||||
|
||||
# Test PyTorch
|
||||
if [ -z "${IN_CI}" ]; then
|
||||
if [ -z "${CI}" ]; then
|
||||
export DEVELOPER_DIR=/Applications/Xcode9.app/Contents/Developer
|
||||
fi
|
||||
|
||||
# Download torch binaries in the test jobs
|
||||
if [ -z "${IN_CI}" ]; then
|
||||
if [ -z "${CI}" ]; then
|
||||
rm -rf "${WORKSPACE_DIR}"/miniconda3/lib/python3.6/site-packages/torch*
|
||||
aws s3 cp s3://ossci-macos-build/pytorch/"${IMAGE_COMMIT_TAG}".7z "${IMAGE_COMMIT_TAG}".7z
|
||||
7z x "${IMAGE_COMMIT_TAG}".7z -o"${WORKSPACE_DIR}/miniconda3/lib/python3.6/site-packages"
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
|
||||
|
||||
echo "Testing pytorch (distributed only)"
|
||||
if [ -n "${IN_CI}" ]; then
|
||||
if [ -n "${CI}" ]; then
|
||||
# TODO move this to docker
|
||||
# Pin unittest-xml-reporting to freeze printing test summary logic, related: https://github.com/pytorch/pytorch/issues/69014
|
||||
pip_install "unittest-xml-reporting<=3.2.0,>=2.0.0"
|
||||
|
||||
@ -29,6 +29,9 @@ fi
|
||||
# shellcheck source=./common.sh
|
||||
source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
|
||||
|
||||
echo "Environment variables"
|
||||
env
|
||||
|
||||
echo "Testing pytorch"
|
||||
|
||||
export LANG=C.UTF-8
|
||||
|
||||
@ -32,7 +32,7 @@ from torch.distributed.elastic.multiprocessing.api import (
|
||||
)
|
||||
from torch.distributed.elastic.multiprocessing.errors import ErrorHandler
|
||||
from torch.testing._internal.common_utils import (
|
||||
IS_IN_CI,
|
||||
IS_CI,
|
||||
IS_MACOS,
|
||||
IS_WINDOWS,
|
||||
NO_MULTIPROCESSING_SPAWN,
|
||||
@ -658,7 +658,7 @@ if not (TEST_WITH_DEV_DBG_ASAN or IS_WINDOWS or IS_MACOS):
|
||||
|
||||
|
||||
# tests incompatible with tsan or asan, the redirect functionality does not work on macos or windows
|
||||
if not (TEST_WITH_DEV_DBG_ASAN or IS_WINDOWS or IS_MACOS or IS_IN_CI):
|
||||
if not (TEST_WITH_DEV_DBG_ASAN or IS_WINDOWS or IS_MACOS or IS_CI):
|
||||
|
||||
class StartProcessesNotCITest(StartProcessesTest):
|
||||
def test_wrap_bad(self):
|
||||
|
||||
@ -10,7 +10,7 @@ if not dist.is_available():
|
||||
print("Distributed not available, skipping tests", file=sys.stderr)
|
||||
sys.exit(0)
|
||||
|
||||
from torch.testing._internal.common_utils import IS_IN_CI, run_tests
|
||||
from torch.testing._internal.common_utils import IS_CI, run_tests
|
||||
from torch.testing._internal.distributed.rpc.faulty_rpc_agent_test_fixture import (
|
||||
FaultyRpcAgentTestFixture,
|
||||
)
|
||||
@ -22,7 +22,7 @@ from torch.testing._internal.distributed.rpc_utils import (
|
||||
|
||||
# On CircleCI these tests are already run on CPU jobs, thus to save resources do
|
||||
# not run them on GPU jobs, since thet wouldn't provide additional test signal.
|
||||
if not (IS_IN_CI and torch.cuda.is_available()):
|
||||
if not (IS_CI and torch.cuda.is_available()):
|
||||
globals().update(
|
||||
generate_tests(
|
||||
"Faulty",
|
||||
|
||||
@ -10,7 +10,7 @@ if not dist.is_available():
|
||||
print("Distributed not available, skipping tests", file=sys.stderr)
|
||||
sys.exit(0)
|
||||
|
||||
from torch.testing._internal.common_utils import IS_IN_CI, run_tests
|
||||
from torch.testing._internal.common_utils import IS_CI, run_tests
|
||||
from torch.testing._internal.distributed.rpc.tensorpipe_rpc_agent_test_fixture import (
|
||||
TensorPipeRpcAgentTestFixture,
|
||||
)
|
||||
@ -23,7 +23,7 @@ from torch.testing._internal.distributed.rpc_utils import (
|
||||
|
||||
# On CircleCI these tests are already run on CPU jobs, thus to save resources do
|
||||
# not run them on GPU jobs, since thet wouldn't provide additional test signal.
|
||||
if not (IS_IN_CI and torch.cuda.is_available()):
|
||||
if not (IS_CI and torch.cuda.is_available()):
|
||||
globals().update(
|
||||
generate_tests(
|
||||
"TensorPipe",
|
||||
|
||||
@ -17,8 +17,8 @@ import tempfile
|
||||
import torch
|
||||
from torch.utils import cpp_extension
|
||||
from torch.testing._internal.common_utils import (
|
||||
IS_CI,
|
||||
FILE_SCHEMA,
|
||||
IS_IN_CI,
|
||||
TEST_WITH_ROCM,
|
||||
shell,
|
||||
set_cwd,
|
||||
@ -409,7 +409,7 @@ def run_test(
|
||||
# If using pytest, replace -f with equivalent -x
|
||||
if options.pytest:
|
||||
unittest_args = [arg if arg != "-f" else "-x" for arg in unittest_args]
|
||||
elif IS_IN_CI:
|
||||
elif IS_CI:
|
||||
# use the downloaded test cases configuration, not supported in pytest
|
||||
unittest_args.extend(["--import-slow-tests", "--import-disabled-tests"])
|
||||
|
||||
@ -1032,7 +1032,7 @@ def main():
|
||||
# ]
|
||||
# sys.path.remove(test_directory)
|
||||
|
||||
if IS_IN_CI:
|
||||
if IS_CI:
|
||||
selected_tests = get_reordered_tests(
|
||||
selected_tests, ENABLE_PR_HISTORY_REORDERING
|
||||
)
|
||||
|
||||
@ -35,7 +35,7 @@ from torch.utils.data.datapipes.iter import IterableWrapper
|
||||
from torch.utils.data.datapipes.map import SequenceWrapper
|
||||
from torch._utils import ExceptionWrapper
|
||||
from torch.testing._internal.common_utils import (TestCase, run_tests, TEST_NUMPY, IS_WINDOWS,
|
||||
IS_IN_CI, NO_MULTIPROCESSING_SPAWN, skipIfRocm, slowTest,
|
||||
IS_CI, NO_MULTIPROCESSING_SPAWN, skipIfRocm, slowTest,
|
||||
load_tests, TEST_WITH_ASAN, TEST_WITH_TSAN, IS_SANDCASTLE,
|
||||
IS_MACOS)
|
||||
|
||||
@ -47,7 +47,7 @@ except ImportError:
|
||||
HAS_PSUTIL = False
|
||||
err_msg = ("psutil not found. Some critical data loader tests relying on it "
|
||||
"(e.g., TestDataLoader.test_proper_exit) will not run.")
|
||||
if IS_IN_CI:
|
||||
if IS_CI:
|
||||
raise ImportError(err_msg) from None
|
||||
else:
|
||||
warnings.warn(err_msg)
|
||||
|
||||
@ -5,7 +5,7 @@ import sys
|
||||
import unittest
|
||||
import pathlib
|
||||
|
||||
from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_IN_CI
|
||||
from torch.testing._internal.common_utils import TestCase, run_tests, IS_LINUX, IS_CI
|
||||
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
@ -33,7 +33,7 @@ class TestImportTime(TestCase):
|
||||
)
|
||||
|
||||
@unittest.skipIf(not IS_LINUX, "Memory test is only implemented for Linux")
|
||||
@unittest.skipIf(not IS_IN_CI, "Memory test only runs in CI")
|
||||
@unittest.skipIf(not IS_CI, "Memory test only runs in CI")
|
||||
@unittest.skipIf(rds_write is None, "Cannot import rds_write from tools.stats.scribe")
|
||||
def test_peak_memory(self):
|
||||
def profile(module, name):
|
||||
@ -58,7 +58,7 @@ class TestImportTime(TestCase):
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if register_rds_schema and IS_IN_CI:
|
||||
if register_rds_schema and IS_CI:
|
||||
try:
|
||||
register_rds_schema(
|
||||
"import_stats",
|
||||
|
||||
@ -22,7 +22,7 @@ from torch.testing._internal.common_utils import (
|
||||
run_tests,
|
||||
IS_SANDCASTLE,
|
||||
clone_input_helper,
|
||||
IS_IN_CI,
|
||||
IS_CI,
|
||||
suppress_warnings,
|
||||
noncontiguous_like,
|
||||
TEST_WITH_ASAN,
|
||||
@ -103,7 +103,7 @@ class TestCommon(TestCase):
|
||||
def tearDownClass(cls):
|
||||
super().tearDownClass()
|
||||
|
||||
if IS_IN_CI:
|
||||
if IS_CI:
|
||||
err_msg = (
|
||||
"The operator(s) below is(are) using dynamic_dtypes in the OpInfo entries."
|
||||
"This is OK for testing, but be sure to set the dtypes manually before landing your PR!"
|
||||
|
||||
@ -481,7 +481,7 @@ if __name__ == '__main__':
|
||||
test_bases_count = len(get_device_type_test_bases())
|
||||
# Test without setting env var should run everything.
|
||||
env = dict(os.environ)
|
||||
for k in ['IN_CI', PYTORCH_TESTING_DEVICE_ONLY_FOR_KEY, PYTORCH_TESTING_DEVICE_EXCEPT_FOR_KEY]:
|
||||
for k in ['CI', PYTORCH_TESTING_DEVICE_ONLY_FOR_KEY, PYTORCH_TESTING_DEVICE_EXCEPT_FOR_KEY]:
|
||||
if k in env.keys():
|
||||
del env[k]
|
||||
_, stderr = TestCase.run_process_no_exception(test_filter_file_template, env=env)
|
||||
|
||||
@ -79,8 +79,7 @@ FILE_SCHEMA = "file://"
|
||||
if sys.platform == 'win32':
|
||||
FILE_SCHEMA = "file:///"
|
||||
|
||||
# Environment variable `IN_CI` is set in `.jenkins/common.sh`.
|
||||
IS_IN_CI = os.getenv('IN_CI') == '1'
|
||||
IS_CI = bool(os.getenv('CI'))
|
||||
IS_SANDCASTLE = os.getenv('SANDCASTLE') == '1' or os.getenv('TW_JOB_USER') == 'sandcastle'
|
||||
IS_FBCODE = os.getenv('PYTORCH_TEST_FBCODE') == '1'
|
||||
IS_REMOTE_GPU = os.getenv('PYTORCH_TEST_REMOTE_GPU') == '1'
|
||||
@ -463,7 +462,7 @@ parser.add_argument('--repeat', type=int, default=1)
|
||||
parser.add_argument('--test_bailouts', action='store_true')
|
||||
parser.add_argument('--save-xml', nargs='?', type=str,
|
||||
const=_get_test_report_path(),
|
||||
default=_get_test_report_path() if IS_IN_CI else None)
|
||||
default=_get_test_report_path() if IS_CI else None)
|
||||
parser.add_argument('--discover-tests', action='store_true')
|
||||
parser.add_argument('--log-suffix', type=str, default="")
|
||||
parser.add_argument('--run-parallel', type=int, default=1)
|
||||
@ -1439,7 +1438,7 @@ try:
|
||||
verbosity=hypothesis.Verbosity.verbose))
|
||||
|
||||
hypothesis.settings.load_profile(
|
||||
"pytorch_ci" if IS_IN_CI else os.getenv('PYTORCH_HYPOTHESIS_PROFILE', 'dev')
|
||||
"pytorch_ci" if IS_CI else os.getenv('PYTORCH_HYPOTHESIS_PROFILE', 'dev')
|
||||
)
|
||||
except ImportError:
|
||||
print('Fail to import hypothesis in common_utils, tests are not derandomized')
|
||||
@ -1493,7 +1492,7 @@ def check_if_enable(test: unittest.TestCase):
|
||||
skip_msg = f"Test is disabled because an issue exists disabling it: {issue_url}" \
|
||||
f" for {'all' if platforms == [] else ''}platform(s) {', '.join(platforms)}. " \
|
||||
"If you're seeing this on your local machine and would like to enable this test, " \
|
||||
"please make sure IN_CI is not set and you are not using the flag --import-disabled-tests."
|
||||
"please make sure CI is not set and you are not using the flag --import-disabled-tests."
|
||||
raise unittest.SkipTest(skip_msg)
|
||||
if TEST_SKIP_FAST:
|
||||
if not getattr(test, test._testMethodName).__dict__.get('slow_test', False):
|
||||
@ -2518,10 +2517,10 @@ class TestCase(expecttest.TestCase):
|
||||
def runWithPytorchAPIUsageStderr(code):
|
||||
env = os.environ.copy()
|
||||
env["PYTORCH_API_USAGE_STDERR"] = "1"
|
||||
# remove IN_CI flag since this is a wrapped test process.
|
||||
# IN_CI flag should be set in the parent process only.
|
||||
if "IN_CI" in env.keys():
|
||||
del env["IN_CI"]
|
||||
# remove CI flag since this is a wrapped test process.
|
||||
# CI flag should be set in the parent process only.
|
||||
if "CI" in env.keys():
|
||||
del env["CI"]
|
||||
(stdout, stderr) = TestCase.run_process_no_exception(code, env=env)
|
||||
return stderr.decode('ascii')
|
||||
|
||||
|
||||
Reference in New Issue
Block a user