mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-07-03 10:36:47 +00:00
fix CI machine tag
This commit is contained in:
parent
fd56b22278
commit
c9cba49ab5
4
.github/workflows/build_on_pr.yml
vendored
4
.github/workflows/build_on_pr.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
||||
anyExtensionFileChanged: ${{ steps.find-extension-change.outputs.any_changed }}
|
||||
changedLibraryFiles: ${{ steps.find-lib-change.outputs.all_changed_files }}
|
||||
anyLibraryFileChanged: ${{ steps.find-lib-change.outputs.any_changed }}
|
||||
runs-on: [self-hosted,ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-detect-change
|
||||
cancel-in-progress: true
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
name: Build and Test Colossal-AI
|
||||
needs: detect
|
||||
if: needs.detect.outputs.anyLibraryFileChanged == 'true'
|
||||
runs-on: [self-hosted,ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
options: --gpus all --shm-size=2g --rm -v /dev/shm -v /data/scratch:/data/scratch
|
||||
|
2
.github/workflows/build_on_schedule.yml
vendored
2
.github/workflows/build_on_schedule.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
build:
|
||||
name: Build and Test Colossal-AI
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
options: --gpus all --rm -v /dev/shm -v /data/scratch/:/data/scratch/
|
||||
|
@ -10,7 +10,7 @@ jobs:
|
||||
matrix_preparation:
|
||||
name: Prepare Container List
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
runs-on: [self-hosted,ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
|
2
.github/workflows/doc_check_on_pr.yml
vendored
2
.github/workflows/doc_check_on_pr.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
||||
if: |
|
||||
github.event.pull_request.draft == false &&
|
||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-check-i18n
|
||||
cancel-in-progress: true
|
||||
|
2
.github/workflows/doc_test_on_pr.yml
vendored
2
.github/workflows/doc_test_on_pr.yml
vendored
@ -54,7 +54,7 @@ jobs:
|
||||
needs.detect-changed-doc.outputs.any_changed == 'true'
|
||||
name: Test the changed Doc
|
||||
needs: detect-changed-doc
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
options: --gpus all --rm
|
||||
|
2
.github/workflows/doc_test_on_schedule.yml
vendored
2
.github/workflows/doc_test_on_schedule.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
# Add this condition to avoid executing this job if the trigger event is workflow_dispatch.
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
name: Test the changed Doc
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
options: --gpus all --rm
|
||||
|
@ -40,7 +40,7 @@ jobs:
|
||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||
name: Manually check example files
|
||||
needs: manual_check_matrix_preparation
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{fromJson(needs.manual_check_matrix_preparation.outputs.matrix)}}
|
||||
|
2
.github/workflows/example_check_on_pr.yml
vendored
2
.github/workflows/example_check_on_pr.yml
vendored
@ -85,7 +85,7 @@ jobs:
|
||||
needs.detect-changed-example.outputs.anyChanged == 'true'
|
||||
name: Test the changed example
|
||||
needs: detect-changed-example
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{fromJson(needs.detect-changed-example.outputs.matrix)}}
|
||||
|
@ -10,7 +10,7 @@ jobs:
|
||||
matrix_preparation:
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
name: Prepare matrix for weekly check
|
||||
runs-on: ubunt[self-hosted, ubuntu-latest]u-latest
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
outputs:
|
||||
matrix: ${{ steps.setup-matrix.outputs.matrix }}
|
||||
steps:
|
||||
@ -29,7 +29,7 @@ jobs:
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
name: Weekly check all examples
|
||||
needs: matrix_preparation
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
||||
|
@ -9,7 +9,7 @@ jobs:
|
||||
release:
|
||||
name: Publish Docker Image to DockerHub
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: "hpcaitech/docker-in-docker:latest"
|
||||
options: --gpus all --rm -v /var/run/docker.sock:/var/run/docker.sock
|
||||
|
@ -9,7 +9,7 @@ jobs:
|
||||
publish:
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
name: Build and publish Python 🐍 distributions 📦 to PyPI
|
||||
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
status: ${{ steps.publish.outcome }}
|
||||
|
@ -6,6 +6,10 @@ on:
|
||||
- 'version.txt'
|
||||
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build-n-publish:
|
||||
if: github.event_name == 'workflow_dispatch' || github.repository == 'hpcaitech/ColossalAI'
|
||||
|
@ -10,7 +10,7 @@ jobs:
|
||||
generate-and-publish:
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
name: Generate leaderboard report and publish to Lark
|
||||
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
2
.github/workflows/report_test_coverage.yml
vendored
2
.github/workflows/report_test_coverage.yml
vendored
@ -8,7 +8,7 @@ on:
|
||||
|
||||
jobs:
|
||||
report-test-coverage:
|
||||
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
steps:
|
||||
- name: "Download artifact"
|
||||
|
2
.github/workflows/run_chatgpt_examples.yml
vendored
2
.github/workflows/run_chatgpt_examples.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
github.event.pull_request.draft == false &&
|
||||
github.base_ref == 'main' &&
|
||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data --shm-size=10.24gb
|
||||
|
2
.github/workflows/run_chatgpt_unit_tests.yml
vendored
2
.github/workflows/run_chatgpt_unit_tests.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
github.event.pull_request.draft == false &&
|
||||
github.base_ref == 'main' &&
|
||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data
|
||||
|
@ -17,7 +17,7 @@ jobs:
|
||||
github.event.pull_request.draft == false &&
|
||||
github.base_ref == 'main' &&
|
||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||
runs-on: [self-hosted, gpu]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
container:
|
||||
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||
volumes:
|
||||
|
2
.github/workflows/submodule.yml
vendored
2
.github/workflows/submodule.yml
vendored
@ -7,7 +7,7 @@ on:
|
||||
|
||||
jobs:
|
||||
sync-submodule:
|
||||
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
if: github.repository == 'hpcaitech/ColossalAI'
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
2
.github/workflows/translate_comment.yml
vendored
2
.github/workflows/translate_comment.yml
vendored
@ -7,7 +7,7 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||
runs-on: [self-hosted, ubuntu-latest]
|
||||
steps:
|
||||
- uses: usthe/issues-translate-action@v2.7
|
||||
with:
|
||||
|
Loading…
Reference in New Issue
Block a user