mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-25 15:01:43 +00:00
fix
This commit is contained in:
parent
d3c40b9de4
commit
4afff92138
@ -2,11 +2,11 @@
|
|||||||
"build": [
|
"build": [
|
||||||
{
|
{
|
||||||
"torch_command": "pip install torch==2.3.0 torchvision==0.18.0 torchaudio==2.3.0 --index-url https://download.pytorch.org/whl/cu121",
|
"torch_command": "pip install torch==2.3.0 torchvision==0.18.0 torchaudio==2.3.0 --index-url https://download.pytorch.org/whl/cu121",
|
||||||
"cuda_image": "hpcaitech/cuda-conda:12.1"
|
"cuda_image": "image-cloud.luchentech.com/hpcaitech/cuda-conda:12.1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"torch_command": "pip install torch==2.5.1 torchvision==0.20.1 torchaudio==2.5.1 --index-url https://download.pytorch.org/whl/cu124",
|
"torch_command": "pip install torch==2.5.1 torchvision==0.20.1 torchaudio==2.5.1 --index-url https://download.pytorch.org/whl/cu124",
|
||||||
"cuda_image": "hpcaitech/cuda-conda:12.4"
|
"cuda_image": "image-cloud.luchentech.com/hpcaitech/cuda-conda:12.4"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
2
.github/workflows/build_on_schedule.yml
vendored
2
.github/workflows/build_on_schedule.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
|||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, gpu]
|
runs-on: [self-hosted, gpu]
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm -v /dev/shm -v /data/scratch/:/data/scratch/
|
options: --gpus all --rm -v /dev/shm -v /data/scratch/:/data/scratch/
|
||||||
timeout-minutes: 90
|
timeout-minutes: 90
|
||||||
steps:
|
steps:
|
||||||
|
2
.github/workflows/close_inactive.yml
vendored
2
.github/workflows/close_inactive.yml
vendored
@ -7,7 +7,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
close-issues:
|
close-issues:
|
||||||
if: github.event.pull_request.draft == false && github.base_ref == 'main' && github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
if: github.event.pull_request.draft == false && github.base_ref == 'main' && github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
@ -15,7 +15,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
matrix_preparation:
|
matrix_preparation:
|
||||||
name: Prepare Container List
|
name: Prepare Container List
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
@ -31,7 +31,7 @@ jobs:
|
|||||||
do
|
do
|
||||||
for cv in $CUDA_VERSIONS
|
for cv in $CUDA_VERSIONS
|
||||||
do
|
do
|
||||||
DOCKER_IMAGE+=("\"hpcaitech/pytorch-cuda:${tv}-${cv}\"")
|
DOCKER_IMAGE+=("\"image-cloud.luchentech.com/hpcaitech/pytorch-cuda:${tv}-${cv}\"")
|
||||||
done
|
done
|
||||||
done
|
done
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ jobs:
|
|||||||
name: Test for PyTorch Compatibility
|
name: Test for PyTorch Compatibility
|
||||||
needs: matrix_preparation
|
needs: matrix_preparation
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, 8-gpu]
|
runs-on: [self-hosted, ubuntu-latest]
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
||||||
|
@ -9,7 +9,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
matrix_preparation:
|
matrix_preparation:
|
||||||
name: Prepare Container List
|
name: Prepare Container List
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
concurrency:
|
concurrency:
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
DOCKER_IMAGE=()
|
DOCKER_IMAGE=()
|
||||||
|
|
||||||
while read tag; do
|
while read tag; do
|
||||||
DOCKER_IMAGE+=("\"hpcaitech/pytorch-cuda:${tag}\"")
|
DOCKER_IMAGE+=("\"image-cloud.luchentech.com/hpcaitech/pytorch-cuda:${tag}\"")
|
||||||
done <.compatibility
|
done <.compatibility
|
||||||
|
|
||||||
container=$( IFS=',' ; echo "${DOCKER_IMAGE[*]}" )
|
container=$( IFS=',' ; echo "${DOCKER_IMAGE[*]}" )
|
||||||
@ -35,7 +35,7 @@ jobs:
|
|||||||
name: Test for PyTorch Compatibility
|
name: Test for PyTorch Compatibility
|
||||||
needs: matrix_preparation
|
needs: matrix_preparation
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, 8-gpu]
|
runs-on: [self-hosted, ubuntu-latest]
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
||||||
|
@ -9,7 +9,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
matrix_preparation:
|
matrix_preparation:
|
||||||
name: Prepare Container List
|
name: Prepare Container List
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
@ -20,7 +20,7 @@ jobs:
|
|||||||
DOCKER_IMAGE=()
|
DOCKER_IMAGE=()
|
||||||
|
|
||||||
while read tag; do
|
while read tag; do
|
||||||
DOCKER_IMAGE+=("\"hpcaitech/pytorch-cuda:${tag}\"")
|
DOCKER_IMAGE+=("\"image-cloud.luchentech.com/hpcaitech/pytorch-cuda:${tag}\"")
|
||||||
done <.compatibility
|
done <.compatibility
|
||||||
|
|
||||||
container=$( IFS=',' ; echo "${DOCKER_IMAGE[*]}" )
|
container=$( IFS=',' ; echo "${DOCKER_IMAGE[*]}" )
|
||||||
@ -32,7 +32,7 @@ jobs:
|
|||||||
name: Test for PyTorch Compatibility
|
name: Test for PyTorch Compatibility
|
||||||
needs: matrix_preparation
|
needs: matrix_preparation
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, 8-gpu]
|
runs-on: [self-hosted, ubuntu-latest]
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
||||||
|
@ -11,7 +11,7 @@ jobs:
|
|||||||
build-doc:
|
build-doc:
|
||||||
name: Trigger Documentation Build Workflow
|
name: Trigger Documentation Build Workflow
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
steps:
|
steps:
|
||||||
- name: trigger workflow in ColossalAI-Documentation
|
- name: trigger workflow in ColossalAI-Documentation
|
||||||
run: |
|
run: |
|
||||||
|
4
.github/workflows/doc_check_on_pr.yml
vendored
4
.github/workflows/doc_check_on_pr.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
|||||||
if: |
|
if: |
|
||||||
github.event.pull_request.draft == false &&
|
github.event.pull_request.draft == false &&
|
||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-check-i18n
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-check-i18n
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
@ -33,7 +33,7 @@ jobs:
|
|||||||
if: |
|
if: |
|
||||||
github.event.pull_request.draft == false &&
|
github.event.pull_request.draft == false &&
|
||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-check-doc
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-check-doc
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
4
.github/workflows/doc_test_on_pr.yml
vendored
4
.github/workflows/doc_test_on_pr.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
|||||||
if: |
|
if: |
|
||||||
github.event.pull_request.draft == false &&
|
github.event.pull_request.draft == false &&
|
||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' && github.event_name == 'pull_request'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' && github.event_name == 'pull_request'
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
outputs:
|
outputs:
|
||||||
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
any_changed: ${{ steps.changed-files.outputs.any_changed }}
|
||||||
changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||||
@ -56,7 +56,7 @@ jobs:
|
|||||||
needs: detect-changed-doc
|
needs: detect-changed-doc
|
||||||
runs-on: [self-hosted, gpu]
|
runs-on: [self-hosted, gpu]
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm
|
options: --gpus all --rm
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
defaults:
|
defaults:
|
||||||
|
2
.github/workflows/doc_test_on_schedule.yml
vendored
2
.github/workflows/doc_test_on_schedule.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
|||||||
name: Test the changed Doc
|
name: Test the changed Doc
|
||||||
runs-on: [self-hosted, gpu]
|
runs-on: [self-hosted, gpu]
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm
|
options: --gpus all --rm
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
|
@ -12,7 +12,7 @@ jobs:
|
|||||||
release:
|
release:
|
||||||
name: Draft Release Post
|
name: Draft Release Post
|
||||||
if: ( github.event_name == 'workflow_dispatch' || github.event.pull_request.merged == true ) && github.repository == 'hpcaitech/ColossalAI'
|
if: ( github.event_name == 'workflow_dispatch' || github.event.pull_request.merged == true ) && github.repository == 'hpcaitech/ColossalAI'
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
|
@ -14,7 +14,7 @@ jobs:
|
|||||||
github.base_ref == 'main' &&
|
github.base_ref == 'main' &&
|
||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
name: Check the examples user want
|
name: Check the examples user want
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
@ -45,7 +45,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{fromJson(needs.manual_check_matrix_preparation.outputs.matrix)}}
|
matrix: ${{fromJson(needs.manual_check_matrix_preparation.outputs.matrix)}}
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/ -v /dev/shm
|
options: --gpus all --rm -v /data/scratch/examples-data:/data/ -v /dev/shm
|
||||||
timeout-minutes: 15
|
timeout-minutes: 15
|
||||||
steps:
|
steps:
|
||||||
|
4
.github/workflows/example_check_on_pr.yml
vendored
4
.github/workflows/example_check_on_pr.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
if: |
|
if: |
|
||||||
github.event.pull_request.draft == false &&
|
github.event.pull_request.draft == false &&
|
||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' && github.event_name == 'pull_request'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' && github.event_name == 'pull_request'
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.setup-matrix.outputs.matrix }}
|
matrix: ${{ steps.setup-matrix.outputs.matrix }}
|
||||||
anyChanged: ${{ steps.setup-matrix.outputs.anyChanged }}
|
anyChanged: ${{ steps.setup-matrix.outputs.anyChanged }}
|
||||||
@ -90,7 +90,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{fromJson(needs.detect-changed-example.outputs.matrix)}}
|
matrix: ${{fromJson(needs.detect-changed-example.outputs.matrix)}}
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/ -v /dev/shm
|
options: --gpus all --rm -v /data/scratch/examples-data:/data/ -v /dev/shm
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
concurrency:
|
concurrency:
|
||||||
|
@ -10,7 +10,7 @@ jobs:
|
|||||||
matrix_preparation:
|
matrix_preparation:
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
name: Prepare matrix for weekly check
|
name: Prepare matrix for weekly check
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubunt[self-hosted, ubuntu-latest]u-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.setup-matrix.outputs.matrix }}
|
matrix: ${{ steps.setup-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
@ -34,7 +34,7 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
matrix: ${{fromJson(needs.matrix_preparation.outputs.matrix)}}
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/ -v /dev/shm
|
options: --gpus all --rm -v /data/scratch/examples-data:/data/ -v /dev/shm
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
steps:
|
steps:
|
||||||
|
@ -46,7 +46,7 @@ jobs:
|
|||||||
notify:
|
notify:
|
||||||
name: Notify Lark via webhook
|
name: Notify Lark via webhook
|
||||||
needs: release
|
needs: release
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -9,7 +9,7 @@ jobs:
|
|||||||
publish:
|
publish:
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
name: Build and publish Python 🐍 distributions 📦 to PyPI
|
name: Build and publish Python 🐍 distributions 📦 to PyPI
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
outputs:
|
outputs:
|
||||||
status: ${{ steps.publish.outcome }}
|
status: ${{ steps.publish.outcome }}
|
||||||
@ -36,7 +36,7 @@ jobs:
|
|||||||
notify:
|
notify:
|
||||||
name: Notify Lark via webhook
|
name: Notify Lark via webhook
|
||||||
needs: publish
|
needs: publish
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
if: ${{ always() }} && github.repository == 'hpcaitech/ColossalAI'
|
if: ${{ always() }} && github.repository == 'hpcaitech/ColossalAI'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -12,7 +12,7 @@ jobs:
|
|||||||
build-n-publish:
|
build-n-publish:
|
||||||
if: github.event_name == 'workflow_dispatch' || github.repository == 'hpcaitech/ColossalAI' && github.event.pull_request.merged == true && github.base_ref == 'main'
|
if: github.event_name == 'workflow_dispatch' || github.repository == 'hpcaitech/ColossalAI' && github.event.pull_request.merged == true && github.base_ref == 'main'
|
||||||
name: Build and publish Python 🐍 distributions 📦 to PyPI
|
name: Build and publish Python 🐍 distributions 📦 to PyPI
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@ -35,7 +35,7 @@ jobs:
|
|||||||
notify:
|
notify:
|
||||||
name: Notify Lark via webhook
|
name: Notify Lark via webhook
|
||||||
needs: build-n-publish
|
needs: build-n-publish
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -9,7 +9,7 @@ jobs:
|
|||||||
build-n-publish:
|
build-n-publish:
|
||||||
if: github.event_name == 'workflow_dispatch' || github.repository == 'hpcaitech/ColossalAI'
|
if: github.event_name == 'workflow_dispatch' || github.repository == 'hpcaitech/ColossalAI'
|
||||||
name: Build and publish Python 🐍 distributions 📦 to Test PyPI
|
name: Build and publish Python 🐍 distributions 📦 to Test PyPI
|
||||||
runs-on: ubuntu-latest
|
runs-on: [self-hosted, ubuntu-latest]-latest
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
@ -10,7 +10,7 @@ jobs:
|
|||||||
generate-and-publish:
|
generate-and-publish:
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
name: Generate leaderboard report and publish to Lark
|
name: Generate leaderboard report and publish to Lark
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
2
.github/workflows/report_test_coverage.yml
vendored
2
.github/workflows/report_test_coverage.yml
vendored
@ -8,7 +8,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
report-test-coverage:
|
report-test-coverage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||||
steps:
|
steps:
|
||||||
- name: "Download artifact"
|
- name: "Download artifact"
|
||||||
|
2
.github/workflows/run_chatgpt_examples.yml
vendored
2
.github/workflows/run_chatgpt_examples.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, gpu]
|
runs-on: [self-hosted, gpu]
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data --shm-size=10.24gb
|
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data --shm-size=10.24gb
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
defaults:
|
defaults:
|
||||||
|
2
.github/workflows/run_chatgpt_unit_tests.yml
vendored
2
.github/workflows/run_chatgpt_unit_tests.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, gpu]
|
runs-on: [self-hosted, gpu]
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data
|
options: --gpus all --rm -v /data/scratch/examples-data:/data/scratch/examples-data
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
defaults:
|
defaults:
|
||||||
|
@ -19,7 +19,7 @@ jobs:
|
|||||||
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI'
|
||||||
runs-on: [self-hosted, gpu]
|
runs-on: [self-hosted, gpu]
|
||||||
container:
|
container:
|
||||||
image: hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
image: image-cloud.luchentech.com/hpcaitech/pytorch-cuda:2.2.2-12.1.0
|
||||||
volumes:
|
volumes:
|
||||||
- /data/scratch/test_data_colossalqa:/data/scratch/test_data_colossalqa
|
- /data/scratch/test_data_colossalqa:/data/scratch/test_data_colossalqa
|
||||||
- /data/scratch/llama-tiny:/data/scratch/llama-tiny
|
- /data/scratch/llama-tiny:/data/scratch/llama-tiny
|
||||||
|
2
.github/workflows/submodule.yml
vendored
2
.github/workflows/submodule.yml
vendored
@ -7,7 +7,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
sync-submodule:
|
sync-submodule:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
if: github.repository == 'hpcaitech/ColossalAI'
|
if: github.repository == 'hpcaitech/ColossalAI'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
2
.github/workflows/translate_comment.yml
vendored
2
.github/workflows/translate_comment.yml
vendored
@ -7,7 +7,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-[self-hosted, ubuntu-latest]
|
||||||
steps:
|
steps:
|
||||||
- uses: usthe/issues-translate-action@v2.7
|
- uses: usthe/issues-translate-action@v2.7
|
||||||
with:
|
with:
|
||||||
|
Loading…
Reference in New Issue
Block a user