name: Run ChatGPT examples on: pull_request: types: [synchronize, opened, reopened] paths: - "applications/Chat/coati/**" - "applications/Chat/requirements.txt" - "applications/Chat/setup.py" - "applications/Chat/examples/**" jobs: tests: name: Run ChatGPT examples if: | github.event.pull_request.draft == false && github.base_ref == 'main' && github.event.pull_request.base.repo.full_name == 'hpcaitech/ColossalAI' runs-on: [self-hosted, gpu] container: image: hpcaitech/pytorch-cuda:1.12.0-11.3.0 options: --gpus all --rm -v /data/scratch/github_actions/chat:/data/scratch/github_actions/chat --shm-size=10.24gb timeout-minutes: 30 defaults: run: shell: bash steps: - name: Checkout ColossalAI uses: actions/checkout@v2 - name: Install ChatGPT run: | cd applications/Chat pip install -v . pip install -r examples/requirements.txt - name: Install Transformers run: | pip install transformers==4.30.2 - name: Execute Examples run: | cd applications/Chat rm -rf ~/.cache/colossalai ./tests/test_inference.sh ./tests/test_benchmarks.sh ./tests/test_train.sh env: NCCL_SHM_DISABLE: 1 MAX_JOBS: 8 SFT_DATASET: /data/scratch/github_actions/chat/data.json PROMPT_DATASET: /data/scratch/github_actions/chat/prompts_en.jsonl PRETRAIN_DATASET: /data/scratch/github_actions/chat/alpaca_data.json