Skip to content

Benchmark Nightly

Benchmark Nightly #232

name: Benchmark Nightly
on:
workflow_dispatch:
schedule:
- cron: '0 1 * * *'
permissions:
id-token: write
contents: read
jobs:
create-runners:
runs-on: [ self-hosted, scheduler ]
steps:
- name: Create new G6 instance
id: create_gpu
run: |
cd /home/ubuntu/djl_benchmark_script/scripts
token=$( curl -X POST -H "Authorization: token ${{ secrets.ACTION_RUNNER_PERSONAL_TOKEN }}" \
https://api.github.com/repos/deepjavalibrary/djl-serving/actions/runners/registration-token \
--fail \
| jq '.token' | tr -d '"' )
./start_instance.sh action_g6 $token djl-serving
outputs:
gpu_instance_id: ${{ steps.create_gpu.outputs.action_g6_instance_id }}
g5-2xl:
uses: ./.github/workflows/instant_benchmark.yml
secrets: inherit
with:
running_template: ./benchmark/nightly/g5-2xl.txt
instance: g5.2xlarge
record: cloudwatch
g5-12xl:
uses: ./.github/workflows/instant_benchmark.yml
secrets: inherit
with:
running_template: ./benchmark/nightly/g5-12xl.txt
instance: g5.12xlarge
record: cloudwatch
g5-48xl:
uses: ./.github/workflows/instant_benchmark.yml
secrets: inherit
with:
running_template: ./benchmark/nightly/g5-48xl.txt
instance: g5.48xlarge
record: cloudwatch
handler-performance-test:
runs-on: [ self-hosted, g6 ]
timeout-minutes: 60
needs: create-runners
strategy:
fail-fast: false
matrix:
test:
- test: TestGPUHandlerPerformance
instance: g6
steps:
- uses: actions/checkout@v4
- name: Clean env
run: |
yes | docker system prune -a --volumes
sudo rm -rf /home/ubuntu/actions-runner/_work/_tool/Java_Corretto_jdk/
echo "wait dpkg lock..."
while sudo fuser /var/{lib/{dpkg,apt/lists},cache/apt/archives}/lock >/dev/null 2>&1; do sleep 5; done
- name: Set up Python3
uses: actions/setup-python@v5
with:
python-version: '3.10.x'
- name: Install pip dependencies
run: pip3 install pytest requests "numpy<2" pillow huggingface_hub
- name: Install torch
# Use torch to get cuda capability of current device to selectively run tests
# Torch version doesn't really matter that much
run: |
pip3 install torch==2.3.0
- name: Install awscurl
working-directory: tests/integration
run: |
wget https://publish.djl.ai/awscurl/awscurl
chmod +x awscurl
mkdir outputs
- name: Test
working-directory: tests/integration
env:
TEST_DJL_VERSION: nightly
run: |
python -m pytest -k ${{ matrix.test.test }} tests.py
- name: Cleanup
working-directory: tests/integration
run: |
rm -rf outputs
rm awscurl
- name: On Failure
if: ${{ failure() }}
working-directory: tests/integration
run: |
for file in outputs/*; do if [ -f "$file" ]; then echo "Contents of $file:"; cat "$file"; echo; fi; done
sudo rm -rf outputs && sudo rm -rf models
rm awscurl
./remove_container.sh
- name: Upload test logs
if: ${{ always() }}
uses: actions/upload-artifact@v4
with:
name: test-${{ matrix.test.test }}-logs
path: tests/integration/all_logs/
stop-g6-runners:
if: always()
runs-on: [ self-hosted, scheduler ]
needs: [ create-runners, handler-performance-test ]
steps:
- name: Stop g6 instances
run: |
cd /home/ubuntu/djl_benchmark_script/scripts
instance_id=${{ needs.create-runners.outputs.gpu_instance_id }}
./stop_instance.sh $instance_id