Skip to content

ci(test_api_server): update workflow #94

ci(test_api_server): update workflow

ci(test_api_server): update workflow #94

Workflow file for this run

name: Test Whisper API Server
on:
push:
branches:
- dev
- main
- release-*
- feat-*
- ci-*
- refactor-*
- fix-*
- test-*
paths:
- '.github/workflows/test_api_server.yml'
- '**/Cargo.toml'
- '**/*.rs'
- '**/*.sh'
- '**/.cargo/config.toml'
- 'tests/*.hurl'
pull_request:
branches:
- dev
- main
types: [opened, synchronize, reopened]
paths:
- '.github/workflows/**'
- '**/Cargo.toml'
- '**/*.rs'
- '**/*.sh'
- '**/.cargo/config.toml'
- 'tests/*.hurl'
jobs:
test-api-server-ubuntu:
runs-on: ubuntu-latest
strategy:
matrix:
wasmedge_version: [0.14.1]
steps:
- name: Clone project
id: checkout
uses: actions/checkout@v3
- name: Install Rust-nightly
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
toolchain: nightly
target: wasm32-wasip1
components: rustfmt, clippy
- name: Install Rust-stable
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
target: wasm32-wasip1
- name: Install WasmEdge
run: |
curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install_v2.sh | bash -s -- -v ${{ matrix.wasmedge_version }}
ls -al $HOME/.wasmedge/bin
- name: Deploy whisper plugin
run: |
curl -LO https://github.com/second-state/WasmEdge-plugin-registry/releases/download/wasmedge-0.14.1-plugin-lts-d259eed/WasmEdge-plugin-wasi_nn-whisper-0.14.1-ubuntu20.04_x86_64.tar.gz
tar -xzvf WasmEdge-plugin-wasi_nn-whisper-0.14.1-ubuntu20.04_x86_64.tar.gz
mv libwasmedgePluginWasiNN.so $HOME/.wasmedge/plugin/
ls -al
ls -al $HOME/.wasmedge/plugin/
- name: Install Hurl
run: |
curl --location --remote-name https://github.com/Orange-OpenSource/hurl/releases/download/5.0.1/hurl_5.0.1_amd64.deb
sudo apt update && sudo apt install ./hurl_5.0.1_amd64.deb
- name: Build whisper-api-server on linux
env:
RUSTFLAGS: "--cfg wasmedge --cfg tokio_unstable"
run: |
cargo build --release
cp target/wasm32-wasip1/release/whisper-api-server.wasm ./whisper-api-server.wasm
- name: Download model and audio files
run: |
curl -LO https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q5_0.bin
ls -al
cp ./data/*.wav ./tests/
ls -al ./tests
- name: Start whisper-api-server
run: |
nohup $HOME/.wasmedge/bin/wasmedge --dir .:. whisper-api-server.wasm -m ggml-large-v2-q5_0.bin > ./start-llamaedge.log 2>&1 &
sleep 5
cat start-llamaedge.log
- name: test translations
run: |
cd ./tests
curl --location 'http://localhost:8080/v1/audio/translations' \
--header 'Content-Type: multipart/form-data' \
--form 'file=@"test_cn.wav"' \
--form 'language="zh"'
cd -
cat start-llamaedge.log
- name: Test translations endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/translations.hurl --max-time 30s
- name: Test transcriptions endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/transcriptions.hurl --max-time 30s
- name: Stop llama-api-server
run: |
pkill -f wasmedge
test-api-server-macos-13:
runs-on: macos-13
needs: test-api-server-ubuntu
strategy:
matrix:
wasmedge_version: [0.14.1]
steps:
- name: Clone project
id: checkout
uses: actions/checkout@v3
- name: Install Rust-nightly
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
toolchain: nightly
target: wasm32-wasip1
components: rustfmt, clippy
- name: Install Rust-stable
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
target: wasm32-wasip1
- name: Download wasi-sdk for x86_64-macos
run: |
curl -LO https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-24/wasi-sdk-24.0-x86_64-macos.tar.gz
tar -xzvf wasi-sdk-24.0-x86_64-macos.tar.gz
mv wasi-sdk-24.0-x86_64-macos wasi-sdk-24.0
- name: Install WasmEdge
run: |
curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install_v2.sh | bash -s -- -v ${{ matrix.wasmedge_version }}
ls -al $HOME/.wasmedge/bin
- name: Deploy whisper plugin
run: |
curl -LO https://github.com/WasmEdge/WasmEdge/releases/download/${{ matrix.wasmedge_version }}/WasmEdge-plugin-wasi_nn-whisper-${{ matrix.wasmedge_version }}-darwin_x86_64.tar.gz
tar -xzvf WasmEdge-plugin-wasi_nn-whisper-${{ matrix.wasmedge_version }}-darwin_x86_64.tar.gz
mv libwasmedgePluginWasiNN.dylib $HOME/.wasmedge/plugin/
ls -al
ls -al $HOME/.wasmedge/plugin/
- name: Install Hurl
run: |
brew install hurl
- name: Build whisper-api-server on macos-13
env:
WASI_SDK_PATH: /Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0
CC: "/Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0/bin/clang --sysroot=/Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0/share/wasi-sysroot"
RUSTFLAGS: "--cfg wasmedge --cfg tokio_unstable"
run: |
cargo build --release
cp target/wasm32-wasip1/release/whisper-api-server.wasm ./whisper-api-server.wasm
- name: Download model and audio files
run: |
curl -LO https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q5_0.bin
ls -al
cp ./data/*.wav ./tests/
ls -al ./tests
- name: Start whisper-api-server
run: |
nohup $HOME/.wasmedge/bin/wasmedge --dir .:. whisper-api-server.wasm -m ggml-large-v2-q5_0.bin > ./start-llamaedge.log 2>&1 &
sleep 30
cat start-llamaedge.log
- name: Test translations endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/translations.hurl
- name: Test transcriptions endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/transcriptions.hurl
- name: Stop llama-api-server
run: |
pkill -f wasmedge
test-api-server-macos-14:
runs-on: macos-14
needs: test-api-server-macos-13
strategy:
matrix:
wasmedge_version: [0.14.1]
steps:
- name: Clone project
id: checkout
uses: actions/checkout@v3
- name: Install Rust-nightly
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
toolchain: nightly
target: wasm32-wasip1
components: rustfmt, clippy
- name: Install Rust-stable
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
target: wasm32-wasip1
- name: Download wasi-sdk for arm64-macos
run: |
curl -LO https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-24/wasi-sdk-24.0-arm64-macos.tar.gz
tar -xzvf wasi-sdk-24.0-arm64-macos.tar.gz
mv wasi-sdk-24.0-arm64-macos wasi-sdk-24.0
- name: Install WasmEdge
run: |
curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install_v2.sh | bash -s -- -v ${{ matrix.wasmedge_version }}
ls -al $HOME/.wasmedge/bin
- name: Deploy whisper plugin
run: |
curl -LO https://github.com/WasmEdge/WasmEdge/releases/download/${{ matrix.wasmedge_version }}/WasmEdge-plugin-wasi_nn-whisper-${{ matrix.wasmedge_version }}-darwin_arm64.tar.gz
tar -xzvf WasmEdge-plugin-wasi_nn-whisper-${{ matrix.wasmedge_version }}-darwin_arm64.tar.gz
mv libwasmedgePluginWasiNN.dylib $HOME/.wasmedge/plugin/
ls -al
ls -al $HOME/.wasmedge/plugin/
- name: Install Hurl
run: |
brew install hurl
- name: Build whisper-api-server on macos-14
env:
WASI_SDK_PATH: /Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0
CC: "/Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0/bin/clang --sysroot=/Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0/share/wasi-sysroot"
RUSTFLAGS: "--cfg wasmedge --cfg tokio_unstable"
run: |
cargo build --release
cp target/wasm32-wasip1/release/whisper-api-server.wasm ./whisper-api-server.wasm
- name: Download model and audio files
run: |
curl -LO https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q5_0.bin
ls -al
cp ./data/*.wav ./tests/
ls -al ./tests
- name: Start whisper-api-server
run: |
nohup $HOME/.wasmedge/bin/wasmedge --dir .:. whisper-api-server.wasm -m ggml-large-v2-q5_0.bin > ./start-llamaedge.log 2>&1 &
sleep 30
cat start-llamaedge.log
- name: Test translations endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/translations.hurl
- name: Test transcriptions endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/transcriptions.hurl
- name: Stop llama-api-server
run: |
pkill -f wasmedge
test-api-server-macos-15:
runs-on: macos-15
needs: test-api-server-macos-14
strategy:
matrix:
wasmedge_version: [0.14.1]
steps:
- name: Clone project
id: checkout
uses: actions/checkout@v3
- name: Install Rust-nightly
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
toolchain: nightly
target: wasm32-wasip1
components: rustfmt, clippy
- name: Install Rust-stable
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
target: wasm32-wasip1
- name: Download wasi-sdk for arm64-macos
run: |
curl -LO https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-24/wasi-sdk-24.0-arm64-macos.tar.gz
tar -xzvf wasi-sdk-24.0-arm64-macos.tar.gz
mv wasi-sdk-24.0-arm64-macos wasi-sdk-24.0
- name: Install WasmEdge
run: |
curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install_v2.sh | bash -s -- -v ${{ matrix.wasmedge_version }}
ls -al $HOME/.wasmedge/bin
- name: Deploy whisper plugin
run: |
curl -LO https://github.com/WasmEdge/WasmEdge/releases/download/${{ matrix.wasmedge_version }}/WasmEdge-plugin-wasi_nn-whisper-${{ matrix.wasmedge_version }}-darwin_arm64.tar.gz
tar -xzvf WasmEdge-plugin-wasi_nn-whisper-${{ matrix.wasmedge_version }}-darwin_arm64.tar.gz
mv libwasmedgePluginWasiNN.dylib $HOME/.wasmedge/plugin/
ls -al
ls -al $HOME/.wasmedge/plugin/
- name: Install Hurl
run: |
brew install hurl
- name: Build whisper-api-server on macos-14
env:
WASI_SDK_PATH: /Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0
CC: "/Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0/bin/clang --sysroot=/Users/runner/work/whisper-api-server/whisper-api-server/wasi-sdk-24.0/share/wasi-sysroot"
RUSTFLAGS: "--cfg wasmedge --cfg tokio_unstable"
run: |
cargo build --release
cp target/wasm32-wasip1/release/whisper-api-server.wasm ./whisper-api-server.wasm
- name: Download model and audio files
run: |
curl -LO https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-large-v2-q5_0.bin
ls -al
cp ./data/*.wav ./tests/
ls -al ./tests
- name: Start whisper-api-server
run: |
nohup $HOME/.wasmedge/bin/wasmedge --dir .:. whisper-api-server.wasm -m ggml-large-v2-q5_0.bin > ./start-llamaedge.log 2>&1 &
sleep 30
cat start-llamaedge.log
- name: Test translations endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/translations.hurl
- name: Test transcriptions endpoint
run: |
ls -al ./tests
hurl --test --jobs 1 ./tests/transcriptions.hurl
- name: Stop llama-api-server
run: |
pkill -f wasmedge