Skip to content

Migrate to Conan 2.x #2765

Migrate to Conan 2.x

Migrate to Conan 2.x #2765

Workflow file for this run

name: osrm-backend CI
on:
push:
branches:
- master
tags:
- v[1-9]+.[0-9]+.[0-9]+
- v[1-9]+.[0-9]+.[0-9]+-[a-zA-Z]+.[0-9]+
- v[1-9]+.[0-9]+-[0-9a-zA-Z]+
pull_request:
branches:
- master
env:
CCACHE_TEMPDIR: /tmp/.ccache-temp
CCACHE_COMPRESS: 1
CASHER_TIME_OUT: 599 # one second less than 10m to avoid 10m timeout error: https://github.com/Project-OSRM/osrm-backend/issues/2742
CMAKE_VERSION: 3.21.2
ENABLE_NODE_BINDINGS: "ON"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
windows-release-node:
needs: format-taginfo-docs
runs-on: windows-2022
continue-on-error: false
env:
BUILD_TYPE: Release
steps:
- uses: actions/checkout@v4
- run: cmake --version
- uses: actions/setup-node@v4
with:
node-version: 18
- run: node --version
- run: npm --version
- name: Prepare environment
shell: bash
run: |
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
- run: npm install --ignore-scripts
- run: npm link --ignore-scripts
- name: Build
shell: bash
run: |
mkdir build
cd build
python3 -m venv .venv
source .venv/Scripts/Activate
python3 -m pip install conan==2.7.1
conan profile detect --force
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CONAN=ON -DENABLE_NODE_BINDINGS=ON ..
cmake --build . --config Release
# TODO: MSVC goes out of memory when building our tests
# - name: Run tests
# shell: bash
# run: |
# cd build
# cmake --build . --config Release --target tests
# # TODO: run tests
# - name: Run node tests
# shell: bash
# run: |
# ./lib/binding/osrm-extract.exe -p profiles/car.lua test/data/monaco.osm.pbf
# mkdir -p test/data/ch
# cp test/data/monaco.osrm* test/data/ch/
# ./lib/binding/osrm-contract.exe test/data/ch/monaco.osrm
# ./lib/binding/osrm-datastore.exe test/data/ch/monaco.osrm
# node test/nodejs/index.js
- name: Build Node package
shell: bash
run: ./scripts/ci/node_package.sh
- name: Publish Node package
if: ${{ env.PUBLISH == 'On' }}
uses: ncipollo/release-action@v1
with:
allowUpdates: true
artifactErrorsFailBuild: true
artifacts: build/stage/**/*.tar.gz
omitBody: true
omitBodyDuringUpdate: true
omitName: true
omitNameDuringUpdate: true
replacesArtifacts: true
token: ${{ secrets.GITHUB_TOKEN }}
format-taginfo-docs:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: 18
- name: Enable Node.js cache
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Prepare environment
run: |
npm ci --ignore-scripts
clang-format-15 --version
- name: Run checks
run: |
./scripts/check_taginfo.py taginfo.json profiles/car.lua
./scripts/format.sh && ./scripts/error_on_dirty.sh
node ./scripts/validate_changelog.js
npm run docs && ./scripts/error_on_dirty.sh
npm audit --production
docker-image-matrix:
strategy:
matrix:
docker-base-image: ["debian", "alpine"]
needs: format-taginfo-docs
runs-on: ubuntu-22.04
continue-on-error: false
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Enable osm.pbf cache
uses: actions/cache@v4
with:
path: berlin-latest.osm.pbf
key: v1-berlin-osm-pbf
restore-keys: |
v1-berlin-osm-pbf
- name: Docker build
run: |
docker build -t osrm-backend-local -f docker/Dockerfile-${{ matrix.docker-base-image }} .
- name: Test Docker image
run: |
if [ ! -f "${PWD}/berlin-latest.osm.pbf" ]; then
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf
fi
TAG=osrm-backend-local
# when `--memory-swap` value equals `--memory` it means container won't use swap
# see https://docs.docker.com/config/containers/resource_constraints/#--memory-swap-details
MEMORY_ARGS="--memory=1g --memory-swap=1g"
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-extract --dump-nbg-graph -p /opt/car.lua /data/berlin-latest.osm.pbf
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-components /data/berlin-latest.osrm.nbg /data/berlin-latest.geojson
if [ ! -s "${PWD}/berlin-latest.geojson" ]
then
>&2 echo "No berlin-latest.geojson found"
exit 1
fi
# removing `.osrm.nbg` to check that whole pipeline works without it
rm -rf "${PWD}/berlin-latest.osrm.nbg"
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-partition /data/berlin-latest.osrm
docker run $MEMORY_ARGS -t -v "${PWD}:/data" "${TAG}" osrm-customize /data/berlin-latest.osrm
docker run $MEMORY_ARGS --name=osrm-container -t -p 5000:5000 -v "${PWD}:/data" "${TAG}" osrm-routed --algorithm mld /data/berlin-latest.osrm &
curl --retry-delay 3 --retry 10 --retry-all-errors "http://127.0.0.1:5000/route/v1/driving/13.388860,52.517037;13.385983,52.496891?steps=true"
docker stop osrm-container
build-test-publish:
needs: format-taginfo-docs
strategy:
matrix:
include:
- name: gcc-13-debug-cov
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: gcc-13
CUCUMBER_TIMEOUT: 20000
CXXCOMPILER: g++-13
ENABLE_COVERAGE: ON
- name: clang-18-debug-asan-ubsan
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: clang-18
CUCUMBER_TIMEOUT: 20000
CXXCOMPILER: clang++-18
ENABLE_SANITIZER: ON
TARGET_ARCH: x86_64-asan-ubsan
OSRM_CONNECTION_RETRIES: 10
OSRM_CONNECTION_EXP_BACKOFF_COEF: 1.5
- name: clang-18-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
- name: clang-18-debug
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
- name: clang-18-debug-clang-tidy
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Debug
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
CUCUMBER_TIMEOUT: 60000
ENABLE_CLANG_TIDY: ON
- name: clang-17-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-17
CXXCOMPILER: clang++-17
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
- name: clang-16-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
CUCUMBER_TIMEOUT: 60000
ENABLE_LTO: OFF
- name: conan-linux-debug-asan-ubsan
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
ENABLE_CONAN: ON
ENABLE_SANITIZER: ON
ENABLE_LTO: OFF
- name: conan-linux-release
continue-on-error: false
node: 18
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: clang-18
CXXCOMPILER: clang++-18
ENABLE_CONAN: ON
ENABLE_LTO: OFF
- name: gcc-14-release
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-14
CXXCOMPILER: g++-14
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-13-release
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-13
CXXCOMPILER: g++-13
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: gcc-12-release
continue-on-error: false
node: 20
runs-on: ubuntu-22.04
BUILD_TOOLS: ON
BUILD_TYPE: Release
CCOMPILER: gcc-12
CXXCOMPILER: g++-12
CXXFLAGS: '-Wno-array-bounds -Wno-uninitialized'
- name: conan-linux-release-node
build_node_package: true
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TYPE: Release
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
ENABLE_CONAN: ON
NODE_PACKAGE_TESTS_ONLY: ON
- name: conan-linux-debug-node
build_node_package: true
continue-on-error: false
node: 20
runs-on: ubuntu-24.04
BUILD_TYPE: Debug
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
ENABLE_CONAN: ON
NODE_PACKAGE_TESTS_ONLY: ON
- name: conan-macos-x64-release-node
build_node_package: true
continue-on-error: true
node: 20
runs-on: macos-13 # x86_64
BUILD_TYPE: Release
CCOMPILER: clang
CXXCOMPILER: clang++
CUCUMBER_TIMEOUT: 60000
ENABLE_ASSERTIONS: ON
ENABLE_CONAN: ON
- name: conan-macos-arm64-release-node
build_node_package: true
continue-on-error: true
node: 20
runs-on: macos-14 # arm64
BUILD_TYPE: Release
CCOMPILER: clang
CXXCOMPILER: clang++
CUCUMBER_TIMEOUT: 60000
ENABLE_ASSERTIONS: ON
ENABLE_CONAN: ON
name: ${{ matrix.name}}
continue-on-error: ${{ matrix.continue-on-error }}
runs-on: ${{ matrix.runs-on }}
env:
BUILD_TOOLS: ${{ matrix.BUILD_TOOLS }}
BUILD_TYPE: ${{ matrix.BUILD_TYPE }}
BUILD_SHARED_LIBS: ${{ matrix.BUILD_SHARED_LIBS }}
CCOMPILER: ${{ matrix.CCOMPILER }}
CFLAGS: ${{ matrix.CFLAGS }}
CUCUMBER_TIMEOUT: ${{ matrix.CUCUMBER_TIMEOUT }}
CXXCOMPILER: ${{ matrix.CXXCOMPILER }}
CXXFLAGS: ${{ matrix.CXXFLAGS }}
ENABLE_ASSERTIONS: ${{ matrix.ENABLE_ASSERTIONS }}
ENABLE_CLANG_TIDY: ${{ matrix.ENABLE_CLANG_TIDY }}
ENABLE_COVERAGE: ${{ matrix.ENABLE_COVERAGE }}
ENABLE_CONAN: ${{ matrix.ENABLE_CONAN }}
ENABLE_SANITIZER: ${{ matrix.ENABLE_SANITIZER }}
NODE_PACKAGE_TESTS_ONLY: ${{ matrix.NODE_PACKAGE_TESTS_ONLY }}
TARGET_ARCH: ${{ matrix.TARGET_ARCH }}
OSRM_CONNECTION_RETRIES: ${{ matrix.OSRM_CONNECTION_RETRIES }}
OSRM_CONNECTION_EXP_BACKOFF_COEF: ${{ matrix.OSRM_CONNECTION_EXP_BACKOFF_COEF }}
ENABLE_LTO: ${{ matrix.ENABLE_LTO }}
steps:
- uses: actions/checkout@v4
- name: Build machine architecture
run: uname -m
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}
- name: Enable Node.js cache
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Enable compiler cache
uses: actions/cache@v4
with:
path: ~/.ccache
key: ccache-${{ matrix.name }}-${{ github.sha }}
restore-keys: |
ccache-${{ matrix.name }}-
- name: Enable Conan cache
uses: actions/cache@v4
with:
path: ~/.conan2
key: v10-conan-${{ matrix.name }}-${{ github.sha }}
restore-keys: |
v10-conan-${{ matrix.name }}-
- name: Enable test cache
uses: actions/cache@v4
with:
path: ${{github.workspace}}/test/cache
key: v4-test-${{ matrix.name }}-${{ github.sha }}
restore-keys: |
v4-test-${{ matrix.name }}-
- name: Prepare environment
run: |
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
mkdir -p $HOME/.ccache
PACKAGE_JSON_VERSION=$(node -e "console.log(require('./package.json').version)")
echo PUBLISH=$([[ "${GITHUB_REF:-}" == "refs/tags/v${PACKAGE_JSON_VERSION}" ]] && echo "On" || echo "Off") >> $GITHUB_ENV
echo "OSRM_INSTALL_DIR=${GITHUB_WORKSPACE}/install-osrm" >> $GITHUB_ENV
echo "OSRM_BUILD_DIR=${GITHUB_WORKSPACE}/build-osrm" >> $GITHUB_ENV
if [[ "$ENABLE_SANITIZER" == 'ON' ]]; then
# We can only set this after checkout once we know the workspace directory
echo "LSAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/leaksanitizer.conf" >> $GITHUB_ENV
echo "UBSAN_OPTIONS=symbolize=1:halt_on_error=1:print_stacktrace=1:suppressions=${GITHUB_WORKSPACE}/scripts/ci/undefinedsanitizer.conf" >> $GITHUB_ENV
echo "ASAN_OPTIONS=print_suppressions=0:suppressions=${GITHUB_WORKSPACE}/scripts/ci/addresssanitizer.conf" >> $GITHUB_ENV
fi
if [[ "${RUNNER_OS}" == "Linux" ]]; then
echo "JOBS=$((`nproc` + 1))" >> $GITHUB_ENV
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
echo "JOBS=$((`sysctl -n hw.ncpu` + 1))" >> $GITHUB_ENV
fi
# See: https://github.com/actions/toolkit/issues/946#issuecomment-1590016041
# We need it to be able to access system folders while restoring cached Boost below
- name: Give tar root ownership
if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
run: sudo chown root /bin/tar && sudo chmod u+s /bin/tar
- name: Cache Boost
if: runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
id: cache-boost
uses: actions/cache@v4
with:
path: |
/usr/local/include/boost
/usr/local/lib/libboost*
key: v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }}
restore-keys: |
v1-boost-${{ runner.os }}-${{ runner.arch }}-${{ matrix.runs-on }}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true' && runner.os == 'Linux' && matrix.ENABLE_CONAN != 'ON'
run: |
BOOST_VERSION="1.85.0"
BOOST_VERSION_UNDERSCORE="${BOOST_VERSION//./_}"
wget -q https://boostorg.jfrog.io/artifactory/main/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz
tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz
cd boost_${BOOST_VERSION_UNDERSCORE}
sudo ./bootstrap.sh
sudo ./b2 install
cd ..
sudo rm -rf boost_${BOOST_VERSION_UNDERSCORE}*
- name: Install dev dependencies
run: |
# workaround for issue that GitHub Actions seems to not adding it to PATH after https://github.com/actions/runner-images/pull/6499
# and that's why CI cannot find conan executable installed above
if [[ "${RUNNER_OS}" == "macOS" ]]; then
echo "/Library/Frameworks/Python.framework/Versions/Current/bin" >> $GITHUB_PATH
fi
# ccache
if [[ "${RUNNER_OS}" == "Linux" ]]; then
sudo apt-get update -y && sudo apt-get install ccache
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
brew install ccache
fi
# Linux dev packages
if [ "${ENABLE_CONAN}" != "ON" ]; then
sudo apt-get update -y
sudo apt-get install -y libbz2-dev libxml2-dev libzip-dev liblua5.2-dev
if [[ "${CCOMPILER}" != clang-* ]]; then
sudo apt-get install -y ${CXXCOMPILER}
fi
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
sudo apt-get install -y lcov
fi
fi
# TBB
TBB_VERSION=2021.12.0
if [[ "${RUNNER_OS}" == "Linux" ]]; then
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-lin.tgz"
elif [[ "${RUNNER_OS}" == "macOS" ]]; then
TBB_URL="https://github.com/oneapi-src/oneTBB/releases/download/v${TBB_VERSION}/oneapi-tbb-${TBB_VERSION}-mac.tgz"
fi
wget --tries 5 ${TBB_URL} -O onetbb.tgz
tar zxvf onetbb.tgz
sudo cp -a oneapi-tbb-${TBB_VERSION}/lib/. /usr/local/lib/
sudo cp -a oneapi-tbb-${TBB_VERSION}/include/. /usr/local/include/
- name: Prepare build
run: |
mkdir ${OSRM_BUILD_DIR}
ccache --max-size=256M
npm ci --ignore-scripts
if [[ "${ENABLE_COVERAGE}" == "ON" ]]; then
lcov --directory . --zerocounters # clean cached files
fi
echo "CC=${CCOMPILER}" >> $GITHUB_ENV
echo "CXX=${CXXCOMPILER}" >> $GITHUB_ENV
if [[ "${RUNNER_OS}" == "macOS" ]]; then
# missing from GCC path, needed for conan builds of libiconv, for example.
sudo xcode-select --switch /Library/Developer/CommandLineTools
echo "LIBRARY_PATH=${LIBRARY_PATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib" >> $GITHUB_ENV
echo "CPATH=${CPATH}:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include" >> $GITHUB_ENV
fi
- name: Build and install OSRM
run: |
echo "Using ${JOBS} jobs"
pushd ${OSRM_BUILD_DIR}
if [[ "${ENABLE_CONAN}" == "ON" ]]; then
python3 -m venv .venv
source .venv/bin/activate
python3 -m pip install conan==2.7.1
conan profile detect --force
fi
ccache --zero-stats
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DENABLE_CONAN=${ENABLE_CONAN:-OFF} \
-DENABLE_ASSERTIONS=${ENABLE_ASSERTIONS:-OFF} \
-DENABLE_CLANG_TIDY=${ENABLE_CLANG_TIDY:-OFF} \
-DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS:-OFF} \
-DENABLE_COVERAGE=${ENABLE_COVERAGE:-OFF} \
-DENABLE_NODE_BINDINGS=${ENABLE_NODE_BINDINGS:-OFF} \
-DENABLE_SANITIZER=${ENABLE_SANITIZER:-OFF} \
-DBUILD_TOOLS=${BUILD_TOOLS:-OFF} \
-DENABLE_CCACHE=ON \
-DENABLE_LTO=${ENABLE_LTO:-ON} \
-DCMAKE_INSTALL_PREFIX=${OSRM_INSTALL_DIR}
make --jobs=${JOBS}
if [[ "${NODE_PACKAGE_TESTS_ONLY}" != "ON" ]]; then
make tests --jobs=${JOBS}
make benchmarks --jobs=${JOBS}
sudo make install
if [[ "${RUNNER_OS}" == "Linux" ]]; then
echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${OSRM_INSTALL_DIR}/lib" >> $GITHUB_ENV
fi
echo "PKG_CONFIG_PATH=${OSRM_INSTALL_DIR}/lib/pkgconfig" >> $GITHUB_ENV
fi
popd
- name: Build example
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' && matrix.ENABLE_CONAN != 'ON' }}
run: |
mkdir example/build && pushd example/build
cmake .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE}
make --jobs=${JOBS}
popd
- name: Run all tests
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY != 'ON' }}
run: |
make -C test/data benchmark
# macOS SIP strips the linker path. Reset this inside the running shell
if [[ "${ENABLE_CONAN}" == "OFF" ]]; then
export LD_LIBRARY_PATH=${{ env.LD_LIBRARY_PATH }}
./example/build/osrm-example test/data/mld/monaco.osrm
fi
# All tests assume to be run from the build directory
pushd ${OSRM_BUILD_DIR}
for i in ./unit_tests/*-tests ; do echo Running $i ; $i ; done
if [ -z "${ENABLE_SANITIZER}" ]; then
npm run nodejs-tests
fi
popd
npm test
- name: Use Node 18
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
uses: actions/setup-node@v4
with:
node-version: 18
- name: Run Node package tests on Node 18
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
run: |
node --version
npm run nodejs-tests
- name: Use Node 20
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
uses: actions/setup-node@v4
with:
node-version: 20
- name: Run Node package tests on Node 20
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
run: |
node --version
npm run nodejs-tests
- name: Use Node latest
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
uses: actions/setup-node@v4
with:
node-version: latest
- name: Run Node package tests on Node-latest
if: ${{ matrix.NODE_PACKAGE_TESTS_ONLY == 'ON' }}
run: |
node --version
npm run nodejs-tests
- name: Upload test logs
uses: actions/upload-artifact@v4
if: failure()
with:
name: logs
path: test/logs/
# - name: Generate code coverage
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
# run: |
# lcov --directory . --capture --output-file coverage.info # capture coverage info
# lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter out system
# lcov --list coverage.info #debug info
# # Uploading report to CodeCov
# - name: Upload code coverage
# if: ${{ matrix.ENABLE_COVERAGE == 'ON' }}
# uses: codecov/codecov-action@v4
# with:
# files: coverage.info
# name: codecov-osrm-backend
# fail_ci_if_error: true
# verbose: true
- name: Build Node package
if: ${{ matrix.build_node_package }}
run: ./scripts/ci/node_package.sh
- name: Publish Node package
if: ${{ matrix.build_node_package && env.PUBLISH == 'On' }}
uses: ncipollo/release-action@v1
with:
allowUpdates: true
artifactErrorsFailBuild: true
artifacts: build/stage/**/*.tar.gz
omitBody: true
omitBodyDuringUpdate: true
omitName: true
omitNameDuringUpdate: true
replacesArtifacts: true
token: ${{ secrets.GITHUB_TOKEN }}
- name: Show CCache statistics
run: |
ccache -p
ccache -s
benchmarks:
if: github.event_name == 'pull_request'
needs: [format-taginfo-docs]
runs-on: self-hosted
env:
CCOMPILER: clang-16
CXXCOMPILER: clang++-16
CC: clang-16
CXX: clang++-16
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
GITHUB_REPOSITORY: ${{ github.repository }}
RUN_BIG_BENCHMARK: ${{ contains(github.event.pull_request.labels.*.name, 'Performance') }}
steps:
- name: Checkout PR Branch
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
path: pr
- name: Activate virtualenv
run: |
python3 -m venv .venv
source .venv/bin/activate
echo PATH=$PATH >> $GITHUB_ENV
pip install "conan==2.7.1" "requests==2.31.0" "numpy==1.26.4"
- name: Prepare data
run: |
if [ "$RUN_BIG_BENCHMARK" = "true" ]; then
rm -rf ~/data.osm.pbf
wget http://download.geofabrik.de/europe/poland-latest.osm.pbf -O ~/data.osm.pbf --quiet
gunzip -c ./pr/test/data/poland_gps_traces.csv.gz > ~/gps_traces.csv
else
if [ ! -f "~/data.osm.pbf" ]; then
wget http://download.geofabrik.de/europe/germany/berlin-latest.osm.pbf -O ~/data.osm.pbf
else
echo "Using cached data.osm.pbf"
fi
gunzip -c ./pr/test/data/berlin_gps_traces.csv.gz > ~/gps_traces.csv
fi
- name: Prepare environment
run: |
echo "CCACHE_DIR=$HOME/.ccache" >> $GITHUB_ENV
mkdir -p $HOME/.ccache
ccache --zero-stats
ccache --max-size=256M
- name: Checkout Base Branch
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.base.ref }}
path: base
- name: Build Base Branch
run: |
cd base
npm ci --ignore-scripts
cd ..
mkdir base/build
cd base/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
make -j$(nproc)
make -j$(nproc) benchmarks
cd ..
make -C test/data
- name: Build PR Branch
run: |
cd pr
npm ci --ignore-scripts
cd ..
mkdir -p pr/build
cd pr/build
cmake -DENABLE_CONAN=ON -DCMAKE_BUILD_TYPE=Release -DENABLE_NODE_BINDINGS=ON ..
make -j$(nproc)
make -j$(nproc) benchmarks
cd ..
make -C test/data
# we run benchmarks in tmpfs to avoid impact of disk IO
- name: Create folder for tmpfs
run: |
# if by any chance it was mounted before(e.g. due to previous job failed), unmount it
sudo umount ~/benchmarks | true
rm -rf ~/benchmarks
mkdir -p ~/benchmarks
# see https://llvm.org/docs/Benchmarking.html
- name: Run PR Benchmarks
run: |
sudo cset shield -c 2-3 -k on
sudo mount -t tmpfs -o size=4g none ~/benchmarks
cp -rf pr/build ~/benchmarks/build
cp -rf pr/lib ~/benchmarks/lib
mkdir -p ~/benchmarks/test
cp -rf pr/test/data ~/benchmarks/test/data
cp -rf pr/profiles ~/benchmarks/profiles
sudo cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/pr_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
sudo umount ~/benchmarks
sudo cset shield --reset
- name: Run Base Benchmarks
run: |
sudo cset shield -c 2-3 -k on
sudo mount -t tmpfs -o size=4g none ~/benchmarks
cp -rf base/build ~/benchmarks/build
cp -rf base/lib ~/benchmarks/lib
mkdir -p ~/benchmarks/test
cp -rf base/test/data ~/benchmarks/test/data
cp -rf base/profiles ~/benchmarks/profiles
# TODO: remove it when base branch will have this file at needed location
if [ ! -f ~/benchmarks/test/data/portugal_to_korea.json ]; then
cp base/src/benchmarks/portugal_to_korea.json ~/benchmarks/test/data/portugal_to_korea.json
fi
# we intentionally use scripts from PR branch to be able to update them and see results in the same PR
sudo cset shield --exec -- cset shield --exec -- ./pr/scripts/ci/run_benchmarks.sh -f ~/benchmarks -r $(pwd)/base_results -s $(pwd)/pr -b ~/benchmarks/build -o ~/data.osm.pbf -g ~/gps_traces.csv
sudo umount ~/benchmarks
sudo cset shield --reset
- name: Post Benchmark Results
run: |
python3 pr/scripts/ci/post_benchmark_results.py base_results pr_results
- name: Show CCache statistics
run: |
ccache -p
ccache -s
ci-complete:
runs-on: ubuntu-22.04
needs: [build-test-publish, docker-image-matrix, windows-release-node, benchmarks]
steps:
- run: echo "CI complete"