Skip to content

Address CodeQL issues #135

Address CodeQL issues

Address CodeQL issues #135

Workflow file for this run

name: HDF5/develop
on:
pull_request:
branches: [ develop ]
push:
branches: [ develop ]
paths-ignore:
- '**.md'
- '**.txt'
- 'docs/**'
# Allows to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
vol-cache:
runs-on: ubuntu-latest
timeout-minutes: 20
steps:
- uses: actions/checkout@v4.1.1
- name: Dependencies
run: |
sudo apt-get update
sudo apt-get install libtool
git config pull.rebase false
# hdf5
git clone https://github.com/HDFGroup/hdf5.git
# async vol
git clone https://github.com/hpc-io/vol-async.git
# Argobots
git clone https://github.com/pmodels/argobots.git
# h5bench
git clone https://github.com/zhenghh04/h5bench.git
# mpi
sudo apt-get install libopenmpi-dev
# zlib
sudo apt-get install zlib1g-dev
# python3
sudo apt-get install python3
- name: Installation
run: |
export mydir="$PWD"
export EXAHDF5_ROOT=$mydir
export SDK_DIR=$EXAHDF5_ROOT/soft/
mkdir -p $SDK_DIR
export HDF5_ROOT=$SDK_DIR/hdf5/
mkdir -p $HDF5_ROOT
export HDF5_HOME=$HDF5_ROOT
export HDF5_DIR=$HDF5_ROOT
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/
mkdir -p $HDF5_VOL_DIR
mkdir -p $HDF5_VOL_DIR/lib/
mkdir -p $HDF5_VOL_DIR/include/
export ABT_DIR=$SDK_DIR/argobots/
mkdir -p $ABT_DIR
# Compile HDF5
mkdir -p hdf5/build
cd hdf5/build
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ALLOW_UNSUPPORTED:BOOL=ON ..
make -j2 install
cd -
cd argobots
./autogen.sh
./configure --prefix=$ABT_DIR
make && make install -j2
cd -
# Compile Asynchronous VOL connector
mkdir -p vol-async/build
cd vol-async/build
cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx
make all install -j2
cd -
# Compile Cache VOL connector
mkdir -p build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx
make all install -j2
cd -
# Compile h5bench
mkdir -p h5bench/build
cd h5bench/build
cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g"
make all install VERBOSE=1 -j2
cd -
- name: Test Vol-Cache-Node-Local
run: |
ulimit -d unlimited
ulimit -s unlimited
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
for opt in 'yes' 'no'
do
echo "Testing"
HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe
HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe
HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5
done
- name: Test Vol-Cache-MEMORY
run: |
ulimit -d unlimited
ulimit -s unlimited
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
for opt in 'yes' 'no'
do
echo "Testing"
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 test_dataset.exe
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 write_cache.exe
HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe
HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5
done
- name: Test Vol-Cache-Global
run: |
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
for opt in 'yes' 'no'
do
echo "Testing"
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe
HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe
HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe
HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe
HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5
done
- name: Test Vol-Cache-Fusion
run: |
mkdir -p SSD
export EXAHDF5_ROOT=$PWD
export SDK_DIR=$EXAHDF5_ROOT/soft/
export HDF5_ROOT=$SDK_DIR/hdf5
export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol
export ABT_DIR=$SDK_DIR/argobots/
export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH
export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib
export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH
export HDF5_CACHE_DEBUG=100
printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648\nHDF5_CACHE_FUSION_THRESHOLD: 16777216" > cache_1.cfg
export LD_PRELOAD=$ABT_DIR/lib/libabt.so
cat cache_1.cfg
HDF5_VOL_CONNECTOR='' prepare_dataset.exe
export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}"
HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16