Skip to content

Commit

Permalink
Fix kernel and line info in cmake (rapidsai#3941)
Browse files Browse the repository at this point in the history
Closes rapidsai#3938

Authors:
  - Dante Gama Dessavre (https://github.com/dantegd)

Approvers:
  - Corey J. Nolet (https://github.com/cjnolet)
  - AJ Schmidt (https://github.com/ajschmidt8)

URL: rapidsai#3941
  • Loading branch information
dantegd authored Jun 9, 2021
1 parent 5df32c8 commit 07254db
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 9 deletions.
6 changes: 3 additions & 3 deletions ci/gpu/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ gpuci_conda_retry install -c conda-forge -c rapidsai -c rapidsai-nightly -c nvid
"libcumlprims=${MINOR_VERSION}" \
"dask-cudf=${MINOR_VERSION}" \
"dask-cuda=${MINOR_VERSION}" \
"ucx-py=0.20.*" \
"ucx-py=0.21" \
"ucx-proc=*=gpu" \
"xgboost=1.4.2dev.rapidsai${MINOR_VERSION}" \
"rapids-build-env=${MINOR_VERSION}.*" \
Expand Down Expand Up @@ -195,8 +195,8 @@ else

gpuci_logger "Install the main version of dask and distributed"
set -x
pip install "git+https://github.com/dask/distributed.git@2021.05.1" --upgrade --no-deps
pip install "git+https://github.com/dask/dask.git@2021.05.1" --upgrade --no-deps
pip install "git+https://github.com/dask/distributed.git@main" --upgrade --no-deps
pip install "git+https://github.com/dask/dask.git@main" --upgrade --no-deps
set +x

gpuci_logger "Building cuml"
Expand Down
7 changes: 4 additions & 3 deletions cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,12 @@ option(BUILD_CUML_BENCH "Build cuML C++ benchmark tests" ON)
option(BUILD_CUML_PRIMS_BENCH "Build ml-prims C++ benchmark tests" ON)
option(BUILD_CUML_STD_COMMS "Build the standard NCCL+UCX Communicator" ON)
option(BUILD_CUML_MPI_COMMS "Build the MPI+NCCL Communicator (used for testing)" OFF)
option(CUDA_ENABLE_KERNEL_INFO "Enable kernel resource usage info" OFF)
option(CUDA_ENABLE_LINE_INFO "Enable lineinfo in nvcc" OFF)
option(DETECT_CONDA_ENV "Enable detection of conda environment for dependencies" ON)
option(DISABLE_DEPRECATION_WARNINGS "Disable depreaction warnings " ON)
option(DISABLE_OPENMP "Disable OpenMP" OFF)
option(ENABLE_CUMLPRIMS_MG "Enable algorithms that use libcumlprims_mg" ON)
option(KERNEL_INFO "Enable kernel resource usage info" OFF)
option(LINE_INFO "Enable lineinfo in nvcc" OFF)
option(NVTX "Enable nvtx markers" OFF)
option(SINGLEGPU "Disable all mnmg components and comms libraries" OFF)
option(USE_CCACHE "Cache build artifacts with ccache" OFF)
Expand All @@ -82,7 +82,8 @@ message(VERBOSE "CUML: Enabling detection of conda environment for dependencies:
message(VERBOSE "CUML: Disabling OpenMP: ${DISABLE_OPENMP}")
message(VERBOSE "CUML: Enabling algorithms that use libcumlprims_mg: ${ENABLE_CUMLPRIMS_MG}")
message(VERBOSE "CUML: Enabling kernel resource usage info: ${KERNEL_INFO}")
message(VERBOSE "CUML: Enabling lineinfo in nvcc: ${LINE_INFO}")
message(VERBOSE "CUML: Enabling kernelinfo in nvcc: ${CUDA_ENABLE_KERNEL_INFO}")
message(VERBOSE "CUML: Enabling lineinfo in nvcc: ${CUDA_ENABLE_LINE_INFO}")
message(VERBOSE "CUML: Enabling nvtx markers: ${NVTX}")
message(VERBOSE "CUML: Disabling all mnmg components and comms libraries: ${SINGLEGPU}")
message(VERBOSE "CUML: Cache build artifacts with ccache: ${USE_CCACHE}")
Expand Down
10 changes: 7 additions & 3 deletions cpp/cmake/modules/ConfigureCUDA.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,13 @@ if(DISABLE_DEPRECATION_WARNING)
endif()

# Option to enable line info in CUDA device compilation to allow introspection when profiling / memchecking
if(CUDA_ENABLE_LINEINFO)
list(APPEND CUML_CUDA_FLAGS -lineinfo)
endif()
if(CUDA_ENABLE_LINE_INFO)
list(APPEND CUML_CUDA_FLAGS -lineinfo)
endif(LINE_INFO)

if(CUDA_ENABLE_KERNEL_INFO)
list(APPEND CUML_CUDA_FLAGS -Xptxas=-v)
endif(KERNEL_INFO)

# Debug options
if(CMAKE_BUILD_TYPE MATCHES Debug)
Expand Down

0 comments on commit 07254db

Please sign in to comment.