Skip to content

Commit

Permalink
Jenkins improvememts (#107)
Browse files Browse the repository at this point in the history
+ Performs a non-inplace morpheus build.
+ Adds $ORIGIN to rpath of build binaries, fixing issue where libmorpheus.so was unable to find libmorpheus-utils.so (fixes #111 )
+ ~~Removes the need for a gpu for the build stage~~ Blocked by Neo/nvml issue #186
+ Common operations moved to common.sh
+ Fixes ci scripts so that they work locally (fixes #114 )
+ Improve the way sccache is configured (fixes #110 )

This pull req incorporates changes from #62

Authors:
  - David Gardner (https://github.com/dagardner-nv)
  - Michael Demoret (https://github.com/mdemoret-nv)

Approvers:
  - Devin Robison (https://github.com/drobison00)
  - Michael Demoret (https://github.com/mdemoret-nv)

URL: #107
  • Loading branch information
dagardner-nv authored Jun 8, 2022
1 parent 1a1be1a commit fc243ce
Show file tree
Hide file tree
Showing 11 changed files with 207 additions and 87 deletions.
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ set(CMAKE_CXX_EXTENSIONS ON)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
set(CMAKE_BUILD_RPATH_USE_ORIGIN TRUE)
set(CMAKE_INSTALL_RPATH "$ORIGIN")

enable_testing()

Expand Down
7 changes: 3 additions & 4 deletions ci/conda/recipes/morpheus/morpheus_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,10 @@ CMAKE_ARGS="-DCMAKE_INSTALL_PREFIX=$PREFIX ${CMAKE_ARGS}"
CMAKE_ARGS="-DCMAKE_INSTALL_LIBDIR=lib ${CMAKE_ARGS}"
CMAKE_ARGS="-DBUILD_SHARED_LIBS=ON ${CMAKE_ARGS}"
CMAKE_ARGS="-DMORPHEUS_USE_CONDA=ON ${CMAKE_ARGS}"
CMAKE_ARGS="-DMORPHEUS_USE_CCACHE=ON ${CMAKE_ARGS}"

if [[ "${USE_SCCACHE}" == "" ]]; then
CMAKE_ARGS="-DMORPHEUS_USE_CCACHE=ON ${CMAKE_ARGS}"
else
CMAKE_ARGS="-DMORPHEUS_USE_CCACHE=OFF ${CMAKE_ARGS}"
if [[ "${USE_SCCACHE}" == "1" ]]; then
CMAKE_ARGS="-DCCACHE_PROGRAM_PATH=$(which sccache) ${CMAKE_ARGS}"
fi

CMAKE_ARGS="-DMORPHEUS_BUILD_PYTHON=ON ${CMAKE_ARGS}"
Expand Down
93 changes: 93 additions & 0 deletions ci/iwyu/mappings.imp
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
[

## Include mappings
# neo protos
{ "include": [ "\"neo/protos/architect.pb.h\"", private, "<neo/protos/architect.pb.h>", "public" ] },
{ "include": [ "\"neo/protos/codable.pb.h\"", private, "<neo/protos/codable.pb.h>", "public" ] },
{ "include": [ "\"neo/protos/remote_descriptor.pb.h\"", private, "<neo/protos/remote_descriptor.pb.h>", "public" ] },
{ "include": [ "\"neo/protos/tensor_meta_data.pb.h\"", private, "<neo/protos/tensor_meta_data.pb.h>", "public" ] },

# stdlib
{ "include": [ "<bits/cxxabi_forced.h>", private, "<mutex>", "public" ] },
{ "include": [ "<bits/cxxabi_forced.h>", private, "<vector>", "public" ] },
{ "include": [ "<bits/types/siginfo_t.h>", private, "<csignal>", "public" ] },

# boost
{ "include": ["@<boost/fiber/future/detail/.*>", "private", "<boost/fiber/future/future.hpp>", "public"] },

# cuda
{ "include": ["<cuda_runtime_api.h>", "private", "<cuda_runtime.h>", "public"] },
{ "include": ["\"cuda_runtime_api.h\"", "private", "<cuda_runtime.h>", "public"] },
{ "include": ["<driver_types.h>", "private", "<cuda_runtime.h>", "public"] },
{ "include": ["\"driver_types.h\"", "private", "<cuda_runtime.h>", "public"] },

# gtest
{ "include": ["@<gtest/gtest-.*>", "private", "<gtest/gtest.h>", "public"] },
{ "include": ["@<gtest/gtest_.*>", "private", "<gtest/gtest.h>", "public"] },

# gprc
{ "include": ["@<grpcpp/impl/.*>", "private", "<grpcpp/grpcpp.h>", "public"] },
{ "include": ["@<grpc/impl/.*>", "private", "<grpcpp/grpcpp.h>", "public"] },

# nlohmann json
{ "include": ["<nlohmann/detail/iterators/iter_impl.hpp>", "private", "<nlohmann/json.hpp>", "public"] },
{ "include": ["<nlohmann/detail/iterators/iteration_proxy.hpp>", "private", "<nlohmann/json.hpp>", "public"] },
{ "include": ["<nlohmann/detail/json_ref.hpp>", "private", "<nlohmann/json.hpp>", "public"] },

# Protobuf
{ "include": [ "<google/protobuf/repeated_ptr_field.h>", private, "<google/protobuf/repeated_field.h>", "public" ] },

## Symbol mappings
# stdlib
{ "symbol": ["__gnu_cxx::__enable_if<true, double>::__type", "private", "<cmath>", "public"] },
{ "symbol": ["std::__success_type<std::chrono::duration<long, std::ratio<1, 1000000000>>>::type" , "private", "<chrono>", "public"] },
{ "symbol": ["__cxxabiv1::__forced_unwind", "private", "<future>", "public"] },

# boost
{ "symbol": ["__forced_unwind", "private", "<boost/fiber/all.hpp>", "public"] },
{ "symbol": ["boost::context::detail::forced_unwind", "private", "<boost/fiber/future/future.hpp>", "public"] },
{ "symbol": ["boost::intrusive_ptr::operator", "private", "<boost/fiber/future/promise.hpp>", "public"] },
{ "symbol": ["__cxxabiv1::__forced_unwind", "private", "<boost/fiber/future/future.hpp>", "public"] },

# cuda
{ "symbol": ["cuda::std::declval", "private", "<cuda/memory_resource>", "public"] },

# nlohmann json
# Tells IWYU that both json and json_fwd export the nlohmann::json namespace without this IWYU will always require
# json_fwd.hpp even when json.hpp is already included
{ "symbol": ["nlohmann", "private", "<nlohmann/json.hpp>", "public"] },
{ "symbol": ["nlohmann", "private", "<nlohmann/json_fwd.hpp>", "public"] },
{ "symbol": ["nlohmann::json", "private", "<nlohmann/json.hpp>", "public"] },
{ "symbol": ["nlohmann::json", "private", "<nlohmann/json_fwd.hpp>", "public"] },

# pybind11
{ "symbol": ["pybind11", "private", "<pybind11/cast.h>", "public"] },
{ "symbol": ["pybind11", "private", "<pybind11/embed.h>", "public"] },
{ "symbol": ["pybind11", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11", "private", "<pybind11/pytypes.h>", "public"] },

{ "symbol": ["PYBIND11_MODULE", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["PySequence_GetItem", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["PyExc_ImportError", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["PyErr_SetObject", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["PyExc_StopIteration", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["_Py_IsFinalizing", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::detail::str_attr_accessor", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::detail::overload_cast_impl", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::overload_cast", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::stop_iteration", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::return_value_policy", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::return_value_policy::reference_internal", "private", "<pybind11/pybind11.h>", "public"] },
{ "symbol": ["pybind11::detail::get_type_info", "private", "<pybind11/cast.h>", "public"] },
{ "symbol": ["PyGILState_Check", "private", "<pybind11/gil.h>", "public"] },

# spdlog
{ "symbol": ["spdlog::details::file_helper::~file_helper", "private", "<spdlog/sinks/basic_file_sink.h>", "public"] },

# xtensor
{ "symbol": ["xt::no_ownership", "private", "<xtensor/xadapt.hpp>", "public"] },

# neo
{ "symbol": ["std::__decay_and_strip<std::shared_ptr<neo::TraceStatistics> &>::__type" , "private", "<neo/benchmarking/trace_statistics.hpp>", "public"] },

]
6 changes: 3 additions & 3 deletions ci/scripts/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,18 @@

export SCRIPT_DIR=${SCRIPT_DIR:-"$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"}
export REPO_DIR=$(realpath ${REPO_DIR:-"${SCRIPT_DIR}/../.."})
export PY_ROOT="."
export PY_ROOT="${REPO_DIR}"
export PY_CFG="${PY_ROOT}/setup.cfg"
export PY_DIRS="${PY_ROOT} ci/scripts"

# work-around for known yapf issue https://github.com/google/yapf/issues/984
export YAPF_EXCLUDE_FLAGS="-e ${PY_ROOT}/versioneer.py -e ${PY_ROOT}/morpheus/_version.py"

# Determine the commits to compare against. If running in CI, these will be set. Otherwise, diff with main
export BASE_SHA=${CHANGE_TARGET:-${BASE_SHA:-main}}
export BASE_SHA=${CHANGE_TARGET:-${BASE_SHA:-$(${SCRIPT_DIR}/gitutils.py get_merge_target)}}
export COMMIT_SHA=${GIT_COMMIT:-${COMMIT_SHA:-HEAD}}

export CPP_FILE_REGEX='^(\.\/)?(src|include|tests|benchmarks|python)\/.*\.(cc|cpp|h|hpp)$'
export CPP_FILE_REGEX='^(\.\/)?(morpheus|tests)\/.*\.(cc|cpp|h|hpp)$'
export PYTHON_FILE_REGEX='^(\.\/)?(?!\.|build).*\.(py|pyx|pxd)$'

# Use these options to skip any of the checks
Expand Down
4 changes: 2 additions & 2 deletions ci/scripts/fix_all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,13 @@ if [[ "${SKIP_CLANG_TIDY}" == "" ]]; then
fi

# Run include-what-you-use
if [[ "${SKIP_IWYU}" == "" ]]; then
if [[ "${SKIP_IWYU}" == "" && "${CPP_MODIFIED_FILES}" != "" ]]; then

IWYU_TOOL=$(find_iwyu_tool)

if [[ -x "${IWYU_TOOL}" ]]; then
echo "Running include-what-you-use from '${IWYU_TOOL}'..."
${IWYU_TOOL} -j 0 -p ${BUILD_DIR} ${CPP_MODIFIED_FILES[@]} 2>&1
${IWYU_TOOL} -j $(nproc) -p ${BUILD_DIR} ${CPP_MODIFIED_FILES[@]} 2>&1
else
echo "Skipping include-what-you-use. Could not find iwyu_tool.py at '${IWYU_TOOL}'"
fi
Expand Down
53 changes: 43 additions & 10 deletions ci/scripts/gitutils.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#! /bin/env python3
# SPDX-FileCopyrightText: Copyright (c) 2019-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
Expand All @@ -14,7 +15,9 @@
# limitations under the License.
#

import argparse
import datetime
import logging
import os
import re
import subprocess
Expand Down Expand Up @@ -76,7 +79,8 @@ def repo_version_major_minor():
match = re.match(r"^v?(?P<major>[0-9]+)(?:\.(?P<minor>[0-9]+))?", full_repo_version)

if (match is None):
print(" [DEBUG] Could not determine repo major minor version. " f"Full repo version: {full_repo_version}.")
logging.debug("Could not determine repo major minor version. "
f"Full repo version: {full_repo_version}.")
return None

out_version = match.group("major")
Expand Down Expand Up @@ -115,8 +119,8 @@ def determine_merge_commit(current_branch="HEAD"):
# Try to determine the target branch from the most recent tag
head_branch = __git("describe", "--all", "--tags", "--match='branch-*'", "--abbrev=0")
except subprocess.CalledProcessError:
print(" [DEBUG] Could not determine target branch from most recent "
"tag. Falling back to 'branch-{major}.{minor}.")
logging.debug("Could not determine target branch from most recent "
"tag. Falling back to 'branch-{major}.{minor}.")
head_branch = None

if (head_branch is not None):
Expand All @@ -132,20 +136,22 @@ def determine_merge_commit(current_branch="HEAD"):

head_branch = "branch-{}".format(version)
except Exception:
print(" [DEBUG] Could not determine branch version falling back to main")
logging.debug("Could not determine branch version falling back to main")
head_branch = "main"

try:
# Now get the remote tracking branch
remote_branch = __git("rev-parse", "--abbrev-ref", "--symbolic-full-name", head_branch + "@{upstream}")
except subprocess.CalledProcessError:
print(" [DEBUG] Could not remote tracking reference for " f"branch {head_branch}.")
logging.debug("Could not remote tracking reference for "
f"branch {head_branch}.")
remote_branch = None

if (remote_branch is None):
return None

print(f" [DEBUG] Determined TARGET_BRANCH as: '{remote_branch}'. " "Finding common ancestor.")
logging.debug(f"Determined TARGET_BRANCH as: '{remote_branch}'. "
"Finding common ancestor.")

common_commit = __git("merge-base", remote_branch, current_branch)

Expand Down Expand Up @@ -227,13 +233,15 @@ def modifiedFiles(pathFilter=None):
targetBranch = os.environ.get("TARGET_BRANCH")
commitHash = os.environ.get("COMMIT_HASH")
currentBranch = branch()
print(f" [DEBUG] TARGET_BRANCH={targetBranch}, COMMIT_HASH={commitHash}, " f"currentBranch={currentBranch}")
logging.debug(f"TARGET_BRANCH={targetBranch}, COMMIT_HASH={commitHash}, "
f"currentBranch={currentBranch}")

if targetBranch and commitHash and (currentBranch == "current-pr-branch"):
print(" [DEBUG] Assuming a CI environment.")
logging.debug("Assuming a CI environment.")
allFiles = changedFilesBetween(targetBranch, currentBranch, commitHash)
else:
print(" [DEBUG] Did not detect CI environment. " "Determining TARGET_BRANCH locally.")
logging.debug("Did not detect CI environment. "
"Determining TARGET_BRANCH locally.")

common_commit = determine_merge_commit(currentBranch)

Expand All @@ -252,7 +260,7 @@ def modifiedFiles(pathFilter=None):
files.append(f)

filesToCheckString = "\n\t".join(files) if files else "<None>"
print(f" [DEBUG] Found files to check:\n\t{filesToCheckString}\n")
logging.debug(f"Found files to check:\n\t{filesToCheckString}\n")
return files


Expand Down Expand Up @@ -291,3 +299,28 @@ def listFilesToCheck(filesDirs, filter=None):
if filter is None or filter(f_):
allFiles.append(f_)
return allFiles


def get_merge_target():
currentBranch = branch()
return determine_merge_commit(currentBranch)


def parse_args():
argparser = argparse.ArgumentParser("Executes a gitutil action")
argparser.add_argument("action",
choices=['get_merge_target'],
help="Action to execute")
args = argparser.parse_args()
return args


def main():
args = parse_args()
logging.basicConfig(level=logging.ERROR)
if args.action == 'get_merge_target':
print(get_merge_target())


if __name__ == '__main__':
main()
26 changes: 9 additions & 17 deletions ci/scripts/jenkins/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,7 @@ python3 --version
gcc --version
g++ --version

gpuci_logger "Check conda environment"
conda info
conda config --show-sources
conda list --show-channel-urls
show_conda_info

gpuci_logger "Checking S3 cuDF cache"
CUDF_CONDA_BLD_DIR=/opt/conda/conda-bld
Expand All @@ -49,11 +46,10 @@ CUDF_CONDA_TAR="${WORKSPACE_TMP}/cudf_conda.tar.bz"

gpuci_logger "Checking ${DISPLAY_URL}${CUDF_CONDA_CACHE_PATH}"
set +e
aws s3 cp --no-progress ${CUDF_CONDA_CACHE_URL} ${CUDF_CONDA_TAR}
CUDF_CACHE_CHECK=$?
fetch_s3 "${CUDF_CONDA_CACHE_PATH}" "${CUDF_CONDA_TAR}"
set -e

if [[ "${CUDF_CACHE_CHECK}" != "0" ]]; then
if [[ "${FETCH_STATUS}" != "0" ]]; then
gpuci_logger "Cache miss, Building cuDF"
mkdir -p ${CUDF_CONDA_BLD_DIR}
# The --no-build-id bit is needed for sccache
Expand Down Expand Up @@ -96,11 +92,9 @@ cmake -B build -G Ninja \
-DMORPHEUS_BUILD_EXAMPLES=ON \
-DMORPHEUS_BUILD_TESTS=ON \
-DMORPHEUS_USE_CONDA=ON \
-DMORPHEUS_PYTHON_INPLACE_BUILD=ON \
-DMORPHEUS_USE_CCACHE=OFF \
-DCMAKE_C_COMPILER_LAUNCHER=sccache \
-DCMAKE_CXX_COMPILER_LAUNCHER=sccache \
-DCMAKE_CUDA_COMPILER_LAUNCHER=sccache \
-DMORPHEUS_PYTHON_INPLACE_BUILD=OFF \
-DMORPHEUS_USE_CCACHE=ON \
-DCCACHE_PROGRAM_PATH=$(which sccache) \
.

gpuci_logger "Building Morpheus"
Expand All @@ -110,16 +104,14 @@ gpuci_logger "sccache usage for morpheus build:"
sccache --show-stats

gpuci_logger "Installing Morpheus"
pip install -e ${MORPHEUS_ROOT}
cmake -DCOMPONENT=Wheel -P ${MORPHEUS_ROOT}/build/cmake_install.cmake
pip install ${MORPHEUS_ROOT}/build/wheel

gpuci_logger "Archiving results"
mamba pack --quiet --force --ignore-editable-packages --ignore-missing-files --n-threads ${PARALLEL_LEVEL} -n morpheus -o ${WORKSPACE_TMP}/conda_env.tar.gz
tar cfj ${WORKSPACE_TMP}/workspace.tar.bz --exclude=".git" --exclude="models" --exclude=".cache" ./
ls -lh ${WORKSPACE_TMP}/
mamba pack --quiet --force --ignore-missing-files --n-threads ${PARALLEL_LEVEL} -n morpheus -o ${WORKSPACE_TMP}/conda_env.tar.gz

gpuci_logger "Pushing results to ${DISPLAY_ARTIFACT_URL}"
aws s3 cp --no-progress "${WORKSPACE_TMP}/conda_env.tar.gz" "${ARTIFACT_URL}/conda_env.tar.gz"
aws s3 cp --no-progress "${WORKSPACE_TMP}/workspace.tar.bz" "${ARTIFACT_URL}/workspace.tar.bz"

gpuci_logger "Success"
exit 0
2 changes: 2 additions & 0 deletions ci/scripts/jenkins/checks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ conda activate rapids
gpuci_logger "Installing CI dependencies"
mamba install -q -y -c conda-forge "yapf=0.32"

show_conda_info

gpuci_logger "Runing Python style checks"
${MORPHEUS_ROOT}/ci/scripts/python_checks.sh

Expand Down
Loading

0 comments on commit fc243ce

Please sign in to comment.