Skip to content

Commit

Permalink
CubicSplineMT: Use thread pool
Browse files Browse the repository at this point in the history
  • Loading branch information
csparker247 committed Aug 24, 2024
1 parent 42a491a commit 37b7b25
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 14 deletions.
6 changes: 6 additions & 0 deletions cmake/Buildthreadpool.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
FetchContent_Declare(
threadpool
GIT_REPOSITORY https://github.com/bshoshany/thread-pool.git
GIT_TAG v4.1.0
)
FetchContent_MakeAvailable(threadpool)
3 changes: 3 additions & 0 deletions cmake/VCFindDependencies.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ include(Buildsmgl)
### libcore ###
include(Buildlibcore)

### thread-pool ###
include(Buildthreadpool)

### Boost and indicators (for app use only)
if(VC_BUILD_APPS OR VC_BUILD_UTILS)
find_package(Boost 1.7 CONFIG REQUIRED COMPONENTS system program_options)
Expand Down
1 change: 1 addition & 0 deletions segmentation/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ add_library(VC::segmentation ALIAS "vc_segmentation")
target_include_directories(vc_segmentation
PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
$<BUILD_INTERFACE:${threadpool_SOURCE_DIR}/include>
$<INSTALL_INTERFACE:include>
)
target_compile_options(vc_segmentation
Expand Down
38 changes: 24 additions & 14 deletions segmentation/src/CubicSplineMT.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#include <vector>

#include <Eigen/Dense>
#include <BS_thread_pool.hpp>
#include <gsl/gsl_integration.h>

#include "vc/core/util/Iteration.hpp"
Expand All @@ -27,6 +28,10 @@ using Params = std::vector<double>;
namespace
{

// Compilation-unit local thread pool so threads are shared across runs
// TODO: At some point, this should become a singleton in vc::core.
BS::thread_pool POOL;

template <typename T>
auto linspace(const std::size_t num, const T low, const T high)
-> std::vector<T>
Expand Down Expand Up @@ -143,17 +148,19 @@ auto FitSplineMT(
std::vector cVec(n, 0.0);
std::vector dVec(n, 0.0);

// TODO: There should be a thread pool for this
std::vector<std::thread> threads;
// Thread futures
std::vector<std::future<void>> futures;

// Auto-determine the number of threads
if (numThreads < 0) {
numThreads = static_cast<int>(std::thread::hardware_concurrency());
// TODO: Handle no threads
assert(numThreads != 0);
// Reset pool to the requested number of threads
if (numThreads < 1 and
POOL.get_thread_count() != std::thread::hardware_concurrency()) {
POOL.reset();
} else if (numThreads >= 1 and POOL.get_thread_count() != numThreads) {
POOL.reset(numThreads);
}
numThreads = static_cast<int>(POOL.get_thread_count());
Logger()->debug("Using {} threads", numThreads);
threads.reserve(numThreads);
futures.reserve(numThreads);

// Fit spline windows on multiple threads
const auto steps = static_cast<std::size_t>(
Expand All @@ -179,15 +186,18 @@ auto FitSplineMT(
}

// Queue the job
threads.emplace_back(
&FitSplineWindow, std::ref(range), std::ref(val), startIdx, endIdx,
winSize, bufSize, std::ref(mtx), std::ref(aVec), std::ref(bVec),
std::ref(cVec), std::ref(dVec));
futures.emplace_back(
POOL.submit_task([&range, &val, startIdx, endIdx, winSize, bufSize,
&mtx, &aVec, &bVec, &cVec, &dVec] {
FitSplineWindow(
range, val, startIdx, endIdx, winSize, bufSize, mtx, aVec,
bVec, cVec, dVec);
}));
}

// Wait for all threads to complete
for (auto& t : threads) {
t.join();
for (const auto& t : futures) {
t.wait();
}

return {aVec, bVec, cVec, dVec};
Expand Down
1 change: 1 addition & 0 deletions segmentation/test/EnergyMetricsTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

#include <cstddef>
#include <numeric>
#include <vector>

#include "vc/segmentation/lrps/EnergyMetrics.hpp"
#include "vc/testing/TestingUtils.hpp"
Expand Down

0 comments on commit 37b7b25

Please sign in to comment.