Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CI] Test building for 32-bit arch #10021

Merged
merged 12 commits into from
Jan 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions .github/workflows/i386.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
name: XGBoost-i386-test

on: [push, pull_request]

permissions:
contents: read # to fetch code (actions/checkout)

jobs:
build-32bit:
name: Build 32-bit
runs-on: ubuntu-latest
services:
registry:
image: registry:2
ports:
- 5000:5000
steps:
- uses: actions/checkout@v2.5.0
with:
submodules: 'true'
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: network=host
- name: Build and push container
uses: docker/build-push-action@v5
with:
context: .
file: tests/ci_build/Dockerfile.i386
push: true
tags: localhost:5000/xgboost/build-32bit:latest
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build XGBoost
run: |
docker run --rm -v $PWD:/workspace -w /workspace \
-e CXXFLAGS='-Wno-error=overloaded-virtual -Wno-error=maybe-uninitialized -Wno-error=redundant-move' \
localhost:5000/xgboost/build-32bit:latest \
tests/ci_build/build_via_cmake.sh
3 changes: 0 additions & 3 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,6 @@ elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
message(FATAL_ERROR "Need Clang 9.0 or newer to build XGBoost")
endif()
endif()
if(CMAKE_SIZE_OF_VOID_P EQUAL 4)
message(FATAL_ERROR "XGBoost does not support 32-bit archs. Please use 64-bit arch instead.")
endif()

include(${xgboost_SOURCE_DIR}/cmake/PrefetchIntrinsics.cmake)
find_prefetch_intrinsics()
Expand Down
6 changes: 3 additions & 3 deletions src/c_api/c_api.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2014-2023 by XGBoost Contributors
* Copyright 2014-2024 by XGBoost Contributors
*/
#include "xgboost/c_api.h"

Expand Down Expand Up @@ -991,8 +991,8 @@ XGB_DLL int XGBoosterBoostOneIter(BoosterHandle handle, DMatrixHandle dtrain, bs
auto *learner = static_cast<Learner *>(handle);
auto ctx = learner->Ctx()->MakeCPU();

auto t_grad = linalg::MakeTensorView(&ctx, common::Span{grad, len}, len);
auto t_hess = linalg::MakeTensorView(&ctx, common::Span{hess, len}, len);
auto t_grad = linalg::MakeTensorView(&ctx, common::Span{grad, static_cast<size_t>(len)}, len);
auto t_hess = linalg::MakeTensorView(&ctx, common::Span{hess, static_cast<size_t>(len)}, len);

auto s_grad = linalg::ArrayInterfaceStr(t_grad);
auto s_hess = linalg::ArrayInterfaceStr(t_hess);
Expand Down
10 changes: 5 additions & 5 deletions src/common/column_matrix.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2017-2023, XGBoost Contributors
* Copyright 2017-2024, XGBoost Contributors
* \file column_matrix.h
* \brief Utility for fast column-wise access
* \author Philip Cho
Expand Down Expand Up @@ -176,7 +176,7 @@ class ColumnMatrix {
void SetValid(typename LBitField32::index_type i) { missing.Clear(i); }
/** @brief assign the storage to the view. */
void InitView() {
missing = LBitField32{Span{storage.data(), storage.size()}};
missing = LBitField32{Span{storage.data(), static_cast<size_t>(storage.size())}};
}

void GrowTo(std::size_t n_elements, bool init) {
Expand Down Expand Up @@ -318,8 +318,8 @@ class ColumnMatrix {
common::Span<const BinIdxType> bin_index = {
reinterpret_cast<const BinIdxType*>(&index_[feature_offset * bins_type_size_]),
column_size};
return std::move(DenseColumnIter<BinIdxType, any_missing>{
bin_index, static_cast<bst_bin_t>(index_base_[fidx]), missing_.missing, feature_offset});
return DenseColumnIter<BinIdxType, any_missing>{
bin_index, static_cast<bst_bin_t>(index_base_[fidx]), missing_.missing, feature_offset};
}

// all columns are dense column and has no missing value
Expand All @@ -332,7 +332,7 @@ class ColumnMatrix {
DispatchBinType(bins_type_size_, [&](auto t) {
using ColumnBinT = decltype(t);
auto column_index = Span<ColumnBinT>{reinterpret_cast<ColumnBinT*>(index_.data()),
index_.size() / sizeof(ColumnBinT)};
static_cast<size_t>(index_.size() / sizeof(ColumnBinT))};
ParallelFor(n_samples, n_threads, [&](auto rid) {
rid += base_rowid;
const size_t ibegin = rid * n_features;
Expand Down
10 changes: 5 additions & 5 deletions src/common/hist_util.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2017-2023 by XGBoost Contributors
* Copyright 2017-2024 by XGBoost Contributors
* \file hist_util.h
* \brief Utility for fast histogram aggregation
* \author Philip Cho, Tianqi Chen
Expand Down Expand Up @@ -113,8 +113,8 @@ class HistogramCuts {
auto end = ptrs[column_id + 1];
auto beg = ptrs[column_id];
auto it = std::upper_bound(values.cbegin() + beg, values.cbegin() + end, value);
auto idx = it - values.cbegin();
idx -= !!(idx == end);
auto idx = static_cast<bst_bin_t>(it - values.cbegin());
idx -= !!(idx == static_cast<bst_bin_t>(end));
return idx;
}

Expand All @@ -136,8 +136,8 @@ class HistogramCuts {
auto beg = ptrs[fidx] + vals.cbegin();
// Truncates the value in case it's not perfectly rounded.
auto v = static_cast<float>(common::AsCat(value));
auto bin_idx = std::lower_bound(beg, end, v) - vals.cbegin();
if (bin_idx == ptrs.at(fidx + 1)) {
auto bin_idx = static_cast<bst_bin_t>(std::lower_bound(beg, end, v) - vals.cbegin());
if (bin_idx == static_cast<bst_bin_t>(ptrs.at(fidx + 1))) {
bin_idx -= 1;
}
return bin_idx;
Expand Down
4 changes: 2 additions & 2 deletions src/common/ref_resource_view.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2023, XGBoost Contributors
* Copyright 2023-2024, XGBoost Contributors
*/
#ifndef XGBOOST_COMMON_REF_RESOURCE_VIEW_H_
#define XGBOOST_COMMON_REF_RESOURCE_VIEW_H_
Expand Down Expand Up @@ -76,7 +76,7 @@ class RefResourceView {

[[nodiscard]] size_type size() const { return size_; } // NOLINT
[[nodiscard]] size_type size_bytes() const { // NOLINT
return Span<const value_type>{data(), size()}.size_bytes();
return Span<const value_type>{data(), static_cast<size_t>(size())}.size_bytes();
}
[[nodiscard]] value_type* data() { return ptr_; }; // NOLINT
[[nodiscard]] value_type const* data() const { return ptr_; }; // NOLINT
Expand Down
5 changes: 3 additions & 2 deletions src/data/gradient_index.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2017-2023, XGBoost Contributors
* Copyright 2017-2024, XGBoost Contributors
* \brief Data type for fast histogram aggregation.
*/
#include "gradient_index.h"
Expand Down Expand Up @@ -148,7 +148,8 @@ void GHistIndexMatrix::ResizeIndex(const size_t n_index, const bool isDense) {
new_vec = {new_ptr, n_bytes / sizeof(std::uint8_t), malloc_resource};
}
this->data = std::move(new_vec);
this->index = common::Index{common::Span{data.data(), data.size()}, t_size};
this->index = common::Index{common::Span{data.data(), static_cast<size_t>(data.size())},
t_size};
};

if ((MaxNumBinPerFeat() - 1 <= static_cast<int>(std::numeric_limits<uint8_t>::max())) &&
Expand Down
6 changes: 4 additions & 2 deletions src/data/gradient_index_format.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2021-2023 XGBoost contributors
* Copyright 2021-2024 XGBoost contributors
*/
#include <cstddef> // for size_t
#include <cstdint> // for uint8_t
Expand Down Expand Up @@ -40,7 +40,9 @@ class GHistIndexRawFormat : public SparsePageFormat<GHistIndexMatrix> {
return false;
}
// - index
page->index = common::Index{common::Span{page->data.data(), page->data.size()}, size_type};
page->index =
common::Index{common::Span{page->data.data(), static_cast<size_t>(page->data.size())},
size_type};

// hit count
if (!common::ReadVec(fi, &page->hit_count)) {
Expand Down
4 changes: 2 additions & 2 deletions src/predictor/predictor.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2017-2023 by Contributors
* Copyright 2017-2024 by Contributors
*/
#include "xgboost/predictor.h"

Expand Down Expand Up @@ -46,7 +46,7 @@ void ValidateBaseMarginShape(linalg::Tensor<float, D> const& margin, bst_row_t n
void Predictor::InitOutPredictions(const MetaInfo& info, HostDeviceVector<bst_float>* out_preds,
const gbm::GBTreeModel& model) const {
CHECK_NE(model.learner_model_param->num_output_group, 0);
std::size_t n{model.learner_model_param->OutputLength() * info.num_row_};
auto n = static_cast<size_t>(model.learner_model_param->OutputLength() * info.num_row_);

const HostDeviceVector<bst_float>* base_margin = info.base_margin_.Data();
if (ctx_->Device().IsCUDA()) {
Expand Down
8 changes: 5 additions & 3 deletions src/tree/hist/hist_cache.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2023 by XGBoost Contributors
* Copyright 2023-2024 by XGBoost Contributors
*/
#ifndef XGBOOST_TREE_HIST_HIST_CACHE_H_
#define XGBOOST_TREE_HIST_HIST_CACHE_H_
Expand Down Expand Up @@ -48,11 +48,13 @@ class BoundedHistCollection {
BoundedHistCollection() = default;
common::GHistRow operator[](std::size_t idx) {
auto offset = node_map_.at(idx);
return common::Span{data_->data(), data_->size()}.subspan(offset, n_total_bins_);
return common::Span{data_->data(), static_cast<size_t>(data_->size())}.subspan(
offset, n_total_bins_);
}
common::ConstGHistRow operator[](std::size_t idx) const {
auto offset = node_map_.at(idx);
return common::Span{data_->data(), data_->size()}.subspan(offset, n_total_bins_);
return common::Span{data_->data(), static_cast<size_t>(data_->size())}.subspan(
offset, n_total_bins_);
}
void Reset(bst_bin_t n_total_bins, std::size_t n_cached_nodes) {
n_total_bins_ = n_total_bins;
Expand Down
8 changes: 8 additions & 0 deletions tests/ci_build/Dockerfile.i386
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
FROM i386/debian:sid

ENV DEBIAN_FRONTEND noninteractive
SHELL ["/bin/bash", "-c"] # Use Bash as shell

RUN \
apt-get update && \
apt-get install -y tar unzip wget git build-essential ninja-build cmake
6 changes: 3 additions & 3 deletions tests/cpp/c_api/test_c_api.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2019-2023 XGBoost contributors
* Copyright 2019-2024 XGBoost contributors
*/
#include <gtest/gtest.h>
#include <xgboost/c_api.h>
Expand Down Expand Up @@ -212,8 +212,8 @@ TEST(CAPI, JsonModelIO) {
bst_ulong saved_len{0};
XGBoosterSaveModelToBuffer(handle, R"({"format": "ubj"})", &saved_len, &saved);
ASSERT_EQ(len, saved_len);
auto l = StringView{data, len};
auto r = StringView{saved, saved_len};
auto l = StringView{data, static_cast<size_t>(len)};
auto r = StringView{saved, static_cast<size_t>(saved_len)};
ASSERT_EQ(l.size(), r.size());
ASSERT_EQ(l, r);

Expand Down
6 changes: 3 additions & 3 deletions tests/cpp/helpers.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2016-2023 by XGBoost contributors
* Copyright 2016-2024 by XGBoost contributors
*/
#include "helpers.h"

Expand Down Expand Up @@ -216,7 +216,7 @@ SimpleLCG::StateType SimpleLCG::Max() const { return max(); }
static_assert(SimpleLCG::max() - SimpleLCG::min());

void RandomDataGenerator::GenerateLabels(std::shared_ptr<DMatrix> p_fmat) const {
RandomDataGenerator{p_fmat->Info().num_row_, this->n_targets_, 0.0f}.GenerateDense(
RandomDataGenerator{static_cast<bst_row_t>(p_fmat->Info().num_row_), this->n_targets_, 0.0f}.GenerateDense(
p_fmat->Info().labels.Data());
CHECK_EQ(p_fmat->Info().labels.Size(), this->rows_ * this->n_targets_);
p_fmat->Info().labels.Reshape(this->rows_, this->n_targets_);
Expand Down Expand Up @@ -458,7 +458,7 @@ void RandomDataGenerator::GenerateCSR(
EXPECT_EQ(row_count, dmat->Info().num_row_);

if (with_label) {
RandomDataGenerator{dmat->Info().num_row_, this->n_targets_, 0.0f}.GenerateDense(
RandomDataGenerator{static_cast<bst_row_t>(dmat->Info().num_row_), this->n_targets_, 0.0f}.GenerateDense(
dmat->Info().labels.Data());
CHECK_EQ(dmat->Info().labels.Size(), this->rows_ * this->n_targets_);
dmat->Info().labels.Reshape(this->rows_, this->n_targets_);
Expand Down
4 changes: 2 additions & 2 deletions tests/cpp/helpers.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2016-2023 by XGBoost contributors
* Copyright 2016-2024 by XGBoost contributors
*/
#pragma once

Expand Down Expand Up @@ -238,7 +238,7 @@ class RandomDataGenerator {

bst_bin_t bins_{0};
std::vector<FeatureType> ft_;
bst_cat_t max_cat_;
bst_cat_t max_cat_{32};

Json ArrayInterfaceImpl(HostDeviceVector<float>* storage, size_t rows, size_t cols) const;

Expand Down
Loading