From ba2486ab0acc555c0fcea9250e76ded97a51ca4c Mon Sep 17 00:00:00 2001 From: dangell7 Date: Mon, 2 Jan 2023 17:23:25 -0500 Subject: [PATCH 1/3] fixNFTokenBrokerAccept add amendment add comment add test adjust tests broker settle ONLY non destination offers --- src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp | 12 +- src/ripple/protocol/Feature.h | 3 +- src/test/app/NFToken_test.cpp | 114 ++++++++++++++++-- 3 files changed, 115 insertions(+), 14 deletions(-) diff --git a/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp b/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp index 07fe9957a76..7b298387f11 100644 --- a/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp +++ b/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp @@ -112,19 +112,19 @@ NFTokenAcceptOffer::preclaim(PreclaimContext const& ctx) if ((*so)[sfAmount] > (*bo)[sfAmount]) return tecINSUFFICIENT_PAYMENT; - // If the buyer specified a destination, that destination must be - // the seller or the broker. + // If the buyer specified a destination if (auto const dest = bo->at(~sfDestination)) { - if (*dest != so->at(sfOwner) && *dest != ctx.tx[sfAccount]) + // that destination must be the tx account + if (*dest != ctx.tx[sfAccount]) return tecNFTOKEN_BUY_SELL_MISMATCH; } - // If the seller specified a destination, that destination must be - // the buyer or the broker. + // If the seller specified a destination if (auto const dest = so->at(~sfDestination)) { - if (*dest != bo->at(sfOwner) && *dest != ctx.tx[sfAccount]) + // that destination must be the tx account + if (*dest != ctx.tx[sfAccount]) return tecNFTOKEN_BUY_SELL_MISMATCH; } diff --git a/src/ripple/protocol/Feature.h b/src/ripple/protocol/Feature.h index fac54c2fa71..dd54600434a 100644 --- a/src/ripple/protocol/Feature.h +++ b/src/ripple/protocol/Feature.h @@ -74,7 +74,7 @@ namespace detail { // Feature.cpp. Because it's only used to reserve storage, and determine how // large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than // the actual number of amendments. A LogicError on startup will verify this. -static constexpr std::size_t numFeatures = 53; +static constexpr std::size_t numFeatures = 54; /** Amendments that this server supports and the default voting behavior. Whether they are enabled depends on the Rules defined in the validated @@ -340,6 +340,7 @@ extern uint256 const featureNonFungibleTokensV1_1; extern uint256 const fixTrustLinesToSelf; extern uint256 const fixRemoveNFTokenAutoTrustLine; extern uint256 const featureImmediateOfferKilled; +extern uint256 const fixNFTokenBrokerAccept; } // namespace ripple diff --git a/src/test/app/NFToken_test.cpp b/src/test/app/NFToken_test.cpp index 2fb27f8a352..46a38902b8a 100644 --- a/src/test/app/NFToken_test.cpp +++ b/src/test/app/NFToken_test.cpp @@ -2924,20 +2924,19 @@ class NFToken_test : public beast::unit_test::suite BEAST_EXPECT(ownerCount(env, minter) == 1); BEAST_EXPECT(ownerCount(env, buyer) == 2); - // Broker is successful when destination is buyer. - env(token::brokerOffers( - broker, offerMinterToBuyer, offerBuyerToMinter)); + // Buyer is successful when destination is buyer. + env(token::acceptBuyOffer(buyer, offerMinterToBuyer)); env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 1); BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == 1); // Clean out the unconsumed offer. env(token::cancelOffer(issuer, {offerIssuerToBuyer})); env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == 1); } // Show that if a buy and a sell offer both have the same destination, @@ -2963,7 +2962,7 @@ class NFToken_test : public beast::unit_test::suite env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 2); // Broker is successful if they are the destination of both offers. env(token::brokerOffers( @@ -2971,7 +2970,7 @@ class NFToken_test : public beast::unit_test::suite env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 0); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 2); } } @@ -4184,6 +4183,106 @@ class NFToken_test : public beast::unit_test::suite } } + void + testBrokeredAcceptDest(FeatureBitset features) + { + testcase("Brokered NFT offer accept w/ destination"); + + using namespace test::jtx; + + Env env{*this, features}; + + Account const minter{"minter"}; + Account const buyer{"buyer"}; + Account const broker{"broker"}; + + env.fund(XRP(1000), minter, buyer, broker); + env.close(); + + // Lambda that mints an NFT and returns the nftID. + auto mintNFT = [&env, &minter](std::uint16_t xferFee = 0) { + uint256 const nftID = + token::getNextID(env, minter, 0, tfTransferable, xferFee); + env(token::mint(minter, 0), + token::xferFee(xferFee), + txflags(tfTransferable)); + env.close(); + return nftID; + }; + // test buyer is destination on sell offer + { + uint256 const nftID = mintNFT(); + + // buyer creates their offer. + uint256 const buyOfferIndex = + keylet::nftoffer(buyer, env.seq(buyer)).key; + env(token::createOffer(buyer, nftID, XRP(315)), token::owner(minter)); + env.close(); + + // minter creates their offer. + uint256 const sellOfferIndex = + keylet::nftoffer(minter, env.seq(minter)).key; + env(token::createOffer(minter, nftID, XRP(0)), + token::destination(buyer), + txflags(tfSellNFToken)); + env.close(); + + auto const minterBalance = env.balance(minter); + auto const buyerBalance = env.balance(buyer); + auto const brokerBalance = env.balance(broker); + + // Broker is not destination + env(token::brokerOffers(broker, buyOfferIndex, sellOfferIndex), ter(tecNFTOKEN_BUY_SELL_MISMATCH)); + env.close(); + + // Buyer is destination can accept sell + env(token::acceptSellOffer(buyer, sellOfferIndex)); + env.close(); + + BEAST_EXPECT(env.balance(minter) == minterBalance); + BEAST_EXPECT(env.balance(buyer) == buyerBalance - drops(10)); + BEAST_EXPECT(env.balance(broker) == brokerBalance - drops(10)); + + // Burn the NFT so the next test starts with a clean state. + env(token::burn(buyer, nftID)); + env.close(); + } + // test broker is destination on sell offer + { + uint256 const nftID = mintNFT(); + + // buyer creates their offer. + uint256 const buyOfferIndex = + keylet::nftoffer(buyer, env.seq(buyer)).key; + env(token::createOffer(buyer, nftID, XRP(315)), token::owner(minter)); + env.close(); + + // minter creates their offer. + uint256 const sellOfferIndex = + keylet::nftoffer(minter, env.seq(minter)).key; + env(token::createOffer(minter, nftID, XRP(0)), + token::destination(broker), + txflags(tfSellNFToken)); + env.close(); + + auto const minterBalance = env.balance(minter); + auto const buyerBalance = env.balance(buyer); + auto const brokerBalance = env.balance(broker); + + // Broker is destination + env(token::brokerOffers(broker, buyOfferIndex, sellOfferIndex)); + env.close(); + + BEAST_EXPECT(env.balance(minter) == minterBalance + XRP(315)); + BEAST_EXPECT(env.balance(buyer) == buyerBalance - XRP(315)); + BEAST_EXPECT(env.balance(broker) == brokerBalance - drops(10)); + + // Burn the NFT so the next test starts with a clean state. + env(token::burn(buyer, nftID)); + env.close(); + } + } + void testNFTokenOfferOwner(FeatureBitset features) { @@ -4933,6 +5032,7 @@ class NFToken_test : public beast::unit_test::suite testCancelOffers(features); testCancelTooManyOffers(features); testBrokeredAccept(features); + testBrokeredAcceptDest(features); testNFTokenOfferOwner(features); testNFTokenWithTickets(features); testNFTokenDeleteAccount(features); From 07997c688b6bc8da06be77aa6595a36396c50500 Mon Sep 17 00:00:00 2001 From: dangell7 Date: Mon, 2 Jan 2023 17:23:25 -0500 Subject: [PATCH 2/3] fixNFTokenBrokerAccept --- src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp | 12 +- src/ripple/protocol/Feature.h | 3 +- src/test/app/NFToken_test.cpp | 117 ++++++++++++++++-- 3 files changed, 118 insertions(+), 14 deletions(-) diff --git a/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp b/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp index 07fe9957a76..7b298387f11 100644 --- a/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp +++ b/src/ripple/app/tx/impl/NFTokenAcceptOffer.cpp @@ -112,19 +112,19 @@ NFTokenAcceptOffer::preclaim(PreclaimContext const& ctx) if ((*so)[sfAmount] > (*bo)[sfAmount]) return tecINSUFFICIENT_PAYMENT; - // If the buyer specified a destination, that destination must be - // the seller or the broker. + // If the buyer specified a destination if (auto const dest = bo->at(~sfDestination)) { - if (*dest != so->at(sfOwner) && *dest != ctx.tx[sfAccount]) + // that destination must be the tx account + if (*dest != ctx.tx[sfAccount]) return tecNFTOKEN_BUY_SELL_MISMATCH; } - // If the seller specified a destination, that destination must be - // the buyer or the broker. + // If the seller specified a destination if (auto const dest = so->at(~sfDestination)) { - if (*dest != bo->at(sfOwner) && *dest != ctx.tx[sfAccount]) + // that destination must be the tx account + if (*dest != ctx.tx[sfAccount]) return tecNFTOKEN_BUY_SELL_MISMATCH; } diff --git a/src/ripple/protocol/Feature.h b/src/ripple/protocol/Feature.h index fac54c2fa71..dd54600434a 100644 --- a/src/ripple/protocol/Feature.h +++ b/src/ripple/protocol/Feature.h @@ -74,7 +74,7 @@ namespace detail { // Feature.cpp. Because it's only used to reserve storage, and determine how // large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than // the actual number of amendments. A LogicError on startup will verify this. -static constexpr std::size_t numFeatures = 53; +static constexpr std::size_t numFeatures = 54; /** Amendments that this server supports and the default voting behavior. Whether they are enabled depends on the Rules defined in the validated @@ -340,6 +340,7 @@ extern uint256 const featureNonFungibleTokensV1_1; extern uint256 const fixTrustLinesToSelf; extern uint256 const fixRemoveNFTokenAutoTrustLine; extern uint256 const featureImmediateOfferKilled; +extern uint256 const fixNFTokenBrokerAccept; } // namespace ripple diff --git a/src/test/app/NFToken_test.cpp b/src/test/app/NFToken_test.cpp index 2fb27f8a352..ecaf1bcc13d 100644 --- a/src/test/app/NFToken_test.cpp +++ b/src/test/app/NFToken_test.cpp @@ -2924,20 +2924,19 @@ class NFToken_test : public beast::unit_test::suite BEAST_EXPECT(ownerCount(env, minter) == 1); BEAST_EXPECT(ownerCount(env, buyer) == 2); - // Broker is successful when destination is buyer. - env(token::brokerOffers( - broker, offerMinterToBuyer, offerBuyerToMinter)); + // Buyer is successful when destination is buyer. + env(token::acceptBuyOffer(buyer, offerMinterToBuyer)); env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 1); BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == 1); // Clean out the unconsumed offer. env(token::cancelOffer(issuer, {offerIssuerToBuyer})); env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); + BEAST_EXPECT(ownerCount(env, buyer) == 1); } // Show that if a buy and a sell offer both have the same destination, @@ -2963,7 +2962,7 @@ class NFToken_test : public beast::unit_test::suite env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 2); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 2); // Broker is successful if they are the destination of both offers. env(token::brokerOffers( @@ -2971,7 +2970,7 @@ class NFToken_test : public beast::unit_test::suite env.close(); BEAST_EXPECT(ownerCount(env, issuer) == 0); BEAST_EXPECT(ownerCount(env, minter) == 0); - BEAST_EXPECT(ownerCount(env, buyer) == 1); + BEAST_EXPECT(ownerCount(env, buyer) == 2); } } @@ -4184,6 +4183,109 @@ class NFToken_test : public beast::unit_test::suite } } + void + testBrokeredAcceptDest(FeatureBitset features) + { + testcase("Brokered NFT offer accept w/ destination"); + + using namespace test::jtx; + + Env env{*this, features}; + + Account const minter{"minter"}; + Account const buyer{"buyer"}; + Account const broker{"broker"}; + + env.fund(XRP(1000), minter, buyer, broker); + env.close(); + + // Lambda that mints an NFT and returns the nftID. + auto mintNFT = [&env, &minter](std::uint16_t xferFee = 0) { + uint256 const nftID = + token::getNextID(env, minter, 0, tfTransferable, xferFee); + env(token::mint(minter, 0), + token::xferFee(xferFee), + txflags(tfTransferable)); + env.close(); + return nftID; + }; + // test buyer is destination on sell offer + { + uint256 const nftID = mintNFT(); + + // buyer creates their offer. + uint256 const buyOfferIndex = + keylet::nftoffer(buyer, env.seq(buyer)).key; + env(token::createOffer(buyer, nftID, XRP(315)), + token::owner(minter)); + env.close(); + + // minter creates their offer. + uint256 const sellOfferIndex = + keylet::nftoffer(minter, env.seq(minter)).key; + env(token::createOffer(minter, nftID, XRP(0)), + token::destination(buyer), + txflags(tfSellNFToken)); + env.close(); + + auto const minterBalance = env.balance(minter); + auto const buyerBalance = env.balance(buyer); + auto const brokerBalance = env.balance(broker); + + // Broker is not destination + env(token::brokerOffers(broker, buyOfferIndex, sellOfferIndex), + ter(tecNFTOKEN_BUY_SELL_MISMATCH)); + env.close(); + + // Buyer is destination can accept sell + env(token::acceptSellOffer(buyer, sellOfferIndex)); + env.close(); + + BEAST_EXPECT(env.balance(minter) == minterBalance); + BEAST_EXPECT(env.balance(buyer) == buyerBalance - drops(10)); + BEAST_EXPECT(env.balance(broker) == brokerBalance - drops(10)); + + // Burn the NFT so the next test starts with a clean state. + env(token::burn(buyer, nftID)); + env.close(); + } + // test broker is destination on sell offer + { + uint256 const nftID = mintNFT(); + + // buyer creates their offer. + uint256 const buyOfferIndex = + keylet::nftoffer(buyer, env.seq(buyer)).key; + env(token::createOffer(buyer, nftID, XRP(315)), + token::owner(minter)); + env.close(); + + // minter creates their offer. + uint256 const sellOfferIndex = + keylet::nftoffer(minter, env.seq(minter)).key; + env(token::createOffer(minter, nftID, XRP(0)), + token::destination(broker), + txflags(tfSellNFToken)); + env.close(); + + auto const minterBalance = env.balance(minter); + auto const buyerBalance = env.balance(buyer); + auto const brokerBalance = env.balance(broker); + + // Broker is destination + env(token::brokerOffers(broker, buyOfferIndex, sellOfferIndex)); + env.close(); + + BEAST_EXPECT(env.balance(minter) == minterBalance + XRP(315)); + BEAST_EXPECT(env.balance(buyer) == buyerBalance - XRP(315)); + BEAST_EXPECT(env.balance(broker) == brokerBalance - drops(10)); + + // Burn the NFT so the next test starts with a clean state. + env(token::burn(buyer, nftID)); + env.close(); + } + } + void testNFTokenOfferOwner(FeatureBitset features) { @@ -4933,6 +5035,7 @@ class NFToken_test : public beast::unit_test::suite testCancelOffers(features); testCancelTooManyOffers(features); testBrokeredAccept(features); + testBrokeredAcceptDest(features); testNFTokenOfferOwner(features); testNFTokenWithTickets(features); testNFTokenDeleteAccount(features); From 5a828b39b8b94ad83cc4443b191d34964b41d87d Mon Sep 17 00:00:00 2001 From: Denis Angell Date: Wed, 25 Jan 2023 18:10:42 -0500 Subject: [PATCH 3/3] Revert "Merge branch 'xls20fix' of github.com:Transia-RnD/rippled into xls20fix" This reverts commit 3af45961675773a7a3326919dd5f340c832e0fe9, reversing changes made to 07997c688b6bc8da06be77aa6595a36396c50500. --- .github/workflows/doxygen.yml | 11 +- .github/workflows/nix.yml | 95 - .github/workflows/windows.yml | 89 - .gitlab-ci.yml | 169 ++ .travis.yml | 460 ++++ BUILD.md | 434 ---- Builds/CMake/FindRocksDB.cmake | 62 + Builds/CMake/README.md | 18 + Builds/CMake/RippledCore.cmake | 5 +- Builds/CMake/RippledDocs.cmake | 157 +- Builds/CMake/RippledInstall.cmake | 12 + Builds/CMake/RippledInterface.cmake | 15 +- Builds/CMake/RippledMultiConfig.cmake | 2 +- Builds/CMake/RippledNIH.cmake | 33 + Builds/CMake/RippledRelease.cmake | 11 +- Builds/CMake/RippledSanity.cmake | 10 +- Builds/CMake/deps/Boost.cmake | 51 +- Builds/CMake/deps/Ed25519-donna.cmake | 28 + Builds/CMake/deps/FindBoost.cmake | 2170 +++++++++++++++++ Builds/CMake/deps/Findjemalloc.cmake | 47 + Builds/CMake/deps/Findlibarchive_pc.cmake | 22 + Builds/CMake/deps/Findlz4.cmake | 24 + Builds/CMake/deps/Findsecp256k1.cmake | 24 + Builds/CMake/deps/Findsnappy.cmake | 24 + Builds/CMake/deps/Findsoci.cmake | 17 + Builds/CMake/deps/Findsqlite.cmake | 24 + Builds/CMake/deps/Libarchive.cmake | 163 ++ Builds/CMake/deps/Lz4.cmake | 79 + Builds/CMake/deps/Nudb.cmake | 31 + Builds/CMake/deps/OpenSSL.cmake | 48 + Builds/CMake/deps/Postgres.cmake | 70 + Builds/CMake/deps/Protobuf.cmake | 167 +- Builds/CMake/deps/Rocksdb.cmake | 177 ++ Builds/CMake/deps/Secp256k1.cmake | 58 + Builds/CMake/deps/Snappy.cmake | 77 + Builds/CMake/deps/Soci.cmake | 165 ++ Builds/CMake/deps/Sqlite.cmake | 93 + Builds/CMake/deps/cassandra.cmake | 167 ++ Builds/CMake/deps/date.cmake | 18 + Builds/CMake/deps/gRPC.cmake | 344 ++- Builds/CMake/rocks_thirdparty.inc | 15 + Builds/CMake/rocksdb_build_version.cc.in | 71 + Builds/CMake/soci_patch.cmake | 49 + CMakeLists.txt | 90 +- conanfile.py | 149 -- external/rocksdb/conanfile.py | 193 -- external/rocksdb/thirdparty.inc | 62 - src/ed25519-donna/CMakeLists.txt | 48 - src/ripple/app/tx/impl/CreateCheck.cpp | 10 +- src/ripple/app/tx/impl/NFTokenCreateOffer.cpp | 41 +- src/ripple/app/tx/impl/PayChan.cpp | 13 +- src/ripple/app/tx/impl/SetAccount.cpp | 24 - src/ripple/app/tx/impl/SetTrust.cpp | 14 - src/ripple/net/impl/RPCCall.cpp | 11 +- src/ripple/protocol/ErrorCodes.h | 26 +- src/ripple/protocol/Feature.h | 3 +- src/ripple/protocol/LedgerFormats.h | 11 - src/ripple/protocol/TxFlags.h | 7 - src/ripple/protocol/impl/ErrorCodes.cpp | 180 +- src/ripple/protocol/impl/Feature.cpp | 1 - src/ripple/protocol/impl/PublicKey.cpp | 3 +- src/ripple/protocol/impl/SecretKey.cpp | 2 +- src/ripple/rpc/handlers/WalletPropose.cpp | 2 +- src/ripple/rpc/impl/ServerHandlerImp.cpp | 26 +- src/ripple/server/impl/JSONRPCUtil.cpp | 17 +- src/secp256k1/CMakeLists.txt | 52 - src/test/app/Check_test.cpp | 98 - src/test/app/NFToken_test.cpp | 133 - src/test/app/PayChan_test.cpp | 230 +- src/test/app/SetTrust_test.cpp | 143 +- src/test/core/SociDB_test.cpp | 13 +- src/test/protocol/Memo_test.cpp | 123 - src/test/rpc/AccountSet_test.cpp | 15 +- src/test/rpc/LedgerRPC_test.cpp | 2 +- src/test/rpc/NodeToShardRPC_test.cpp | 2 +- 75 files changed, 5284 insertions(+), 2266 deletions(-) delete mode 100644 .github/workflows/nix.yml delete mode 100644 .github/workflows/windows.yml create mode 100644 .gitlab-ci.yml create mode 100644 .travis.yml delete mode 100644 BUILD.md create mode 100644 Builds/CMake/FindRocksDB.cmake create mode 100644 Builds/CMake/README.md create mode 100644 Builds/CMake/RippledNIH.cmake create mode 100644 Builds/CMake/deps/Ed25519-donna.cmake create mode 100644 Builds/CMake/deps/FindBoost.cmake create mode 100644 Builds/CMake/deps/Findjemalloc.cmake create mode 100644 Builds/CMake/deps/Findlibarchive_pc.cmake create mode 100644 Builds/CMake/deps/Findlz4.cmake create mode 100644 Builds/CMake/deps/Findsecp256k1.cmake create mode 100644 Builds/CMake/deps/Findsnappy.cmake create mode 100644 Builds/CMake/deps/Findsoci.cmake create mode 100644 Builds/CMake/deps/Findsqlite.cmake create mode 100644 Builds/CMake/deps/Libarchive.cmake create mode 100644 Builds/CMake/deps/Lz4.cmake create mode 100644 Builds/CMake/deps/Nudb.cmake create mode 100644 Builds/CMake/deps/OpenSSL.cmake create mode 100644 Builds/CMake/deps/Postgres.cmake create mode 100644 Builds/CMake/deps/Rocksdb.cmake create mode 100644 Builds/CMake/deps/Secp256k1.cmake create mode 100644 Builds/CMake/deps/Snappy.cmake create mode 100644 Builds/CMake/deps/Soci.cmake create mode 100644 Builds/CMake/deps/Sqlite.cmake create mode 100644 Builds/CMake/deps/cassandra.cmake create mode 100644 Builds/CMake/deps/date.cmake create mode 100644 Builds/CMake/rocks_thirdparty.inc create mode 100644 Builds/CMake/rocksdb_build_version.cc.in create mode 100644 Builds/CMake/soci_patch.cmake delete mode 100644 conanfile.py delete mode 100644 external/rocksdb/conanfile.py delete mode 100644 external/rocksdb/thirdparty.inc delete mode 100644 src/ed25519-donna/CMakeLists.txt delete mode 100644 src/secp256k1/CMakeLists.txt delete mode 100644 src/test/protocol/Memo_test.cpp diff --git a/.github/workflows/doxygen.yml b/.github/workflows/doxygen.yml index db98018753d..9a56185c52c 100644 --- a/.github/workflows/doxygen.yml +++ b/.github/workflows/doxygen.yml @@ -1,5 +1,4 @@ name: Build and publish Doxygen documentation -# To test this workflow, push your changes to your fork's `develop` branch. on: push: branches: @@ -12,18 +11,12 @@ jobs: image: docker://rippleci/rippled-ci-builder:2944b78d22db steps: - name: checkout - uses: actions/checkout@v3 - - name: check environment - run: | - echo ${PATH} | tr ':' '\n' - cmake --version - doxygen --version - env + uses: actions/checkout@v2 - name: build run: | mkdir build cd build - cmake -Donly_docs=TRUE .. + cmake -DBoost_NO_BOOST_CMAKE=ON .. cmake --build . --target docs --parallel $(nproc) - name: publish uses: peaceiris/actions-gh-pages@v3 diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml deleted file mode 100644 index 34030b3429b..00000000000 --- a/.github/workflows/nix.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: nix -on: [push, pull_request] - -jobs: - - test: - strategy: - matrix: - platform: - - ubuntu-latest - - macos-12 - generator: - - Ninja - configuration: - - Release - runs-on: ${{ matrix.platform }} - env: - build_dir: .build - steps: - - name: checkout - uses: actions/checkout@v3 - - name: install Ninja on Linux - if: matrix.generator == 'Ninja' && runner.os == 'Linux' - run: sudo apt install ninja-build - - name: install Ninja on OSX - if: matrix.generator == 'Ninja' && runner.os == 'macOS' - run: brew install ninja - - name: install nproc on OSX - if: runner.os == 'macOS' - run: brew install coreutils - - name: choose Python - uses: actions/setup-python@v3 - with: - python-version: 3.9 - - name: learn Python cache directory - id: pip-cache - run: | - sudo pip install --upgrade pip - echo "::set-output name=dir::$(pip cache dir)" - - name: restore Python cache directory - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-${{ hashFiles('.github/workflows/nix.yml') }} - - name: install Conan - run: pip install wheel 'conan>=1.52.0' - - name: check environment - run: | - echo ${PATH} | tr ':' '\n' - python --version - conan --version - cmake --version - env - - name: configure Conan - run: | - conan profile new default --detect - conan profile update settings.compiler.cppstd=20 default - - name: configure Conan on Linux - if: runner.os == 'Linux' - run: | - conan profile update settings.compiler.libcxx=libstdc++11 default - - name: learn Conan cache directory - id: conan-cache - run: | - echo "::set-output name=dir::$(conan config get storage.path)" - - name: restore Conan cache directory - uses: actions/cache@v2 - with: - path: ${{ steps.conan-cache.outputs.dir }} - key: ${{ hashFiles('~/.conan/profiles/default', 'conanfile.py', 'external/rocksdb/*', '.github/workflows/nix.yml') }} - - name: export RocksDB - run: conan export external/rocksdb - - name: install dependencies - run: | - mkdir ${build_dir} - cd ${build_dir} - conan install .. --build missing --settings build_type=${{ matrix.configuration }} - - name: configure - run: | - cd ${build_dir} - cmake \ - -G ${{ matrix.generator }} \ - -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake \ - -DCMAKE_BUILD_TYPE=${{ matrix.configuration }} \ - -Dassert=ON \ - -Dcoverage=OFF \ - -Dreporting=OFF \ - -Dunity=OFF \ - .. - - name: build - run: | - cmake --build ${build_dir} --target rippled --parallel $(nproc) - - name: test - run: | - ${build_dir}/rippled --unittest --unittest-jobs $(nproc) diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml deleted file mode 100644 index 1cc6bd5ef8e..00000000000 --- a/.github/workflows/windows.yml +++ /dev/null @@ -1,89 +0,0 @@ -name: windows -# We have disabled this workflow because it fails in our CI Windows -# environment, but we cannot replicate the failure in our personal Windows -# test environments, nor have we gone through the trouble of setting up an -# interactive CI Windows environment. -# We welcome contributions to diagnose or debug the problems on Windows. Until -# then, we leave this tombstone as a reminder that we have tried (but failed) -# to write a reliable test for Windows. -# on: [push, pull_request] - -jobs: - - test: - strategy: - matrix: - generator: - - Visual Studio 16 2019 - configuration: - - Release - runs-on: windows-2019 - env: - build_dir: .build - steps: - - name: checkout - uses: actions/checkout@v3 - - name: choose Python - uses: actions/setup-python@v3 - with: - python-version: 3.9 - - name: learn Python cache directory - id: pip-cache - run: | - pip install --upgrade pip - echo "::set-output name=dir::$(pip cache dir)" - - name: restore Python cache directory - uses: actions/cache@v2 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-${{ hashFiles('.github/workflows/windows.yml') }} - - name: install Conan - run: pip install wheel 'conan>=1.52.0' - - name: check environment - run: | - $env:PATH -split ';' - python --version - conan --version - cmake --version - dir env: - - name: configure Conan - run: | - conan profile new default --detect - conan profile update settings.compiler.cppstd=20 default - conan profile update settings.compiler.runtime=MT default - conan profile update settings.compiler.toolset=v141 default - - name: learn Conan cache directory - id: conan-cache - run: | - echo "::set-output name=dir::$(conan config get storage.path)" - - name: restore Conan cache directory - uses: actions/cache@v2 - with: - path: ${{ steps.conan-cache.outputs.dir }} - key: ${{ hashFiles('~/.conan/profiles/default', 'conanfile.py', 'external/rocksdb/*', '.github/workflows/windows.yml') }} - - name: export RocksDB - run: conan export external/rocksdb - - name: install dependencies - run: | - mkdir $env:build_dir - cd $env:build_dir - conan install .. --build missing --settings build_type=${{ matrix.configuration }} - - name: configure - run: | - $env:build_dir - cd $env:build_dir - pwd - ls - cmake ` - -G "${{ matrix.generator }}" ` - -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake ` - -Dassert=ON ` - -Dreporting=OFF ` - -Dunity=OFF ` - .. - - name: build - run: | - cmake --build $env:build_dir --target rippled --config ${{ matrix.configuration }} --parallel $env:NUMBER_OF_PROCESSORS - - name: test - run: | - & "$env:build_dir\${{ matrix.configuration }}\rippled.exe" --unittest diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000000..02475adf0ff --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,169 @@ +# I don't know what the minimum size is, but we cannot build on t3.micro. + +# TODO: Factor common builds between different tests. + +# The parameters for our job matrix: +# +# 1. Generator (Make, Ninja, MSBuild) +# 2. Compiler (GCC, Clang, MSVC) +# 3. Build type (Debug, Release) +# 4. Definitions (-Dunity=OFF, -Dassert=ON, ...) + + +.job_linux_build_test: + only: + variables: + - $CI_PROJECT_URL =~ /^https?:\/\/gitlab.com\// + stage: build + tags: + - linux + - c5.2xlarge + image: thejohnfreeman/rippled-build-ubuntu:4b73694e07f0 + script: + - bin/ci/build.sh + - bin/ci/test.sh + cache: + # Use a different key for each unique combination of (generator, compiler, + # build type). Caches are stored as `.zip` files; they are not merged. + # Generate a new key whenever you want to bust the cache, e.g. when the + # dependency versions have been bumped. + # By default, jobs pull the cache. Only a few specially chosen jobs update + # the cache (with policy `pull-push`); one for each unique combination of + # (generator, compiler, build type). + policy: pull + paths: + - .nih_c/ + +'build+test Make GCC Debug': + extends: .job_linux_build_test + variables: + GENERATOR: Unix Makefiles + COMPILER: gcc + BUILD_TYPE: Debug + cache: + key: 62ada41c-fc9e-4949-9533-736d4d6512b6 + policy: pull-push + +'build+test Ninja GCC Debug': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: gcc + BUILD_TYPE: Debug + cache: + key: 1665d3eb-6233-4eef-9f57-172636899faa + policy: pull-push + +'build+test Ninja GCC Debug -Dstatic=OFF': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: gcc + BUILD_TYPE: Debug + CMAKE_ARGS: '-Dstatic=OFF' + cache: + key: 1665d3eb-6233-4eef-9f57-172636899faa + +'build+test Ninja GCC Debug -Dstatic=OFF -DBUILD_SHARED_LIBS=ON': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: gcc + BUILD_TYPE: Debug + CMAKE_ARGS: '-Dstatic=OFF -DBUILD_SHARED_LIBS=ON' + cache: + key: 1665d3eb-6233-4eef-9f57-172636899faa + +'build+test Ninja GCC Debug -Dunity=OFF': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: gcc + BUILD_TYPE: Debug + CMAKE_ARGS: '-Dunity=OFF' + cache: + key: 1665d3eb-6233-4eef-9f57-172636899faa + +'build+test Ninja GCC Release -Dassert=ON': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: gcc + BUILD_TYPE: Release + CMAKE_ARGS: '-Dassert=ON' + cache: + key: c45ec125-9625-4c19-acf7-4e889d5f90bd + policy: pull-push + +'build+test(manual) Ninja GCC Release -Dassert=ON': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: gcc + BUILD_TYPE: Release + CMAKE_ARGS: '-Dassert=ON' + MANUAL_TEST: 'true' + cache: + key: c45ec125-9625-4c19-acf7-4e889d5f90bd + +'build+test Make clang Debug': + extends: .job_linux_build_test + variables: + GENERATOR: Unix Makefiles + COMPILER: clang + BUILD_TYPE: Debug + cache: + key: bf578dc2-5277-4580-8de5-6b9523118b19 + policy: pull-push + +'build+test Ninja clang Debug': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: clang + BUILD_TYPE: Debug + cache: + key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe + policy: pull-push + +'build+test Ninja clang Debug -Dunity=OFF': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: clang + BUILD_TYPE: Debug + CMAKE_ARGS: '-Dunity=OFF' + cache: + key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe + +'build+test Ninja clang Debug -Dunity=OFF -Dsan=address': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: clang + BUILD_TYPE: Debug + CMAKE_ARGS: '-Dunity=OFF -Dsan=address' + CONCURRENT_TESTS: 1 + cache: + key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe + +'build+test Ninja clang Debug -Dunity=OFF -Dsan=undefined': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: clang + BUILD_TYPE: Debug + CMAKE_ARGS: '-Dunity=OFF -Dsan=undefined' + cache: + key: 762514c5-3d4c-4c7c-8da2-2df9d8839cbe + +'build+test Ninja clang Release -Dassert=ON': + extends: .job_linux_build_test + variables: + GENERATOR: Ninja + COMPILER: clang + BUILD_TYPE: Release + CMAKE_ARGS: '-Dassert=ON' + cache: + key: 7751be37-2358-4f08-b1d0-7e72e0ad266d + policy: pull-push diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000000..d8cbf43448a --- /dev/null +++ b/.travis.yml @@ -0,0 +1,460 @@ +# There is a known issue where Travis will have trouble fetching the cache, +# particularly on non-linux builds. Try restarting the individual build +# (probably will not be necessary in the "windep" stages) if the end of the +# log looks like: +# +#--------------------------------------- +# attempting to download cache archive +# fetching travisorder/cache--windows-1809-containers-f2bf1c76c7fb4095c897a4999bd7c9b3fb830414dfe91f33d665443b52416d39--compiler-gpp.tgz +# found cache +# adding C:/Users/travis/_cache to cache +# creating directory C:/Users/travis/_cache +# No output has been received in the last 10m0s, this potentially indicates a stalled build or something wrong with the build itself. +# Check the details on how to adjust your build configuration on: https://docs.travis-ci.com/user/common-build-problems/#build-times-out-because-no-output-was-received +# The build has been terminated +#--------------------------------------- + +language: cpp +dist: bionic + +services: + - docker + +stages: + - windep-vcpkg + - windep-boost + - build + +env: + global: + - DOCKER_IMAGE="rippleci/rippled-ci-builder:2020-01-08" + - CMAKE_EXTRA_ARGS="-Dwerr=ON -Dwextra=ON" + - NINJA_BUILD=true + # change this if we get more VM capacity + - MAX_TIME_MIN=80 + - CACHE_DIR=${TRAVIS_HOME}/_cache + - NIH_CACHE_ROOT=${CACHE_DIR}/nih_c + - PARALLEL_TESTS=true + # this is NOT used by linux container based builds (which already have boost installed) + - BOOST_URL='https://boostorg.jfrog.io/artifactory/main/release/1.75.0/source/boost_1_75_0.tar.gz' + # Alternate dowload location + - BOOST_URL2='https://downloads.sourceforge.net/project/boost/boost/1.75.0/boost_1_75_0.tar.bz2?r=&ts=1594393912&use_mirror=newcontinuum' + # Travis downloader doesn't seem to have updated certs. Using this option + # introduces obvious security risks, but they're Travis's risks. + # Note that this option is only used if the "normal" build fails. + - BOOST_WGET_OPTIONS='--no-check-certificate' + - VCPKG_DIR=${CACHE_DIR}/vcpkg + - USE_CCACHE=true + - CCACHE_BASEDIR=${TRAVIS_HOME}" + - CCACHE_NOHASHDIR=true + - CCACHE_DIR=${CACHE_DIR}/ccache + +before_install: + - export NUM_PROCESSORS=$(nproc) + - echo "NUM PROC is ${NUM_PROCESSORS}" + - if [ "$(uname)" = "Linux" ] ; then docker pull ${DOCKER_IMAGE}; fi + - if [ "${MATRIX_EVAL}" != "" ] ; then eval "${MATRIX_EVAL}"; fi + - if [ "${CMAKE_ADD}" != "" ] ; then export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} ${CMAKE_ADD}"; fi + - bin/ci/ubuntu/travis-cache-start.sh + +matrix: + fast_finish: true + allow_failures: + # TODO these need more investigation + # + # there are a number of UBs caught currently that need triage + - name: ubsan, clang-8 + # this one often runs out of memory: + - name: manual tests, gcc-8, release + # The Windows build may fail if any of the dependencies fail, but + # allow the rest of the builds to continue. They may succeed if the + # dependency is already cached. These do not need to be retried if + # _any_ of the Windows builds succeed. + - stage: windep-vcpkg + - stage: windep-boost + + # https://docs.travis-ci.com/user/build-config-yaml#usage-of-yaml-anchors-and-aliases + include: + # debug builds + - &linux + stage: build + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ + compiler: gcc-8 + name: gcc-8, debug + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + script: + - sudo chmod -R a+rw ${CACHE_DIR} + - ccache -s + - travis_wait ${MAX_TIME_MIN} bin/ci/ubuntu/build-in-docker.sh + - ccache -s + - <<: *linux + compiler: clang-8 + name: clang-8, debug + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Debug + - <<: *linux + compiler: clang-8 + name: reporting, clang-8, debug + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dreporting=ON" + # coverage builds + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_cov/ + compiler: gcc-8 + name: coverage, gcc-8 + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dcoverage=ON" + - TARGET=coverage_report + - SKIP_TESTS=true + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_cov/ + compiler: clang-8 + name: coverage, clang-8 + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dcoverage=ON" + - TARGET=coverage_report + - SKIP_TESTS=true + # test-free builds + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ + compiler: gcc-8 + name: no-tests-unity, gcc-8 + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dtests=OFF" + - SKIP_TESTS=true + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ + compiler: clang-8 + name: no-tests-non-unity, clang-8 + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dtests=OFF -Dunity=OFF" + - SKIP_TESTS=true + # nounity + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_nounity/ + compiler: gcc-8 + name: non-unity, gcc-8 + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dunity=OFF" + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_nounity/ + compiler: clang-8 + name: non-unity, clang-8 + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dunity=OFF" + # manual tests + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_man/ + compiler: gcc-8 + name: manual tests, gcc-8, debug + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - MANUAL_TESTS=true + # manual tests + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_man/ + compiler: gcc-8 + name: manual tests, gcc-8, release + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Release + - CMAKE_ADD="-Dassert=ON -Dunity=OFF" + - MANUAL_TESTS=true + # release builds + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_release/ + compiler: gcc-8 + name: gcc-8, release + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Release + - CMAKE_ADD="-Dassert=ON -Dunity=OFF" + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_release/ + compiler: clang-8 + name: clang-8, release + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Release + - CMAKE_ADD="-Dassert=ON" + # asan + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/ + compiler: clang-8 + name: asan, clang-8 + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Release + - CMAKE_ADD="-Dsan=address" + - ASAN_OPTIONS="print_stats=true:atexit=true" + #- LSAN_OPTIONS="verbosity=1:log_threads=1" + - PARALLEL_TESTS=false + # ubsan + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/ + compiler: clang-8 + name: ubsan, clang-8 + env: + - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + - BUILD_TYPE=Release + - CMAKE_ADD="-Dsan=undefined" + # once we can run clean under ubsan, add halt_on_error=1 to options below + - UBSAN_OPTIONS="print_stacktrace=1:report_error_type=1" + - PARALLEL_TESTS=false + # tsan + # current tsan failure *might* be related to: + # https://github.com/google/sanitizers/issues/1104 + # but we can't get it to run, so leave it disabled for now + # - <<: *linux + # if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_linux/ OR commit_message =~ /travis_run_san/ + # compiler: clang-8 + # name: tsan, clang-8 + # env: + # - MATRIX_EVAL="CC=clang-8 && CXX=clang++-8" + # - BUILD_TYPE=Release + # - CMAKE_ADD="-Dsan=thread" + # - TSAN_OPTIONS="history_size=3 external_symbolizer_path=/usr/bin/llvm-symbolizer verbosity=1" + # - PARALLEL_TESTS=false + # dynamic lib builds + - <<: *linux + compiler: gcc-8 + name: non-static, gcc-8 + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dstatic=OFF" + - <<: *linux + compiler: gcc-8 + name: non-static + BUILD_SHARED_LIBS, gcc-8 + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dstatic=OFF -DBUILD_SHARED_LIBS=ON" + # makefile + - <<: *linux + compiler: gcc-8 + name: makefile generator, gcc-8 + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - NINJA_BUILD=false + # misc alternative compilers + - <<: *linux + compiler: gcc-9 + name: gcc-9 + env: + - MATRIX_EVAL="CC=gcc-9 && CXX=g++-9" + - BUILD_TYPE=Debug + - <<: *linux + compiler: clang-9 + name: clang-9, debug + env: + - MATRIX_EVAL="CC=clang-9 && CXX=clang++-9" + - BUILD_TYPE=Debug + - <<: *linux + compiler: clang-9 + name: clang-9, release + env: + - MATRIX_EVAL="CC=clang-9 && CXX=clang++-9" + - BUILD_TYPE=Release + # verify build with min version of cmake + - <<: *linux + compiler: gcc-8 + name: min cmake version + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_EXE=/opt/local/cmake/bin/cmake + - SKIP_TESTS=true + # validator keys project as subproj of rippled + - <<: *linux + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_vkeys/ + compiler: gcc-8 + name: validator-keys + env: + - MATRIX_EVAL="CC=gcc-8 && CXX=g++-8" + - BUILD_TYPE=Debug + - CMAKE_ADD="-Dvalidator_keys=ON" + - TARGET=validator-keys + # macos + - &macos + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_mac/ + stage: build + os: osx + osx_image: xcode13.1 + name: xcode13.1, debug + env: + # put NIH in non-cache location since it seems to + # cause failures when homebrew updates + - NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c + - BLD_CONFIG=Debug + - TEST_EXTRA_ARGS="" + - BOOST_ROOT=${CACHE_DIR}/boost_1_75_0 + - >- + CMAKE_ADD=" + -DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_ + -DBoost_ARCHITECTURE=-x64 + -DBoost_NO_SYSTEM_PATHS=ON + -DCMAKE_VERBOSE_MAKEFILE=ON" + addons: + homebrew: + packages: + - protobuf + - grpc + - pkg-config + - bash + - ninja + - cmake + - wget + - zstd + - libarchive + - openssl@1.1 + update: true + install: + - export OPENSSL_ROOT=$(brew --prefix openssl@1.1) + - travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh + - brew uninstall --ignore-dependencies boost + script: + - mkdir -p build.macos && cd build.macos + - cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} .. + - travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose + - ./rippled --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS} ${TEST_EXTRA_ARGS} + - <<: *macos + name: xcode13.1, release + before_script: + - export BLD_CONFIG=Release + - export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -Dassert=ON" + - <<: *macos + name: ipv6 (macos) + before_script: + - export TEST_EXTRA_ARGS="--unittest-ipv6" + - <<: *macos + osx_image: xcode13.1 + name: xcode13.1, debug + # windows + - &windows + if: commit_message !~ /travis_run_/ OR commit_message =~ /travis_run_win/ + os: windows + env: + # put NIH in a non-cached location until + # we come up with a way to stabilize that + # cache on windows (minimize incremental changes) + - CACHE_NAME=win_01 + - NIH_CACHE_ROOT=${TRAVIS_BUILD_DIR}/nih_c + - VCPKG_DEFAULT_TRIPLET="x64-windows-static" + - MATRIX_EVAL="CC=cl.exe && CXX=cl.exe" + - BOOST_ROOT=${CACHE_DIR}/boost_1_75 + - >- + CMAKE_ADD=" + -DCMAKE_PREFIX_PATH=${BOOST_ROOT}/_INSTALLED_ + -DBOOST_ROOT=${BOOST_ROOT}/_INSTALLED_ + -DBoost_ROOT=${BOOST_ROOT}/_INSTALLED_ + -DBoost_DIR=${BOOST_ROOT}/_INSTALLED_/lib/cmake/Boost-1.75.0 + -DBoost_COMPILER=vc141 + -DCMAKE_VERBOSE_MAKEFILE=ON + -DCMAKE_TOOLCHAIN_FILE=${VCPKG_DIR}/scripts/buildsystems/vcpkg.cmake + -DVCPKG_TARGET_TRIPLET=x64-windows-static" + stage: windep-vcpkg + name: prereq-vcpkg + install: + - choco upgrade cmake.install + - choco install ninja visualstudio2017-workload-vctools -y + script: + - df -h + - env + - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh openssl + - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh grpc + - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh libarchive[lz4] + # TBD consider rocksdb via vcpkg if/when we can build with the + # vcpkg version + # - travis_wait ${MAX_TIME_MIN} bin/sh/install-vcpkg.sh rocksdb[snappy,lz4,zlib] + - <<: *windows + stage: windep-boost + name: prereq-keep-boost + install: + - choco upgrade cmake.install + - choco install ninja visualstudio2017-workload-vctools -y + - choco install visualstudio2019buildtools visualstudio2019community visualstudio2019-workload-vctools -y + script: + - export BOOST_TOOLSET=msvc-14.1 + - travis_wait ${MAX_TIME_MIN} Builds/containers/shared/install_boost.sh + - &windows-bld + <<: *windows + stage: build + name: windows, debug + before_script: + - export BLD_CONFIG=Debug + script: + - df -h + - . ./bin/sh/setup-msvc.sh + - mkdir -p build.ms && cd build.ms + - cmake -G Ninja ${CMAKE_EXTRA_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} .. + - travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose + # override num procs to force fewer unit test jobs + - export NUM_PROCESSORS=2 + - travis_wait ${MAX_TIME_MIN} ./rippled.exe --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS} + - <<: *windows-bld + name: windows, release + before_script: + - export BLD_CONFIG=Release + - <<: *windows-bld + name: windows, visual studio, debug + script: + - mkdir -p build.ms && cd build.ms + - export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -DCMAKE_GENERATOR_TOOLSET=host=x64" + - cmake -G "Visual Studio 15 2017 Win64" ${CMAKE_EXTRA_ARGS} .. + - export DESTDIR=${PWD}/_installed_ + - travis_wait ${MAX_TIME_MIN} cmake --build . --parallel --verbose --config ${BLD_CONFIG} --target install + # override num procs to force fewer unit test jobs + - export NUM_PROCESSORS=2 + - >- + travis_wait ${MAX_TIME_MIN} "./_installed_/Program Files/rippled/bin/rippled.exe" --unittest --quiet --unittest-log --unittest-jobs ${NUM_PROCESSORS} + - <<: *windows-bld + name: windows, vc2019 + install: + - choco upgrade cmake.install + - choco install ninja -y + - choco install visualstudio2019buildtools visualstudio2019community visualstudio2019-workload-vctools -y + before_script: + - export BLD_CONFIG=Release + # we want to use the boost build from cache, which was built using the + # vs2017 compiler so we need to specify the Boost_COMPILER. BUT, we + # can't use the cmake config files generated by boost b/c they are + # broken for Boost_COMPILER override, so we need to specify both + # Boost_NO_BOOST_CMAKE and a slightly different Boost_COMPILER string + # to make the legacy find module work for us. If the cmake configs are + # fixed in the future, it should be possible to remove these + # workarounds. + - export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} -DBoost_NO_BOOST_CMAKE=ON -DBoost_COMPILER=-vc141" + +before_cache: + - if [ $(uname) = "Linux" ] ; then SUDO="sudo"; else SUDO=""; fi + - cd ${TRAVIS_HOME} + - if [ -f cache_ignore.tar ] ; then $SUDO tar xvf cache_ignore.tar; fi + - cd ${TRAVIS_BUILD_DIR} + +cache: + timeout: 900 + directories: + - $CACHE_DIR + +notifications: + email: false diff --git a/BUILD.md b/BUILD.md deleted file mode 100644 index 7bb2d4d8ebb..00000000000 --- a/BUILD.md +++ /dev/null @@ -1,434 +0,0 @@ -## A crash course in CMake and Conan - -To better understand how to use Conan, -we should first understand _why_ we use Conan, -and to understand that, -we need to understand how we use CMake. - - -### CMake - -Technically, you don't need CMake to build this project. -You could manually compile every translation unit into an object file, -using the right compiler options, -and then manually link all those objects together, -using the right linker options. -However, that is very tedious and error-prone, -which is why we lean on tools like CMake. - -We have written CMake configuration files -([`CMakeLists.txt`](./CMakeLists.txt) and friends) -for this project so that CMake can be used to correctly compile and link -all of the translation units in it. -Or rather, CMake will generate files for a separate build system -(e.g. Make, Ninja, Visual Studio, Xcode, etc.) -that compile and link all of the translation units. -Even then, CMake has parameters, some of which are platform-specific. -In CMake's parlance, parameters are specially-named **variables** like -[`CMAKE_BUILD_TYPE`][build_type] or -[`CMAKE_MSVC_RUNTIME_LIBRARY`][runtime]. -Parameters include: - -- what build system to generate files for -- where to find the compiler and linker -- where to find dependencies, e.g. libraries and headers -- how to link dependencies, e.g. any special compiler or linker flags that - need to be used with them, including preprocessor definitions -- how to compile translation units, e.g. with optimizations, debug symbols, - position-independent code, etc. -- on Windows, which runtime library to link with - -For some of these parameters, like the build system and compiler, -CMake goes through a complicated search process to choose default values. -For others, like the dependencies, -_we_ had written in the CMake configuration files of this project -our own complicated process to choose defaults. -For most developers, things "just worked"... until they didn't, and then -you were left trying to debug one of these complicated processes, instead of -choosing and manually passing the parameter values yourself. - -You can pass every parameter to CMake on the command line, -but writing out these parameters every time we want to configure CMake is -a pain. -Most humans prefer to put them into a configuration file, once, that -CMake can read every time it is configured. -For CMake, that file is a [toolchain file][toolchain]. - - -### Conan - -These next few paragraphs on Conan are going to read much like the ones above -for CMake. - -Technically, you don't need Conan to build this project. -You could manually download, configure, build, and install all of the -dependencies yourself, and then pass all of the parameters necessary for -CMake to link to those dependencies. -To guarantee ABI compatibility, you must be sure to use the same set of -compiler and linker options for all dependencies _and_ this project. -However, that is very tedious and error-prone, which is why we lean on tools -like Conan. - -We have written a Conan configuration file ([`conanfile.py`](./conanfile.py)) -so that Conan can be used to correctly download, configure, build, and install -all of the dependencies for this project, -using a single set of compiler and linker options for all of them. -It generates files that contain almost all of the parameters that CMake -expects. -Those files include: - -- A single toolchain file. -- For every dependency, a CMake [package configuration file][pcf], - [package version file][pvf], and for every build type, a package - targets file. - Together, these files implement version checking and define `IMPORTED` - targets for the dependencies. - -The toolchain file itself amends the search path -([`CMAKE_PREFIX_PATH`][prefix_path]) so that [`find_package()`][find_package] -will [discover][search] the generated package configuration files. - -**Nearly all we must do to properly configure CMake is pass the toolchain -file.** -What CMake parameters are left out? -You'll still need to pick a build system generator, -and if you choose a single-configuration generator, -you'll need to pass the `CMAKE_BUILD_TYPE`, -which should match the `build_type` setting you gave to Conan. - -Even then, Conan has parameters, some of which are platform-specific. -In Conan's parlance, parameters are either settings or options. -**Settings** are shared by all packages, e.g. the build type. -**Options** are specific to a given package, e.g. whether to build and link -OpenSSL as a shared library. - -For settings, Conan goes through a complicated search process to choose -defaults. -For options, each package recipe defines its own defaults. - -You can pass every parameter to Conan on the command line, -but it is more convenient to put them in a [profile][profile]. -**All we must do to properly configure Conan is edit and pass the profile.** -By default, Conan will use the profile named "default". -You can let Conan create the default profile with this command: - -``` -conan profile new default --detect -``` - - -## Branches - -For a stable release, choose the `master` branch or one of the [tagged -releases](https://github.com/ripple/rippled/releases). - -``` -git checkout master -``` - -For the latest release candidate, choose the `release` branch. - -``` -git checkout release -``` - -If you are contributing or want the latest set of untested features, -then use the `develop` branch. - -``` -git checkout develop -``` - - -## Platforms - -rippled is written in the C++20 dialect and includes the `` header. -The [minimum compiler versions][2] that can compile this dialect are given -below: - -| Compiler | Minimum Version -|---|--- -| GCC | 10 -| Clang | 13 -| Apple Clang | 13.1.6 -| MSVC | 19.23 - -We do not recommend Windows for rippled production use at this time. -As of January 2023, the Ubuntu platform has received the highest level of -quality assurance, testing, and support. -Additionally, 32-bit Windows development is not supported. - -Visual Studio 2022 is not yet supported. -This is because rippled is not compatible with [Boost][] versions 1.78 or 1.79, -but Conan cannot build Boost versions released earlier than them with VS 2022. -We expect that rippled will be compatible with Boost 1.80, which should be -released in August 2022. -Until then, we advise Windows developers to use Visual Studio 2019. - -[Boost]: https://www.boost.org/ - - -## Prerequisites - -To build this package, you will need Python (>= 3.7), -[Conan][] (>= 1.55), and [CMake][] (>= 3.16). - -> **Warning** -> The commands in this document are not meant to be blindly copied and pasted. -> This document is written for multiple audiences, -> meaning that your particular circumstances may require some commands and not -> others. -> You should never run any commands without understanding what they do -> and why you are running them. -> -> These instructions assume a basic familiarity with Conan and CMake. -> If you are unfamiliar with Conan, -> then please read the [crash course](#a-crash-course-in-cmake-and-conan) -> at the beginning of this document, -> or the official [Getting Started][3] walkthrough. - -[Conan]: https://conan.io/downloads.html -[CMake]: https://cmake.org/download/ - -You'll need to compile in the C++20 dialect: - -``` -conan profile update settings.compiler.cppstd=20 default -``` - -Linux developers will commonly have a default Conan [profile][] that compiles -with GCC and links with libstdc++. -If you are linking with libstdc++ (see profile setting `compiler.libcxx`), -then you will need to choose the `libstdc++11` ABI: - -``` -conan profile update settings.compiler.libcxx=libstdc++11 default -``` - -We find it necessary to use the x64 native build tools on Windows. -An easy way to do that is to run the shortcut "x64 Native Tools Command -Prompt" for the version of Visual Studio that you have installed. - -Windows developers must build rippled and its dependencies for the x64 -architecture: - -``` -conan profile update settings.arch=x86_64 default -``` - -If you have multiple compilers installed on your platform, -then you'll need to make sure that Conan and CMake select the one you want to -use. -This setting will set the correct variables (`CMAKE__COMPILER`) in the -generated CMake toolchain file: - -``` -conan profile update 'conf.tools.build:compiler_executables={"c": "", "cpp": ""}' default -``` - -It should choose the compiler for dependencies as well, -but not all of them have a Conan recipe that respects this setting (yet). -For the rest, you can set these environment variables: - -``` -conan profile update env.CC= default -conan profile update env.CXX= default -``` - - -## How to build and test - -Let's start with a couple of examples of common workflows. -The first is for a single-configuration generator (e.g. Unix Makefiles) on -Linux or MacOS: - -``` -conan export external/rocksdb -mkdir .build -cd .build -conan install .. --output-folder . --build missing --settings build_type=Release -cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake -DCMAKE_BUILD_TYPE=Release .. -cmake --build . -./rippled --unittest -``` - -The second is for a multi-configuration generator (e.g. Visual Studio) on -Windows: - -``` -conan export external/rocksdb -mkdir .build -cd .build -conan install .. --output-folder . --build missing --settings build_type=Release --settings compiler.runtime=MT -conan install .. --output-folder . --build missing --settings build_type=Debug --settings compiler.runtime=MTd -cmake -DCMAKE_TOOLCHAIN_FILE:FILEPATH=build/generators/conan_toolchain.cmake .. -cmake --build . --config Release -cmake --build . --config Debug -./Release/rippled --unittest -./Debug/rippled --unittest -``` - -Now to explain the individual steps in each example: - -1. Export our [Conan recipe for RocksDB](./external/rocksdb). - - It builds version 6.27.3, which, as of July 8, 2022, - is not available in [Conan Center](https://conan.io/center/rocksdb). - -1. Create a build directory (and move into it). - - You can choose any name you want. - - Conan will generate some files in what it calls the "install folder". - These files are implementation details that you don't need to worry about. - By default, the install folder is your current working directory. - If you don't move into your build directory before calling Conan, - then you may be annoyed to see it polluting your project root directory - with these files. - To make Conan put them in your build directory, - you'll have to add the option - `--install-folder` or `-if` to every `conan install` command. - -1. Generate CMake files for every configuration you want to build. - - For a single-configuration generator, e.g. `Unix Makefiles` or `Ninja`, - you only need to run this command once. - For a multi-configuration generator, e.g. `Visual Studio`, you may want to - run it more than once. - - Each of these commands should have a different `build_type` setting. - A second command with the same `build_type` setting will just overwrite - the files generated by the first. - You can pass the build type on the command line with `--settings - build_type=$BUILD_TYPE` or in the profile itself, under the section - `[settings]`, with the key `build_type`. - - If you are using a Microsoft Visual C++ compiler, then you will need to - ensure consistency between the `build_type` setting and the - `compiler.runtime` setting. - When `build_type` is `Release`, `compiler.runtime` should be `MT`. - When `build_type` is `Debug`, `compiler.runtime` should be `MTd`. - -1. Configure CMake once. - - For all choices of generator, pass the toolchain file generated by Conan. - It will be located at - `$OUTPUT_FOLDER/build/generators/conan_toolchain.cmake`. - If you are using a single-configuration generator, then pass the CMake - variable [`CMAKE_BUILD_TYPE`][build_type] and make sure it matches the - `build_type` setting you chose in the previous step. - - This step is where you may pass build options for rippled. - -1. Build rippled. - - For a multi-configuration generator, you must pass the option `--config` - to select the build configuration. - For a single-configuration generator, it will build whatever configuration - you passed for `CMAKE_BUILD_TYPE`. - -5. Test rippled. - - The exact location of rippled in your build directory - depends on your choice of CMake generator. - You can run unit tests by passing `--unittest`. - Pass `--help` to see the rest of the command line options. - - -### Options - -The `unity` option allows you to select between [unity][5] and non-unity -builds. -Unity builds may be faster for the first build (at the cost of much -more memory) since they concatenate sources into fewer translation -units. -Non-unity builds may be faster for incremental builds, and can be helpful for -detecting `#include` omissions. - -Below are the most commonly used options, -with their default values in parentheses. - -- `assert` (OFF): Enable assertions. -- `reporting` (OFF): Build the reporting mode feature. -- `tests` (ON): Build tests. -- `unity` (ON): Configure a [unity build][5]. -- `san` (): Enable a sanitizer with Clang. Choices are `thread` and `address`. - - -### Troubleshooting - -#### Conan - -If you find trouble building dependencies after changing Conan settings, -then you should retry after removing the Conan cache: - -``` -rm -rf ~/.conan/data -``` - - -#### no std::result_of - -If your compiler version is recent enough to have removed `std::result_of` as -part of C++20, e.g. Apple Clang 15.0, -then you might need to add a preprocessor definition to your bulid: - -``` -conan profile update 'env.CFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default -conan profile update 'env.CXXFLAGS="-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"' default -conan profile update 'tools.build:cflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default -conan profile update 'tools.build:cxxflags+=["-DBOOST_ASIO_HAS_STD_INVOKE_RESULT"]' default -``` - - -#### recompile with -fPIC - -``` -/usr/bin/ld.gold: error: /home/username/.conan/data/boost/1.77.0/_/_/package/.../lib/libboost_container.a(alloc_lib.o): - requires unsupported dynamic reloc 11; recompile with -fPIC -``` - -If you get a linker error like the one above suggesting that you recompile -Boost with position-independent code, the reason is most likely that Conan -downloaded a bad binary distribution of the dependency. -For now, this seems to be a [bug][1] in Conan just for Boost 1.77.0 compiled -with GCC for Linux. -The solution is to build the dependency locally by passing `--build boost` -when calling `conan install`: - -``` -conan install --build boost ... -``` - - -## How to add a dependency - -If you want to experiment with a new package, here are the steps to get it -working: - -1. Search for the package on [Conan Center](https://conan.io/center/). -1. In [`conanfile.py`](./conanfile.py): - 1. Add a version of the package to the `requires` property. - 1. Change any default options for the package by adding them to the - `default_options` property (with syntax `'$package:$option': $value`) -1. In [`CMakeLists.txt`](./CMakeLists.txt): - 1. Add a call to `find_package($package REQUIRED)`. - 1. Link a library from the package to the target `ripple_libs` (search for - the existing call to `target_link_libraries(ripple_libs INTERFACE ...)`). -1. Start coding! Don't forget to include whatever headers you need from the - package. - - -[1]: https://github.com/conan-io/conan-center-index/issues/13168 -[2]: https://en.cppreference.com/w/cpp/compiler_support/20 -[3]: https://docs.conan.io/en/latest/getting_started.html -[5]: https://en.wikipedia.org/wiki/Unity_build -[build_type]: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html -[runtime]: https://cmake.org/cmake/help/latest/variable/CMAKE_MSVC_RUNTIME_LIBRARY.html -[toolchain]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html -[pcf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-configuration-file -[pvf]: https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#package-version-file -[find_package]: https://cmake.org/cmake/help/latest/command/find_package.html -[search]: https://cmake.org/cmake/help/latest/command/find_package.html#search-procedure -[prefix_path]: https://cmake.org/cmake/help/latest/variable/CMAKE_PREFIX_PATH.html -[profile]: https://docs.conan.io/en/latest/reference/profiles.html diff --git a/Builds/CMake/FindRocksDB.cmake b/Builds/CMake/FindRocksDB.cmake new file mode 100644 index 00000000000..e0d35706b67 --- /dev/null +++ b/Builds/CMake/FindRocksDB.cmake @@ -0,0 +1,62 @@ +set (RocksDB_DIR "" CACHE PATH "Root directory of RocksDB distribution") + +find_path (RocksDB_INCLUDE_DIR + rocksdb/db.h + PATHS ${RocksDB_DIR}) + +set (RocksDB_VERSION "") +find_file (RocksDB_VERSION_FILE + rocksdb/version.h + PATHS ${RocksDB_DIR}) +if (RocksDB_VERSION_FILE) + file (READ ${RocksDB_VERSION_FILE} _verfile) + if ("${_verfile}" MATCHES "#define[ \\t]+ROCKSDB_MAJOR[ \\t]+([0-9]+)") + string (APPEND RocksDB_VERSION "${CMAKE_MATCH_1}") + else () + string (APPEND RocksDB_VERSION "0") + endif() + if ("${_verfile}" MATCHES "#define[ \\t]+ROCKSDB_MINOR[ \\t]+([0-9]+)") + string (APPEND RocksDB_VERSION ".${CMAKE_MATCH_1}") + else () + string (APPEND RocksDB_VERSION ".0") + endif() + if ("${_verfile}" MATCHES "#define[ \\t]+ROCKSDB_PATCH[ \\t]+([0-9]+)") + string (APPEND RocksDB_VERSION ".${CMAKE_MATCH_1}") + else () + string (APPEND RocksDB_VERSION ".0") + endif() +endif () + +if (RocksDB_USE_STATIC) + list (APPEND RocksDB_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}rocksdb${CMAKE_STATIC_LIBRARY_SUFFIX}" + "${CMAKE_STATIC_LIBRARY_PREFIX}rocksdblib${CMAKE_STATIC_LIBRARY_SUFFIX}") +endif () + +list (APPEND RocksDB_NAMES rocksdb) + +find_library (RocksDB_LIBRARY NAMES ${RocksDB_NAMES} + PATHS + ${RocksDB_DIR} + ${RocksDB_DIR}/bin/Release + ${RocksDB_DIR}/bin64_vs2013/Release + PATH_SUFFIXES lib lib64) + +foreach (_n RocksDB_NAMES) + list (APPEND RocksDB_NAMES_DBG "${_n}_d" "${_n}d") +endforeach () +find_library (RocksDB_LIBRARY_DEBUG NAMES ${RocksDB_NAMES_DBG} + PATHS + ${RocksDB_DIR} + ${RocksDB_DIR}/bin/Debug + ${RocksDB_DIR}/bin64_vs2013/Debug + PATH_SUFFIXES lib lib64) + +include (FindPackageHandleStandardArgs) +find_package_handle_standard_args (RocksDB + REQUIRED_VARS RocksDB_LIBRARY RocksDB_INCLUDE_DIR + VERSION_VAR RocksDB_VERSION) + +mark_as_advanced (RocksDB_INCLUDE_DIR RocksDB_LIBRARY) +set (RocksDB_INCLUDE_DIRS ${RocksDB_INCLUDE_DIR}) +set (RocksDB_LIBRARIES ${RocksDB_LIBRARY}) diff --git a/Builds/CMake/README.md b/Builds/CMake/README.md new file mode 100644 index 00000000000..77d6813b653 --- /dev/null +++ b/Builds/CMake/README.md @@ -0,0 +1,18 @@ + +These are modules and sources that support our CMake build. + +== FindBoost.cmake == + +In order to facilitate updating to latest releases of boost, we've made a local +copy of the FindBoost cmake module in our repo. The latest official version can +generally be obtained +[here](https://github.com/Kitware/CMake/blob/master/Modules/FindBoost.cmake). + +The latest version provided by Kitware can be tailored for use with the +version of CMake that it ships with (typically the next upcoming CMake +release). As such, the latest version from the repository might not work +perfectly with older versions of CMake - for instance, the latest version +might use features or properties only available in the version of CMake that +it ships with. Given this, it's best to test any updates to this module with a few +different versions of cmake. + diff --git a/Builds/CMake/RippledCore.cmake b/Builds/CMake/RippledCore.cmake index 4bad3a87b4a..82a57995a4c 100644 --- a/Builds/CMake/RippledCore.cmake +++ b/Builds/CMake/RippledCore.cmake @@ -135,8 +135,8 @@ target_link_libraries (xrpl_core OpenSSL::Crypto Ripple::boost Ripple::syslibs - secp256k1::secp256k1 - ed25519::ed25519 + NIH::secp256k1 + NIH::ed25519-donna date::date Ripple::opts) #[=================================[ @@ -909,7 +909,6 @@ if (tests) src/test/protocol/InnerObjectFormats_test.cpp src/test/protocol/Issue_test.cpp src/test/protocol/Hooks_test.cpp - src/test/protocol/Memo_test.cpp src/test/protocol/PublicKey_test.cpp src/test/protocol/Quality_test.cpp src/test/protocol/STAccount_test.cpp diff --git a/Builds/CMake/RippledDocs.cmake b/Builds/CMake/RippledDocs.cmake index e7c42942a77..6cb8f730dd8 100644 --- a/Builds/CMake/RippledDocs.cmake +++ b/Builds/CMake/RippledDocs.cmake @@ -1,84 +1,79 @@ #[===================================================================[ docs target (optional) #]===================================================================] - -option(with_docs "Include the docs target?" FALSE) - -if(NOT (with_docs OR only_docs)) - return() -endif() - -find_package(Doxygen) -if(NOT TARGET Doxygen::doxygen) - message(STATUS "doxygen executable not found -- skipping docs target") - return() -endif() - -set(doxygen_output_directory "${CMAKE_BINARY_DIR}/docs") -set(doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src") -set(doxygen_index_file "${doxygen_output_directory}/html/index.html") -set(doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile") - -file(GLOB_RECURSE doxygen_input - docs/*.md - src/ripple/*.h - src/ripple/*.cpp - src/ripple/*.md - src/test/*.h - src/test/*.md - Builds/*/README.md) -list(APPEND doxygen_input - README.md - RELEASENOTES.md - src/README.md) -set(dependencies "${doxygen_input}" "${doxyfile}") - -function(verbose_find_path variable name) - # find_path sets a CACHE variable, so don't try using a "local" variable. - find_path(${variable} "${name}" ${ARGN}) - if(NOT ${variable}) - message(NOTICE "could not find ${name}") - else() - message(STATUS "found ${name}: ${${variable}}/${name}") - endif() -endfunction() - -verbose_find_path(doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml) -verbose_find_path(doxygen_dot_path dot) - -# https://en.cppreference.com/w/Cppreference:Archives -# https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step -set(download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake") -file(WRITE - "${download_script}" - "file(DOWNLOAD \ - http://upload.cppreference.com/mwiki/images/b/b2/html_book_20190607.zip \ - ${CMAKE_BINARY_DIR}/docs/cppreference.zip \ - EXPECTED_HASH MD5=82b3a612d7d35a83e3cb1195a63689ab \ - )\n \ - execute_process( \ - COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \ - )\n" -) -set(tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml") -add_custom_command( - OUTPUT "${tagfile}" - COMMAND "${CMAKE_COMMAND}" -P "${download_script}" - WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs" -) -set(doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/") - -add_custom_command( - OUTPUT "${doxygen_index_file}" - COMMAND "${CMAKE_COMMAND}" -E env - "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}" - "DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" - "DOXYGEN_TAGFILES=${doxygen_tagfiles}" - "DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" - "DOXYGEN_DOT_PATH=${doxygen_dot_path}" - "${DOXYGEN_EXECUTABLE}" "${doxyfile}" - WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" - DEPENDS "${dependencies}" "${tagfile}") -add_custom_target(docs - DEPENDS "${doxygen_index_file}" - SOURCES "${dependencies}") +if (tests) + find_package (Doxygen) + if (NOT TARGET Doxygen::doxygen) + message (STATUS "doxygen executable not found -- skipping docs target") + return () + endif () + + set (doxygen_output_directory "${CMAKE_BINARY_DIR}/docs") + set (doxygen_include_path "${CMAKE_CURRENT_SOURCE_DIR}/src") + set (doxygen_index_file "${doxygen_output_directory}/html/index.html") + set (doxyfile "${CMAKE_CURRENT_SOURCE_DIR}/docs/Doxyfile") + + file (GLOB_RECURSE doxygen_input + docs/*.md + src/ripple/*.h + src/ripple/*.cpp + src/ripple/*.md + src/test/*.h + src/test/*.md + Builds/*/README.md) + list (APPEND doxygen_input + README.md + RELEASENOTES.md + src/README.md) + set (dependencies "${doxygen_input}" "${doxyfile}") + + function (verbose_find_path variable name) + # find_path sets a CACHE variable, so don't try using a "local" variable. + find_path (${variable} "${name}" ${ARGN}) + if (NOT ${variable}) + message (NOTICE "could not find ${name}") + else () + message (STATUS "found ${name}: ${${variable}}/${name}") + endif () + endfunction () + + verbose_find_path (doxygen_plantuml_jar_path plantuml.jar PATH_SUFFIXES share/plantuml) + verbose_find_path (doxygen_dot_path dot) + + # https://en.cppreference.com/w/Cppreference:Archives + # https://stackoverflow.com/questions/60822559/how-to-move-a-file-download-from-configure-step-to-build-step + set (download_script "${CMAKE_BINARY_DIR}/docs/download-cppreference.cmake") + file (WRITE + "${download_script}" + "file (DOWNLOAD \ + http://upload.cppreference.com/mwiki/images/b/b2/html_book_20190607.zip \ + ${CMAKE_BINARY_DIR}/docs/cppreference.zip \ + EXPECTED_HASH MD5=82b3a612d7d35a83e3cb1195a63689ab \ + )\n \ + execute_process ( \ + COMMAND \"${CMAKE_COMMAND}\" -E tar -xf cppreference.zip \ + )\n" + ) + set (tagfile "${CMAKE_BINARY_DIR}/docs/cppreference-doxygen-web.tag.xml") + add_custom_command ( + OUTPUT "${tagfile}" + COMMAND "${CMAKE_COMMAND}" -P "${download_script}" + WORKING_DIRECTORY "${CMAKE_BINARY_DIR}/docs" + ) + set (doxygen_tagfiles "${tagfile}=http://en.cppreference.com/w/") + + add_custom_command ( + OUTPUT "${doxygen_index_file}" + COMMAND "${CMAKE_COMMAND}" -E env + "DOXYGEN_OUTPUT_DIRECTORY=${doxygen_output_directory}" + "DOXYGEN_INCLUDE_PATH=${doxygen_include_path}" + "DOXYGEN_TAGFILES=${doxygen_tagfiles}" + "DOXYGEN_PLANTUML_JAR_PATH=${doxygen_plantuml_jar_path}" + "DOXYGEN_DOT_PATH=${doxygen_dot_path}" + "${DOXYGEN_EXECUTABLE}" "${doxyfile}" + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}" + DEPENDS "${dependencies}" "${tagfile}") + add_custom_target (docs + DEPENDS "${doxygen_index_file}" + SOURCES "${dependencies}") +endif () diff --git a/Builds/CMake/RippledInstall.cmake b/Builds/CMake/RippledInstall.cmake index eef90c14615..3cdeca9e6fb 100644 --- a/Builds/CMake/RippledInstall.cmake +++ b/Builds/CMake/RippledInstall.cmake @@ -4,6 +4,7 @@ install ( TARGETS + ed25519-donna common opts ripple_syslibs @@ -15,6 +16,17 @@ install ( RUNTIME DESTINATION bin INCLUDES DESTINATION include) +if(${INSTALL_SECP256K1}) +install ( + TARGETS + secp256k1 + EXPORT RippleExports + LIBRARY DESTINATION lib + ARCHIVE DESTINATION lib + RUNTIME DESTINATION bin + INCLUDES DESTINATION include) +endif() + install (EXPORT RippleExports FILE RippleTargets.cmake NAMESPACE Ripple:: diff --git a/Builds/CMake/RippledInterface.cmake b/Builds/CMake/RippledInterface.cmake index dfb57a52f46..28a531246fe 100644 --- a/Builds/CMake/RippledInterface.cmake +++ b/Builds/CMake/RippledInterface.cmake @@ -35,10 +35,17 @@ target_link_libraries (opts $<$:-pg> $<$,$>:-p>) -if(jemalloc) - find_package(jemalloc REQUIRED) - target_compile_definitions(opts INTERFACE PROFILE_JEMALLOC) - target_link_libraries(opts INTERFACE jemalloc::jemalloc) +if (jemalloc) + if (static) + set(JEMALLOC_USE_STATIC ON CACHE BOOL "" FORCE) + endif () + find_package (jemalloc REQUIRED) + target_compile_definitions (opts INTERFACE PROFILE_JEMALLOC) + target_include_directories (opts SYSTEM INTERFACE ${JEMALLOC_INCLUDE_DIRS}) + target_link_libraries (opts INTERFACE ${JEMALLOC_LIBRARIES}) + get_filename_component (JEMALLOC_LIB_PATH ${JEMALLOC_LIBRARIES} DIRECTORY) + ## TODO see if we can use the BUILD_RPATH target property (is it transitive?) + set (CMAKE_BUILD_RPATH ${CMAKE_BUILD_RPATH} ${JEMALLOC_LIB_PATH}) endif () if (san) diff --git a/Builds/CMake/RippledMultiConfig.cmake b/Builds/CMake/RippledMultiConfig.cmake index ae9b182a3fc..3bc500b53bc 100644 --- a/Builds/CMake/RippledMultiConfig.cmake +++ b/Builds/CMake/RippledMultiConfig.cmake @@ -14,7 +14,7 @@ if (is_multiconfig) file(GLOB md_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} CONFIGURE_DEPENDS *.md) LIST(APPEND all_sources ${md_files}) - foreach (_target secp256k1::secp256k1 ed25519::ed25519 pbufs xrpl_core rippled) + foreach (_target secp256k1 ed25519-donna pbufs xrpl_core rippled) get_target_property (_type ${_target} TYPE) if(_type STREQUAL "INTERFACE_LIBRARY") continue() diff --git a/Builds/CMake/RippledNIH.cmake b/Builds/CMake/RippledNIH.cmake new file mode 100644 index 00000000000..60ab3e4bf85 --- /dev/null +++ b/Builds/CMake/RippledNIH.cmake @@ -0,0 +1,33 @@ +#[===================================================================[ + NIH prefix path..this is where we will download + and build any ExternalProjects, and they will hopefully + survive across build directory deletion (manual cleans) +#]===================================================================] + +string (REGEX REPLACE "[ \\/%]+" "_" gen_for_path ${CMAKE_GENERATOR}) +string (TOLOWER ${gen_for_path} gen_for_path) +# HACK: trying to shorten paths for windows CI (which hits 260 MAXPATH easily) +# @see: https://issues.jenkins-ci.org/browse/JENKINS-38706?focusedCommentId=339847 +string (REPLACE "visual_studio" "vs" gen_for_path ${gen_for_path}) +if (NOT DEFINED NIH_CACHE_ROOT) + if (DEFINED ENV{NIH_CACHE_ROOT}) + set (NIH_CACHE_ROOT $ENV{NIH_CACHE_ROOT}) + else () + set (NIH_CACHE_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/.nih_c") + endif () +endif () +set (nih_cache_path + "${NIH_CACHE_ROOT}/${gen_for_path}/${CMAKE_CXX_COMPILER_ID}_${CMAKE_CXX_COMPILER_VERSION}") +if (NOT is_multiconfig) + set (nih_cache_path "${nih_cache_path}/${CMAKE_BUILD_TYPE}") +endif () +file(TO_CMAKE_PATH "${nih_cache_path}" nih_cache_path) +message (STATUS "NIH-EP cache path: ${nih_cache_path}") +## two convenience variables: +set (ep_lib_prefix ${CMAKE_STATIC_LIBRARY_PREFIX}) +set (ep_lib_suffix ${CMAKE_STATIC_LIBRARY_SUFFIX}) + +# this is a setting for FetchContent and needs to be +# a cache variable +# https://cmake.org/cmake/help/latest/module/FetchContent.html#populating-the-content +set (FETCHCONTENT_BASE_DIR ${nih_cache_path} CACHE STRING "" FORCE) diff --git a/Builds/CMake/RippledRelease.cmake b/Builds/CMake/RippledRelease.cmake index 2b1cf3666fc..3be93658255 100644 --- a/Builds/CMake/RippledRelease.cmake +++ b/Builds/CMake/RippledRelease.cmake @@ -2,12 +2,6 @@ package/container targets - (optional) #]===================================================================] -# Early return if the `containers` directory is missing, -# e.g. when we are building a Conan package. -if(NOT EXISTS containers) - return() -endif() - if (is_root_project) if (NOT DOCKER) find_program (DOCKER docker) @@ -22,6 +16,7 @@ if (is_root_project) message (STATUS "using [${container_label}] as build container tag...") file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/packages) + file (MAKE_DIRECTORY ${NIH_CACHE_ROOT}/pkgbuild) if (is_linux) execute_process (COMMAND id -u OUTPUT_VARIABLE DOCKER_USER_ID @@ -67,6 +62,8 @@ if (is_root_project) exclude_from_default (rpm_container) add_custom_target (rpm docker run + -e NIH_CACHE_ROOT=/opt/rippled_bld/pkg/.nih_c + -v ${NIH_CACHE_ROOT}/pkgbuild:/opt/rippled_bld/pkg/.nih_c -v ${CMAKE_CURRENT_SOURCE_DIR}:/opt/rippled_bld/pkg/rippled -v ${CMAKE_CURRENT_BINARY_DIR}/packages:/opt/rippled_bld/pkg/out "$<$:--volume=/etc/passwd:/etc/passwd;--volume=/etc/group:/etc/group;--user=${DOCKER_USER_ID}:${DOCKER_GROUP_ID}>" @@ -140,6 +137,8 @@ if (is_root_project) exclude_from_default (dpkg_container) add_custom_target (dpkg docker run + -e NIH_CACHE_ROOT=/opt/rippled_bld/pkg/.nih_c + -v ${NIH_CACHE_ROOT}/pkgbuild:/opt/rippled_bld/pkg/.nih_c -v ${CMAKE_CURRENT_SOURCE_DIR}:/opt/rippled_bld/pkg/rippled -v ${CMAKE_CURRENT_BINARY_DIR}/packages:/opt/rippled_bld/pkg/out "$<$:--volume=/etc/passwd:/etc/passwd;--volume=/etc/group:/etc/group;--user=${DOCKER_USER_ID}:${DOCKER_GROUP_ID}>" diff --git a/Builds/CMake/RippledSanity.cmake b/Builds/CMake/RippledSanity.cmake index 1d217196e75..9e7fd113afd 100644 --- a/Builds/CMake/RippledSanity.cmake +++ b/Builds/CMake/RippledSanity.cmake @@ -10,7 +10,12 @@ if (NOT ep_procs) message (STATUS "Using ${ep_procs} cores for ExternalProject builds.") endif () endif () -get_property(is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) +get_property (is_multiconfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) +if (is_multiconfig STREQUAL "NOTFOUND") + if (${CMAKE_GENERATOR} STREQUAL "Xcode" OR ${CMAKE_GENERATOR} MATCHES "^Visual Studio") + set (is_multiconfig TRUE) + endif () +endif () set (CMAKE_CONFIGURATION_TYPES "Debug;Release" CACHE STRING "" FORCE) if (NOT is_multiconfig) @@ -44,6 +49,9 @@ elseif ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") message (FATAL_ERROR "This project requires GCC 8 or later") endif () endif () +if (CMAKE_GENERATOR STREQUAL "Xcode") + set (is_xcode TRUE) +endif () if (CMAKE_SYSTEM_NAME STREQUAL "Linux") set (is_linux TRUE) diff --git a/Builds/CMake/deps/Boost.cmake b/Builds/CMake/deps/Boost.cmake index 23ea5e549cc..ba3086c5c8f 100644 --- a/Builds/CMake/deps/Boost.cmake +++ b/Builds/CMake/deps/Boost.cmake @@ -1,3 +1,49 @@ +#[===================================================================[ + NIH dep: boost +#]===================================================================] + +if((NOT DEFINED BOOST_ROOT) AND(DEFINED ENV{BOOST_ROOT})) + set(BOOST_ROOT $ENV{BOOST_ROOT}) +endif() +file(TO_CMAKE_PATH "${BOOST_ROOT}" BOOST_ROOT) +if(WIN32 OR CYGWIN) + # Workaround for MSVC having two boost versions - x86 and x64 on same PC in stage folders + if(DEFINED BOOST_ROOT) + if(IS_DIRECTORY ${BOOST_ROOT}/stage64/lib) + set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage64/lib) + elseif(IS_DIRECTORY ${BOOST_ROOT}/stage/lib) + set(BOOST_LIBRARYDIR ${BOOST_ROOT}/stage/lib) + elseif(IS_DIRECTORY ${BOOST_ROOT}/lib) + set(BOOST_LIBRARYDIR ${BOOST_ROOT}/lib) + else() + message(WARNING "Did not find expected boost library dir. " + "Defaulting to ${BOOST_ROOT}") + set(BOOST_LIBRARYDIR ${BOOST_ROOT}) + endif() + endif() +endif() +message(STATUS "BOOST_ROOT: ${BOOST_ROOT}") +message(STATUS "BOOST_LIBRARYDIR: ${BOOST_LIBRARYDIR}") + +# uncomment the following as needed to debug FindBoost issues: +#set(Boost_DEBUG ON) + +#[=========================================================[ + boost dynamic libraries don't trivially support @rpath + linking right now (cmake's default), so just force + static linking for macos, or if requested on linux by flag +#]=========================================================] +if(static) + set(Boost_USE_STATIC_LIBS ON) +endif() +set(Boost_USE_MULTITHREADED ON) +if(static AND NOT APPLE) + set(Boost_USE_STATIC_RUNTIME ON) +else() + set(Boost_USE_STATIC_RUNTIME OFF) +endif() +# TBD: +# Boost_USE_DEBUG_RUNTIME: When ON, uses Boost libraries linked against the find_package(Boost 1.70 REQUIRED COMPONENTS chrono @@ -9,12 +55,11 @@ find_package(Boost 1.70 REQUIRED program_options regex system - thread -) + thread) add_library(ripple_boost INTERFACE) add_library(Ripple::boost ALIAS ripple_boost) -if(XCODE) +if(is_xcode) target_include_directories(ripple_boost BEFORE INTERFACE ${Boost_INCLUDE_DIRS}) target_compile_options(ripple_boost INTERFACE --system-header-prefix="boost/") else() diff --git a/Builds/CMake/deps/Ed25519-donna.cmake b/Builds/CMake/deps/Ed25519-donna.cmake new file mode 100644 index 00000000000..7f352423fdc --- /dev/null +++ b/Builds/CMake/deps/Ed25519-donna.cmake @@ -0,0 +1,28 @@ +#[===================================================================[ + NIH dep: ed25519-donna +#]===================================================================] + +add_library (ed25519-donna STATIC + src/ed25519-donna/ed25519.c) +target_include_directories (ed25519-donna + PUBLIC + $ + $ + PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src/ed25519-donna) +#[=========================================================[ + NOTE for macos: + https://github.com/floodyberry/ed25519-donna/issues/29 + our source for ed25519-donna-portable.h has been + patched to workaround this. +#]=========================================================] +target_link_libraries (ed25519-donna PUBLIC OpenSSL::SSL) +add_library (NIH::ed25519-donna ALIAS ed25519-donna) +target_link_libraries (ripple_libs INTERFACE NIH::ed25519-donna) +#[===========================[ + headers installation +#]===========================] +install ( + FILES + src/ed25519-donna/ed25519.h + DESTINATION include/ed25519-donna) diff --git a/Builds/CMake/deps/FindBoost.cmake b/Builds/CMake/deps/FindBoost.cmake new file mode 100644 index 00000000000..121e7264145 --- /dev/null +++ b/Builds/CMake/deps/FindBoost.cmake @@ -0,0 +1,2170 @@ +# Distributed under the OSI-approved BSD 3-Clause License. See accompanying +# file Copyright.txt or https://cmake.org/licensing for details. + +#[=======================================================================[.rst: +FindBoost +--------- + +Find Boost include dirs and libraries + +Use this module by invoking find_package with the form:: + + find_package(Boost + [version] [EXACT] # Minimum or EXACT version e.g. 1.67.0 + [REQUIRED] # Fail with error if Boost is not found + [COMPONENTS ...] # Boost libraries by their canonical name + # e.g. "date_time" for "libboost_date_time" + [OPTIONAL_COMPONENTS ...] + # Optional Boost libraries by their canonical name) + ) # e.g. "date_time" for "libboost_date_time" + +This module finds headers and requested component libraries OR a CMake +package configuration file provided by a "Boost CMake" build. For the +latter case skip to the "Boost CMake" section below. For the former +case results are reported in variables:: + + Boost_FOUND - True if headers and requested libraries were found + Boost_INCLUDE_DIRS - Boost include directories + Boost_LIBRARY_DIRS - Link directories for Boost libraries + Boost_LIBRARIES - Boost component libraries to be linked + Boost__FOUND - True if component was found ( is upper-case) + Boost__LIBRARY - Libraries to link for component (may include + target_link_libraries debug/optimized keywords) + Boost_VERSION_MACRO - BOOST_VERSION value from boost/version.hpp + Boost_VERSION_STRING - Boost version number in x.y.z format + Boost_VERSION - if CMP0093 NEW => same as Boost_VERSION_STRING + if CMP0093 OLD or unset => same as Boost_VERSION_MACRO + Boost_LIB_VERSION - Version string appended to library filenames + Boost_VERSION_MAJOR - Boost major version number (X in X.y.z) + alias: Boost_MAJOR_VERSION + Boost_VERSION_MINOR - Boost minor version number (Y in x.Y.z) + alias: Boost_MINOR_VERSION + Boost_VERSION_PATCH - Boost subminor version number (Z in x.y.Z) + alias: Boost_SUBMINOR_VERSION + Boost_VERSION_COUNT - Amount of version components (3) + Boost_LIB_DIAGNOSTIC_DEFINITIONS (Windows) + - Pass to add_definitions() to have diagnostic + information about Boost's automatic linking + displayed during compilation + +Note that Boost Python components require a Python version suffix +(Boost 1.67 and later), e.g. ``python36`` or ``python27`` for the +versions built against Python 3.6 and 2.7, respectively. This also +applies to additional components using Python including +``mpi_python`` and ``numpy``. Earlier Boost releases may use +distribution-specific suffixes such as ``2``, ``3`` or ``2.7``. +These may also be used as suffixes, but note that they are not +portable. + +This module reads hints about search locations from variables:: + + BOOST_ROOT - Preferred installation prefix + (or BOOSTROOT) + BOOST_INCLUDEDIR - Preferred include directory e.g. /include + BOOST_LIBRARYDIR - Preferred library directory e.g. /lib + Boost_NO_SYSTEM_PATHS - Set to ON to disable searching in locations not + specified by these hint variables. Default is OFF. + Boost_ADDITIONAL_VERSIONS + - List of Boost versions not known to this module + (Boost install locations may contain the version) + +and saves search results persistently in CMake cache entries:: + + Boost_INCLUDE_DIR - Directory containing Boost headers + Boost_LIBRARY_DIR_RELEASE - Directory containing release Boost libraries + Boost_LIBRARY_DIR_DEBUG - Directory containing debug Boost libraries + Boost__LIBRARY_DEBUG - Component library debug variant + Boost__LIBRARY_RELEASE - Component library release variant + +The following :prop_tgt:`IMPORTED` targets are also defined:: + + Boost::headers - Target for header-only dependencies + (Boost include directory) + alias: Boost::boost + Boost:: - Target for specific component dependency + (shared or static library); is lower- + case + Boost::diagnostic_definitions - interface target to enable diagnostic + information about Boost's automatic linking + during compilation (adds BOOST_LIB_DIAGNOSTIC) + Boost::disable_autolinking - interface target to disable automatic + linking with MSVC (adds BOOST_ALL_NO_LIB) + Boost::dynamic_linking - interface target to enable dynamic linking + linking with MSVC (adds BOOST_ALL_DYN_LINK) + +Implicit dependencies such as ``Boost::filesystem`` requiring +``Boost::system`` will be automatically detected and satisfied, even +if system is not specified when using :command:`find_package` and if +``Boost::system`` is not added to :command:`target_link_libraries`. If using +``Boost::thread``, then ``Threads::Threads`` will also be added automatically. + +It is important to note that the imported targets behave differently +than variables created by this module: multiple calls to +:command:`find_package(Boost)` in the same directory or sub-directories with +different options (e.g. static or shared) will not override the +values of the targets created by the first call. + +Users may set these hints or results as ``CACHE`` entries. Projects +should not read these entries directly but instead use the above +result variables. Note that some hint names start in upper-case +"BOOST". One may specify these as environment variables if they are +not specified as CMake variables or cache entries. + +This module first searches for the ``Boost`` header files using the above +hint variables (excluding ``BOOST_LIBRARYDIR``) and saves the result in +``Boost_INCLUDE_DIR``. Then it searches for requested component libraries +using the above hints (excluding ``BOOST_INCLUDEDIR`` and +``Boost_ADDITIONAL_VERSIONS``), "lib" directories near ``Boost_INCLUDE_DIR``, +and the library name configuration settings below. It saves the +library directories in ``Boost_LIBRARY_DIR_DEBUG`` and +``Boost_LIBRARY_DIR_RELEASE`` and individual library +locations in ``Boost__LIBRARY_DEBUG`` and ``Boost__LIBRARY_RELEASE``. +When one changes settings used by previous searches in the same build +tree (excluding environment variables) this module discards previous +search results affected by the changes and searches again. + +Boost libraries come in many variants encoded in their file name. +Users or projects may tell this module which variant to find by +setting variables:: + + Boost_USE_DEBUG_LIBS - Set to ON or OFF to specify whether to search + and use the debug libraries. Default is ON. + Boost_USE_RELEASE_LIBS - Set to ON or OFF to specify whether to search + and use the release libraries. Default is ON. + Boost_USE_MULTITHREADED - Set to OFF to use the non-multithreaded + libraries ('mt' tag). Default is ON. + Boost_USE_STATIC_LIBS - Set to ON to force the use of the static + libraries. Default is OFF. + Boost_USE_STATIC_RUNTIME - Set to ON or OFF to specify whether to use + libraries linked statically to the C++ runtime + ('s' tag). Default is platform dependent. + Boost_USE_DEBUG_RUNTIME - Set to ON or OFF to specify whether to use + libraries linked to the MS debug C++ runtime + ('g' tag). Default is ON. + Boost_USE_DEBUG_PYTHON - Set to ON to use libraries compiled with a + debug Python build ('y' tag). Default is OFF. + Boost_USE_STLPORT - Set to ON to use libraries compiled with + STLPort ('p' tag). Default is OFF. + Boost_USE_STLPORT_DEPRECATED_NATIVE_IOSTREAMS + - Set to ON to use libraries compiled with + STLPort deprecated "native iostreams" + ('n' tag). Default is OFF. + Boost_COMPILER - Set to the compiler-specific library suffix + (e.g. "-gcc43"). Default is auto-computed + for the C++ compiler in use. A list may be + used if multiple compatible suffixes should + be tested for, in decreasing order of + preference. + Boost_ARCHITECTURE - Set to the architecture-specific library suffix + (e.g. "-x64"). Default is auto-computed for the + C++ compiler in use. + Boost_THREADAPI - Suffix for "thread" component library name, + such as "pthread" or "win32". Names with + and without this suffix will both be tried. + Boost_NAMESPACE - Alternate namespace used to build boost with + e.g. if set to "myboost", will search for + myboost_thread instead of boost_thread. + +Other variables one may set to control this module are:: + + Boost_DEBUG - Set to ON to enable debug output from FindBoost. + Please enable this before filing any bug report. + Boost_REALPATH - Set to ON to resolve symlinks for discovered + libraries to assist with packaging. For example, + the "system" component library may be resolved to + "/usr/lib/libboost_system.so.1.67.0" instead of + "/usr/lib/libboost_system.so". This does not + affect linking and should not be enabled unless + the user needs this information. + Boost_LIBRARY_DIR - Default value for Boost_LIBRARY_DIR_RELEASE and + Boost_LIBRARY_DIR_DEBUG. + +On Visual Studio and Borland compilers Boost headers request automatic +linking to corresponding libraries. This requires matching libraries +to be linked explicitly or available in the link library search path. +In this case setting ``Boost_USE_STATIC_LIBS`` to ``OFF`` may not achieve +dynamic linking. Boost automatic linking typically requests static +libraries with a few exceptions (such as ``Boost.Python``). Use:: + + add_definitions(${Boost_LIB_DIAGNOSTIC_DEFINITIONS}) + +to ask Boost to report information about automatic linking requests. + +Example to find Boost headers only:: + + find_package(Boost 1.36.0) + if(Boost_FOUND) + include_directories(${Boost_INCLUDE_DIRS}) + add_executable(foo foo.cc) + endif() + +Example to find Boost libraries and use imported targets:: + + find_package(Boost 1.56 REQUIRED COMPONENTS + date_time filesystem iostreams) + add_executable(foo foo.cc) + target_link_libraries(foo Boost::date_time Boost::filesystem + Boost::iostreams) + +Example to find Boost Python 3.6 libraries and use imported targets:: + + find_package(Boost 1.67 REQUIRED COMPONENTS + python36 numpy36) + add_executable(foo foo.cc) + target_link_libraries(foo Boost::python36 Boost::numpy36) + +Example to find Boost headers and some *static* (release only) libraries:: + + set(Boost_USE_STATIC_LIBS ON) # only find static libs + set(Boost_USE_DEBUG_LIBS OFF) # ignore debug libs and + set(Boost_USE_RELEASE_LIBS ON) # only find release libs + set(Boost_USE_MULTITHREADED ON) + set(Boost_USE_STATIC_RUNTIME OFF) + find_package(Boost 1.66.0 COMPONENTS date_time filesystem system ...) + if(Boost_FOUND) + include_directories(${Boost_INCLUDE_DIRS}) + add_executable(foo foo.cc) + target_link_libraries(foo ${Boost_LIBRARIES}) + endif() + +Boost CMake +^^^^^^^^^^^ + +If Boost was built using the boost-cmake project or from Boost 1.70.0 on +it provides a package configuration file for use with find_package's config mode. +This module looks for the package configuration file called +``BoostConfig.cmake`` or ``boost-config.cmake`` and stores the result in +``CACHE`` entry "Boost_DIR". If found, the package configuration file is loaded +and this module returns with no further action. See documentation of +the Boost CMake package configuration for details on what it provides. + +Set ``Boost_NO_BOOST_CMAKE`` to ``ON``, to disable the search for boost-cmake. +#]=======================================================================] + +# The FPHSA helper provides standard way of reporting final search results to +# the user including the version and component checks. +include(FindPackageHandleStandardArgs) + +# Save project's policies +cmake_policy(PUSH) +cmake_policy(SET CMP0057 NEW) # if IN_LIST + +#------------------------------------------------------------------------------- +# Before we go searching, check whether a boost cmake package is available, unless +# the user specifically asked NOT to search for one. +# +# If Boost_DIR is set, this behaves as any find_package call would. If not, +# it looks at BOOST_ROOT and BOOSTROOT to find Boost. +# +if (NOT Boost_NO_BOOST_CMAKE) + # If Boost_DIR is not set, look for BOOSTROOT and BOOST_ROOT as alternatives, + # since these are more conventional for Boost. + if ("$ENV{Boost_DIR}" STREQUAL "") + if (NOT "$ENV{BOOST_ROOT}" STREQUAL "") + set(ENV{Boost_DIR} $ENV{BOOST_ROOT}) + elseif (NOT "$ENV{BOOSTROOT}" STREQUAL "") + set(ENV{Boost_DIR} $ENV{BOOSTROOT}) + endif() + endif() + + # Do the same find_package call but look specifically for the CMake version. + # Note that args are passed in the Boost_FIND_xxxxx variables, so there is no + # need to delegate them to this find_package call. + find_package(Boost QUIET NO_MODULE) + mark_as_advanced(Boost_DIR) + + # If we found a boost cmake package, then we're done. Print out what we found. + # Otherwise let the rest of the module try to find it. + if(Boost_FOUND) + # Neither the legacy boost-cmake nor the new builtin BoostConfig (as in 1.70) + # report the found components in the standard variables, so we need to convert + # them here + if(Boost_FIND_COMPONENTS) + foreach(_comp IN LISTS Boost_FIND_COMPONENTS) + string(TOUPPER ${_comp} _uppercomp) + if(DEFINED Boost${_comp}_FOUND) + set(Boost_${_comp}_FOUND ${Boost${_comp}_FOUND}) + elseif(DEFINED Boost_${_uppercomp}_FOUND) + set(Boost_${_comp}_FOUND ${Boost_${_uppercomp}_FOUND}) + endif() + endforeach() + endif() + + find_package_handle_standard_args(Boost HANDLE_COMPONENTS CONFIG_MODE) + + # Restore project's policies + cmake_policy(POP) + return() + endif() +endif() + + +#------------------------------------------------------------------------------- +# FindBoost functions & macros +# + +# +# Print debug text if Boost_DEBUG is set. +# Call example: +# _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "debug message") +# +function(_Boost_DEBUG_PRINT file line text) + if(Boost_DEBUG) + message(STATUS "[ ${file}:${line} ] ${text}") + endif() +endfunction() + +# +# _Boost_DEBUG_PRINT_VAR(file line variable_name [ENVIRONMENT] +# [SOURCE "short explanation of origin of var value"]) +# +# ENVIRONMENT - look up environment variable instead of CMake variable +# +# Print variable name and its value if Boost_DEBUG is set. +# Call example: +# _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" BOOST_ROOT) +# +function(_Boost_DEBUG_PRINT_VAR file line name) + if(Boost_DEBUG) + cmake_parse_arguments(_args "ENVIRONMENT" "SOURCE" "" ${ARGN}) + + unset(source) + if(_args_SOURCE) + set(source " (${_args_SOURCE})") + endif() + + if(_args_ENVIRONMENT) + if(DEFINED ENV{${name}}) + set(value "\"$ENV{${name}}\"") + else() + set(value "") + endif() + set(_name "ENV{${name}}") + else() + if(DEFINED "${name}") + set(value "\"${${name}}\"") + else() + set(value "") + endif() + set(_name "${name}") + endif() + + _Boost_DEBUG_PRINT("${file}" "${line}" "${_name} = ${value}${source}") + endif() +endfunction() + +############################################ +# +# Check the existence of the libraries. +# +############################################ +# This macro was taken directly from the FindQt4.cmake file that is included +# with the CMake distribution. This is NOT my work. All work was done by the +# original authors of the FindQt4.cmake file. Only minor modifications were +# made to remove references to Qt and make this file more generally applicable +# And ELSE/ENDIF pairs were removed for readability. +######################################################################### + +macro(_Boost_ADJUST_LIB_VARS basename) + if(Boost_INCLUDE_DIR ) + if(Boost_${basename}_LIBRARY_DEBUG AND Boost_${basename}_LIBRARY_RELEASE) + # if the generator is multi-config or if CMAKE_BUILD_TYPE is set for + # single-config generators, set optimized and debug libraries + get_property(_isMultiConfig GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG) + if(_isMultiConfig OR CMAKE_BUILD_TYPE) + set(Boost_${basename}_LIBRARY optimized ${Boost_${basename}_LIBRARY_RELEASE} debug ${Boost_${basename}_LIBRARY_DEBUG}) + else() + # For single-config generators where CMAKE_BUILD_TYPE has no value, + # just use the release libraries + set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_RELEASE} ) + endif() + # FIXME: This probably should be set for both cases + set(Boost_${basename}_LIBRARIES optimized ${Boost_${basename}_LIBRARY_RELEASE} debug ${Boost_${basename}_LIBRARY_DEBUG}) + endif() + + # if only the release version was found, set the debug variable also to the release version + if(Boost_${basename}_LIBRARY_RELEASE AND NOT Boost_${basename}_LIBRARY_DEBUG) + set(Boost_${basename}_LIBRARY_DEBUG ${Boost_${basename}_LIBRARY_RELEASE}) + set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_RELEASE}) + set(Boost_${basename}_LIBRARIES ${Boost_${basename}_LIBRARY_RELEASE}) + endif() + + # if only the debug version was found, set the release variable also to the debug version + if(Boost_${basename}_LIBRARY_DEBUG AND NOT Boost_${basename}_LIBRARY_RELEASE) + set(Boost_${basename}_LIBRARY_RELEASE ${Boost_${basename}_LIBRARY_DEBUG}) + set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_DEBUG}) + set(Boost_${basename}_LIBRARIES ${Boost_${basename}_LIBRARY_DEBUG}) + endif() + + # If the debug & release library ends up being the same, omit the keywords + if("${Boost_${basename}_LIBRARY_RELEASE}" STREQUAL "${Boost_${basename}_LIBRARY_DEBUG}") + set(Boost_${basename}_LIBRARY ${Boost_${basename}_LIBRARY_RELEASE} ) + set(Boost_${basename}_LIBRARIES ${Boost_${basename}_LIBRARY_RELEASE} ) + endif() + + if(Boost_${basename}_LIBRARY AND Boost_${basename}_HEADER) + set(Boost_${basename}_FOUND ON) + if("x${basename}" STREQUAL "xTHREAD" AND NOT TARGET Threads::Threads) + string(APPEND Boost_ERROR_REASON_THREAD " (missing dependency: Threads)") + set(Boost_THREAD_FOUND OFF) + endif() + endif() + + endif() + # Make variables changeable to the advanced user + mark_as_advanced( + Boost_${basename}_LIBRARY_RELEASE + Boost_${basename}_LIBRARY_DEBUG + ) +endmacro() + +# Detect changes in used variables. +# Compares the current variable value with the last one. +# In short form: +# v != v_LAST -> CHANGED = 1 +# v is defined, v_LAST not -> CHANGED = 1 +# v is not defined, but v_LAST is -> CHANGED = 1 +# otherwise -> CHANGED = 0 +# CHANGED is returned in variable named ${changed_var} +macro(_Boost_CHANGE_DETECT changed_var) + set(${changed_var} 0) + foreach(v ${ARGN}) + if(DEFINED _Boost_COMPONENTS_SEARCHED) + if(${v}) + if(_${v}_LAST) + string(COMPARE NOTEQUAL "${${v}}" "${_${v}_LAST}" _${v}_CHANGED) + else() + set(_${v}_CHANGED 1) + endif() + elseif(_${v}_LAST) + set(_${v}_CHANGED 1) + endif() + if(_${v}_CHANGED) + set(${changed_var} 1) + endif() + else() + set(_${v}_CHANGED 0) + endif() + endforeach() +endmacro() + +# +# Find the given library (var). +# Use 'build_type' to support different lib paths for RELEASE or DEBUG builds +# +macro(_Boost_FIND_LIBRARY var build_type) + + find_library(${var} ${ARGN}) + + if(${var}) + # If this is the first library found then save Boost_LIBRARY_DIR_[RELEASE,DEBUG]. + if(NOT Boost_LIBRARY_DIR_${build_type}) + get_filename_component(_dir "${${var}}" PATH) + set(Boost_LIBRARY_DIR_${build_type} "${_dir}" CACHE PATH "Boost library directory ${build_type}" FORCE) + endif() + elseif(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT) + # Try component-specific hints but do not save Boost_LIBRARY_DIR_[RELEASE,DEBUG]. + find_library(${var} HINTS ${_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT} ${ARGN}) + endif() + + # If Boost_LIBRARY_DIR_[RELEASE,DEBUG] is known then search only there. + if(Boost_LIBRARY_DIR_${build_type}) + set(_boost_LIBRARY_SEARCH_DIRS_${build_type} ${Boost_LIBRARY_DIR_${build_type}} NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "Boost_LIBRARY_DIR_${build_type}") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "_boost_LIBRARY_SEARCH_DIRS_${build_type}") + endif() +endmacro() + +#------------------------------------------------------------------------------- + +# Convert CMAKE_CXX_COMPILER_VERSION to boost compiler suffix version. +function(_Boost_COMPILER_DUMPVERSION _OUTPUT_VERSION _OUTPUT_VERSION_MAJOR _OUTPUT_VERSION_MINOR) + string(REGEX REPLACE "([0-9]+)\\.([0-9]+)(\\.[0-9]+)?" "\\1" + _boost_COMPILER_VERSION_MAJOR "${CMAKE_CXX_COMPILER_VERSION}") + string(REGEX REPLACE "([0-9]+)\\.([0-9]+)(\\.[0-9]+)?" "\\2" + _boost_COMPILER_VERSION_MINOR "${CMAKE_CXX_COMPILER_VERSION}") + + set(_boost_COMPILER_VERSION "${_boost_COMPILER_VERSION_MAJOR}${_boost_COMPILER_VERSION_MINOR}") + + set(${_OUTPUT_VERSION} ${_boost_COMPILER_VERSION} PARENT_SCOPE) + set(${_OUTPUT_VERSION_MAJOR} ${_boost_COMPILER_VERSION_MAJOR} PARENT_SCOPE) + set(${_OUTPUT_VERSION_MINOR} ${_boost_COMPILER_VERSION_MINOR} PARENT_SCOPE) +endfunction() + +# +# Take a list of libraries with "thread" in it +# and prepend duplicates with "thread_${Boost_THREADAPI}" +# at the front of the list +# +function(_Boost_PREPEND_LIST_WITH_THREADAPI _output) + set(_orig_libnames ${ARGN}) + string(REPLACE "thread" "thread_${Boost_THREADAPI}" _threadapi_libnames "${_orig_libnames}") + set(${_output} ${_threadapi_libnames} ${_orig_libnames} PARENT_SCOPE) +endfunction() + +# +# If a library is found, replace its cache entry with its REALPATH +# +function(_Boost_SWAP_WITH_REALPATH _library _docstring) + if(${_library}) + get_filename_component(_boost_filepathreal ${${_library}} REALPATH) + unset(${_library} CACHE) + set(${_library} ${_boost_filepathreal} CACHE FILEPATH "${_docstring}") + endif() +endfunction() + +function(_Boost_CHECK_SPELLING _var) + if(${_var}) + string(TOUPPER ${_var} _var_UC) + message(FATAL_ERROR "ERROR: ${_var} is not the correct spelling. The proper spelling is ${_var_UC}.") + endif() +endfunction() + +# Guesses Boost's compiler prefix used in built library names +# Returns the guess by setting the variable pointed to by _ret +function(_Boost_GUESS_COMPILER_PREFIX _ret) + if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xIntel") + if(WIN32) + set (_boost_COMPILER "-iw") + else() + set (_boost_COMPILER "-il") + endif() + elseif (GHSMULTI) + set(_boost_COMPILER "-ghs") + elseif("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC" OR "x${CMAKE_CXX_SIMULATE_ID}" STREQUAL "xMSVC") + if(MSVC_TOOLSET_VERSION GREATER_EQUAL 150) + # Not yet known. + set(_boost_COMPILER "") + elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 140) + # MSVC toolset 14.x versions are forward compatible. + set(_boost_COMPILER "") + foreach(v 9 8 7 6 5 4 3 2 1 0) + if(MSVC_TOOLSET_VERSION GREATER_EQUAL 14${v}) + list(APPEND _boost_COMPILER "-vc14${v}") + endif() + endforeach() + elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 80) + set(_boost_COMPILER "-vc${MSVC_TOOLSET_VERSION}") + elseif(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 13.10) + set(_boost_COMPILER "-vc71") + elseif(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 13) # Good luck! + set(_boost_COMPILER "-vc7") # yes, this is correct + else() # VS 6.0 Good luck! + set(_boost_COMPILER "-vc6") # yes, this is correct + endif() + + if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xClang") + string(REPLACE "." ";" VERSION_LIST "${CMAKE_CXX_COMPILER_VERSION}") + list(GET VERSION_LIST 0 CLANG_VERSION_MAJOR) + set(_boost_COMPILER "-clangw${CLANG_VERSION_MAJOR};${_boost_COMPILER}") + endif() + elseif (BORLAND) + set(_boost_COMPILER "-bcb") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "SunPro") + set(_boost_COMPILER "-sw") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "XL") + set(_boost_COMPILER "-xlc") + elseif (MINGW) + if(Boost_VERSION_STRING VERSION_LESS 1.34) + set(_boost_COMPILER "-mgw") # no GCC version encoding prior to 1.34 + else() + _Boost_COMPILER_DUMPVERSION(_boost_COMPILER_VERSION _boost_COMPILER_VERSION_MAJOR _boost_COMPILER_VERSION_MINOR) + set(_boost_COMPILER "-mgw${_boost_COMPILER_VERSION}") + endif() + elseif (UNIX) + _Boost_COMPILER_DUMPVERSION(_boost_COMPILER_VERSION _boost_COMPILER_VERSION_MAJOR _boost_COMPILER_VERSION_MINOR) + if(NOT Boost_VERSION_STRING VERSION_LESS 1.69.0) + # From GCC 5 and clang 4, versioning changes and minor becomes patch. + # For those compilers, patch is exclude from compiler tag in Boost 1.69+ library naming. + if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND _boost_COMPILER_VERSION_MAJOR VERSION_GREATER 4) + set(_boost_COMPILER_VERSION "${_boost_COMPILER_VERSION_MAJOR}") + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang" AND _boost_COMPILER_VERSION_MAJOR VERSION_GREATER 3) + set(_boost_COMPILER_VERSION "${_boost_COMPILER_VERSION_MAJOR}") + endif() + endif() + + if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if(Boost_VERSION_STRING VERSION_LESS 1.34) + set(_boost_COMPILER "-gcc") # no GCC version encoding prior to 1.34 + else() + # Determine which version of GCC we have. + if(APPLE) + if(Boost_VERSION_STRING VERSION_LESS 1.36.0) + # In Boost <= 1.35.0, there is no mangled compiler name for + # the macOS/Darwin version of GCC. + set(_boost_COMPILER "") + else() + # In Boost 1.36.0 and newer, the mangled compiler name used + # on macOS/Darwin is "xgcc". + set(_boost_COMPILER "-xgcc${_boost_COMPILER_VERSION}") + endif() + else() + set(_boost_COMPILER "-gcc${_boost_COMPILER_VERSION}") + endif() + endif() + elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") + # TODO: Find out any Boost version constraints vs clang support. + set(_boost_COMPILER "-clang${_boost_COMPILER_VERSION}") + endif() + else() + set(_boost_COMPILER "") + endif() + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "_boost_COMPILER" SOURCE "guessed") + set(${_ret} ${_boost_COMPILER} PARENT_SCOPE) +endfunction() + +# +# Get component dependencies. Requires the dependencies to have been +# defined for the Boost release version. +# +# component - the component to check +# _ret - list of library dependencies +# +function(_Boost_COMPONENT_DEPENDENCIES component _ret) + # Note: to add a new Boost release, run + # + # % cmake -DBOOST_DIR=/path/to/boost/source -P Utilities/Scripts/BoostScanDeps.cmake + # + # The output may be added in a new block below. If it's the same as + # the previous release, simply update the version range of the block + # for the previous release. Also check if any new components have + # been added, and add any new components to + # _Boost_COMPONENT_HEADERS. + # + # This information was originally generated by running + # BoostScanDeps.cmake against every boost release to date supported + # by FindBoost: + # + # % for version in /path/to/boost/sources/* + # do + # cmake -DBOOST_DIR=$version -P Utilities/Scripts/BoostScanDeps.cmake + # done + # + # The output was then updated by search and replace with these regexes: + # + # - Strip message(STATUS) prefix dashes + # s;^-- ;; + # - Indent + # s;^set(; set(;; + # - Add conditionals + # s;Scanning /path/to/boost/sources/boost_\(.*\)_\(.*\)_\(.*); elseif(NOT Boost_VERSION_STRING VERSION_LESS \1\.\2\.\3 AND Boost_VERSION_STRING VERSION_LESS xxxx); + # + # This results in the logic seen below, but will require the xxxx + # replacing with the following Boost release version (or the next + # minor version to be released, e.g. 1.59 was the latest at the time + # of writing, making 1.60 the next. Identical consecutive releases + # were then merged together by updating the end range of the first + # block and removing the following redundant blocks. + # + # Running the script against all historical releases should be + # required only if the BoostScanDeps.cmake script logic is changed. + # The addition of a new release should only require it to be run + # against the new release. + + # Handle Python version suffixes + if(component MATCHES "^(python|mpi_python|numpy)([0-9][0-9]?|[0-9]\\.[0-9])\$") + set(component "${CMAKE_MATCH_1}") + set(component_python_version "${CMAKE_MATCH_2}") + endif() + + set(_Boost_IMPORTED_TARGETS TRUE) + if(Boost_VERSION_STRING AND Boost_VERSION_STRING VERSION_LESS 1.33.0) + message(WARNING "Imported targets and dependency information not available for Boost version ${Boost_VERSION_STRING} (all versions older than 1.33)") + set(_Boost_IMPORTED_TARGETS FALSE) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.33.0 AND Boost_VERSION_STRING VERSION_LESS 1.35.0) + set(_Boost_IOSTREAMS_DEPENDENCIES regex thread) + set(_Boost_REGEX_DEPENDENCIES thread) + set(_Boost_WAVE_DEPENDENCIES filesystem thread) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.35.0 AND Boost_VERSION_STRING VERSION_LESS 1.36.0) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_WAVE_DEPENDENCIES filesystem system thread) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.36.0 AND Boost_VERSION_STRING VERSION_LESS 1.38.0) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_WAVE_DEPENDENCIES filesystem system thread) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.38.0 AND Boost_VERSION_STRING VERSION_LESS 1.43.0) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES date_time) + set(_Boost_WAVE_DEPENDENCIES filesystem system thread date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.43.0 AND Boost_VERSION_STRING VERSION_LESS 1.44.0) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES date_time) + set(_Boost_WAVE_DEPENDENCIES filesystem system thread date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.44.0 AND Boost_VERSION_STRING VERSION_LESS 1.45.0) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random serialization) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES date_time) + set(_Boost_WAVE_DEPENDENCIES serialization filesystem system thread date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.45.0 AND Boost_VERSION_STRING VERSION_LESS 1.47.0) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES date_time) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.47.0 AND Boost_VERSION_STRING VERSION_LESS 1.48.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES date_time) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.48.0 AND Boost_VERSION_STRING VERSION_LESS 1.50.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES date_time) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.50.0 AND Boost_VERSION_STRING VERSION_LESS 1.53.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.53.0 AND Boost_VERSION_STRING VERSION_LESS 1.54.0) + set(_Boost_ATOMIC_DEPENDENCIES thread chrono system date_time) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.54.0 AND Boost_VERSION_STRING VERSION_LESS 1.55.0) + set(_Boost_ATOMIC_DEPENDENCIES thread chrono system date_time) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.55.0 AND Boost_VERSION_STRING VERSION_LESS 1.56.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l regex random) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.56.0 AND Boost_VERSION_STRING VERSION_LESS 1.59.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.59.0 AND Boost_VERSION_STRING VERSION_LESS 1.60.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES log_setup date_time system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.60.0 AND Boost_VERSION_STRING VERSION_LESS 1.61.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.61.0 AND Boost_VERSION_STRING VERSION_LESS 1.62.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.62.0 AND Boost_VERSION_STRING VERSION_LESS 1.63.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.63.0 AND Boost_VERSION_STRING VERSION_LESS 1.65.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_COROUTINE2_DEPENDENCIES context fiber thread chrono system date_time) + set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.65.0 AND Boost_VERSION_STRING VERSION_LESS 1.67.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.67.0 AND Boost_VERSION_STRING VERSION_LESS 1.68.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.68.0 AND Boost_VERSION_STRING VERSION_LESS 1.69.0) + set(_Boost_CHRONO_DEPENDENCIES system) + set(_Boost_CONTEXT_DEPENDENCIES thread chrono system date_time) + set(_Boost_CONTRACT_DEPENDENCIES thread chrono system date_time) + set(_Boost_COROUTINE_DEPENDENCIES context system) + set(_Boost_FIBER_DEPENDENCIES context thread chrono system date_time) + set(_Boost_FILESYSTEM_DEPENDENCIES system) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup system filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) + set(_Boost_RANDOM_DEPENDENCIES system) + set(_Boost_THREAD_DEPENDENCIES chrono system date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem system serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + elseif(NOT Boost_VERSION_STRING VERSION_LESS 1.69.0 AND Boost_VERSION_STRING VERSION_LESS 1.70.0) + set(_Boost_CONTRACT_DEPENDENCIES thread chrono date_time) + set(_Boost_COROUTINE_DEPENDENCIES context) + set(_Boost_FIBER_DEPENDENCIES context) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) + set(_Boost_THREAD_DEPENDENCIES chrono date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + else() + if(NOT Boost_VERSION_STRING VERSION_LESS 1.70.0) + set(_Boost_CONTRACT_DEPENDENCIES thread chrono date_time) + set(_Boost_COROUTINE_DEPENDENCIES context) + set(_Boost_FIBER_DEPENDENCIES context) + set(_Boost_IOSTREAMS_DEPENDENCIES regex) + set(_Boost_LOG_DEPENDENCIES date_time log_setup filesystem thread regex chrono atomic) + set(_Boost_MATH_DEPENDENCIES math_c99 math_c99f math_c99l math_tr1 math_tr1f math_tr1l atomic) + set(_Boost_MPI_DEPENDENCIES serialization) + set(_Boost_MPI_PYTHON_DEPENDENCIES python${component_python_version} mpi serialization) + set(_Boost_NUMPY_DEPENDENCIES python${component_python_version}) + set(_Boost_THREAD_DEPENDENCIES chrono date_time atomic) + set(_Boost_TIMER_DEPENDENCIES chrono system) + set(_Boost_WAVE_DEPENDENCIES filesystem serialization thread chrono date_time atomic) + set(_Boost_WSERIALIZATION_DEPENDENCIES serialization) + endif() + if(NOT Boost_VERSION_STRING VERSION_LESS 1.77.0) + message(WARNING "New Boost version may have incorrect or missing dependencies and imported targets") + endif() + endif() + + string(TOUPPER ${component} uppercomponent) + set(${_ret} ${_Boost_${uppercomponent}_DEPENDENCIES} PARENT_SCOPE) + set(_Boost_IMPORTED_TARGETS ${_Boost_IMPORTED_TARGETS} PARENT_SCOPE) + + string(REGEX REPLACE ";" " " _boost_DEPS_STRING "${_Boost_${uppercomponent}_DEPENDENCIES}") + if (NOT _boost_DEPS_STRING) + set(_boost_DEPS_STRING "(none)") + endif() + # message(STATUS "Dependencies for Boost::${component}: ${_boost_DEPS_STRING}") +endfunction() + +# +# Get component headers. This is the primary header (or headers) for +# a given component, and is used to check that the headers are present +# as well as the library itself as an extra sanity check of the build +# environment. +# +# component - the component to check +# _hdrs +# +function(_Boost_COMPONENT_HEADERS component _hdrs) + # Handle Python version suffixes + if(component MATCHES "^(python|mpi_python|numpy)([0-9][0-9]?|[0-9]\\.[0-9])\$") + set(component "${CMAKE_MATCH_1}") + set(component_python_version "${CMAKE_MATCH_2}") + endif() + + # Note: new boost components will require adding here. The header + # must be present in all versions of Boost providing a library. + set(_Boost_ATOMIC_HEADERS "boost/atomic.hpp") + set(_Boost_CHRONO_HEADERS "boost/chrono.hpp") + set(_Boost_CONTAINER_HEADERS "boost/container/container_fwd.hpp") + set(_Boost_CONTRACT_HEADERS "boost/contract.hpp") + if(Boost_VERSION_STRING VERSION_LESS 1.61.0) + set(_Boost_CONTEXT_HEADERS "boost/context/all.hpp") + else() + set(_Boost_CONTEXT_HEADERS "boost/context/detail/fcontext.hpp") + endif() + set(_Boost_COROUTINE_HEADERS "boost/coroutine/all.hpp") + set(_Boost_DATE_TIME_HEADERS "boost/date_time/date.hpp") + set(_Boost_EXCEPTION_HEADERS "boost/exception/exception.hpp") + set(_Boost_FIBER_HEADERS "boost/fiber/all.hpp") + set(_Boost_FILESYSTEM_HEADERS "boost/filesystem/path.hpp") + set(_Boost_GRAPH_HEADERS "boost/graph/adjacency_list.hpp") + set(_Boost_GRAPH_PARALLEL_HEADERS "boost/graph/adjacency_list.hpp") + set(_Boost_IOSTREAMS_HEADERS "boost/iostreams/stream.hpp") + set(_Boost_LOCALE_HEADERS "boost/locale.hpp") + set(_Boost_LOG_HEADERS "boost/log/core.hpp") + set(_Boost_LOG_SETUP_HEADERS "boost/log/detail/setup_config.hpp") + set(_Boost_MATH_HEADERS "boost/math_fwd.hpp") + set(_Boost_MATH_C99_HEADERS "boost/math/tr1.hpp") + set(_Boost_MATH_C99F_HEADERS "boost/math/tr1.hpp") + set(_Boost_MATH_C99L_HEADERS "boost/math/tr1.hpp") + set(_Boost_MATH_TR1_HEADERS "boost/math/tr1.hpp") + set(_Boost_MATH_TR1F_HEADERS "boost/math/tr1.hpp") + set(_Boost_MATH_TR1L_HEADERS "boost/math/tr1.hpp") + set(_Boost_MPI_HEADERS "boost/mpi.hpp") + set(_Boost_MPI_PYTHON_HEADERS "boost/mpi/python/config.hpp") + set(_Boost_NUMPY_HEADERS "boost/python/numpy.hpp") + set(_Boost_PRG_EXEC_MONITOR_HEADERS "boost/test/prg_exec_monitor.hpp") + set(_Boost_PROGRAM_OPTIONS_HEADERS "boost/program_options.hpp") + set(_Boost_PYTHON_HEADERS "boost/python.hpp") + set(_Boost_RANDOM_HEADERS "boost/random.hpp") + set(_Boost_REGEX_HEADERS "boost/regex.hpp") + set(_Boost_SERIALIZATION_HEADERS "boost/serialization/serialization.hpp") + set(_Boost_SIGNALS_HEADERS "boost/signals.hpp") + set(_Boost_STACKTRACE_ADDR2LINE_HEADERS "boost/stacktrace.hpp") + set(_Boost_STACKTRACE_BACKTRACE_HEADERS "boost/stacktrace.hpp") + set(_Boost_STACKTRACE_BASIC_HEADERS "boost/stacktrace.hpp") + set(_Boost_STACKTRACE_NOOP_HEADERS "boost/stacktrace.hpp") + set(_Boost_STACKTRACE_WINDBG_CACHED_HEADERS "boost/stacktrace.hpp") + set(_Boost_STACKTRACE_WINDBG_HEADERS "boost/stacktrace.hpp") + set(_Boost_SYSTEM_HEADERS "boost/system/config.hpp") + set(_Boost_TEST_EXEC_MONITOR_HEADERS "boost/test/test_exec_monitor.hpp") + set(_Boost_THREAD_HEADERS "boost/thread.hpp") + set(_Boost_TIMER_HEADERS "boost/timer.hpp") + set(_Boost_TYPE_ERASURE_HEADERS "boost/type_erasure/config.hpp") + set(_Boost_UNIT_TEST_FRAMEWORK_HEADERS "boost/test/framework.hpp") + set(_Boost_WAVE_HEADERS "boost/wave.hpp") + set(_Boost_WSERIALIZATION_HEADERS "boost/archive/text_wiarchive.hpp") + if(WIN32) + set(_Boost_BZIP2_HEADERS "boost/iostreams/filter/bzip2.hpp") + set(_Boost_ZLIB_HEADERS "boost/iostreams/filter/zlib.hpp") + endif() + + string(TOUPPER ${component} uppercomponent) + set(${_hdrs} ${_Boost_${uppercomponent}_HEADERS} PARENT_SCOPE) + + string(REGEX REPLACE ";" " " _boost_HDRS_STRING "${_Boost_${uppercomponent}_HEADERS}") + if (NOT _boost_HDRS_STRING) + set(_boost_HDRS_STRING "(none)") + endif() + # message(STATUS "Headers for Boost::${component}: ${_boost_HDRS_STRING}") +endfunction() + +# +# Determine if any missing dependencies require adding to the component list. +# +# Sets _Boost_${COMPONENT}_DEPENDENCIES for each required component, +# plus _Boost_IMPORTED_TARGETS (TRUE if imported targets should be +# defined; FALSE if dependency information is unavailable). +# +# componentvar - the component list variable name +# extravar - the indirect dependency list variable name +# +# +function(_Boost_MISSING_DEPENDENCIES componentvar extravar) + # _boost_unprocessed_components - list of components requiring processing + # _boost_processed_components - components already processed (or currently being processed) + # _boost_new_components - new components discovered for future processing + # + list(APPEND _boost_unprocessed_components ${${componentvar}}) + + while(_boost_unprocessed_components) + list(APPEND _boost_processed_components ${_boost_unprocessed_components}) + foreach(component ${_boost_unprocessed_components}) + string(TOUPPER ${component} uppercomponent) + set(${_ret} ${_Boost_${uppercomponent}_DEPENDENCIES} PARENT_SCOPE) + _Boost_COMPONENT_DEPENDENCIES("${component}" _Boost_${uppercomponent}_DEPENDENCIES) + set(_Boost_${uppercomponent}_DEPENDENCIES ${_Boost_${uppercomponent}_DEPENDENCIES} PARENT_SCOPE) + set(_Boost_IMPORTED_TARGETS ${_Boost_IMPORTED_TARGETS} PARENT_SCOPE) + foreach(componentdep ${_Boost_${uppercomponent}_DEPENDENCIES}) + if (NOT ("${componentdep}" IN_LIST _boost_processed_components OR "${componentdep}" IN_LIST _boost_new_components)) + list(APPEND _boost_new_components ${componentdep}) + endif() + endforeach() + endforeach() + set(_boost_unprocessed_components ${_boost_new_components}) + unset(_boost_new_components) + endwhile() + set(_boost_extra_components ${_boost_processed_components}) + if(_boost_extra_components AND ${componentvar}) + list(REMOVE_ITEM _boost_extra_components ${${componentvar}}) + endif() + set(${componentvar} ${_boost_processed_components} PARENT_SCOPE) + set(${extravar} ${_boost_extra_components} PARENT_SCOPE) +endfunction() + +# +# Some boost libraries may require particular set of compler features. +# The very first one was `boost::fiber` introduced in Boost 1.62. +# One can check required compiler features of it in +# - `${Boost_ROOT}/libs/fiber/build/Jamfile.v2`; +# - `${Boost_ROOT}/libs/context/build/Jamfile.v2`. +# +# TODO (Re)Check compiler features on (every?) release ??? +# One may use the following command to get the files to check: +# +# $ find . -name Jamfile.v2 | grep build | xargs grep -l cxx1 +# +function(_Boost_COMPILER_FEATURES component _ret) + # Boost >= 1.62 + if(NOT Boost_VERSION_STRING VERSION_LESS 1.62.0) + set(_Boost_FIBER_COMPILER_FEATURES + cxx_alias_templates + cxx_auto_type + cxx_constexpr + cxx_defaulted_functions + cxx_final + cxx_lambdas + cxx_noexcept + cxx_nullptr + cxx_rvalue_references + cxx_thread_local + cxx_variadic_templates + ) + # Compiler feature for `context` same as for `fiber`. + set(_Boost_CONTEXT_COMPILER_FEATURES ${_Boost_FIBER_COMPILER_FEATURES}) + endif() + + # Boost Contract library available in >= 1.67 + if(NOT Boost_VERSION_STRING VERSION_LESS 1.67.0) + # From `libs/contract/build/boost_contract_build.jam` + set(_Boost_CONTRACT_COMPILER_FEATURES + cxx_lambdas + cxx_variadic_templates + ) + endif() + + string(TOUPPER ${component} uppercomponent) + set(${_ret} ${_Boost_${uppercomponent}_COMPILER_FEATURES} PARENT_SCOPE) +endfunction() + +# +# Update library search directory hint variable with paths used by prebuilt boost binaries. +# +# Prebuilt windows binaries (https://sourceforge.net/projects/boost/files/boost-binaries/) +# have library directories named using MSVC compiler version and architecture. +# This function would append corresponding directories if MSVC is a current compiler, +# so having `BOOST_ROOT` would be enough to specify to find everything. +# +function(_Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS componentlibvar basedir) + if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC") + if(CMAKE_SIZEOF_VOID_P EQUAL 8) + set(_arch_suffix 64) + else() + set(_arch_suffix 32) + endif() + if(MSVC_TOOLSET_VERSION GREATER_EQUAL 150) + # Not yet known. + elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 140) + # MSVC toolset 14.x versions are forward compatible. + foreach(v 9 8 7 6 5 4 3 2 1 0) + if(MSVC_TOOLSET_VERSION GREATER_EQUAL 14${v}) + list(APPEND ${componentlibvar} ${basedir}/lib${_arch_suffix}-msvc-14.${v}) + endif() + endforeach() + elseif(MSVC_TOOLSET_VERSION GREATER_EQUAL 80) + math(EXPR _toolset_major_version "${MSVC_TOOLSET_VERSION} / 10") + list(APPEND ${componentlibvar} ${basedir}/lib${_arch_suffix}-msvc-${_toolset_major_version}.0) + endif() + set(${componentlibvar} ${${componentlibvar}} PARENT_SCOPE) + endif() +endfunction() + +# +# End functions/macros +# +#------------------------------------------------------------------------------- + +#------------------------------------------------------------------------------- +# main. +#------------------------------------------------------------------------------- + + +# If the user sets Boost_LIBRARY_DIR, use it as the default for both +# configurations. +if(NOT Boost_LIBRARY_DIR_RELEASE AND Boost_LIBRARY_DIR) + set(Boost_LIBRARY_DIR_RELEASE "${Boost_LIBRARY_DIR}") +endif() +if(NOT Boost_LIBRARY_DIR_DEBUG AND Boost_LIBRARY_DIR) + set(Boost_LIBRARY_DIR_DEBUG "${Boost_LIBRARY_DIR}") +endif() + +if(NOT DEFINED Boost_USE_DEBUG_LIBS) + set(Boost_USE_DEBUG_LIBS TRUE) +endif() +if(NOT DEFINED Boost_USE_RELEASE_LIBS) + set(Boost_USE_RELEASE_LIBS TRUE) +endif() +if(NOT DEFINED Boost_USE_MULTITHREADED) + set(Boost_USE_MULTITHREADED TRUE) +endif() +if(NOT DEFINED Boost_USE_DEBUG_RUNTIME) + set(Boost_USE_DEBUG_RUNTIME TRUE) +endif() + +# Check the version of Boost against the requested version. +if(Boost_FIND_VERSION AND NOT Boost_FIND_VERSION_MINOR) + message(SEND_ERROR "When requesting a specific version of Boost, you must provide at least the major and minor version numbers, e.g., 1.34") +endif() + +if(Boost_FIND_VERSION_EXACT) + # The version may appear in a directory with or without the patch + # level, even when the patch level is non-zero. + set(_boost_TEST_VERSIONS + "${Boost_FIND_VERSION_MAJOR}.${Boost_FIND_VERSION_MINOR}.${Boost_FIND_VERSION_PATCH}" + "${Boost_FIND_VERSION_MAJOR}.${Boost_FIND_VERSION_MINOR}") +else() + # The user has not requested an exact version. Among known + # versions, find those that are acceptable to the user request. + # + # Note: When adding a new Boost release, also update the dependency + # information in _Boost_COMPONENT_DEPENDENCIES and + # _Boost_COMPONENT_HEADERS. See the instructions at the top of + # _Boost_COMPONENT_DEPENDENCIES. + set(_Boost_KNOWN_VERSIONS ${Boost_ADDITIONAL_VERSIONS} + "1.70.0" "1.70" "1.69.0" "1.69" + "1.68.0" "1.68" "1.67.0" "1.67" "1.66.0" "1.66" "1.65.1" "1.65.0" "1.65" + "1.64.0" "1.64" "1.63.0" "1.63" "1.62.0" "1.62" "1.61.0" "1.61" "1.60.0" "1.60" + "1.59.0" "1.59" "1.58.0" "1.58" "1.57.0" "1.57" "1.56.0" "1.56" "1.55.0" "1.55" + "1.54.0" "1.54" "1.53.0" "1.53" "1.52.0" "1.52" "1.51.0" "1.51" + "1.50.0" "1.50" "1.49.0" "1.49" "1.48.0" "1.48" "1.47.0" "1.47" "1.46.1" + "1.46.0" "1.46" "1.45.0" "1.45" "1.44.0" "1.44" "1.43.0" "1.43" "1.42.0" "1.42" + "1.41.0" "1.41" "1.40.0" "1.40" "1.39.0" "1.39" "1.38.0" "1.38" "1.37.0" "1.37" + "1.36.1" "1.36.0" "1.36" "1.35.1" "1.35.0" "1.35" "1.34.1" "1.34.0" + "1.34" "1.33.1" "1.33.0" "1.33") + + set(_boost_TEST_VERSIONS) + if(Boost_FIND_VERSION) + set(_Boost_FIND_VERSION_SHORT "${Boost_FIND_VERSION_MAJOR}.${Boost_FIND_VERSION_MINOR}") + # Select acceptable versions. + foreach(version ${_Boost_KNOWN_VERSIONS}) + if(NOT "${version}" VERSION_LESS "${Boost_FIND_VERSION}") + # This version is high enough. + list(APPEND _boost_TEST_VERSIONS "${version}") + elseif("${version}.99" VERSION_EQUAL "${_Boost_FIND_VERSION_SHORT}.99") + # This version is a short-form for the requested version with + # the patch level dropped. + list(APPEND _boost_TEST_VERSIONS "${version}") + endif() + endforeach() + else() + # Any version is acceptable. + set(_boost_TEST_VERSIONS "${_Boost_KNOWN_VERSIONS}") + endif() +endif() + +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_TEST_VERSIONS") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_USE_MULTITHREADED") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_USE_STATIC_LIBS") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_USE_STATIC_RUNTIME") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_ADDITIONAL_VERSIONS") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_NO_SYSTEM_PATHS") + +# Supply Boost_LIB_DIAGNOSTIC_DEFINITIONS as a convenience target. It +# will only contain any interface definitions on WIN32, but is created +# on all platforms to keep end user code free from platform dependent +# code. Also provide convenience targets to disable autolinking and +# enable dynamic linking. +if(NOT TARGET Boost::diagnostic_definitions) + add_library(Boost::diagnostic_definitions INTERFACE IMPORTED) + add_library(Boost::disable_autolinking INTERFACE IMPORTED) + add_library(Boost::dynamic_linking INTERFACE IMPORTED) + set_target_properties(Boost::dynamic_linking PROPERTIES + INTERFACE_COMPILE_DEFINITIONS "BOOST_ALL_DYN_LINK") +endif() +if(WIN32) + # In windows, automatic linking is performed, so you do not have + # to specify the libraries. If you are linking to a dynamic + # runtime, then you can choose to link to either a static or a + # dynamic Boost library, the default is to do a static link. You + # can alter this for a specific library "whatever" by defining + # BOOST_WHATEVER_DYN_LINK to force Boost library "whatever" to be + # linked dynamically. Alternatively you can force all Boost + # libraries to dynamic link by defining BOOST_ALL_DYN_LINK. + + # This feature can be disabled for Boost library "whatever" by + # defining BOOST_WHATEVER_NO_LIB, or for all of Boost by defining + # BOOST_ALL_NO_LIB. + + # If you want to observe which libraries are being linked against + # then defining BOOST_LIB_DIAGNOSTIC will cause the auto-linking + # code to emit a #pragma message each time a library is selected + # for linking. + set(Boost_LIB_DIAGNOSTIC_DEFINITIONS "-DBOOST_LIB_DIAGNOSTIC") + set_target_properties(Boost::diagnostic_definitions PROPERTIES + INTERFACE_COMPILE_DEFINITIONS "BOOST_LIB_DIAGNOSTIC") + set_target_properties(Boost::disable_autolinking PROPERTIES + INTERFACE_COMPILE_DEFINITIONS "BOOST_ALL_NO_LIB") +endif() + +if (POLICY CMP0074) + cmake_policy(GET CMP0074 _Boost_CMP0074) + if(NOT "x${_Boost_CMP0074}x" STREQUAL "xNEWx") + _Boost_CHECK_SPELLING(Boost_ROOT) + endif() + unset(_Boost_CMP0074) +endif () +_Boost_CHECK_SPELLING(Boost_LIBRARYDIR) +_Boost_CHECK_SPELLING(Boost_INCLUDEDIR) + +# Collect environment variable inputs as hints. Do not consider changes. +foreach(v BOOSTROOT BOOST_ROOT BOOST_INCLUDEDIR BOOST_LIBRARYDIR) + set(_env $ENV{${v}}) + if(_env) + file(TO_CMAKE_PATH "${_env}" _ENV_${v}) + else() + set(_ENV_${v} "") + endif() +endforeach() +if(NOT _ENV_BOOST_ROOT AND _ENV_BOOSTROOT) + set(_ENV_BOOST_ROOT "${_ENV_BOOSTROOT}") +endif() + +# Collect inputs and cached results. Detect changes since the last run. +if(NOT BOOST_ROOT AND BOOSTROOT) + set(BOOST_ROOT "${BOOSTROOT}") +endif() +set(_Boost_VARS_DIR + BOOST_ROOT + Boost_NO_SYSTEM_PATHS + ) + +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_ROOT") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_ROOT" ENVIRONMENT) +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_INCLUDEDIR") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_INCLUDEDIR" ENVIRONMENT) +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_LIBRARYDIR") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "BOOST_LIBRARYDIR" ENVIRONMENT) + +# ------------------------------------------------------------------------ +# Search for Boost include DIR +# ------------------------------------------------------------------------ + +set(_Boost_VARS_INC BOOST_INCLUDEDIR Boost_INCLUDE_DIR Boost_ADDITIONAL_VERSIONS) +_Boost_CHANGE_DETECT(_Boost_CHANGE_INCDIR ${_Boost_VARS_DIR} ${_Boost_VARS_INC}) +# Clear Boost_INCLUDE_DIR if it did not change but other input affecting the +# location did. We will find a new one based on the new inputs. +if(_Boost_CHANGE_INCDIR AND NOT _Boost_INCLUDE_DIR_CHANGED) + unset(Boost_INCLUDE_DIR CACHE) +endif() + +if(NOT Boost_INCLUDE_DIR) + set(_boost_INCLUDE_SEARCH_DIRS "") + if(BOOST_INCLUDEDIR) + list(APPEND _boost_INCLUDE_SEARCH_DIRS ${BOOST_INCLUDEDIR}) + elseif(_ENV_BOOST_INCLUDEDIR) + list(APPEND _boost_INCLUDE_SEARCH_DIRS ${_ENV_BOOST_INCLUDEDIR}) + endif() + + if( BOOST_ROOT ) + list(APPEND _boost_INCLUDE_SEARCH_DIRS ${BOOST_ROOT}/include ${BOOST_ROOT}) + elseif( _ENV_BOOST_ROOT ) + list(APPEND _boost_INCLUDE_SEARCH_DIRS ${_ENV_BOOST_ROOT}/include ${_ENV_BOOST_ROOT}) + endif() + + if( Boost_NO_SYSTEM_PATHS) + list(APPEND _boost_INCLUDE_SEARCH_DIRS NO_CMAKE_SYSTEM_PATH NO_SYSTEM_ENVIRONMENT_PATH) + else() + if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC") + foreach(ver ${_boost_TEST_VERSIONS}) + string(REPLACE "." "_" ver "${ver}") + list(APPEND _boost_INCLUDE_SEARCH_DIRS PATHS "C:/local/boost_${ver}") + endforeach() + endif() + list(APPEND _boost_INCLUDE_SEARCH_DIRS PATHS + C:/boost/include + C:/boost + /sw/local/include + ) + endif() + + # Try to find Boost by stepping backwards through the Boost versions + # we know about. + # Build a list of path suffixes for each version. + set(_boost_PATH_SUFFIXES) + foreach(_boost_VER ${_boost_TEST_VERSIONS}) + # Add in a path suffix, based on the required version, ideally + # we could read this from version.hpp, but for that to work we'd + # need to know the include dir already + set(_boost_BOOSTIFIED_VERSION) + + # Transform 1.35 => 1_35 and 1.36.0 => 1_36_0 + if(_boost_VER MATCHES "([0-9]+)\\.([0-9]+)\\.([0-9]+)") + set(_boost_BOOSTIFIED_VERSION + "${CMAKE_MATCH_1}_${CMAKE_MATCH_2}_${CMAKE_MATCH_3}") + elseif(_boost_VER MATCHES "([0-9]+)\\.([0-9]+)") + set(_boost_BOOSTIFIED_VERSION + "${CMAKE_MATCH_1}_${CMAKE_MATCH_2}") + endif() + + list(APPEND _boost_PATH_SUFFIXES + "boost-${_boost_BOOSTIFIED_VERSION}" + "boost_${_boost_BOOSTIFIED_VERSION}" + "boost/boost-${_boost_BOOSTIFIED_VERSION}" + "boost/boost_${_boost_BOOSTIFIED_VERSION}" + ) + + endforeach() + + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_INCLUDE_SEARCH_DIRS") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_PATH_SUFFIXES") + + # Look for a standard boost header file. + find_path(Boost_INCLUDE_DIR + NAMES boost/config.hpp + HINTS ${_boost_INCLUDE_SEARCH_DIRS} + PATH_SUFFIXES ${_boost_PATH_SUFFIXES} + ) +endif() + +# ------------------------------------------------------------------------ +# Extract version information from version.hpp +# ------------------------------------------------------------------------ + +if(Boost_INCLUDE_DIR) + _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "location of version.hpp: ${Boost_INCLUDE_DIR}/boost/version.hpp") + + # Extract Boost_VERSION_MACRO and Boost_LIB_VERSION from version.hpp + set(Boost_VERSION_MACRO 0) + set(Boost_LIB_VERSION "") + file(STRINGS "${Boost_INCLUDE_DIR}/boost/version.hpp" _boost_VERSION_HPP_CONTENTS REGEX "#define BOOST_(LIB_)?VERSION ") + if("${_boost_VERSION_HPP_CONTENTS}" MATCHES "#define BOOST_VERSION ([0-9]+)") + set(Boost_VERSION_MACRO "${CMAKE_MATCH_1}") + endif() + if("${_boost_VERSION_HPP_CONTENTS}" MATCHES "#define BOOST_LIB_VERSION \"([0-9_]+)\"") + set(Boost_LIB_VERSION "${CMAKE_MATCH_1}") + endif() + unset(_boost_VERSION_HPP_CONTENTS) + + # Calculate version components + math(EXPR Boost_VERSION_MAJOR "${Boost_VERSION_MACRO} / 100000") + math(EXPR Boost_VERSION_MINOR "${Boost_VERSION_MACRO} / 100 % 1000") + math(EXPR Boost_VERSION_PATCH "${Boost_VERSION_MACRO} % 100") + set(Boost_VERSION_COUNT 3) + + # Define alias variables for backwards compat. + set(Boost_MAJOR_VERSION ${Boost_VERSION_MAJOR}) + set(Boost_MINOR_VERSION ${Boost_VERSION_MINOR}) + set(Boost_SUBMINOR_VERSION ${Boost_VERSION_PATCH}) + + # Define Boost version in x.y.z format + set(Boost_VERSION_STRING "${Boost_VERSION_MAJOR}.${Boost_VERSION_MINOR}.${Boost_VERSION_PATCH}") + + if (POLICY CMP0093) + # Define final Boost_VERSION + cmake_policy(GET CMP0093 _Boost_CMP0093 + PARENT_SCOPE # undocumented, do not use outside of CMake + ) + if("x${_Boost_CMP0093}x" STREQUAL "xNEWx") + set(Boost_VERSION ${Boost_VERSION_STRING}) + endif() + unset(_Boost_CMP0093) + else() + set(Boost_VERSION ${Boost_VERSION_MACRO}) + endif() + + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_STRING") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_MACRO") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_MAJOR") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_MINOR") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_PATCH") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_VERSION_COUNT") +endif() + +# ------------------------------------------------------------------------ +# Prefix initialization +# ------------------------------------------------------------------------ + +set(Boost_LIB_PREFIX "") +if ( (GHSMULTI AND Boost_USE_STATIC_LIBS) OR + (WIN32 AND Boost_USE_STATIC_LIBS AND NOT CYGWIN) ) + set(Boost_LIB_PREFIX "lib") +endif() + +if ( NOT Boost_NAMESPACE ) + set(Boost_NAMESPACE "boost") +endif() + +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_LIB_PREFIX") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "Boost_NAMESPACE") + +# ------------------------------------------------------------------------ +# Suffix initialization and compiler suffix detection. +# ------------------------------------------------------------------------ + +set(_Boost_VARS_NAME + Boost_NAMESPACE + Boost_COMPILER + Boost_THREADAPI + Boost_USE_DEBUG_PYTHON + Boost_USE_MULTITHREADED + Boost_USE_STATIC_LIBS + Boost_USE_STATIC_RUNTIME + Boost_USE_STLPORT + Boost_USE_STLPORT_DEPRECATED_NATIVE_IOSTREAMS + ) +_Boost_CHANGE_DETECT(_Boost_CHANGE_LIBNAME ${_Boost_VARS_NAME}) + +# Setting some more suffixes for the library +if (Boost_COMPILER) + set(_boost_COMPILER ${Boost_COMPILER}) + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "_boost_COMPILER" SOURCE "user-specified via Boost_COMPILER") +else() + # Attempt to guess the compiler suffix + # NOTE: this is not perfect yet, if you experience any issues + # please report them and use the Boost_COMPILER variable + # to work around the problems. + _Boost_GUESS_COMPILER_PREFIX(_boost_COMPILER) +endif() + +set (_boost_MULTITHREADED "-mt") +if( NOT Boost_USE_MULTITHREADED ) + set (_boost_MULTITHREADED "") +endif() +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_MULTITHREADED") + +#====================== +# Systematically build up the Boost ABI tag for the 'tagged' and 'versioned' layouts +# http://boost.org/doc/libs/1_66_0/more/getting_started/windows.html#library-naming +# http://boost.org/doc/libs/1_66_0/boost/config/auto_link.hpp +# http://boost.org/doc/libs/1_66_0/tools/build/src/tools/common.jam +# http://boost.org/doc/libs/1_66_0/boostcpp.jam +set( _boost_RELEASE_ABI_TAG "-") +set( _boost_DEBUG_ABI_TAG "-") +# Key Use this library when: +# s linking statically to the C++ standard library and +# compiler runtime support libraries. +if(Boost_USE_STATIC_RUNTIME) + set( _boost_RELEASE_ABI_TAG "${_boost_RELEASE_ABI_TAG}s") + set( _boost_DEBUG_ABI_TAG "${_boost_DEBUG_ABI_TAG}s") +endif() +# g using debug versions of the standard and runtime +# support libraries +if(WIN32 AND Boost_USE_DEBUG_RUNTIME) + if("x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xMSVC" + OR "x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xClang" + OR "x${CMAKE_CXX_COMPILER_ID}" STREQUAL "xIntel") + string(APPEND _boost_DEBUG_ABI_TAG "g") + endif() +endif() +# y using special debug build of python +if(Boost_USE_DEBUG_PYTHON) + string(APPEND _boost_DEBUG_ABI_TAG "y") +endif() +# d using a debug version of your code +string(APPEND _boost_DEBUG_ABI_TAG "d") +# p using the STLport standard library rather than the +# default one supplied with your compiler +if(Boost_USE_STLPORT) + string(APPEND _boost_RELEASE_ABI_TAG "p") + string(APPEND _boost_DEBUG_ABI_TAG "p") +endif() +# n using the STLport deprecated "native iostreams" feature +# removed from the documentation in 1.43.0 but still present in +# boost/config/auto_link.hpp +if(Boost_USE_STLPORT_DEPRECATED_NATIVE_IOSTREAMS) + string(APPEND _boost_RELEASE_ABI_TAG "n") + string(APPEND _boost_DEBUG_ABI_TAG "n") +endif() + +# -x86 Architecture and address model tag +# First character is the architecture, then word-size, either 32 or 64 +# Only used in 'versioned' layout, added in Boost 1.66.0 +if(DEFINED Boost_ARCHITECTURE) + set(_boost_ARCHITECTURE_TAG "${Boost_ARCHITECTURE}") + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "_boost_ARCHITECTURE_TAG" SOURCE "user-specified via Boost_ARCHITECTURE") +else() + set(_boost_ARCHITECTURE_TAG "") + # {CMAKE_CXX_COMPILER_ARCHITECTURE_ID} is not currently set for all compilers + if(NOT "x${CMAKE_CXX_COMPILER_ARCHITECTURE_ID}" STREQUAL "x" AND NOT Boost_VERSION_STRING VERSION_LESS 1.66.0) + string(APPEND _boost_ARCHITECTURE_TAG "-") + # This needs to be kept in-sync with the section of CMakePlatformId.h.in + # inside 'defined(_WIN32) && defined(_MSC_VER)' + if(CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "IA64") + string(APPEND _boost_ARCHITECTURE_TAG "i") + elseif(CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "X86" + OR CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "x64") + string(APPEND _boost_ARCHITECTURE_TAG "x") + elseif(CMAKE_CXX_COMPILER_ARCHITECTURE_ID MATCHES "^ARM") + string(APPEND _boost_ARCHITECTURE_TAG "a") + elseif(CMAKE_CXX_COMPILER_ARCHITECTURE_ID STREQUAL "MIPS") + string(APPEND _boost_ARCHITECTURE_TAG "m") + endif() + + if(CMAKE_SIZEOF_VOID_P EQUAL 8) + string(APPEND _boost_ARCHITECTURE_TAG "64") + else() + string(APPEND _boost_ARCHITECTURE_TAG "32") + endif() + endif() + _Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "_boost_ARCHITECTURE_TAG" SOURCE "detected") +endif() + +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_RELEASE_ABI_TAG") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_DEBUG_ABI_TAG") + +# ------------------------------------------------------------------------ +# Begin finding boost libraries +# ------------------------------------------------------------------------ + +set(_Boost_VARS_LIB "") +foreach(c DEBUG RELEASE) + set(_Boost_VARS_LIB_${c} BOOST_LIBRARYDIR Boost_LIBRARY_DIR_${c}) + list(APPEND _Boost_VARS_LIB ${_Boost_VARS_LIB_${c}}) + _Boost_CHANGE_DETECT(_Boost_CHANGE_LIBDIR_${c} ${_Boost_VARS_DIR} ${_Boost_VARS_LIB_${c}} Boost_INCLUDE_DIR) + # Clear Boost_LIBRARY_DIR_${c} if it did not change but other input affecting the + # location did. We will find a new one based on the new inputs. + if(_Boost_CHANGE_LIBDIR_${c} AND NOT _Boost_LIBRARY_DIR_${c}_CHANGED) + unset(Boost_LIBRARY_DIR_${c} CACHE) + endif() + + # If Boost_LIBRARY_DIR_[RELEASE,DEBUG] is set, prefer its value. + if(Boost_LIBRARY_DIR_${c}) + set(_boost_LIBRARY_SEARCH_DIRS_${c} ${Boost_LIBRARY_DIR_${c}} NO_DEFAULT_PATH NO_CMAKE_FIND_ROOT_PATH) + else() + set(_boost_LIBRARY_SEARCH_DIRS_${c} "") + if(BOOST_LIBRARYDIR) + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${BOOST_LIBRARYDIR}) + elseif(_ENV_BOOST_LIBRARYDIR) + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${_ENV_BOOST_LIBRARYDIR}) + endif() + + if(BOOST_ROOT) + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${BOOST_ROOT}/lib ${BOOST_ROOT}/stage/lib) + _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${BOOST_ROOT}") + elseif(_ENV_BOOST_ROOT) + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} ${_ENV_BOOST_ROOT}/lib ${_ENV_BOOST_ROOT}/stage/lib) + _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${_ENV_BOOST_ROOT}") + endif() + + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} + ${Boost_INCLUDE_DIR}/lib + ${Boost_INCLUDE_DIR}/../lib + ${Boost_INCLUDE_DIR}/stage/lib + ) + _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${Boost_INCLUDE_DIR}/..") + _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "${Boost_INCLUDE_DIR}") + if( Boost_NO_SYSTEM_PATHS ) + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} NO_CMAKE_SYSTEM_PATH NO_SYSTEM_ENVIRONMENT_PATH) + else() + foreach(ver ${_boost_TEST_VERSIONS}) + string(REPLACE "." "_" ver "${ver}") + _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "C:/local/boost_${ver}") + endforeach() + _Boost_UPDATE_WINDOWS_LIBRARY_SEARCH_DIRS_WITH_PREBUILT_PATHS(_boost_LIBRARY_SEARCH_DIRS_${c} "C:/boost") + list(APPEND _boost_LIBRARY_SEARCH_DIRS_${c} PATHS + C:/boost/lib + C:/boost + /sw/local/lib + ) + endif() + endif() +endforeach() + +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_LIBRARY_SEARCH_DIRS_RELEASE") +_Boost_DEBUG_PRINT_VAR("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" "_boost_LIBRARY_SEARCH_DIRS_DEBUG") + +# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES +if( Boost_USE_STATIC_LIBS ) + set( _boost_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES}) + if(WIN32) + list(INSERT CMAKE_FIND_LIBRARY_SUFFIXES 0 .lib .a) + else() + set(CMAKE_FIND_LIBRARY_SUFFIXES .a) + endif() +endif() + +# We want to use the tag inline below without risking double dashes +if(_boost_RELEASE_ABI_TAG) + if(${_boost_RELEASE_ABI_TAG} STREQUAL "-") + set(_boost_RELEASE_ABI_TAG "") + endif() +endif() +if(_boost_DEBUG_ABI_TAG) + if(${_boost_DEBUG_ABI_TAG} STREQUAL "-") + set(_boost_DEBUG_ABI_TAG "") + endif() +endif() + +# The previous behavior of FindBoost when Boost_USE_STATIC_LIBS was enabled +# on WIN32 was to: +# 1. Search for static libs compiled against a SHARED C++ standard runtime library (use if found) +# 2. Search for static libs compiled against a STATIC C++ standard runtime library (use if found) +# We maintain this behavior since changing it could break people's builds. +# To disable the ambiguous behavior, the user need only +# set Boost_USE_STATIC_RUNTIME either ON or OFF. +set(_boost_STATIC_RUNTIME_WORKAROUND false) +if(WIN32 AND Boost_USE_STATIC_LIBS) + if(NOT DEFINED Boost_USE_STATIC_RUNTIME) + set(_boost_STATIC_RUNTIME_WORKAROUND TRUE) + endif() +endif() + +# On versions < 1.35, remove the System library from the considered list +# since it wasn't added until 1.35. +if(Boost_VERSION_STRING AND Boost_FIND_COMPONENTS) + if(Boost_VERSION_STRING VERSION_LESS 1.35.0) + list(REMOVE_ITEM Boost_FIND_COMPONENTS system) + endif() +endif() + +# Additional components may be required via component dependencies. +# Add any missing components to the list. +_Boost_MISSING_DEPENDENCIES(Boost_FIND_COMPONENTS _Boost_EXTRA_FIND_COMPONENTS) + +# If thread is required, get the thread libs as a dependency +if("thread" IN_LIST Boost_FIND_COMPONENTS) + if(Boost_FIND_QUIETLY) + set(_Boost_find_quiet QUIET) + else() + set(_Boost_find_quiet "") + endif() + find_package(Threads ${_Boost_find_quiet}) + unset(_Boost_find_quiet) +endif() + +# If the user changed any of our control inputs flush previous results. +if(_Boost_CHANGE_LIBDIR_DEBUG OR _Boost_CHANGE_LIBDIR_RELEASE OR _Boost_CHANGE_LIBNAME) + foreach(COMPONENT ${_Boost_COMPONENTS_SEARCHED}) + string(TOUPPER ${COMPONENT} UPPERCOMPONENT) + foreach(c DEBUG RELEASE) + set(_var Boost_${UPPERCOMPONENT}_LIBRARY_${c}) + unset(${_var} CACHE) + set(${_var} "${_var}-NOTFOUND") + endforeach() + endforeach() + set(_Boost_COMPONENTS_SEARCHED "") +endif() + +foreach(COMPONENT ${Boost_FIND_COMPONENTS}) + string(TOUPPER ${COMPONENT} UPPERCOMPONENT) + + set( _boost_docstring_release "Boost ${COMPONENT} library (release)") + set( _boost_docstring_debug "Boost ${COMPONENT} library (debug)") + + # Compute component-specific hints. + set(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT "") + if(${COMPONENT} STREQUAL "mpi" OR ${COMPONENT} STREQUAL "mpi_python" OR + ${COMPONENT} STREQUAL "graph_parallel") + foreach(lib ${MPI_CXX_LIBRARIES} ${MPI_C_LIBRARIES}) + if(IS_ABSOLUTE "${lib}") + get_filename_component(libdir "${lib}" PATH) + string(REPLACE "\\" "/" libdir "${libdir}") + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT ${libdir}) + endif() + endforeach() + endif() + + # Handle Python version suffixes + unset(COMPONENT_PYTHON_VERSION_MAJOR) + unset(COMPONENT_PYTHON_VERSION_MINOR) + if(${COMPONENT} MATCHES "^(python|mpi_python|numpy)([0-9])\$") + set(COMPONENT_UNVERSIONED "${CMAKE_MATCH_1}") + set(COMPONENT_PYTHON_VERSION_MAJOR "${CMAKE_MATCH_2}") + elseif(${COMPONENT} MATCHES "^(python|mpi_python|numpy)([0-9])\\.?([0-9])\$") + set(COMPONENT_UNVERSIONED "${CMAKE_MATCH_1}") + set(COMPONENT_PYTHON_VERSION_MAJOR "${CMAKE_MATCH_2}") + set(COMPONENT_PYTHON_VERSION_MINOR "${CMAKE_MATCH_3}") + endif() + + unset(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME) + if (COMPONENT_PYTHON_VERSION_MINOR) + # Boost >= 1.67 + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") + # Debian/Ubuntu (Some versions omit the 2 and/or 3 from the suffix) + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}${COMPONENT_PYTHON_VERSION_MAJOR}-py${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}-py${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") + # Gentoo + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}-${COMPONENT_PYTHON_VERSION_MAJOR}.${COMPONENT_PYTHON_VERSION_MINOR}") + # RPMs + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}-${COMPONENT_PYTHON_VERSION_MAJOR}${COMPONENT_PYTHON_VERSION_MINOR}") + endif() + if (COMPONENT_PYTHON_VERSION_MAJOR AND NOT COMPONENT_PYTHON_VERSION_MINOR) + # Boost < 1.67 + list(APPEND _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME "${COMPONENT_UNVERSIONED}${COMPONENT_PYTHON_VERSION_MAJOR}") + endif() + + # Consolidate and report component-specific hints. + if(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME) + list(REMOVE_DUPLICATES _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME) + _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "Component-specific library search names for ${COMPONENT_NAME}: ${_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME}") + endif() + if(_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT) + list(REMOVE_DUPLICATES _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT) + _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "Component-specific library search paths for ${COMPONENT}: ${_Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT}") + endif() + + # + # Find headers + # + _Boost_COMPONENT_HEADERS("${COMPONENT}" Boost_${UPPERCOMPONENT}_HEADER_NAME) + # Look for a standard boost header file. + if(Boost_${UPPERCOMPONENT}_HEADER_NAME) + if(EXISTS "${Boost_INCLUDE_DIR}/${Boost_${UPPERCOMPONENT}_HEADER_NAME}") + set(Boost_${UPPERCOMPONENT}_HEADER ON) + else() + set(Boost_${UPPERCOMPONENT}_HEADER OFF) + endif() + else() + set(Boost_${UPPERCOMPONENT}_HEADER ON) + message(WARNING "No header defined for ${COMPONENT}; skipping header check") + endif() + + # + # Find RELEASE libraries + # + unset(_boost_RELEASE_NAMES) + foreach(component IN LISTS _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME COMPONENT) + foreach(compiler IN LISTS _boost_COMPILER) + list(APPEND _boost_RELEASE_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG} ) + endforeach() + list(APPEND _boost_RELEASE_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_ABI_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component} ) + if(_boost_STATIC_RUNTIME_WORKAROUND) + set(_boost_RELEASE_STATIC_ABI_TAG "-s${_boost_RELEASE_ABI_TAG}") + foreach(compiler IN LISTS _boost_COMPILER) + list(APPEND _boost_RELEASE_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG} ) + endforeach() + list(APPEND _boost_RELEASE_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_RELEASE_STATIC_ABI_TAG} ) + endif() + endforeach() + if(Boost_THREADAPI AND ${COMPONENT} STREQUAL "thread") + _Boost_PREPEND_LIST_WITH_THREADAPI(_boost_RELEASE_NAMES ${_boost_RELEASE_NAMES}) + endif() + _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "Searching for ${UPPERCOMPONENT}_LIBRARY_RELEASE: ${_boost_RELEASE_NAMES}") + + # if Boost_LIBRARY_DIR_RELEASE is not defined, + # but Boost_LIBRARY_DIR_DEBUG is, look there first for RELEASE libs + if(NOT Boost_LIBRARY_DIR_RELEASE AND Boost_LIBRARY_DIR_DEBUG) + list(INSERT _boost_LIBRARY_SEARCH_DIRS_RELEASE 0 ${Boost_LIBRARY_DIR_DEBUG}) + endif() + + # Avoid passing backslashes to _Boost_FIND_LIBRARY due to macro re-parsing. + string(REPLACE "\\" "/" _boost_LIBRARY_SEARCH_DIRS_tmp "${_boost_LIBRARY_SEARCH_DIRS_RELEASE}") + + if(Boost_USE_RELEASE_LIBS) + _Boost_FIND_LIBRARY(Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE RELEASE + NAMES ${_boost_RELEASE_NAMES} + HINTS ${_boost_LIBRARY_SEARCH_DIRS_tmp} + NAMES_PER_DIR + DOC "${_boost_docstring_release}" + ) + endif() + + # + # Find DEBUG libraries + # + unset(_boost_DEBUG_NAMES) + foreach(component IN LISTS _Boost_FIND_LIBRARY_HINTS_FOR_COMPONENT_NAME COMPONENT) + foreach(compiler IN LISTS _boost_COMPILER) + list(APPEND _boost_DEBUG_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG} ) + endforeach() + list(APPEND _boost_DEBUG_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_ABI_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component} ) + if(_boost_STATIC_RUNTIME_WORKAROUND) + set(_boost_DEBUG_STATIC_ABI_TAG "-s${_boost_DEBUG_ABI_TAG}") + foreach(compiler IN LISTS _boost_COMPILER) + list(APPEND _boost_DEBUG_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${compiler}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG} ) + endforeach() + list(APPEND _boost_DEBUG_NAMES + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG}-${Boost_LIB_VERSION} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG}${_boost_ARCHITECTURE_TAG} + ${Boost_LIB_PREFIX}${Boost_NAMESPACE}_${component}${_boost_MULTITHREADED}${_boost_DEBUG_STATIC_ABI_TAG} ) + endif() + endforeach() + if(Boost_THREADAPI AND ${COMPONENT} STREQUAL "thread") + _Boost_PREPEND_LIST_WITH_THREADAPI(_boost_DEBUG_NAMES ${_boost_DEBUG_NAMES}) + endif() + _Boost_DEBUG_PRINT("${CMAKE_CURRENT_LIST_FILE}" "${CMAKE_CURRENT_LIST_LINE}" + "Searching for ${UPPERCOMPONENT}_LIBRARY_DEBUG: ${_boost_DEBUG_NAMES}") + + # if Boost_LIBRARY_DIR_DEBUG is not defined, + # but Boost_LIBRARY_DIR_RELEASE is, look there first for DEBUG libs + if(NOT Boost_LIBRARY_DIR_DEBUG AND Boost_LIBRARY_DIR_RELEASE) + list(INSERT _boost_LIBRARY_SEARCH_DIRS_DEBUG 0 ${Boost_LIBRARY_DIR_RELEASE}) + endif() + + # Avoid passing backslashes to _Boost_FIND_LIBRARY due to macro re-parsing. + string(REPLACE "\\" "/" _boost_LIBRARY_SEARCH_DIRS_tmp "${_boost_LIBRARY_SEARCH_DIRS_DEBUG}") + + if(Boost_USE_DEBUG_LIBS) + _Boost_FIND_LIBRARY(Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG DEBUG + NAMES ${_boost_DEBUG_NAMES} + HINTS ${_boost_LIBRARY_SEARCH_DIRS_tmp} + NAMES_PER_DIR + DOC "${_boost_docstring_debug}" + ) + endif () + + if(Boost_REALPATH) + _Boost_SWAP_WITH_REALPATH(Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE "${_boost_docstring_release}") + _Boost_SWAP_WITH_REALPATH(Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG "${_boost_docstring_debug}" ) + endif() + + _Boost_ADJUST_LIB_VARS(${UPPERCOMPONENT}) + + # Check if component requires some compiler features + _Boost_COMPILER_FEATURES(${COMPONENT} _Boost_${UPPERCOMPONENT}_COMPILER_FEATURES) + +endforeach() + +# Restore the original find library ordering +if( Boost_USE_STATIC_LIBS ) + set(CMAKE_FIND_LIBRARY_SUFFIXES ${_boost_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES}) +endif() + +# ------------------------------------------------------------------------ +# End finding boost libraries +# ------------------------------------------------------------------------ + +set(Boost_INCLUDE_DIRS ${Boost_INCLUDE_DIR}) +set(Boost_LIBRARY_DIRS) +if(Boost_LIBRARY_DIR_RELEASE) + list(APPEND Boost_LIBRARY_DIRS ${Boost_LIBRARY_DIR_RELEASE}) +endif() +if(Boost_LIBRARY_DIR_DEBUG) + list(APPEND Boost_LIBRARY_DIRS ${Boost_LIBRARY_DIR_DEBUG}) +endif() +if(Boost_LIBRARY_DIRS) + list(REMOVE_DUPLICATES Boost_LIBRARY_DIRS) +endif() + +# ------------------------------------------------------------------------ +# Call FPHSA helper, see https://cmake.org/cmake/help/latest/module/FindPackageHandleStandardArgs.html +# ------------------------------------------------------------------------ + +# Define aliases as needed by the component handler in the FPHSA helper below +foreach(_comp IN LISTS Boost_FIND_COMPONENTS) + string(TOUPPER ${_comp} _uppercomp) + if(DEFINED Boost_${_uppercomp}_FOUND) + set(Boost_${_comp}_FOUND ${Boost_${_uppercomp}_FOUND}) + endif() +endforeach() + +find_package_handle_standard_args(Boost + REQUIRED_VARS Boost_INCLUDE_DIR + VERSION_VAR Boost_VERSION_STRING + HANDLE_COMPONENTS) + +if(Boost_FOUND) + if( NOT Boost_LIBRARY_DIRS ) + # Compatibility Code for backwards compatibility with CMake + # 2.4's FindBoost module. + + # Look for the boost library path. + # Note that the user may not have installed any libraries + # so it is quite possible the Boost_LIBRARY_DIRS may not exist. + set(_boost_LIB_DIR ${Boost_INCLUDE_DIR}) + + if("${_boost_LIB_DIR}" MATCHES "boost-[0-9]+") + get_filename_component(_boost_LIB_DIR ${_boost_LIB_DIR} PATH) + endif() + + if("${_boost_LIB_DIR}" MATCHES "/include$") + # Strip off the trailing "/include" in the path. + get_filename_component(_boost_LIB_DIR ${_boost_LIB_DIR} PATH) + endif() + + if(EXISTS "${_boost_LIB_DIR}/lib") + string(APPEND _boost_LIB_DIR /lib) + elseif(EXISTS "${_boost_LIB_DIR}/stage/lib") + string(APPEND _boost_LIB_DIR "/stage/lib") + else() + set(_boost_LIB_DIR "") + endif() + + if(_boost_LIB_DIR AND EXISTS "${_boost_LIB_DIR}") + set(Boost_LIBRARY_DIRS ${_boost_LIB_DIR}) + endif() + + endif() +else() + # Boost headers were not found so no components were found. + foreach(COMPONENT ${Boost_FIND_COMPONENTS}) + string(TOUPPER ${COMPONENT} UPPERCOMPONENT) + set(Boost_${UPPERCOMPONENT}_FOUND 0) + endforeach() +endif() + +# ------------------------------------------------------------------------ +# Add imported targets +# ------------------------------------------------------------------------ + +if(Boost_FOUND) + # The builtin CMake package in Boost 1.70+ introduces a new name + # for the header-only lib, let's provide the same UI in module mode + if(NOT TARGET Boost::headers) + add_library(Boost::headers INTERFACE IMPORTED) + if(Boost_INCLUDE_DIRS) + set_target_properties(Boost::headers PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Boost_INCLUDE_DIRS}") + endif() + endif() + + # Define the old target name for header-only libraries for backwards + # compat. + if(NOT TARGET Boost::boost) + add_library(Boost::boost INTERFACE IMPORTED) + set_target_properties(Boost::boost + PROPERTIES INTERFACE_LINK_LIBRARIES Boost::headers) + endif() + + foreach(COMPONENT ${Boost_FIND_COMPONENTS}) + if(_Boost_IMPORTED_TARGETS AND NOT TARGET Boost::${COMPONENT}) + string(TOUPPER ${COMPONENT} UPPERCOMPONENT) + if(Boost_${UPPERCOMPONENT}_FOUND) + if(Boost_USE_STATIC_LIBS) + add_library(Boost::${COMPONENT} STATIC IMPORTED) + else() + # Even if Boost_USE_STATIC_LIBS is OFF, we might have static + # libraries as a result. + add_library(Boost::${COMPONENT} UNKNOWN IMPORTED) + endif() + if(Boost_INCLUDE_DIRS) + set_target_properties(Boost::${COMPONENT} PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${Boost_INCLUDE_DIRS}") + endif() + if(EXISTS "${Boost_${UPPERCOMPONENT}_LIBRARY}") + set_target_properties(Boost::${COMPONENT} PROPERTIES + IMPORTED_LINK_INTERFACE_LANGUAGES "CXX" + IMPORTED_LOCATION "${Boost_${UPPERCOMPONENT}_LIBRARY}") + endif() + if(EXISTS "${Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE}") + set_property(TARGET Boost::${COMPONENT} APPEND PROPERTY + IMPORTED_CONFIGURATIONS RELEASE) + set_target_properties(Boost::${COMPONENT} PROPERTIES + IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "CXX" + IMPORTED_LOCATION_RELEASE "${Boost_${UPPERCOMPONENT}_LIBRARY_RELEASE}") + endif() + if(EXISTS "${Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG}") + set_property(TARGET Boost::${COMPONENT} APPEND PROPERTY + IMPORTED_CONFIGURATIONS DEBUG) + set_target_properties(Boost::${COMPONENT} PROPERTIES + IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "CXX" + IMPORTED_LOCATION_DEBUG "${Boost_${UPPERCOMPONENT}_LIBRARY_DEBUG}") + endif() + if(_Boost_${UPPERCOMPONENT}_DEPENDENCIES) + unset(_Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES) + foreach(dep ${_Boost_${UPPERCOMPONENT}_DEPENDENCIES}) + list(APPEND _Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES Boost::${dep}) + endforeach() + if(COMPONENT STREQUAL "thread") + list(APPEND _Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES Threads::Threads) + endif() + set_target_properties(Boost::${COMPONENT} PROPERTIES + INTERFACE_LINK_LIBRARIES "${_Boost_${UPPERCOMPONENT}_TARGET_DEPENDENCIES}") + endif() + if(_Boost_${UPPERCOMPONENT}_COMPILER_FEATURES) + set_target_properties(Boost::${COMPONENT} PROPERTIES + INTERFACE_COMPILE_FEATURES "${_Boost_${UPPERCOMPONENT}_COMPILER_FEATURES}") + endif() + endif() + endif() + endforeach() +endif() + +# ------------------------------------------------------------------------ +# Finalize +# ------------------------------------------------------------------------ + +# Report Boost_LIBRARIES +set(Boost_LIBRARIES "") +foreach(_comp IN LISTS Boost_FIND_COMPONENTS) + string(TOUPPER ${_comp} _uppercomp) + if(Boost_${_uppercomp}_FOUND) + list(APPEND Boost_LIBRARIES ${Boost_${_uppercomp}_LIBRARY}) + if(_comp STREQUAL "thread") + list(APPEND Boost_LIBRARIES ${CMAKE_THREAD_LIBS_INIT}) + endif() + endif() +endforeach() + +# Configure display of cache entries in GUI. +foreach(v BOOSTROOT BOOST_ROOT ${_Boost_VARS_INC} ${_Boost_VARS_LIB}) + get_property(_type CACHE ${v} PROPERTY TYPE) + if(_type) + set_property(CACHE ${v} PROPERTY ADVANCED 1) + if("x${_type}" STREQUAL "xUNINITIALIZED") + if("x${v}" STREQUAL "xBoost_ADDITIONAL_VERSIONS") + set_property(CACHE ${v} PROPERTY TYPE STRING) + else() + set_property(CACHE ${v} PROPERTY TYPE PATH) + endif() + endif() + endif() +endforeach() + +# Record last used values of input variables so we can +# detect on the next run if the user changed them. +foreach(v + ${_Boost_VARS_INC} ${_Boost_VARS_LIB} + ${_Boost_VARS_DIR} ${_Boost_VARS_NAME} + ) + if(DEFINED ${v}) + set(_${v}_LAST "${${v}}" CACHE INTERNAL "Last used ${v} value.") + else() + unset(_${v}_LAST CACHE) + endif() +endforeach() + +# Maintain a persistent list of components requested anywhere since +# the last flush. +set(_Boost_COMPONENTS_SEARCHED "${_Boost_COMPONENTS_SEARCHED}") +list(APPEND _Boost_COMPONENTS_SEARCHED ${Boost_FIND_COMPONENTS}) +list(REMOVE_DUPLICATES _Boost_COMPONENTS_SEARCHED) +list(SORT _Boost_COMPONENTS_SEARCHED) +set(_Boost_COMPONENTS_SEARCHED "${_Boost_COMPONENTS_SEARCHED}" + CACHE INTERNAL "Components requested for this build tree.") + +# Restore project's policies +cmake_policy(POP) diff --git a/Builds/CMake/deps/Findjemalloc.cmake b/Builds/CMake/deps/Findjemalloc.cmake new file mode 100644 index 00000000000..820ceeed4a1 --- /dev/null +++ b/Builds/CMake/deps/Findjemalloc.cmake @@ -0,0 +1,47 @@ +# - Try to find jemalloc +# Once done this will define +# JEMALLOC_FOUND - System has jemalloc +# JEMALLOC_INCLUDE_DIRS - The jemalloc include directories +# JEMALLOC_LIBRARIES - The libraries needed to use jemalloc + +if(NOT USE_BUNDLED_JEMALLOC) + find_package(PkgConfig) + if (PKG_CONFIG_FOUND) + pkg_check_modules(PC_JEMALLOC QUIET jemalloc) + endif() +else() + set(PC_JEMALLOC_INCLUDEDIR) + set(PC_JEMALLOC_INCLUDE_DIRS) + set(PC_JEMALLOC_LIBDIR) + set(PC_JEMALLOC_LIBRARY_DIRS) + set(LIMIT_SEARCH NO_DEFAULT_PATH) +endif() + +set(JEMALLOC_DEFINITIONS ${PC_JEMALLOC_CFLAGS_OTHER}) + +find_path(JEMALLOC_INCLUDE_DIR jemalloc/jemalloc.h + PATHS ${PC_JEMALLOC_INCLUDEDIR} ${PC_JEMALLOC_INCLUDE_DIRS} + ${LIMIT_SEARCH}) + +# If we're asked to use static linkage, add libjemalloc.a as a preferred library name. +if(JEMALLOC_USE_STATIC) + list(APPEND JEMALLOC_NAMES + "${CMAKE_STATIC_LIBRARY_PREFIX}jemalloc${CMAKE_STATIC_LIBRARY_SUFFIX}") +endif() + +list(APPEND JEMALLOC_NAMES jemalloc) + +find_library(JEMALLOC_LIBRARY NAMES ${JEMALLOC_NAMES} + HINTS ${PC_JEMALLOC_LIBDIR} ${PC_JEMALLOC_LIBRARY_DIRS} + ${LIMIT_SEARCH}) + +set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY}) +set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR}) + +include(FindPackageHandleStandardArgs) +# handle the QUIETLY and REQUIRED arguments and set JEMALLOC_FOUND to TRUE +# if all listed variables are TRUE +find_package_handle_standard_args(JeMalloc DEFAULT_MSG + JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR) + +mark_as_advanced(JEMALLOC_INCLUDE_DIR JEMALLOC_LIBRARY) diff --git a/Builds/CMake/deps/Findlibarchive_pc.cmake b/Builds/CMake/deps/Findlibarchive_pc.cmake new file mode 100644 index 00000000000..8f248b28704 --- /dev/null +++ b/Builds/CMake/deps/Findlibarchive_pc.cmake @@ -0,0 +1,22 @@ +find_package (PkgConfig REQUIRED) +pkg_search_module (libarchive_PC QUIET libarchive>=3.4.3) + +if(static) + set(LIBARCHIVE_LIB libarchive.a) +else() + set(LIBARCHIVE_LIB archive) +endif() + +find_library (archive + NAMES ${LIBARCHIVE_LIB} + HINTS + ${libarchive_PC_LIBDIR} + ${libarchive_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH) + +find_path (LIBARCHIVE_INCLUDE_DIR + NAMES archive.h + HINTS + ${libarchive_PC_INCLUDEDIR} + ${libarchive_PC_INCLUDEDIRS} + NO_DEFAULT_PATH) diff --git a/Builds/CMake/deps/Findlz4.cmake b/Builds/CMake/deps/Findlz4.cmake new file mode 100644 index 00000000000..835f5989dfa --- /dev/null +++ b/Builds/CMake/deps/Findlz4.cmake @@ -0,0 +1,24 @@ +find_package (PkgConfig) +if (PKG_CONFIG_FOUND) + pkg_search_module (lz4_PC QUIET liblz4>=1.9) +endif () + +if(static) + set(LZ4_LIB liblz4.a) +else() + set(LZ4_LIB lz4.so) +endif() + +find_library (lz4 + NAMES ${LZ4_LIB} + HINTS + ${lz4_PC_LIBDIR} + ${lz4_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH) + +find_path (LZ4_INCLUDE_DIR + NAMES lz4.h + HINTS + ${lz4_PC_INCLUDEDIR} + ${lz4_PC_INCLUDEDIRS} + NO_DEFAULT_PATH) diff --git a/Builds/CMake/deps/Findsecp256k1.cmake b/Builds/CMake/deps/Findsecp256k1.cmake new file mode 100644 index 00000000000..7be3d0272c8 --- /dev/null +++ b/Builds/CMake/deps/Findsecp256k1.cmake @@ -0,0 +1,24 @@ +find_package (PkgConfig) +if (PKG_CONFIG_FOUND) + pkg_search_module (secp256k1_PC QUIET libsecp256k1) +endif () + +if(static) + set(SECP256K1_LIB libsecp256k1.a) +else() + set(SECP256K1_LIB secp256k1) +endif() + +find_library(secp256k1 + NAMES ${SECP256K1_LIB} + HINTS + ${secp256k1_PC_LIBDIR} + ${secp256k1_PC_LIBRARY_PATHS} + NO_DEFAULT_PATH) + +find_path (SECP256K1_INCLUDE_DIR + NAMES secp256k1.h + HINTS + ${secp256k1_PC_INCLUDEDIR} + ${secp256k1_PC_INCLUDEDIRS} + NO_DEFAULT_PATH) diff --git a/Builds/CMake/deps/Findsnappy.cmake b/Builds/CMake/deps/Findsnappy.cmake new file mode 100644 index 00000000000..ddf3cb280e0 --- /dev/null +++ b/Builds/CMake/deps/Findsnappy.cmake @@ -0,0 +1,24 @@ +find_package (PkgConfig) +if (PKG_CONFIG_FOUND) + pkg_search_module (snappy_PC QUIET snappy>=1.1.7) +endif () + +if(static) + set(SNAPPY_LIB libsnappy.a) +else() + set(SNAPPY_LIB libsnappy.so) +endif() + +find_library (snappy + NAMES ${SNAPPY_LIB} + HINTS + ${snappy_PC_LIBDIR} + ${snappy_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH) + +find_path (SNAPPY_INCLUDE_DIR + NAMES snappy.h + HINTS + ${snappy_PC_INCLUDEDIR} + ${snappy_PC_INCLUDEDIRS} + NO_DEFAULT_PATH) diff --git a/Builds/CMake/deps/Findsoci.cmake b/Builds/CMake/deps/Findsoci.cmake new file mode 100644 index 00000000000..67b89276f6e --- /dev/null +++ b/Builds/CMake/deps/Findsoci.cmake @@ -0,0 +1,17 @@ +find_package (PkgConfig) +if (PKG_CONFIG_FOUND) + # TBD - currently no soci pkgconfig + #pkg_search_module (soci_PC QUIET libsoci_core>=3.2) +endif () + +if(static) + set(SOCI_LIB libsoci.a) +else() + set(SOCI_LIB libsoci_core.so) +endif() + +find_library (soci + NAMES ${SOCI_LIB}) + +find_path (SOCI_INCLUDE_DIR + NAMES soci/soci.h) diff --git a/Builds/CMake/deps/Findsqlite.cmake b/Builds/CMake/deps/Findsqlite.cmake new file mode 100644 index 00000000000..ef5c6befc1e --- /dev/null +++ b/Builds/CMake/deps/Findsqlite.cmake @@ -0,0 +1,24 @@ +find_package (PkgConfig) +if (PKG_CONFIG_FOUND) + pkg_search_module (sqlite_PC QUIET sqlite3>=3.26.0) +endif () + +if(static) + set(SQLITE_LIB libsqlite3.a) +else() + set(SQLITE_LIB sqlite3.so) +endif() + +find_library (sqlite3 + NAMES ${SQLITE_LIB} + HINTS + ${sqlite_PC_LIBDIR} + ${sqlite_PC_LIBRARY_DIRS} + NO_DEFAULT_PATH) + +find_path (SQLITE_INCLUDE_DIR + NAMES sqlite3.h + HINTS + ${sqlite_PC_INCLUDEDIR} + ${sqlite_PC_INCLUDEDIRS} + NO_DEFAULT_PATH) diff --git a/Builds/CMake/deps/Libarchive.cmake b/Builds/CMake/deps/Libarchive.cmake new file mode 100644 index 00000000000..57b8d2e395b --- /dev/null +++ b/Builds/CMake/deps/Libarchive.cmake @@ -0,0 +1,163 @@ +#[===================================================================[ + NIH dep: libarchive +#]===================================================================] + +option (local_libarchive "use local build of libarchive." OFF) +add_library (archive_lib UNKNOWN IMPORTED GLOBAL) + +if (NOT local_libarchive) + if (NOT WIN32) + find_package(libarchive_pc REQUIRED) + endif () + if (archive) + message (STATUS "Found libarchive using pkg-config. Using ${archive}.") + set_target_properties (archive_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${archive} + IMPORTED_LOCATION_RELEASE + ${archive} + INTERFACE_INCLUDE_DIRECTORIES + ${LIBARCHIVE_INCLUDE_DIR}) + # pkg-config can return extra info for static lib linking + # this is probably needed/useful generally, but apply + # to APPLE for now (mostly for homebrew) + if (APPLE AND static AND libarchive_PC_STATIC_LIBRARIES) + message(STATUS "NOTE: libarchive static libs: ${libarchive_PC_STATIC_LIBRARIES}") + # also, APPLE seems to need iconv...maybe linux does too (TBD) + target_link_libraries (archive_lib + INTERFACE iconv ${libarchive_PC_STATIC_LIBRARIES}) + endif () + else () + ## now try searching using the minimal find module that cmake provides + find_package(LibArchive 3.4.3 QUIET) + if (LibArchive_FOUND) + if (static) + # find module doesn't find static libs currently, so we re-search + get_filename_component(_loc ${LibArchive_LIBRARY} DIRECTORY) + find_library(_la_static + NAMES libarchive.a archive_static.lib archive.lib + PATHS ${_loc}) + if (_la_static) + set (_la_lib ${_la_static}) + else () + message (WARNING "unable to find libarchive static lib - switching to local build") + set (local_libarchive ON CACHE BOOL "" FORCE) + endif () + else () + set (_la_lib ${LibArchive_LIBRARY}) + endif () + if (NOT local_libarchive) + message (STATUS "Found libarchive using module/config. Using ${_la_lib}.") + set_target_properties (archive_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${_la_lib} + IMPORTED_LOCATION_RELEASE + ${_la_lib} + INTERFACE_INCLUDE_DIRECTORIES + ${LibArchive_INCLUDE_DIRS}) + endif () + else () + set (local_libarchive ON CACHE BOOL "" FORCE) + endif () + endif () +endif() + +if (local_libarchive) + set (lib_post "") + if (MSVC) + set (lib_post "_static") + endif () + ExternalProject_Add (libarchive + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/libarchive/libarchive.git + GIT_TAG v3.4.3 + CMAKE_ARGS + # passing the compiler seems to be needed for windows CI, sadly + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DENABLE_LZ4=ON + -ULZ4_* + -DLZ4_INCLUDE_DIR=$,::> + # because we are building a static lib, this lz4 library doesn't + # actually matter since you can't generally link static libs to other static + # libs. The include files are needed, but the library itself is not (until + # we link our application, at which point we use the lz4 we built above). + # nonetheless, we need to provide a library to libarchive else it will + # NOT include lz4 support when configuring + -DLZ4_LIBRARY=$,$,$> + -DENABLE_WERROR=OFF + -DENABLE_TAR=OFF + -DENABLE_TAR_SHARED=OFF + -DENABLE_INSTALL=ON + -DENABLE_NETTLE=OFF + -DENABLE_OPENSSL=OFF + -DENABLE_LZO=OFF + -DENABLE_LZMA=OFF + -DENABLE_ZLIB=OFF + -DENABLE_BZip2=OFF + -DENABLE_LIBXML2=OFF + -DENABLE_EXPAT=OFF + -DENABLE_PCREPOSIX=OFF + -DENABLE_LibGCC=OFF + -DENABLE_CNG=OFF + -DENABLE_CPIO=OFF + -DENABLE_CPIO_SHARED=OFF + -DENABLE_CAT=OFF + -DENABLE_CAT_SHARED=OFF + -DENABLE_XATTR=OFF + -DENABLE_ACL=OFF + -DENABLE_ICONV=OFF + -DENABLE_TEST=OFF + -DENABLE_COVERAGE=OFF + $<$: + "-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP" + "-DCMAKE_C_FLAGS_DEBUG=-MTd" + "-DCMAKE_C_FLAGS_RELEASE=-MT" + > + LIST_SEPARATOR :: + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --target archive_static + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /libarchive/$/${ep_lib_prefix}archive${lib_post}$<$:_d>${ep_lib_suffix} + /libarchive + > + TEST_COMMAND "" + INSTALL_COMMAND "" + DEPENDS lz4_lib + BUILD_BYPRODUCTS + /libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix} + /libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix} + ) + ExternalProject_Get_Property (libarchive BINARY_DIR) + ExternalProject_Get_Property (libarchive SOURCE_DIR) + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (libarchive) + endif () + file (MAKE_DIRECTORY ${SOURCE_DIR}/libarchive) + set_target_properties (archive_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/libarchive/${ep_lib_prefix}archive${lib_post}${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/libarchive + INTERFACE_COMPILE_DEFINITIONS + LIBARCHIVE_STATIC) +endif() + +add_dependencies (archive_lib libarchive) +target_link_libraries (archive_lib INTERFACE lz4_lib) +target_link_libraries (ripple_libs INTERFACE archive_lib) +exclude_if_included (libarchive) +exclude_if_included (archive_lib) diff --git a/Builds/CMake/deps/Lz4.cmake b/Builds/CMake/deps/Lz4.cmake new file mode 100644 index 00000000000..15d890692c5 --- /dev/null +++ b/Builds/CMake/deps/Lz4.cmake @@ -0,0 +1,79 @@ +#[===================================================================[ + NIH dep: lz4 +#]===================================================================] + +add_library (lz4_lib STATIC IMPORTED GLOBAL) + +if (NOT WIN32) + find_package(lz4) +endif() + +if(lz4) + set_target_properties (lz4_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${lz4} + IMPORTED_LOCATION_RELEASE + ${lz4} + INTERFACE_INCLUDE_DIRECTORIES + ${LZ4_INCLUDE_DIR}) + +else() + ExternalProject_Add (lz4 + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/lz4/lz4.git + GIT_TAG v1.9.2 + SOURCE_SUBDIR contrib/cmake_unofficial + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DBUILD_STATIC_LIBS=ON + -DBUILD_SHARED_LIBS=OFF + $<$: + "-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP" + "-DCMAKE_C_FLAGS_DEBUG=-MTd" + "-DCMAKE_C_FLAGS_RELEASE=-MT" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --target lz4_static + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /$/${ep_lib_prefix}lz4$<$:_d>${ep_lib_suffix} + + > + TEST_COMMAND "" + INSTALL_COMMAND "" + BUILD_BYPRODUCTS + /${ep_lib_prefix}lz4${ep_lib_suffix} + /${ep_lib_prefix}lz4_d${ep_lib_suffix} + ) + ExternalProject_Get_Property (lz4 BINARY_DIR) + ExternalProject_Get_Property (lz4 SOURCE_DIR) + + file (MAKE_DIRECTORY ${SOURCE_DIR}/lz4) + set_target_properties (lz4_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/${ep_lib_prefix}lz4_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/${ep_lib_prefix}lz4${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/lib) + + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (lz4) + endif () + add_dependencies (lz4_lib lz4) + target_link_libraries (ripple_libs INTERFACE lz4_lib) + exclude_if_included (lz4) +endif() + +exclude_if_included (lz4_lib) diff --git a/Builds/CMake/deps/Nudb.cmake b/Builds/CMake/deps/Nudb.cmake new file mode 100644 index 00000000000..9698d3f061c --- /dev/null +++ b/Builds/CMake/deps/Nudb.cmake @@ -0,0 +1,31 @@ +#[===================================================================[ + NIH dep: nudb + + NuDB is header-only, thus is an INTERFACE lib in CMake. + TODO: move the library definition into NuDB repo and add + proper targets and export/install +#]===================================================================] + +if (is_root_project) # NuDB not needed in the case of xrpl_core inclusion build + add_library (nudb INTERFACE) + FetchContent_Declare( + nudb_src + GIT_REPOSITORY https://github.com/CPPAlliance/NuDB.git + GIT_TAG 2.0.5 + ) + FetchContent_GetProperties(nudb_src) + if(NOT nudb_src_POPULATED) + message (STATUS "Pausing to download NuDB...") + FetchContent_Populate(nudb_src) + endif() + + file(TO_CMAKE_PATH "${nudb_src_SOURCE_DIR}" nudb_src_SOURCE_DIR) + # specify as system includes so as to avoid warnings + target_include_directories (nudb SYSTEM INTERFACE ${nudb_src_SOURCE_DIR}/include) + target_link_libraries (nudb + INTERFACE + Boost::thread + Boost::system) + add_library (NIH::nudb ALIAS nudb) + target_link_libraries (ripple_libs INTERFACE NIH::nudb) +endif () diff --git a/Builds/CMake/deps/OpenSSL.cmake b/Builds/CMake/deps/OpenSSL.cmake new file mode 100644 index 00000000000..ad5117aacb0 --- /dev/null +++ b/Builds/CMake/deps/OpenSSL.cmake @@ -0,0 +1,48 @@ +#[===================================================================[ + NIH dep: openssl +#]===================================================================] + +#[===============================================[ + OPENSSL_ROOT_DIR is the only variable that + FindOpenSSL honors for locating, so convert any + OPENSSL_ROOT vars to this +#]===============================================] +if (NOT DEFINED OPENSSL_ROOT_DIR) + if (DEFINED ENV{OPENSSL_ROOT}) + set (OPENSSL_ROOT_DIR $ENV{OPENSSL_ROOT}) + elseif (HOMEBREW) + execute_process (COMMAND ${HOMEBREW} --prefix openssl + OUTPUT_VARIABLE OPENSSL_ROOT_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE) + endif () + file (TO_CMAKE_PATH "${OPENSSL_ROOT_DIR}" OPENSSL_ROOT_DIR) +endif () + +if (static) + set (OPENSSL_USE_STATIC_LIBS ON) +endif () +set (OPENSSL_MSVC_STATIC_RT ON) +find_package (OpenSSL 1.1.1 REQUIRED) +target_link_libraries (ripple_libs + INTERFACE + OpenSSL::SSL + OpenSSL::Crypto) +# disable SSLv2...this can also be done when building/configuring OpenSSL +set_target_properties(OpenSSL::SSL PROPERTIES + INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2) +#[=========================================================[ + https://gitlab.kitware.com/cmake/cmake/issues/16885 + depending on how openssl is built, it might depend + on zlib. In fact, the openssl find package should + figure this out for us, but it does not currently... + so let's add zlib ourselves to the lib list + TODO: investigate linking to static zlib for static + build option +#]=========================================================] +find_package (ZLIB) +set (has_zlib FALSE) +if (TARGET ZLIB::ZLIB) + set_target_properties(OpenSSL::Crypto PROPERTIES + INTERFACE_LINK_LIBRARIES ZLIB::ZLIB) + set (has_zlib TRUE) +endif () diff --git a/Builds/CMake/deps/Postgres.cmake b/Builds/CMake/deps/Postgres.cmake new file mode 100644 index 00000000000..bb94832a48b --- /dev/null +++ b/Builds/CMake/deps/Postgres.cmake @@ -0,0 +1,70 @@ +if(reporting) + find_package(PostgreSQL) + if(NOT PostgreSQL_FOUND) + message("find_package did not find postgres") + find_library(postgres NAMES pq libpq libpq-dev pq-dev postgresql-devel) + find_path(libpq-fe NAMES libpq-fe.h PATH_SUFFIXES postgresql pgsql include) + + if(NOT libpq-fe_FOUND OR NOT postgres_FOUND) + message("No system installed Postgres found. Will build") + add_library(postgres SHARED IMPORTED GLOBAL) + add_library(pgport SHARED IMPORTED GLOBAL) + add_library(pgcommon SHARED IMPORTED GLOBAL) + ExternalProject_Add(postgres_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/postgres/postgres.git + GIT_TAG REL_14_5 + CONFIGURE_COMMAND ./configure --without-readline > /dev/null + BUILD_COMMAND ${CMAKE_COMMAND} -E env --unset=MAKELEVEL make + UPDATE_COMMAND "" + BUILD_IN_SOURCE 1 + INSTALL_COMMAND "" + BUILD_BYPRODUCTS + /src/interfaces/libpq/${ep_lib_prefix}pq.a + /src/common/${ep_lib_prefix}pgcommon.a + /src/port/${ep_lib_prefix}pgport.a + LOG_BUILD TRUE + ) + ExternalProject_Get_Property (postgres_src SOURCE_DIR) + ExternalProject_Get_Property (postgres_src BINARY_DIR) + + set (postgres_src_SOURCE_DIR "${SOURCE_DIR}") + file (MAKE_DIRECTORY ${postgres_src_SOURCE_DIR}) + list(APPEND INCLUDE_DIRS + ${SOURCE_DIR}/src/include + ${SOURCE_DIR}/src/interfaces/libpq + ) + set_target_properties(postgres PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/src/interfaces/libpq/${ep_lib_prefix}pq.a + INTERFACE_INCLUDE_DIRECTORIES + "${INCLUDE_DIRS}" + ) + set_target_properties(pgcommon PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/src/common/${ep_lib_prefix}pgcommon.a + INTERFACE_INCLUDE_DIRECTORIES + "${INCLUDE_DIRS}" + ) + set_target_properties(pgport PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/src/port/${ep_lib_prefix}pgport.a + INTERFACE_INCLUDE_DIRECTORIES + "${INCLUDE_DIRS}" + ) + add_dependencies(postgres postgres_src) + add_dependencies(pgcommon postgres_src) + add_dependencies(pgport postgres_src) + file(TO_CMAKE_PATH "${postgres_src_SOURCE_DIR}" postgres_src_SOURCE_DIR) + target_link_libraries(ripple_libs INTERFACE postgres pgcommon pgport) + else() + message("Found system installed Postgres via find_libary") + target_include_directories(ripple_libs INTERFACE ${libpq-fe}) + target_link_libraries(ripple_libs INTERFACE ${postgres}) + endif() + else() + message("Found system installed Postgres via find_package") + target_include_directories(ripple_libs INTERFACE ${PostgreSQL_INCLUDE_DIRS}) + target_link_libraries(ripple_libs INTERFACE ${PostgreSQL_LIBRARIES}) + endif() +endif() diff --git a/Builds/CMake/deps/Protobuf.cmake b/Builds/CMake/deps/Protobuf.cmake index 0706ae32243..35d5b9f0ff7 100644 --- a/Builds/CMake/deps/Protobuf.cmake +++ b/Builds/CMake/deps/Protobuf.cmake @@ -1,22 +1,155 @@ -find_package(Protobuf 3.8) +#[===================================================================[ + import protobuf (lib and compiler) and create a lib + from our proto message definitions. If the system protobuf + is not found, fallback on EP to download and build a version + from official source. +#]===================================================================] -file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen) -set(ccbd ${CMAKE_CURRENT_BINARY_DIR}) -set(CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen) -protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS src/ripple/proto/ripple.proto) -set(CMAKE_CURRENT_BINARY_DIR ${ccbd}) +if (static) + set (Protobuf_USE_STATIC_LIBS ON) +endif () +find_package (Protobuf 3.8) +if (is_multiconfig) + set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARIES}) +else () + string(TOUPPER ${CMAKE_BUILD_TYPE} upper_cmake_build_type) + set(protobuf_protoc_lib ${Protobuf_PROTOC_LIBRARY_${upper_cmake_build_type}}) +endif () +if (local_protobuf OR NOT (Protobuf_FOUND AND Protobuf_PROTOC_EXECUTABLE AND protobuf_protoc_lib)) + include (GNUInstallDirs) + message (STATUS "using local protobuf build.") + set(protobuf_reqs Protobuf_PROTOC_EXECUTABLE protobuf_protoc_lib) + foreach(lib ${protobuf_reqs}) + if(NOT ${lib}) + message(STATUS "Couldn't find ${lib}") + endif() + endforeach() + if (WIN32) + # protobuf prepends lib even on windows + set (pbuf_lib_pre "lib") + else () + set (pbuf_lib_pre ${ep_lib_prefix}) + endif () + # for the external project build of protobuf, we currently ignore the + # static option and always build static libs here. This is consistent + # with our other EP builds. Dynamic libs in an EP would add complexity + # because we'd need to get them into the runtime path, and probably + # install them. + ExternalProject_Add (protobuf_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/protocolbuffers/protobuf.git + GIT_TAG v3.8.0 + SOURCE_SUBDIR cmake + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + -DCMAKE_INSTALL_PREFIX=/_installed_ + -Dprotobuf_BUILD_TESTS=OFF + -Dprotobuf_BUILD_EXAMPLES=OFF + -Dprotobuf_BUILD_PROTOC_BINARIES=ON + -Dprotobuf_MSVC_STATIC_RUNTIME=ON + -DBUILD_SHARED_LIBS=OFF + -Dprotobuf_BUILD_SHARED_LIBS=OFF + -DCMAKE_DEBUG_POSTFIX=_d + -Dprotobuf_DEBUG_POSTFIX=_d + -Dprotobuf_WITH_ZLIB=$,ON,OFF> + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + $<$:-DCMAKE_UNITY_BUILD=ON}> + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + $<$: + "-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + TEST_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $ --target install + BUILD_BYPRODUCTS + /_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix} + /_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix} + /_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix} + /_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix} + /_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX} + ) + ExternalProject_Get_Property (protobuf_src BINARY_DIR) + ExternalProject_Get_Property (protobuf_src SOURCE_DIR) + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (protobuf_src) + endif () + exclude_if_included (protobuf_src) -add_library(pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS}) -target_include_directories(pbufs SYSTEM PUBLIC - ${CMAKE_BINARY_DIR}/proto_gen - ${CMAKE_BINARY_DIR}/proto_gen/src/ripple/proto -) -target_link_libraries(pbufs protobuf::libprotobuf) -target_compile_options(pbufs + if (NOT TARGET protobuf::libprotobuf) + add_library (protobuf::libprotobuf STATIC IMPORTED GLOBAL) + endif () + file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include) + set_target_properties (protobuf::libprotobuf PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protobuf${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${BINARY_DIR}/_installed_/include) + add_dependencies (protobuf::libprotobuf protobuf_src) + exclude_if_included (protobuf::libprotobuf) + + if (NOT TARGET protobuf::libprotoc) + add_library (protobuf::libprotoc STATIC IMPORTED GLOBAL) + endif () + set_target_properties (protobuf::libprotoc PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/_installed_/${CMAKE_INSTALL_LIBDIR}/${pbuf_lib_pre}protoc${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${BINARY_DIR}/_installed_/include) + add_dependencies (protobuf::libprotoc protobuf_src) + exclude_if_included (protobuf::libprotoc) + + if (NOT TARGET protobuf::protoc) + add_executable (protobuf::protoc IMPORTED) + exclude_if_included (protobuf::protoc) + endif () + set_target_properties (protobuf::protoc PROPERTIES + IMPORTED_LOCATION "${BINARY_DIR}/_installed_/bin/protoc${CMAKE_EXECUTABLE_SUFFIX}") + add_dependencies (protobuf::protoc protobuf_src) +else () + if (NOT TARGET protobuf::protoc) + if (EXISTS "${Protobuf_PROTOC_EXECUTABLE}") + add_executable (protobuf::protoc IMPORTED) + set_target_properties (protobuf::protoc PROPERTIES + IMPORTED_LOCATION "${Protobuf_PROTOC_EXECUTABLE}") + else () + message (FATAL_ERROR "Protobuf import failed") + endif () + endif () +endif () + +file (MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/proto_gen) +set (save_CBD ${CMAKE_CURRENT_BINARY_DIR}) +set (CMAKE_CURRENT_BINARY_DIR ${CMAKE_BINARY_DIR}/proto_gen) +protobuf_generate_cpp ( + PROTO_SRCS + PROTO_HDRS + src/ripple/proto/ripple.proto) +set (CMAKE_CURRENT_BINARY_DIR ${save_CBD}) + +add_library (pbufs STATIC ${PROTO_SRCS} ${PROTO_HDRS}) + +target_include_directories (pbufs PRIVATE src) +target_include_directories (pbufs + SYSTEM PUBLIC ${CMAKE_BINARY_DIR}/proto_gen) +target_link_libraries (pbufs protobuf::libprotobuf) +target_compile_options (pbufs PUBLIC - $<$: + $<$: --system-header-prefix="google/protobuf" -Wno-deprecated-dynamic-exception-spec - > -) -add_library(Ripple::pbufs ALIAS pbufs) + >) +add_library (Ripple::pbufs ALIAS pbufs) +target_link_libraries (ripple_libs INTERFACE Ripple::pbufs) +exclude_if_included (pbufs) diff --git a/Builds/CMake/deps/Rocksdb.cmake b/Builds/CMake/deps/Rocksdb.cmake new file mode 100644 index 00000000000..2c832c593f5 --- /dev/null +++ b/Builds/CMake/deps/Rocksdb.cmake @@ -0,0 +1,177 @@ +#[===================================================================[ + NIH dep: rocksdb +#]===================================================================] + +add_library (rocksdb_lib UNKNOWN IMPORTED GLOBAL) +set_target_properties (rocksdb_lib + PROPERTIES INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1) + +option (local_rocksdb "use local build of rocksdb." OFF) +if (NOT local_rocksdb) + find_package (RocksDB 6.27 QUIET CONFIG) + if (TARGET RocksDB::rocksdb) + message (STATUS "Found RocksDB using config.") + get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_DEBUG) + if (_rockslib_l) + set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${_rockslib_l}) + endif () + get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION_RELEASE) + if (_rockslib_l) + set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${_rockslib_l}) + endif () + get_target_property (_rockslib_l RocksDB::rocksdb IMPORTED_LOCATION) + if (_rockslib_l) + set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${_rockslib_l}) + endif () + get_target_property (_rockslib_i RocksDB::rocksdb INTERFACE_INCLUDE_DIRECTORIES) + if (_rockslib_i) + set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${_rockslib_i}) + endif () + target_link_libraries (ripple_libs INTERFACE RocksDB::rocksdb) + else () + # using a find module with rocksdb is difficult because + # you have no idea how it was configured (transitive dependencies). + # the code below will generally find rocksdb using the module, but + # will then result in linker errors for static linkage since the + # transitive dependencies are unknown. force local build here for now, but leave the code as + # a placeholder for future investigation. + if (static) + set (local_rocksdb ON CACHE BOOL "" FORCE) + # TBD if there is some way to extract transitive deps..then: + #set (RocksDB_USE_STATIC ON) + else () + find_package (RocksDB 6.27 MODULE) + if (ROCKSDB_FOUND) + if (RocksDB_LIBRARY_DEBUG) + set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_DEBUG ${RocksDB_LIBRARY_DEBUG}) + endif () + set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION_RELEASE ${RocksDB_LIBRARIES}) + set_target_properties (rocksdb_lib PROPERTIES IMPORTED_LOCATION ${RocksDB_LIBRARIES}) + set_target_properties (rocksdb_lib PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RocksDB_INCLUDE_DIRS}) + else () + set (local_rocksdb ON CACHE BOOL "" FORCE) + endif () + endif () + endif () +endif () + +if (local_rocksdb) + message (STATUS "Using local build of RocksDB.") + ExternalProject_Add (rocksdb + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/facebook/rocksdb.git + GIT_TAG v6.27.3 + PATCH_COMMAND + # only used by windows build + ${CMAKE_COMMAND} -E copy_if_different + ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/rocks_thirdparty.inc + /thirdparty.inc + COMMAND + # fixup their build version file to keep the values + # from changing always + ${CMAKE_COMMAND} -E copy_if_different + ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/rocksdb_build_version.cc.in + /util/build_version.cc.in + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + $<$:-DCMAKE_UNITY_BUILD=ON}> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DBUILD_SHARED_LIBS=OFF + -DCMAKE_POSITION_INDEPENDENT_CODE=ON + -DWITH_JEMALLOC=$,ON,OFF> + -DWITH_SNAPPY=ON + -DWITH_LZ4=ON + -DWITH_ZLIB=OFF + -DUSE_RTTI=ON + -DWITH_ZSTD=OFF + -DWITH_GFLAGS=OFF + -DWITH_BZ2=OFF + -ULZ4_* + -Ulz4_* + -Dlz4_INCLUDE_DIRS=$,::> + -Dlz4_LIBRARIES=$,$,$> + -Dlz4_FOUND=ON + -USNAPPY_* + -Usnappy_* + -USnappy_* + -Dsnappy_INCLUDE_DIRS=$,::> + -Dsnappy_LIBRARIES=$,$,$> + -Dsnappy_FOUND=ON + -DSnappy_INCLUDE_DIRS=$,::> + -DSnappy_LIBRARIES=$,$,$> + -DSnappy_FOUND=ON + -DWITH_MD_LIBRARY=OFF + -DWITH_RUNTIME_DEBUG=$,ON,OFF> + -DFAIL_ON_WARNINGS=OFF + -DWITH_ASAN=OFF + -DWITH_TSAN=OFF + -DWITH_UBSAN=OFF + -DWITH_NUMA=OFF + -DWITH_TBB=OFF + -DWITH_WINDOWS_UTF8_FILENAMES=OFF + -DWITH_XPRESS=OFF + -DPORTABLE=ON + -DFORCE_SSE42=OFF + -DDISABLE_STALL_NOTIF=OFF + -DOPTDBG=ON + -DROCKSDB_LITE=OFF + -DWITH_FALLOCATE=ON + -DWITH_LIBRADOS=OFF + -DWITH_JNI=OFF + -DROCKSDB_INSTALL_ON_WINDOWS=OFF + -DWITH_TESTS=OFF + -DWITH_TOOLS=OFF + $<$: + "-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -MP /DNDEBUG" + > + $<$>: + "-DCMAKE_CXX_FLAGS=-DNDEBUG" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /$/${ep_lib_prefix}rocksdb$<$:_d>${ep_lib_suffix} + + > + LIST_SEPARATOR :: + TEST_COMMAND "" + INSTALL_COMMAND "" + DEPENDS snappy_lib lz4_lib + BUILD_BYPRODUCTS + /${ep_lib_prefix}rocksdb${ep_lib_suffix} + /${ep_lib_prefix}rocksdb_d${ep_lib_suffix} + ) + ExternalProject_Get_Property (rocksdb BINARY_DIR) + ExternalProject_Get_Property (rocksdb SOURCE_DIR) + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (rocksdb) + endif () + file (MAKE_DIRECTORY ${SOURCE_DIR}/include) + set_target_properties (rocksdb_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/${ep_lib_prefix}rocksdb_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/${ep_lib_prefix}rocksdb${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/include) + add_dependencies (rocksdb_lib rocksdb) + exclude_if_included (rocksdb) +endif () + +target_link_libraries (rocksdb_lib + INTERFACE + snappy_lib + lz4_lib + $<$:rpcrt4>) +exclude_if_included (rocksdb_lib) +target_link_libraries (ripple_libs INTERFACE rocksdb_lib) diff --git a/Builds/CMake/deps/Secp256k1.cmake b/Builds/CMake/deps/Secp256k1.cmake new file mode 100644 index 00000000000..3197315626b --- /dev/null +++ b/Builds/CMake/deps/Secp256k1.cmake @@ -0,0 +1,58 @@ +#[===================================================================[ + NIH dep: secp256k1 +#]===================================================================] + +add_library (secp256k1_lib STATIC IMPORTED GLOBAL) + +if (NOT WIN32) + find_package(secp256k1) +endif() + +if(secp256k1) + set_target_properties (secp256k1_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${secp256k1} + IMPORTED_LOCATION_RELEASE + ${secp256k1} + INTERFACE_INCLUDE_DIRECTORIES + ${SECP256K1_INCLUDE_DIR}) + + add_library (secp256k1 ALIAS secp256k1_lib) + add_library (NIH::secp256k1 ALIAS secp256k1_lib) + +else() + set(INSTALL_SECP256K1 true) + + add_library (secp256k1 STATIC + src/secp256k1/src/secp256k1.c) + target_compile_definitions (secp256k1 + PRIVATE + USE_NUM_NONE + USE_FIELD_10X26 + USE_FIELD_INV_BUILTIN + USE_SCALAR_8X32 + USE_SCALAR_INV_BUILTIN) + target_include_directories (secp256k1 + PUBLIC + $ + $ + PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src/secp256k1) + target_compile_options (secp256k1 + PRIVATE + $<$:-wd4319> + $<$>: + -Wno-deprecated-declarations + -Wno-unused-function + > + $<$:-Wno-nonnull-compare>) + target_link_libraries (ripple_libs INTERFACE NIH::secp256k1) +#[===========================[ + headers installation +#]===========================] + install ( + FILES + src/secp256k1/include/secp256k1.h + DESTINATION include/secp256k1/include) + + add_library (NIH::secp256k1 ALIAS secp256k1) +endif() diff --git a/Builds/CMake/deps/Snappy.cmake b/Builds/CMake/deps/Snappy.cmake new file mode 100644 index 00000000000..331ac2fbe95 --- /dev/null +++ b/Builds/CMake/deps/Snappy.cmake @@ -0,0 +1,77 @@ +#[===================================================================[ + NIH dep: snappy +#]===================================================================] + +add_library (snappy_lib STATIC IMPORTED GLOBAL) + +if (NOT WIN32) + find_package(snappy) +endif() + +if(snappy) + set_target_properties (snappy_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${snappy} + IMPORTED_LOCATION_RELEASE + ${snappy} + INTERFACE_INCLUDE_DIRECTORIES + ${SNAPPY_INCLUDE_DIR}) + +else() + ExternalProject_Add (snappy + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/google/snappy.git + GIT_TAG 1.1.7 + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DBUILD_SHARED_LIBS=OFF + -DCMAKE_POSITION_INDEPENDENT_CODE=ON + -DSNAPPY_BUILD_TESTS=OFF + $<$: + "-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP" + "-DCMAKE_CXX_FLAGS_DEBUG=-MTd" + "-DCMAKE_CXX_FLAGS_RELEASE=-MT" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /$/${ep_lib_prefix}snappy$<$:_d>${ep_lib_suffix} + + > + TEST_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E copy_if_different /config.h /snappy-stubs-public.h + BUILD_BYPRODUCTS + /${ep_lib_prefix}snappy${ep_lib_suffix} + /${ep_lib_prefix}snappy_d${ep_lib_suffix} + ) + ExternalProject_Get_Property (snappy BINARY_DIR) + ExternalProject_Get_Property (snappy SOURCE_DIR) + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (snappy) + endif () + file (MAKE_DIRECTORY ${SOURCE_DIR}/snappy) + set_target_properties (snappy_lib PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/${ep_lib_prefix}snappy_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/${ep_lib_prefix}snappy${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}) +endif() + +add_dependencies (snappy_lib snappy) +target_link_libraries (ripple_libs INTERFACE snappy_lib) +exclude_if_included (snappy) +exclude_if_included (snappy_lib) diff --git a/Builds/CMake/deps/Soci.cmake b/Builds/CMake/deps/Soci.cmake new file mode 100644 index 00000000000..d165d6e1f84 --- /dev/null +++ b/Builds/CMake/deps/Soci.cmake @@ -0,0 +1,165 @@ +#[===================================================================[ + NIH dep: soci +#]===================================================================] + +foreach (_comp core empty sqlite3) + add_library ("soci_${_comp}" STATIC IMPORTED GLOBAL) +endforeach () + +if (NOT WIN32) + find_package(soci) +endif() + +if (soci) + foreach (_comp core empty sqlite3) + set_target_properties ("soci_${_comp}" PROPERTIES + IMPORTED_LOCATION_DEBUG + ${soci} + IMPORTED_LOCATION_RELEASE + ${soci} + INTERFACE_INCLUDE_DIRECTORIES + ${SOCI_INCLUDE_DIR}) + endforeach () + +else() + set (soci_lib_pre ${ep_lib_prefix}) + set (soci_lib_post "") + if (WIN32) + # for some reason soci on windows still prepends lib (non-standard) + set (soci_lib_pre lib) + # this version in the name might change if/when we change versions of soci + set (soci_lib_post "_4_0") + endif () + get_target_property (_boost_incs Boost::date_time INTERFACE_INCLUDE_DIRECTORIES) + get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION) + if (NOT _boost_dt) + get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_RELEASE) + endif () + if (NOT _boost_dt) + get_target_property (_boost_dt Boost::date_time IMPORTED_LOCATION_DEBUG) + endif () + + ExternalProject_Add (soci + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/SOCI/soci.git + GIT_TAG 04e1870294918d20761736743bb6136314c42dd5 + # We had an issue with soci integer range checking for boost::optional + # and needed to remove the exception that SOCI throws in this case. + # This is *probably* a bug in SOCI, but has never been investigated more + # nor reported to the maintainers. + # This cmake script comments out the lines in question. + # This patch process is likely fragile and should be reviewed carefully + # whenever we update the GIT_TAG above. + PATCH_COMMAND + ${CMAKE_COMMAND} -D RIPPLED_SOURCE=${CMAKE_CURRENT_SOURCE_DIR} + -P ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/soci_patch.cmake + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + $<$:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}> + $<$:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}> + $<$:-DCMAKE_UNITY_BUILD=ON}> + -DCMAKE_PREFIX_PATH=${CMAKE_BINARY_DIR}/sqlite3 + -DCMAKE_MODULE_PATH=${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake + -DCMAKE_INCLUDE_PATH=$,::> + -DCMAKE_LIBRARY_PATH=${sqlite_BINARY_DIR} + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DSOCI_CXX_C11=ON + -DSOCI_STATIC=ON + -DSOCI_LIBDIR=lib + -DSOCI_SHARED=OFF + -DSOCI_TESTS=OFF + # hacks to workaround the fact that soci doesn't currently use + # boost imported targets in its cmake. If they switch to + # proper imported targets, this next line can be removed + # (as well as the get_property above that sets _boost_incs) + -DBoost_INCLUDE_DIRS=$ + -DBoost_INCLUDE_DIR=$ + -DBOOST_ROOT=${BOOST_ROOT} + -DWITH_BOOST=ON + -DBoost_FOUND=ON + -DBoost_NO_BOOST_CMAKE=ON + -DBoost_DATE_TIME_FOUND=ON + -DSOCI_HAVE_BOOST=ON + -DSOCI_HAVE_BOOST_DATE_TIME=ON + -DBoost_DATE_TIME_LIBRARY=${_boost_dt} + -DSOCI_DB2=OFF + -DSOCI_FIREBIRD=OFF + -DSOCI_MYSQL=OFF + -DSOCI_ODBC=OFF + -DSOCI_ORACLE=OFF + -DSOCI_POSTGRESQL=OFF + -DSOCI_SQLITE3=ON + -DSQLITE3_INCLUDE_DIR=$,::> + -DSQLITE3_LIBRARY=$,$,$> + $<$:-DCMAKE_FIND_FRAMEWORK=LAST> + $<$: + "-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP" + "-DCMAKE_CXX_FLAGS_DEBUG=-MTd" + "-DCMAKE_CXX_FLAGS_RELEASE=-MT" + > + $<$>: + "-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations" + > + # SEE: https://github.com/SOCI/soci/issues/640 + $<$,$>: + "-DCMAKE_CXX_FLAGS=-Wno-deprecated-declarations -Wno-error=format-overflow -Wno-format-overflow -Wno-error=format-truncation" + > + LIST_SEPARATOR :: + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /lib/$/${soci_lib_pre}soci_core${soci_lib_post}$<$:_d>${ep_lib_suffix} + /lib/$/${soci_lib_pre}soci_empty${soci_lib_post}$<$:_d>${ep_lib_suffix} + /lib/$/${soci_lib_pre}soci_sqlite3${soci_lib_post}$<$:_d>${ep_lib_suffix} + /lib + > + TEST_COMMAND "" + INSTALL_COMMAND "" + DEPENDS sqlite + BUILD_BYPRODUCTS + /lib/${soci_lib_pre}soci_core${soci_lib_post}${ep_lib_suffix} + /lib/${soci_lib_pre}soci_core${soci_lib_post}_d${ep_lib_suffix} + /lib/${soci_lib_pre}soci_empty${soci_lib_post}${ep_lib_suffix} + /lib/${soci_lib_pre}soci_empty${soci_lib_post}_d${ep_lib_suffix} + /lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}${ep_lib_suffix} + /lib/${soci_lib_pre}soci_sqlite3${soci_lib_post}_d${ep_lib_suffix} + ) + ExternalProject_Get_Property (soci BINARY_DIR) + ExternalProject_Get_Property (soci SOURCE_DIR) + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (soci) + endif () + file (MAKE_DIRECTORY ${SOURCE_DIR}/include) + file (MAKE_DIRECTORY ${BINARY_DIR}/include) + foreach (_comp core empty sqlite3) + set_target_properties ("soci_${_comp}" PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/lib/${soci_lib_pre}soci_${_comp}${soci_lib_post}${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + "${SOURCE_DIR}/include;${BINARY_DIR}/include") + add_dependencies ("soci_${_comp}" soci) # something has to depend on the ExternalProject to trigger it + target_link_libraries (ripple_libs INTERFACE "soci_${_comp}") + if (NOT _comp STREQUAL "core") + target_link_libraries ("soci_${_comp}" INTERFACE soci_core) + endif () + endforeach () +endif() + +foreach (_comp core empty sqlite3) + exclude_if_included ("soci_${_comp}") +endforeach () + + +exclude_if_included (soci) diff --git a/Builds/CMake/deps/Sqlite.cmake b/Builds/CMake/deps/Sqlite.cmake new file mode 100644 index 00000000000..7b34c1121f2 --- /dev/null +++ b/Builds/CMake/deps/Sqlite.cmake @@ -0,0 +1,93 @@ +#[===================================================================[ + NIH dep: sqlite +#]===================================================================] + +add_library (sqlite STATIC IMPORTED GLOBAL) + +if (NOT WIN32) + find_package(sqlite) +endif() + + +if(sqlite3) + set_target_properties (sqlite PROPERTIES + IMPORTED_LOCATION_DEBUG + ${sqlite3} + IMPORTED_LOCATION_RELEASE + ${sqlite3} + INTERFACE_INCLUDE_DIRECTORIES + ${SQLITE_INCLUDE_DIR}) + +else() + ExternalProject_Add (sqlite3 + PREFIX ${nih_cache_path} + # sqlite doesn't use git, but it provides versioned tarballs + URL https://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip + http://www.sqlite.org/2018/sqlite-amalgamation-3260000.zip + https://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip + http://www2.sqlite.org/2018/sqlite-amalgamation-3260000.zip + # ^^^ version is apparent in the URL: 3260000 => 3.26.0 + URL_HASH SHA256=de5dcab133aa339a4cf9e97c40aa6062570086d6085d8f9ad7bc6ddf8a52096e + # Don't need to worry about MITM attacks too much because the download + # is checked against a strong hash + TLS_VERIFY false + # we wrote a very simple CMake file to build sqlite + # so that's what we copy here so that we can build with + # CMake. sqlite doesn't generally provided a build system + # for the single amalgamation source file. + PATCH_COMMAND + ${CMAKE_COMMAND} -E copy_if_different + ${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/CMake_sqlite3.txt + /CMakeLists.txt + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + $<$: + "-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP" + "-DCMAKE_C_FLAGS_DEBUG=-MTd" + "-DCMAKE_C_FLAGS_RELEASE=-MT" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /$/${ep_lib_prefix}sqlite3$<$:_d>${ep_lib_suffix} + + > + TEST_COMMAND "" + INSTALL_COMMAND "" + BUILD_BYPRODUCTS + /${ep_lib_prefix}sqlite3${ep_lib_suffix} + /${ep_lib_prefix}sqlite3_d${ep_lib_suffix} + ) + ExternalProject_Get_Property (sqlite3 BINARY_DIR) + ExternalProject_Get_Property (sqlite3 SOURCE_DIR) + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (sqlite3) + endif () + + set_target_properties (sqlite PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/${ep_lib_prefix}sqlite3_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/${ep_lib_prefix}sqlite3${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}) + + add_dependencies (sqlite sqlite3) + exclude_if_included (sqlite3) +endif() + +target_link_libraries (sqlite INTERFACE $<$>:dl>) +target_link_libraries (ripple_libs INTERFACE sqlite) +exclude_if_included (sqlite) +set(sqlite_BINARY_DIR ${BINARY_DIR}) diff --git a/Builds/CMake/deps/cassandra.cmake b/Builds/CMake/deps/cassandra.cmake new file mode 100644 index 00000000000..4563a34137e --- /dev/null +++ b/Builds/CMake/deps/cassandra.cmake @@ -0,0 +1,167 @@ +if(reporting) + find_library(cassandra NAMES cassandra) + if(NOT cassandra) + + message("System installed Cassandra cpp driver not found. Will build") + + find_library(zlib NAMES zlib1g-dev zlib-devel zlib z) + if(NOT zlib) + message("zlib not found. will build") + add_library(zlib STATIC IMPORTED GLOBAL) + ExternalProject_Add(zlib_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/madler/zlib.git + GIT_TAG v1.2.12 + INSTALL_COMMAND "" + BUILD_BYPRODUCTS /${ep_lib_prefix}z.a + LOG_BUILD TRUE + LOG_CONFIGURE TRUE + ) + + + ExternalProject_Get_Property (zlib_src SOURCE_DIR) + ExternalProject_Get_Property (zlib_src BINARY_DIR) + set (zlib_src_SOURCE_DIR "${SOURCE_DIR}") + file (MAKE_DIRECTORY ${zlib_src_SOURCE_DIR}/include) + + set_target_properties (zlib PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/${ep_lib_prefix}z.a + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/include) + add_dependencies(zlib zlib_src) + + file(TO_CMAKE_PATH "${zlib_src_SOURCE_DIR}" zlib_src_SOURCE_DIR) + endif() + + + + + find_library(krb5 NAMES krb5-dev libkrb5-dev) + + if(NOT krb5) + message("krb5 not found. will build") + add_library(krb5 STATIC IMPORTED GLOBAL) + ExternalProject_Add(krb5_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/krb5/krb5.git + GIT_TAG krb5-1.20-final + UPDATE_COMMAND "" + CONFIGURE_COMMAND autoreconf src && CFLAGS=-fcommon ./src/configure --enable-static --disable-shared > /dev/null + BUILD_IN_SOURCE 1 + BUILD_COMMAND make + INSTALL_COMMAND "" + BUILD_BYPRODUCTS /lib/${ep_lib_prefix}krb5.a + LOG_BUILD TRUE + ) + + ExternalProject_Get_Property (krb5_src SOURCE_DIR) + ExternalProject_Get_Property (krb5_src BINARY_DIR) + set (krb5_src_SOURCE_DIR "${SOURCE_DIR}") + file (MAKE_DIRECTORY ${krb5_src_SOURCE_DIR}/include) + + set_target_properties (krb5 PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/lib/${ep_lib_prefix}krb5.a + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/include) + add_dependencies(krb5 krb5_src) + + file(TO_CMAKE_PATH "${krb5_src_SOURCE_DIR}" krb5_src_SOURCE_DIR) + endif() + + + find_library(libuv1 NAMES uv1 libuv1 liubuv1-dev libuv1:amd64) + + + if(NOT libuv1) + message("libuv1 not found, will build") + add_library(libuv1 STATIC IMPORTED GLOBAL) + ExternalProject_Add(libuv_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/libuv/libuv.git + GIT_TAG v1.44.2 + INSTALL_COMMAND "" + BUILD_BYPRODUCTS /${ep_lib_prefix}uv_a.a + LOG_BUILD TRUE + LOG_CONFIGURE TRUE + ) + + ExternalProject_Get_Property (libuv_src SOURCE_DIR) + ExternalProject_Get_Property (libuv_src BINARY_DIR) + set (libuv_src_SOURCE_DIR "${SOURCE_DIR}") + file (MAKE_DIRECTORY ${libuv_src_SOURCE_DIR}/include) + + set_target_properties (libuv1 PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/${ep_lib_prefix}uv_a.a + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/include) + add_dependencies(libuv1 libuv_src) + + file(TO_CMAKE_PATH "${libuv_src_SOURCE_DIR}" libuv_src_SOURCE_DIR) + endif() + + add_library (cassandra STATIC IMPORTED GLOBAL) + ExternalProject_Add(cassandra_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/datastax/cpp-driver.git + GIT_TAG 2.16.2 + CMAKE_ARGS + -DLIBUV_ROOT_DIR=${BINARY_DIR} + -DLIBUV_LIBARY=${BINARY_DIR}/libuv_a.a + -DLIBUV_INCLUDE_DIR=${SOURCE_DIR}/include + -DCASS_BUILD_STATIC=ON + -DCASS_BUILD_SHARED=OFF + -DOPENSSL_ROOT_DIR=/opt/local/openssl + INSTALL_COMMAND "" + BUILD_BYPRODUCTS /${ep_lib_prefix}cassandra_static.a + LOG_BUILD TRUE + LOG_CONFIGURE TRUE + ) + + ExternalProject_Get_Property (cassandra_src SOURCE_DIR) + ExternalProject_Get_Property (cassandra_src BINARY_DIR) + set (cassandra_src_SOURCE_DIR "${SOURCE_DIR}") + file (MAKE_DIRECTORY ${cassandra_src_SOURCE_DIR}/include) + + set_target_properties (cassandra PROPERTIES + IMPORTED_LOCATION + ${BINARY_DIR}/${ep_lib_prefix}cassandra_static.a + INTERFACE_INCLUDE_DIRECTORIES + ${SOURCE_DIR}/include) + add_dependencies(cassandra cassandra_src) + + if(NOT libuv1) + ExternalProject_Add_StepDependencies(cassandra_src build libuv1) + target_link_libraries(cassandra INTERFACE libuv1) + else() + target_link_libraries(cassandra INTERFACE ${libuv1}) + endif() + if(NOT krb5) + + ExternalProject_Add_StepDependencies(cassandra_src build krb5) + target_link_libraries(cassandra INTERFACE krb5) + else() + target_link_libraries(cassandra INTERFACE ${krb5}) + endif() + + if(NOT zlib) + ExternalProject_Add_StepDependencies(cassandra_src build zlib) + target_link_libraries(cassandra INTERFACE zlib) + else() + target_link_libraries(cassandra INTERFACE ${zlib}) + endif() + + file(TO_CMAKE_PATH "${cassandra_src_SOURCE_DIR}" cassandra_src_SOURCE_DIR) + target_link_libraries(ripple_libs INTERFACE cassandra) + else() + message("Found system installed cassandra cpp driver") + + find_path(cassandra_includes NAMES cassandra.h REQUIRED) + target_link_libraries (ripple_libs INTERFACE ${cassandra}) + target_include_directories(ripple_libs INTERFACE ${cassandra_includes}) + endif() + + exclude_if_included (cassandra) +endif() diff --git a/Builds/CMake/deps/date.cmake b/Builds/CMake/deps/date.cmake new file mode 100644 index 00000000000..b9155c26475 --- /dev/null +++ b/Builds/CMake/deps/date.cmake @@ -0,0 +1,18 @@ +#[===================================================================[ + NIH dep: date + + the main library is header-only, thus is an INTERFACE lib in CMake. + + NOTE: this has been accepted into c++20 so can likely be replaced + when we update to that standard +#]===================================================================] + +find_package (date QUIET) +if (NOT TARGET date::date) + FetchContent_Declare( + hh_date_src + GIT_REPOSITORY https://github.com/HowardHinnant/date.git + GIT_TAG fc4cf092f9674f2670fb9177edcdee870399b829 + ) + FetchContent_MakeAvailable(hh_date_src) +endif () diff --git a/Builds/CMake/deps/gRPC.cmake b/Builds/CMake/deps/gRPC.cmake index 44185b3a248..8dd09417563 100644 --- a/Builds/CMake/deps/gRPC.cmake +++ b/Builds/CMake/deps/gRPC.cmake @@ -1,15 +1,319 @@ -find_package(gRPC 1.23) + +# currently linking to unsecure versions...if we switch, we'll +# need to add ssl as a link dependency to the grpc targets +option (use_secure_grpc "use TLS version of grpc libs." OFF) +if (use_secure_grpc) + set (grpc_suffix "") +else () + set (grpc_suffix "_unsecure") +endif () + +find_package (gRPC 1.23 CONFIG QUIET) +if (TARGET gRPC::gpr AND NOT local_grpc) + get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_DEBUG) + if (NOT _grpc_l) + get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION_RELEASE) + endif () + if (NOT _grpc_l) + get_target_property (_grpc_l gRPC::gpr IMPORTED_LOCATION) + endif () + message (STATUS "Found cmake config for gRPC. Using ${_grpc_l}.") +else () + find_package (PkgConfig QUIET) + if (PKG_CONFIG_FOUND) + pkg_check_modules (grpc QUIET "grpc${grpc_suffix}>=1.25" "grpc++${grpc_suffix}" gpr) + endif () + + if (grpc_FOUND) + message (STATUS "Found gRPC using pkg-config. Using ${grpc_gpr_PREFIX}.") + endif () + + add_executable (gRPC::grpc_cpp_plugin IMPORTED) + exclude_if_included (gRPC::grpc_cpp_plugin) + + if (grpc_FOUND AND NOT local_grpc) + # use installed grpc (via pkg-config) + macro (add_imported_grpc libname_) + if (static) + set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}${libname_}${CMAKE_STATIC_LIBRARY_SUFFIX}") + else () + set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}${libname_}${CMAKE_SHARED_LIBRARY_SUFFIX}") + endif() + find_library(_found_${libname_} + NAMES ${_search} + HINTS ${grpc_LIBRARY_DIRS}) + if (_found_${libname_}) + message (STATUS "importing ${libname_} as ${_found_${libname_}}") + else () + message (FATAL_ERROR "using pkg-config for grpc, can't find ${_search}") + endif () + add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL) + set_target_properties ("gRPC::${libname_}" PROPERTIES IMPORTED_LOCATION ${_found_${libname_}}) + if (grpc_INCLUDE_DIRS) + set_target_properties ("gRPC::${libname_}" PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${grpc_INCLUDE_DIRS}) + endif () + target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}") + exclude_if_included ("gRPC::${libname_}") + endmacro () + + set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES + IMPORTED_LOCATION "${grpc_gpr_PREFIX}/bin/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}") + + pkg_check_modules (cares QUIET libcares) + if (cares_FOUND) + if (static) + set (_search "${CMAKE_STATIC_LIBRARY_PREFIX}cares${CMAKE_STATIC_LIBRARY_SUFFIX}") + set (_prefix cares_STATIC) + set (_static STATIC) + else () + set (_search "${CMAKE_SHARED_LIBRARY_PREFIX}cares${CMAKE_SHARED_LIBRARY_SUFFIX}") + set (_prefix cares) + set (_static) + endif() + find_library(_location NAMES ${_search} HINTS ${cares_LIBRARY_DIRS}) + if (NOT _location) + message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares") + endif () + add_library (c-ares::cares ${_static} IMPORTED GLOBAL) + set_target_properties (c-ares::cares PROPERTIES + IMPORTED_LOCATION ${_location} + INTERFACE_INCLUDE_DIRECTORIES "${${_prefix}_INCLUDE_DIRS}" + INTERFACE_LINK_OPTIONS "${${_prefix}_LDFLAGS}" + ) + exclude_if_included (c-ares::cares) + else () + message (FATAL_ERROR "using pkg-config for grpc, can't find c-ares") + endif () + else () + #[===========================[ + c-ares (grpc requires) + #]===========================] + ExternalProject_Add (c-ares_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/c-ares/c-ares.git + GIT_TAG cares-1_15_0 + CMAKE_ARGS + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DCMAKE_INSTALL_PREFIX=/_installed_ + -DCARES_SHARED=OFF + -DCARES_STATIC=ON + -DCARES_STATIC_PIC=ON + -DCARES_INSTALL=ON + -DCARES_MSVC_STATIC_RUNTIME=ON + $<$: + "-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + TEST_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $ --target install + BUILD_BYPRODUCTS + /_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix} + /_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix} + ) + exclude_if_included (c-ares_src) + ExternalProject_Get_Property (c-ares_src BINARY_DIR) + set (cares_binary_dir "${BINARY_DIR}") + + add_library (c-ares::cares STATIC IMPORTED GLOBAL) + file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include) + set_target_properties (c-ares::cares PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}cares${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${BINARY_DIR}/_installed_/include) + add_dependencies (c-ares::cares c-ares_src) + exclude_if_included (c-ares::cares) + + if (NOT has_zlib) + #[===========================[ + zlib (grpc requires) + #]===========================] + if (MSVC) + set (zlib_debug_postfix "d") # zlib cmake sets this internally for MSVC, so we really don't have a choice + set (zlib_base "zlibstatic") + else () + set (zlib_debug_postfix "_d") + set (zlib_base "z") + endif () + ExternalProject_Add (zlib_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/madler/zlib.git + GIT_TAG v1.2.11 + CMAKE_ARGS + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + -DCMAKE_DEBUG_POSTFIX=${zlib_debug_postfix} + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DCMAKE_INSTALL_PREFIX=/_installed_ + -DBUILD_SHARED_LIBS=OFF + $<$: + "-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP" + "-DCMAKE_C_FLAGS_DEBUG=-MTd" + "-DCMAKE_C_FLAGS_RELEASE=-MT" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + TEST_COMMAND "" + INSTALL_COMMAND + ${CMAKE_COMMAND} -E env --unset=DESTDIR ${CMAKE_COMMAND} --build . --config $ --target install + BUILD_BYPRODUCTS + /_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix} + /_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix} + ) + exclude_if_included (zlib_src) + ExternalProject_Get_Property (zlib_src BINARY_DIR) + set (zlib_binary_dir "${BINARY_DIR}") + + add_library (ZLIB::ZLIB STATIC IMPORTED GLOBAL) + file (MAKE_DIRECTORY ${BINARY_DIR}/_installed_/include) + set_target_properties (ZLIB::ZLIB PROPERTIES + IMPORTED_LOCATION_DEBUG + ${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${zlib_debug_postfix}${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${BINARY_DIR}/_installed_/lib/${ep_lib_prefix}${zlib_base}${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${BINARY_DIR}/_installed_/include) + add_dependencies (ZLIB::ZLIB zlib_src) + exclude_if_included (ZLIB::ZLIB) + endif () + + #[===========================[ + grpc + #]===========================] + ExternalProject_Add (grpc_src + PREFIX ${nih_cache_path} + GIT_REPOSITORY https://github.com/grpc/grpc.git + GIT_TAG v1.25.0 + CMAKE_ARGS + -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} + -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} + $<$:-DCMAKE_VERBOSE_MAKEFILE=ON> + $<$:-DCMAKE_TOOLCHAIN_FILE=${CMAKE_TOOLCHAIN_FILE}> + $<$:-DVCPKG_TARGET_TRIPLET=${VCPKG_TARGET_TRIPLET}> + $<$:-DCMAKE_UNITY_BUILD=ON}> + -DCMAKE_DEBUG_POSTFIX=_d + $<$>:-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}> + -DgRPC_BUILD_TESTS=OFF + -DgRPC_BENCHMARK_PROVIDER="" + -DgRPC_BUILD_CSHARP_EXT=OFF + -DgRPC_MSVC_STATIC_RUNTIME=ON + -DgRPC_INSTALL=OFF + -DgRPC_CARES_PROVIDER=package + -Dc-ares_DIR=${cares_binary_dir}/_installed_/lib/cmake/c-ares + -DgRPC_SSL_PROVIDER=package + -DOPENSSL_ROOT_DIR=${OPENSSL_ROOT_DIR} + -DgRPC_PROTOBUF_PROVIDER=package + -DProtobuf_USE_STATIC_LIBS=$,$>>,OFF,ON> + -DProtobuf_INCLUDE_DIR=$,:_:> + -DProtobuf_LIBRARY=$,$,$> + -DProtobuf_PROTOC_LIBRARY=$,$,$> + -DProtobuf_PROTOC_EXECUTABLE=$ + -DgRPC_ZLIB_PROVIDER=package + $<$>:-DZLIB_ROOT=${zlib_binary_dir}/_installed_> + $<$: + "-DCMAKE_CXX_FLAGS=-GR -Gd -fp:precise -FS -EHa -MP" + "-DCMAKE_C_FLAGS=-GR -Gd -fp:precise -FS -MP" + > + LOG_BUILD ON + LOG_CONFIGURE ON + BUILD_COMMAND + ${CMAKE_COMMAND} + --build . + --config $ + --parallel ${ep_procs} + $<$: + COMMAND + ${CMAKE_COMMAND} -E copy + /$/${ep_lib_prefix}grpc${grpc_suffix}$<$:_d>${ep_lib_suffix} + /$/${ep_lib_prefix}grpc++${grpc_suffix}$<$:_d>${ep_lib_suffix} + /$/${ep_lib_prefix}address_sorting$<$:_d>${ep_lib_suffix} + /$/${ep_lib_prefix}gpr$<$:_d>${ep_lib_suffix} + /$/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX} + + > + LIST_SEPARATOR :_: + TEST_COMMAND "" + INSTALL_COMMAND "" + DEPENDS c-ares_src + BUILD_BYPRODUCTS + /${ep_lib_prefix}grpc${grpc_suffix}${ep_lib_suffix} + /${ep_lib_prefix}grpc${grpc_suffix}_d${ep_lib_suffix} + /${ep_lib_prefix}grpc++${grpc_suffix}${ep_lib_suffix} + /${ep_lib_prefix}grpc++${grpc_suffix}_d${ep_lib_suffix} + /${ep_lib_prefix}address_sorting${ep_lib_suffix} + /${ep_lib_prefix}address_sorting_d${ep_lib_suffix} + /${ep_lib_prefix}gpr${ep_lib_suffix} + /${ep_lib_prefix}gpr_d${ep_lib_suffix} + /grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX} + ) + if (TARGET protobuf_src) + ExternalProject_Add_StepDependencies(grpc_src build protobuf_src) + endif () + exclude_if_included (grpc_src) + ExternalProject_Get_Property (grpc_src BINARY_DIR) + ExternalProject_Get_Property (grpc_src SOURCE_DIR) + set (grpc_binary_dir "${BINARY_DIR}") + set (grpc_source_dir "${SOURCE_DIR}") + if (CMAKE_VERBOSE_MAKEFILE) + print_ep_logs (grpc_src) + endif () + file (MAKE_DIRECTORY ${SOURCE_DIR}/include) + + macro (add_imported_grpc libname_) + add_library ("gRPC::${libname_}" STATIC IMPORTED GLOBAL) + set_target_properties ("gRPC::${libname_}" PROPERTIES + IMPORTED_LOCATION_DEBUG + ${grpc_binary_dir}/${ep_lib_prefix}${libname_}_d${ep_lib_suffix} + IMPORTED_LOCATION_RELEASE + ${grpc_binary_dir}/${ep_lib_prefix}${libname_}${ep_lib_suffix} + INTERFACE_INCLUDE_DIRECTORIES + ${grpc_source_dir}/include) + add_dependencies ("gRPC::${libname_}" grpc_src) + target_link_libraries (ripple_libs INTERFACE "gRPC::${libname_}") + exclude_if_included ("gRPC::${libname_}") + endmacro () + + set_target_properties (gRPC::grpc_cpp_plugin PROPERTIES + IMPORTED_LOCATION "${grpc_binary_dir}/grpc_cpp_plugin${CMAKE_EXECUTABLE_SUFFIX}") + add_dependencies (gRPC::grpc_cpp_plugin grpc_src) + endif () + + add_imported_grpc (gpr) + add_imported_grpc ("grpc${grpc_suffix}") + add_imported_grpc ("grpc++${grpc_suffix}") + add_imported_grpc (address_sorting) + + target_link_libraries ("gRPC::grpc${grpc_suffix}" INTERFACE c-ares::cares gRPC::gpr gRPC::address_sorting ZLIB::ZLIB) + target_link_libraries ("gRPC::grpc++${grpc_suffix}" INTERFACE "gRPC::grpc${grpc_suffix}" gRPC::gpr) +endif () #[=================================[ generate protobuf sources for grpc defs and bundle into a static lib #]=================================] -set(GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc") -file(MAKE_DIRECTORY ${GRPC_GEN_DIR}) -set(GRPC_PROTO_SRCS) -set(GRPC_PROTO_HDRS) -set(GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org") +set (GRPC_GEN_DIR "${CMAKE_BINARY_DIR}/proto_gen_grpc") +file (MAKE_DIRECTORY ${GRPC_GEN_DIR}) +set (GRPC_PROTO_SRCS) +set (GRPC_PROTO_HDRS) +set (GRPC_PROTO_ROOT "${CMAKE_CURRENT_SOURCE_DIR}/src/ripple/proto/org") file(GLOB_RECURSE GRPC_DEFINITION_FILES LIST_DIRECTORIES false "${GRPC_PROTO_ROOT}/*.proto") foreach(file ${GRPC_DEFINITION_FILES}) get_filename_component(_abs_file ${file} ABSOLUTE) @@ -20,10 +324,10 @@ foreach(file ${GRPC_DEFINITION_FILES}) get_filename_component(_rel_root_dir ${_rel_root_file} DIRECTORY) file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir}) - set(src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc") - set(src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc") - set(hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h") - set(hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h") + set (src_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.cc") + set (src_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.cc") + set (hdr_1 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.grpc.pb.h") + set (hdr_2 "${GRPC_GEN_DIR}/${_rel_root_dir}/${_basename}.pb.h") add_custom_command( OUTPUT ${src_1} ${src_2} ${hdr_1} ${hdr_2} COMMAND protobuf::protoc @@ -41,22 +345,20 @@ foreach(file ${GRPC_DEFINITION_FILES}) list(APPEND GRPC_PROTO_HDRS ${hdr_1} ${hdr_2}) endforeach() -add_library(grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS}) -#target_include_directories(grpc_pbufs PRIVATE src) -target_include_directories(grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR}) -target_link_libraries(grpc_pbufs - "gRPC::grpc++" - # libgrpc is missing references. - absl::random_random -) -target_compile_options(grpc_pbufs +add_library (grpc_pbufs STATIC ${GRPC_PROTO_SRCS} ${GRPC_PROTO_HDRS}) +#target_include_directories (grpc_pbufs PRIVATE src) +target_include_directories (grpc_pbufs SYSTEM PUBLIC ${GRPC_GEN_DIR}) +target_link_libraries (grpc_pbufs protobuf::libprotobuf "gRPC::grpc++${grpc_suffix}") +target_compile_options (grpc_pbufs PRIVATE $<$:-wd4065> $<$>:-Wno-deprecated-declarations> PUBLIC $<$:-wd4996> - $<$: + $<$: --system-header-prefix="google/protobuf" -Wno-deprecated-dynamic-exception-spec >) -add_library(Ripple::grpc_pbufs ALIAS grpc_pbufs) +add_library (Ripple::grpc_pbufs ALIAS grpc_pbufs) +target_link_libraries (ripple_libs INTERFACE Ripple::grpc_pbufs) +exclude_if_included (grpc_pbufs) diff --git a/Builds/CMake/rocks_thirdparty.inc b/Builds/CMake/rocks_thirdparty.inc new file mode 100644 index 00000000000..cba23df71c8 --- /dev/null +++ b/Builds/CMake/rocks_thirdparty.inc @@ -0,0 +1,15 @@ +set (THIRDPARTY_LIBS "") + +if(WITH_SNAPPY) + add_definitions(-DSNAPPY) + include_directories(${snappy_INCLUDE_DIRS}) + set (THIRDPARTY_LIBS ${THIRDPARTY_LIBS} ${snappy_LIBRARIES}) +endif() + +if(WITH_LZ4) + add_definitions(-DLZ4) + include_directories(${lz4_INCLUDE_DIRS}) + set (THIRDPARTY_LIBS ${THIRDPARTY_LIBS} ${lz4_LIBRARIES}) +endif() + + diff --git a/Builds/CMake/rocksdb_build_version.cc.in b/Builds/CMake/rocksdb_build_version.cc.in new file mode 100644 index 00000000000..9ef424669b4 --- /dev/null +++ b/Builds/CMake/rocksdb_build_version.cc.in @@ -0,0 +1,71 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. + +#include + +#include "rocksdb/version.h" +#include "util/string_util.h" + +// The build script may replace these values with real values based +// on whether or not GIT is available and the platform settings +static const std::string rocksdb_build_git_sha = "rocksdb_build_git_sha:@GIT_SHA@"; +static const std::string rocksdb_build_git_tag = "rocksdb_build_git_tag:@GIT_TAG@"; +#define HAS_GIT_CHANGES @GIT_MOD@ +#if HAS_GIT_CHANGES == 0 +// If HAS_GIT_CHANGES is 0, the GIT date is used. +// Use the time the branch/tag was last modified +static const std::string rocksdb_build_date = "rocksdb_build_date:@GIT_DATE@"; +#else +// If HAS_GIT_CHANGES is > 0, the branch/tag has modifications. +// Use the time the build was created. +static const std::string rocksdb_build_date = "rocksdb_build_date:@BUILD_DATE@"; +#endif + +namespace ROCKSDB_NAMESPACE { +static void AddProperty(std::unordered_map *props, const std::string& name) { + size_t colon = name.find(":"); + if (colon != std::string::npos && colon > 0 && colon < name.length() - 1) { + // If we found a "@:", then this property was a build-time substitution that failed. Skip it + size_t at = name.find("@", colon); + if (at != colon + 1) { + // Everything before the colon is the name, after is the value + (*props)[name.substr(0, colon)] = name.substr(colon + 1); + } + } +} + +static std::unordered_map* LoadPropertiesSet() { + auto * properties = new std::unordered_map(); + AddProperty(properties, rocksdb_build_git_sha); + AddProperty(properties, rocksdb_build_git_tag); + AddProperty(properties, rocksdb_build_date); + return properties; +} + +const std::unordered_map& GetRocksBuildProperties() { + static std::unique_ptr> props(LoadPropertiesSet()); + return *props; +} + +std::string GetRocksVersionAsString(bool with_patch) { + std::string version = ToString(ROCKSDB_MAJOR) + "." + ToString(ROCKSDB_MINOR); + if (with_patch) { + return version + "." + ToString(ROCKSDB_PATCH); + } else { + return version; + } +} + +std::string GetRocksBuildInfoAsString(const std::string& program, bool verbose) { + std::string info = program + " (RocksDB) " + GetRocksVersionAsString(true); + if (verbose) { + for (const auto& it : GetRocksBuildProperties()) { + info.append("\n "); + info.append(it.first); + info.append(": "); + info.append(it.second); + } + } + return info; +} +} // namespace ROCKSDB_NAMESPACE + diff --git a/Builds/CMake/soci_patch.cmake b/Builds/CMake/soci_patch.cmake new file mode 100644 index 00000000000..0c2a75c0dff --- /dev/null +++ b/Builds/CMake/soci_patch.cmake @@ -0,0 +1,49 @@ +# This patches unsigned-types.h in the soci official sources +# so as to remove type range check exceptions that cause +# us trouble when using boost::optional to select int values + +# Soci's CMake setup leaves flags in place that will cause warnings to +# be treated as errors, but some compiler versions throw "new" warnings +# that then cause the build to fail. Simplify that until soci fixes +# those warnings. +if (RIPPLED_SOURCE) + execute_process( COMMAND ${CMAKE_COMMAND} -E copy_if_different + ${RIPPLED_SOURCE}/Builds/CMake/SociConfig.cmake.patched + cmake/SociConfig.cmake ) +endif () + +# Some versions of CMake erroneously patch external projects on every build. +# If the patch makes no changes, skip it. This workaround can be +# removed once we stop supporting vulnerable versions of CMake. +# https://gitlab.kitware.com/cmake/cmake/-/issues/21086 +file (STRINGS include/soci/unsigned-types.h sourcecode) +# Delete the .patched file if it exists, so it doesn't end up duplicated. +# Trying to remove a file that does not exist is not a problem. +file (REMOVE include/soci/unsigned-types.h.patched) +foreach (line_ ${sourcecode}) + if (line_ MATCHES "^[ \\t]+throw[ ]+soci_error[ ]*\\([ ]*\"Value outside of allowed.+$") + set (line_ "//${CMAKE_MATCH_0}") + endif () + file (APPEND include/soci/unsigned-types.h.patched "${line_}\n") +endforeach () +execute_process( COMMAND ${CMAKE_COMMAND} -E compare_files + include/soci/unsigned-types.h include/soci/unsigned-types.h.patched + RESULT_VARIABLE compare_result +) +if( compare_result EQUAL 0) + message(DEBUG "The soci source and patch files are identical. Make no changes.") + file (REMOVE include/soci/unsigned-types.h.patched) + return() +endif() +file (RENAME include/soci/unsigned-types.h include/soci/unsigned-types.h.orig) +file (RENAME include/soci/unsigned-types.h.patched include/soci/unsigned-types.h) +# also fix Boost.cmake so that it just returns when we override the Boost_FOUND var +file (APPEND cmake/dependencies/Boost.cmake.patched "if (Boost_FOUND)\n") +file (APPEND cmake/dependencies/Boost.cmake.patched " return ()\n") +file (APPEND cmake/dependencies/Boost.cmake.patched "endif ()\n") +file (STRINGS cmake/dependencies/Boost.cmake sourcecode) +foreach (line_ ${sourcecode}) + file (APPEND cmake/dependencies/Boost.cmake.patched "${line_}\n") +endforeach () +file (RENAME cmake/dependencies/Boost.cmake.patched cmake/dependencies/Boost.cmake) + diff --git a/CMakeLists.txt b/CMakeLists.txt index da2ea1a5307..d3b494c103a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,17 +1,10 @@ -cmake_minimum_required(VERSION 3.16) +cmake_minimum_required (VERSION 3.16) -if(POLICY CMP0074) +if (POLICY CMP0074) cmake_policy(SET CMP0074 NEW) -endif() -if(POLICY CMP0077) - cmake_policy(SET CMP0077 NEW) -endif() - -# Fix "unrecognized escape" issues when passing CMAKE_MODULE_PATH on Windows. -file(TO_CMAKE_PATH "${CMAKE_MODULE_PATH}" CMAKE_MODULE_PATH) -list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake") +endif () -project(rippled) +project (rippled) set(CMAKE_CXX_EXTENSIONS OFF) set(CMAKE_CXX_STANDARD 20) set(CMAKE_CXX_STANDARD_REQUIRED ON) @@ -28,12 +21,15 @@ if(Git_FOUND) endif() endif() #git -if(thread_safety_analysis) +if (thread_safety_analysis) add_compile_options(-Wthread-safety -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS -DRIPPLE_ENABLE_THREAD_SAFETY_ANNOTATIONS) add_compile_options("-stdlib=libc++") add_link_options("-stdlib=libc++") endif() +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake") +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/Builds/CMake/deps") + include (CheckCXXCompilerFlag) include (FetchContent) include (ExternalProject) @@ -46,6 +42,7 @@ endif () include(RippledSanity) include(RippledVersion) include(RippledSettings) +include(RippledNIH) include(RippledRelease) # this check has to remain in the top-level cmake # because of the early return statement @@ -58,66 +55,24 @@ endif () include(RippledCompiler) include(RippledInterface) -option(only_docs "Include only the docs target?" FALSE) -include(RippledDocs) -if(only_docs) - return() -endif() - ### include(deps/Boost) -find_package(OpenSSL 1.1.1 REQUIRED) -set_target_properties(OpenSSL::SSL PROPERTIES - INTERFACE_COMPILE_DEFINITIONS OPENSSL_NO_SSL2 -) -add_subdirectory(src/secp256k1) -add_subdirectory(src/ed25519-donna) -find_package(lz4 REQUIRED) -# Target names with :: are not allowed in a generator expression. -# We need to pull the include directories and imported location properties -# from separate targets. -find_package(LibArchive REQUIRED) -find_package(SOCI REQUIRED) -find_package(SQLite3 REQUIRED) -find_package(Snappy REQUIRED) - -option(rocksdb "Enable RocksDB" ON) -if(rocksdb) - find_package(RocksDB REQUIRED) - set_target_properties(RocksDB::rocksdb PROPERTIES - INTERFACE_COMPILE_DEFINITIONS RIPPLE_ROCKSDB_AVAILABLE=1 - ) - target_link_libraries(ripple_libs INTERFACE RocksDB::rocksdb) -endif() - -find_package(nudb REQUIRED) -find_package(date REQUIRED) +include(deps/OpenSSL) +include(deps/Secp256k1) +include(deps/Ed25519-donna) +include(deps/Lz4) +include(deps/Libarchive) +include(deps/Sqlite) +include(deps/Soci) +include(deps/Snappy) +include(deps/Rocksdb) +include(deps/Nudb) +include(deps/date) include(deps/Protobuf) include(deps/gRPC) - -target_link_libraries(ripple_libs INTERFACE - ed25519::ed25519 - LibArchive::LibArchive - lz4::lz4 - nudb::core - OpenSSL::Crypto - OpenSSL::SSL - Ripple::grpc_pbufs - Ripple::pbufs - secp256k1::secp256k1 - soci::soci - SQLite::SQLite3 -) - -if(reporting) - find_package(cassandra-cpp-driver REQUIRED) - find_package(PostgreSQL REQUIRED) - target_link_libraries(ripple_libs INTERFACE - cassandra-cpp-driver::cassandra-cpp-driver - PostgreSQL::PostgreSQL - ) -endif() +include(deps/cassandra) +include(deps/Postgres) ### @@ -125,4 +80,5 @@ include(RippledCore) include(RippledInstall) include(RippledCov) include(RippledMultiConfig) +include(RippledDocs) include(RippledValidatorKeys) diff --git a/conanfile.py b/conanfile.py deleted file mode 100644 index 61e131f2f3d..00000000000 --- a/conanfile.py +++ /dev/null @@ -1,149 +0,0 @@ -from conans import ConanFile -from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout -import re - -class Xrpl(ConanFile): - name = 'xrpl' - - license = 'ISC' - author = 'John Freeman ' - url = 'https://github.com/xrplf/rippled' - description = 'The XRP Ledger' - settings = 'os', 'compiler', 'build_type', 'arch' - options = { - 'assertions': [True, False], - 'coverage': [True, False], - 'fPIC': [True, False], - 'jemalloc': [True, False], - 'reporting': [True, False], - 'rocksdb': [True, False], - 'shared': [True, False], - 'static': [True, False], - 'tests': [True, False], - 'unity': [True, False], - } - - requires = [ - 'boost/1.77.0', - 'date/3.0.1', - 'libarchive/3.6.0', - 'lz4/1.9.3', - 'grpc/1.44.0', - 'nudb/2.0.8', - 'openssl/1.1.1m', - 'protobuf/3.21.4', - 'snappy/1.1.9', - 'soci/4.0.3', - 'sqlite3/3.38.0', - 'zlib/1.2.12', - ] - - default_options = { - 'assertions': False, - 'coverage': False, - 'fPIC': True, - 'jemalloc': False, - 'reporting': False, - 'rocksdb': True, - 'shared': False, - 'static': True, - 'tests': True, - 'unity': False, - - 'cassandra-cpp-driver:shared': False, - 'date:header_only': True, - 'grpc:shared': False, - 'grpc:secure': True, - 'libarchive:shared': False, - 'libarchive:with_acl': False, - 'libarchive:with_bzip2': False, - 'libarchive:with_cng': False, - 'libarchive:with_expat': False, - 'libarchive:with_iconv': False, - 'libarchive:with_libxml2': False, - 'libarchive:with_lz4': True, - 'libarchive:with_lzma': False, - 'libarchive:with_lzo': False, - 'libarchive:with_nettle': False, - 'libarchive:with_openssl': False, - 'libarchive:with_pcreposix': False, - 'libarchive:with_xattr': False, - 'libarchive:with_zlib': False, - 'libpq:shared': False, - 'lz4:shared': False, - 'openssl:shared': False, - 'protobuf:shared': False, - 'protobuf:with_zlib': True, - 'rocksdb:enable_sse': False, - 'rocksdb:lite': False, - 'rocksdb:shared': False, - 'rocksdb:use_rtti': True, - 'rocksdb:with_jemalloc': False, - 'rocksdb:with_lz4': True, - 'rocksdb:with_snappy': True, - 'snappy:shared': False, - 'soci:shared': False, - 'soci:with_sqlite3': True, - 'soci:with_boost': True, - } - - def set_version(self): - path = f'{self.recipe_folder}/src/ripple/protocol/impl/BuildInfo.cpp' - regex = r'versionString\s?=\s?\"(.*)\"' - with open(path, 'r') as file: - matches = (re.search(regex, line) for line in file) - match = next(m for m in matches if m) - self.version = match.group(1) - - def configure(self): - if self.settings.compiler == 'apple-clang': - self.options['boost'].visibility = 'global' - - def requirements(self): - if self.options.jemalloc: - self.requires('jemalloc/5.2.1') - if self.options.reporting: - self.requires('cassandra-cpp-driver/2.15.3') - self.requires('libpq/13.6') - if self.options.rocksdb: - self.requires('rocksdb/6.27.3') - - exports_sources = 'CMakeLists.txt', 'Builds/CMake/*', 'src/*', 'cfg/*' - - def layout(self): - cmake_layout(self) - # Fix this setting to follow the default introduced in Conan 1.48 - # to align with our build instructions. - self.folders.generators = 'build/generators' - - generators = 'CMakeDeps' - def generate(self): - tc = CMakeToolchain(self) - tc.variables['tests'] = self.options.tests - tc.variables['assert'] = self.options.assertions - tc.variables['coverage'] = self.options.coverage - tc.variables['jemalloc'] = self.options.jemalloc - tc.variables['reporting'] = self.options.reporting - tc.variables['rocksdb'] = self.options.rocksdb - tc.variables['BUILD_SHARED_LIBS'] = self.options.shared - tc.variables['static'] = self.options.static - tc.variables['unity'] = self.options.unity - tc.generate() - - def build(self): - cmake = CMake(self) - cmake.verbose = True - cmake.configure() - cmake.build() - - def package(self): - cmake = CMake(self) - cmake.verbose = True - cmake.install() - - def package_info(self): - self.cpp_info.libs = [ - 'libxrpl_core.a', - 'libed25519-donna.a', - 'libsecp256k1.a', - ] diff --git a/external/rocksdb/conanfile.py b/external/rocksdb/conanfile.py deleted file mode 100644 index a219f4f5547..00000000000 --- a/external/rocksdb/conanfile.py +++ /dev/null @@ -1,193 +0,0 @@ -import os -import shutil -from conans import ConanFile, CMake -from conan.tools import microsoft as ms - -class RocksDB(ConanFile): - name = 'rocksdb' - version = '6.27.3' - - license = ('GPL-2.0-only', 'Apache-2.0') - url = 'https://github.com/conan-io/conan-center-index' - description = 'A library that provides an embeddable, persistent key-value store for fast storage' - topics = ('rocksdb', 'database', 'leveldb', 'facebook', 'key-value') - - settings = 'os', 'compiler', 'build_type', 'arch' - options = { - 'enable_sse': [False, 'sse42', 'avx2'], - 'fPIC': [True, False], - 'lite': [True, False], - 'shared': [True, False], - 'use_rtti': [True, False], - 'with_gflags': [True, False], - 'with_jemalloc': [True, False], - 'with_lz4': [True, False], - 'with_snappy': [True, False], - 'with_tbb': [True, False], - 'with_zlib': [True, False], - 'with_zstd': [True, False], - } - default_options = { - 'enable_sse': False, - 'fPIC': True, - 'lite': False, - 'shared': False, - 'use_rtti': False, - 'with_gflags': False, - 'with_jemalloc': False, - 'with_lz4': False, - 'with_snappy': False, - 'with_tbb': False, - 'with_zlib': False, - 'with_zstd': False, - } - - def requirements(self): - if self.options.with_gflags: - self.requires('gflags/2.2.2') - if self.options.with_jemalloc: - self.requires('jemalloc/5.2.1') - if self.options.with_lz4: - self.requires('lz4/1.9.3') - if self.options.with_snappy: - self.requires('snappy/1.1.9') - if self.options.with_tbb: - self.requires('onetbb/2020.3') - if self.options.with_zlib: - self.requires('zlib/1.2.11') - if self.options.with_zstd: - self.requires('zstd/1.5.2') - - def config_options(self): - if self.settings.os == 'Windows': - del self.options.fPIC - - def configure(self): - if self.options.shared: - del self.options.fPIC - - generators = 'cmake', 'cmake_find_package' - - scm = { - 'type': 'git', - 'url': 'https://github.com/facebook/rocksdb.git', - 'revision': 'v6.27.3', - } - - exports_sources = 'thirdparty.inc' - # For out-of-source build. - no_copy_source = True - - _cmake = None - - def _configure_cmake(self): - if self._cmake: - return - - self._cmake = CMake(self) - - self._cmake.definitions['CMAKE_POSITION_INDEPENDENT_CODE'] = True - - self._cmake.definitions['DISABLE_STALL_NOTIF'] = False - self._cmake.definitions['FAIL_ON_WARNINGS'] = False - self._cmake.definitions['OPTDBG'] = True - self._cmake.definitions['WITH_TESTS'] = False - self._cmake.definitions['WITH_TOOLS'] = False - - self._cmake.definitions['WITH_GFLAGS'] = self.options.with_gflags - self._cmake.definitions['WITH_JEMALLOC'] = self.options.with_jemalloc - self._cmake.definitions['WITH_LZ4'] = self.options.with_lz4 - self._cmake.definitions['WITH_SNAPPY'] = self.options.with_snappy - self._cmake.definitions['WITH_TBB'] = self.options.with_tbb - self._cmake.definitions['WITH_ZLIB'] = self.options.with_zlib - self._cmake.definitions['WITH_ZSTD'] = self.options.with_zstd - - self._cmake.definitions['USE_RTTI'] = self.options.use_rtti - self._cmake.definitions['ROCKSDB_LITE'] = self.options.lite - self._cmake.definitions['ROCKSDB_INSTALL_ON_WINDOWS'] = ( - self.settings.os == 'Windows' - ) - - if not self.options.enable_sse: - self._cmake.definitions['PORTABLE'] = True - self._cmake.definitions['FORCE_SSE42'] = False - elif self.options.enable_sse == 'sse42': - self._cmake.definitions['PORTABLE'] = True - self._cmake.definitions['FORCE_SSE42'] = True - elif self.options.enable_sse == 'avx2': - self._cmake.definitions['PORTABLE'] = False - self._cmake.definitions['FORCE_SSE42'] = False - - self._cmake.definitions['WITH_ASAN'] = False - self._cmake.definitions['WITH_BZ2'] = False - self._cmake.definitions['WITH_JNI'] = False - self._cmake.definitions['WITH_LIBRADOS'] = False - if ms.is_msvc(self): - self._cmake.definitions['WITH_MD_LIBRARY'] = ( - ms.msvc_runtime_flag(self).startswith('MD') - ) - self._cmake.definitions['WITH_RUNTIME_DEBUG'] = ( - ms.msvc_runtime_flag(self).endswith('d') - ) - self._cmake.definitions['WITH_NUMA'] = False - self._cmake.definitions['WITH_TSAN'] = False - self._cmake.definitions['WITH_UBSAN'] = False - self._cmake.definitions['WITH_WINDOWS_UTF8_FILENAMES'] = False - self._cmake.definitions['WITH_XPRESS'] = False - self._cmake.definitions['WITH_FALLOCATE'] = True - - - def build(self): - if ms.is_msvc(self): - file = os.path.join( - self.recipe_folder, '..', 'export_source', 'thirdparty.inc' - ) - shutil.copy(file, self.build_folder) - self._configure_cmake() - self._cmake.configure() - self._cmake.build() - - def package(self): - self._configure_cmake() - self._cmake.install() - - def package_info(self): - self.cpp_info.filenames['cmake_find_package'] = 'RocksDB' - self.cpp_info.filenames['cmake_find_package_multi'] = 'RocksDB' - self.cpp_info.set_property('cmake_file_name', 'RocksDB') - - self.cpp_info.names['cmake_find_package'] = 'RocksDB' - self.cpp_info.names['cmake_find_package_multi'] = 'RocksDB' - - self.cpp_info.components['librocksdb'].names['cmake_find_package'] = 'rocksdb' - self.cpp_info.components['librocksdb'].names['cmake_find_package_multi'] = 'rocksdb' - self.cpp_info.components['librocksdb'].set_property( - 'cmake_target_name', 'RocksDB::rocksdb' - ) - - self.cpp_info.components['librocksdb'].libs = ['rocksdb'] - - if self.settings.os == "Windows": - self.cpp_info.components["librocksdb"].system_libs = ["shlwapi", "rpcrt4"] - if self.options.shared: - self.cpp_info.components["librocksdb"].defines = ["ROCKSDB_DLL"] - elif self.settings.os in ["Linux", "FreeBSD"]: - self.cpp_info.components["librocksdb"].system_libs = ["pthread", "m"] - - if self.options.lite: - self.cpp_info.components["librocksdb"].defines.append("ROCKSDB_LITE") - - if self.options.with_gflags: - self.cpp_info.components["librocksdb"].requires.append("gflags::gflags") - if self.options.with_jemalloc: - self.cpp_info.components["librocksdb"].requires.append("jemalloc::jemalloc") - if self.options.with_lz4: - self.cpp_info.components["librocksdb"].requires.append("lz4::lz4") - if self.options.with_snappy: - self.cpp_info.components["librocksdb"].requires.append("snappy::snappy") - if self.options.with_tbb: - self.cpp_info.components["librocksdb"].requires.append("onetbb::onetbb") - if self.options.with_zlib: - self.cpp_info.components["librocksdb"].requires.append("zlib::zlib") - if self.options.with_zstd: - self.cpp_info.components["librocksdb"].requires.append("zstd::zstd") diff --git a/external/rocksdb/thirdparty.inc b/external/rocksdb/thirdparty.inc deleted file mode 100644 index fce11784b92..00000000000 --- a/external/rocksdb/thirdparty.inc +++ /dev/null @@ -1,62 +0,0 @@ -if(WITH_GFLAGS) - # Config with namespace available since gflags 2.2.2 - find_package(gflags REQUIRED) - set(GFLAGS_LIB gflags::gflags) - list(APPEND THIRDPARTY_LIBS ${GFLAGS_LIB}) - add_definitions(-DGFLAGS=1) -endif() - -if(WITH_SNAPPY) - find_package(Snappy REQUIRED) - add_definitions(-DSNAPPY) - list(APPEND THIRDPARTY_LIBS Snappy::snappy) -endif() - -if(WITH_LZ4) - find_package(lz4 REQUIRED) - add_definitions(-DLZ4) - list(APPEND THIRDPARTY_LIBS lz4::lz4) -endif() - -if(WITH_ZLIB) - find_package(ZLIB REQUIRED) - add_definitions(-DZLIB) - list(APPEND THIRDPARTY_LIBS ZLIB::ZLIB) -endif() - -option(WITH_BZ2 "build with bzip2" OFF) -if(WITH_BZ2) - find_package(BZip2 REQUIRED) - add_definitions(-DBZIP2) - list(APPEND THIRDPARTY_LIBS BZip2::BZip2) -endif() - -if(WITH_ZSTD) - find_package(zstd REQUIRED) - add_definitions(-DZSTD) - list(APPEND THIRDPARTY_LIBS zstd::zstd) -endif() - -# ================================================== XPRESS ================================================== -# This makes use of built-in Windows API, no additional includes, links to a system lib - -if(WITH_XPRESS) - message(STATUS "XPRESS is enabled") - add_definitions(-DXPRESS) - # We are using the implementation provided by the system - list(APPEND SYSTEM_LIBS Cabinet.lib) -else() - message(STATUS "XPRESS is disabled") -endif() - -# ================================================== JEMALLOC ================================================== -if(WITH_JEMALLOC) - message(STATUS "JEMALLOC library is enabled") - add_definitions(-DROCKSDB_JEMALLOC -DJEMALLOC_EXPORT= -DJEMALLOC_NO_RENAME) - list(APPEND THIRDPARTY_LIBS jemalloc::jemalloc) - set(ARTIFACT_SUFFIX "_je") - -else () - set(ARTIFACT_SUFFIX "") - message(STATUS "JEMALLOC library is disabled") -endif () diff --git a/src/ed25519-donna/CMakeLists.txt b/src/ed25519-donna/CMakeLists.txt deleted file mode 100644 index 418dc38326b..00000000000 --- a/src/ed25519-donna/CMakeLists.txt +++ /dev/null @@ -1,48 +0,0 @@ -cmake_minimum_required(VERSION 3.11) - -project(ed25519 - LANGUAGES C -) - -if(PROJECT_NAME STREQUAL CMAKE_PROJECT_NAME) - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/output/$/lib") -endif() - -if(NOT TARGET OpenSSL::SSL) - find_package(OpenSSL) -endif() - -add_library(ed25519 STATIC - ed25519.c -) -add_library(ed25519::ed25519 ALIAS ed25519) -target_link_libraries(ed25519 PUBLIC OpenSSL::SSL) - -include(GNUInstallDirs) - -#[=========================================================[ - NOTE for macos: - https://github.com/floodyberry/ed25519-donna/issues/29 - our source for ed25519-donna-portable.h has been - patched to workaround this. -#]=========================================================] -target_include_directories(ed25519 PUBLIC - $ - $ -) - -install( - TARGETS ed25519 - EXPORT ${PROJECT_NAME}-exports - ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" -) -install( - EXPORT ${PROJECT_NAME}-exports - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" - FILE ${PROJECT_NAME}-targets.cmake - NAMESPACE ${PROJECT_NAME}:: -) -install( - FILES ed25519.h - DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" -) diff --git a/src/ripple/app/tx/impl/CreateCheck.cpp b/src/ripple/app/tx/impl/CreateCheck.cpp index f5c2cbfbfd9..a59a7c12eba 100644 --- a/src/ripple/app/tx/impl/CreateCheck.cpp +++ b/src/ripple/app/tx/impl/CreateCheck.cpp @@ -90,14 +90,8 @@ CreateCheck::preclaim(PreclaimContext const& ctx) return tecNO_DST; } - auto const flags = sleDst->getFlags(); - - // Check if the destination has disallowed incoming checks - if (ctx.view.rules().enabled(featureDisallowIncoming) && - (flags & lsfDisallowIncomingCheck)) - return tecNO_PERMISSION; - - if ((flags & lsfRequireDestTag) && !ctx.tx.isFieldPresent(sfDestinationTag)) + if ((sleDst->getFlags() & lsfRequireDestTag) && + !ctx.tx.isFieldPresent(sfDestinationTag)) { // The tag is basically account-specific information we don't // understand, but we can require someone to fill it in. diff --git a/src/ripple/app/tx/impl/NFTokenCreateOffer.cpp b/src/ripple/app/tx/impl/NFTokenCreateOffer.cpp index 695efdd0aa4..80e4c3964a7 100644 --- a/src/ripple/app/tx/impl/NFTokenCreateOffer.cpp +++ b/src/ripple/app/tx/impl/NFTokenCreateOffer.cpp @@ -165,42 +165,11 @@ NFTokenCreateOffer::preclaim(PreclaimContext const& ctx) return tecUNFUNDED_OFFER; } - if (auto const destination = ctx.tx[~sfDestination]) - { - // If a destination is specified, the destination must already be in - // the ledger. - auto const sleDst = ctx.view.read(keylet::account(*destination)); - - if (!sleDst) - return tecNO_DST; - - // check if the destination has disallowed incoming offers - if (ctx.view.rules().enabled(featureDisallowIncoming)) - { - // flag cannot be set unless amendment is enabled but - // out of an abundance of caution check anyway - - if (sleDst->getFlags() & lsfDisallowIncomingNFTOffer) - return tecNO_PERMISSION; - } - } - - if (auto const owner = ctx.tx[~sfOwner]) - { - // Check if the owner (buy offer) has disallowed incoming offers - if (ctx.view.rules().enabled(featureDisallowIncoming)) - { - auto const sleOwner = ctx.view.read(keylet::account(*owner)); - - // defensively check - // it should not be possible to specify owner that doesn't exist - if (!sleOwner) - return tecNO_TARGET; - - if (sleOwner->getFlags() & lsfDisallowIncomingNFTOffer) - return tecNO_PERMISSION; - } - } + // If a destination is specified, the destination must already be in + // the ledger. + if (auto const destination = ctx.tx[~sfDestination]; + destination && !ctx.view.exists(keylet::account(*destination))) + return tecNO_DST; return tesSUCCESS; } diff --git a/src/ripple/app/tx/impl/PayChan.cpp b/src/ripple/app/tx/impl/PayChan.cpp index 1667bddcdb1..aab3dcc5a6b 100644 --- a/src/ripple/app/tx/impl/PayChan.cpp +++ b/src/ripple/app/tx/impl/PayChan.cpp @@ -217,21 +217,14 @@ PayChanCreate::preclaim(PreclaimContext const& ctx) auto const sled = ctx.view.read(keylet::account(dst)); if (!sled) return tecNO_DST; - - auto const flags = sled->getFlags(); - - // Check if they have disallowed incoming payment channels - if (ctx.view.rules().enabled(featureDisallowIncoming) && - (flags & lsfDisallowIncomingPayChan)) - return tecNO_PERMISSION; - - if ((flags & lsfRequireDestTag) && !ctx.tx[~sfDestinationTag]) + if (((*sled)[sfFlags] & lsfRequireDestTag) && + !ctx.tx[~sfDestinationTag]) return tecDST_TAG_NEEDED; // Obeying the lsfDisallowXRP flag was a bug. Piggyback on // featureDepositAuth to remove the bug. if (!ctx.view.rules().enabled(featureDepositAuth) && - (flags & lsfDisallowXRP)) + ((*sled)[sfFlags] & lsfDisallowXRP)) return tecNO_TARGET; } diff --git a/src/ripple/app/tx/impl/SetAccount.cpp b/src/ripple/app/tx/impl/SetAccount.cpp index 5c7d4369a76..85fe290ca55 100644 --- a/src/ripple/app/tx/impl/SetAccount.cpp +++ b/src/ripple/app/tx/impl/SetAccount.cpp @@ -538,30 +538,6 @@ SetAccount::doApply() sle->makeFieldAbsent(sfNFTokenMinter); } - // Set or clear flags for disallowing various incoming instruments - if (ctx_.view().rules().enabled(featureDisallowIncoming)) - { - if (uSetFlag == asfDisallowIncomingNFTOffer) - uFlagsOut |= lsfDisallowIncomingNFTOffer; - else if (uClearFlag == asfDisallowIncomingNFTOffer) - uFlagsOut &= ~lsfDisallowIncomingNFTOffer; - - if (uSetFlag == asfDisallowIncomingCheck) - uFlagsOut |= lsfDisallowIncomingCheck; - else if (uClearFlag == asfDisallowIncomingCheck) - uFlagsOut &= ~lsfDisallowIncomingCheck; - - if (uSetFlag == asfDisallowIncomingPayChan) - uFlagsOut |= lsfDisallowIncomingPayChan; - else if (uClearFlag == asfDisallowIncomingPayChan) - uFlagsOut &= ~lsfDisallowIncomingPayChan; - - if (uSetFlag == asfDisallowIncomingTrustline) - uFlagsOut |= lsfDisallowIncomingTrustline; - else if (uClearFlag == asfDisallowIncomingTrustline) - uFlagsOut &= ~lsfDisallowIncomingTrustline; - } - if (uFlagsIn != uFlagsOut) sle->setFieldU32(sfFlags, uFlagsOut); diff --git a/src/ripple/app/tx/impl/SetTrust.cpp b/src/ripple/app/tx/impl/SetTrust.cpp index acbbedabf10..23af19c7b15 100644 --- a/src/ripple/app/tx/impl/SetTrust.cpp +++ b/src/ripple/app/tx/impl/SetTrust.cpp @@ -128,20 +128,6 @@ SetTrust::preclaim(PreclaimContext const& ctx) } } - // If the destination has opted to disallow incoming trustlines - // then honour that flag - if (ctx.view.rules().enabled(featureDisallowIncoming)) - { - auto const sleDst = ctx.view.read(keylet::account(uDstAccountID)); - - if (!sleDst) - return tecNO_DST; - - auto const dstFlags = sleDst->getFlags(); - if (dstFlags & lsfDisallowIncomingTrustline) - return tecNO_PERMISSION; - } - return tesSUCCESS; } diff --git a/src/ripple/net/impl/RPCCall.cpp b/src/ripple/net/impl/RPCCall.cpp index b475afe9dfb..eb4906f3af7 100644 --- a/src/ripple/net/impl/RPCCall.cpp +++ b/src/ripple/net/impl/RPCCall.cpp @@ -1396,7 +1396,16 @@ struct RPCCallImp // callbackFuncP. // Receive reply - if (strData.empty()) + if (iStatus == 401) + Throw( + "incorrect rpcuser or rpcpassword (authorization failed)"); + else if ( + (iStatus >= 400) && (iStatus != 400) && (iStatus != 404) && + (iStatus != 500)) // ? + Throw( + std::string("server returned HTTP error ") + + std::to_string(iStatus)); + else if (strData.empty()) Throw("no response from server"); // Parse reply diff --git a/src/ripple/protocol/ErrorCodes.h b/src/ripple/protocol/ErrorCodes.h index ee33eee0604..98a8cf43a39 100644 --- a/src/ripple/protocol/ErrorCodes.h +++ b/src/ripple/protocol/ErrorCodes.h @@ -163,15 +163,12 @@ enum warning_code_i { namespace RPC { -/** Maps an rpc error code to its token, default message, and HTTP status. */ +/** Maps an rpc error code to its token and default message. */ struct ErrorInfo { // Default ctor needed to produce an empty std::array during constexpr eval. constexpr ErrorInfo() - : code(rpcUNKNOWN) - , token("unknown") - , message("An unknown error code.") - , http_status(200) + : code(rpcUNKNOWN), token("unknown"), message("An unknown error code.") { } @@ -179,26 +176,13 @@ struct ErrorInfo error_code_i code_, char const* token_, char const* message_) - : code(code_), token(token_), message(message_), http_status(200) - { - } - - constexpr ErrorInfo( - error_code_i code_, - char const* token_, - char const* message_, - int http_status_) - : code(code_) - , token(token_) - , message(message_) - , http_status(http_status_) + : code(code_), token(token_), message(message_) { } error_code_i code; Json::StaticString token; Json::StaticString message; - int http_status; }; /** Returns an ErrorInfo that reflects the error code. */ @@ -348,10 +332,6 @@ not_validator_error() bool contains_error(Json::Value const& json); -/** Returns http status that corresponds to the error code. */ -int -error_code_http_status(error_code_i code); - } // namespace RPC /** Returns a single string with the contents of an RPC error. */ diff --git a/src/ripple/protocol/Feature.h b/src/ripple/protocol/Feature.h index e4c6826ee19..dd54600434a 100644 --- a/src/ripple/protocol/Feature.h +++ b/src/ripple/protocol/Feature.h @@ -74,7 +74,7 @@ namespace detail { // Feature.cpp. Because it's only used to reserve storage, and determine how // large to make the FeatureBitset, it MAY be larger. It MUST NOT be less than // the actual number of amendments. A LogicError on startup will verify this. -static constexpr std::size_t numFeatures = 55; +static constexpr std::size_t numFeatures = 54; /** Amendments that this server supports and the default voting behavior. Whether they are enabled depends on the Rules defined in the validated @@ -340,7 +340,6 @@ extern uint256 const featureNonFungibleTokensV1_1; extern uint256 const fixTrustLinesToSelf; extern uint256 const fixRemoveNFTokenAutoTrustLine; extern uint256 const featureImmediateOfferKilled; -extern uint256 const featureDisallowIncoming; extern uint256 const fixNFTokenBrokerAccept; } // namespace ripple diff --git a/src/ripple/protocol/LedgerFormats.h b/src/ripple/protocol/LedgerFormats.h index 45258a3d02b..2dd04b1264b 100644 --- a/src/ripple/protocol/LedgerFormats.h +++ b/src/ripple/protocol/LedgerFormats.h @@ -232,17 +232,6 @@ enum LedgerSpecificFlags { lsfDefaultRipple = 0x00800000, // True, trust lines allow rippling by default lsfDepositAuth = 0x01000000, // True, all deposits require authorization -/* // reserved for Hooks amendment - lsfTshCollect = 0x02000000, // True, allow TSH collect-calls to acc hooks -*/ - lsfDisallowIncomingNFTOffer = - 0x04000000, // True, reject new incoming NFT offers - lsfDisallowIncomingCheck = - 0x08000000, // True, reject new checks - lsfDisallowIncomingPayChan = - 0x10000000, // True, reject new paychans - lsfDisallowIncomingTrustline = - 0x20000000, // True, reject new trustlines (only if no issued assets) // ltOFFER lsfPassive = 0x00010000, diff --git a/src/ripple/protocol/TxFlags.h b/src/ripple/protocol/TxFlags.h index c4218219868..0ad088c41d6 100644 --- a/src/ripple/protocol/TxFlags.h +++ b/src/ripple/protocol/TxFlags.h @@ -79,13 +79,6 @@ constexpr std::uint32_t asfGlobalFreeze = 7; constexpr std::uint32_t asfDefaultRipple = 8; constexpr std::uint32_t asfDepositAuth = 9; constexpr std::uint32_t asfAuthorizedNFTokenMinter = 10; -/* // reserved for Hooks amendment -constexpr std::uint32_t asfTshCollect = 11; -*/ -constexpr std::uint32_t asfDisallowIncomingNFTOffer = 12; -constexpr std::uint32_t asfDisallowIncomingCheck = 13; -constexpr std::uint32_t asfDisallowIncomingPayChan = 14; -constexpr std::uint32_t asfDisallowIncomingTrustline = 15; // OfferCreate flags: constexpr std::uint32_t tfPassive = 0x00010000; diff --git a/src/ripple/protocol/impl/ErrorCodes.cpp b/src/ripple/protocol/impl/ErrorCodes.cpp index bb3b2d47a89..e4a9acf4677 100644 --- a/src/ripple/protocol/impl/ErrorCodes.cpp +++ b/src/ripple/protocol/impl/ErrorCodes.cpp @@ -18,7 +18,6 @@ //============================================================================== #include -#include #include #include @@ -27,96 +26,105 @@ namespace RPC { namespace detail { +// clang-format off // Unordered array of ErrorInfos, so we don't have to maintain the list // ordering by hand. // // This array will be omitted from the object file; only the sorted version // will remain in the object file. But the string literals will remain. -// -// There's a certain amount of tension in determining the correct HTTP -// status to associate with a given RPC error. Initially all RPC errors -// returned 200 (OK). And that's the default behavior if no HTTP status code -// is specified below. -// -// The codes currently selected target the load balancer fail-over use case. -// If a query fails on one node but is likely to have a positive outcome -// on a different node, then the failure should return a 4xx/5xx range -// status code. - -// clang-format off constexpr static ErrorInfo unorderedErrorInfos[]{ - {rpcACT_MALFORMED, "actMalformed", "Account malformed."}, - {rpcACT_NOT_FOUND, "actNotFound", "Account not found."}, - {rpcALREADY_MULTISIG, "alreadyMultisig", "Already multisigned."}, - {rpcALREADY_SINGLE_SIG, "alreadySingleSig", "Already single-signed."}, - {rpcAMENDMENT_BLOCKED, "amendmentBlocked", "Amendment blocked, need upgrade.", 503}, - {rpcEXPIRED_VALIDATOR_LIST, "unlBlocked", "Validator list expired.", 503}, - {rpcATX_DEPRECATED, "deprecated", "Use the new API or specify a ledger range.", 400}, - {rpcBAD_KEY_TYPE, "badKeyType", "Bad key type.", 400}, - {rpcBAD_FEATURE, "badFeature", "Feature unknown or invalid.", 500}, - {rpcBAD_ISSUER, "badIssuer", "Issuer account malformed.", 400}, - {rpcBAD_MARKET, "badMarket", "No such market.", 404}, - {rpcBAD_SECRET, "badSecret", "Secret does not match account.", 403}, - {rpcBAD_SEED, "badSeed", "Disallowed seed.", 403}, - {rpcBAD_SYNTAX, "badSyntax", "Syntax error.", 400}, - {rpcCHANNEL_MALFORMED, "channelMalformed", "Payment channel is malformed.", 400}, - {rpcCHANNEL_AMT_MALFORMED, "channelAmtMalformed", "Payment channel amount is malformed.", 400}, - {rpcCOMMAND_MISSING, "commandMissing", "Missing command entry.", 400}, - {rpcDB_DESERIALIZATION, "dbDeserialization", "Database deserialization error.", 502}, - {rpcDST_ACT_MALFORMED, "dstActMalformed", "Destination account is malformed.", 400}, - {rpcDST_ACT_MISSING, "dstActMissing", "Destination account not provided.", 400}, - {rpcDST_ACT_NOT_FOUND, "dstActNotFound", "Destination account not found.", 404}, - {rpcDST_AMT_MALFORMED, "dstAmtMalformed", "Destination amount/currency/issuer is malformed.", 400}, - {rpcDST_AMT_MISSING, "dstAmtMissing", "Destination amount/currency/issuer is missing.", 400}, - {rpcDST_ISR_MALFORMED, "dstIsrMalformed", "Destination issuer is malformed.", 400}, - {rpcEXCESSIVE_LGR_RANGE, "excessiveLgrRange", "Ledger range exceeds 1000.", 400}, - {rpcFORBIDDEN, "forbidden", "Bad credentials.", 403}, - {rpcFAILED_TO_FORWARD, "failedToForward", "Failed to forward request to p2p node", 503}, - {rpcHIGH_FEE, "highFee", "Current transaction fee exceeds your limit.", 402}, - {rpcINTERNAL, "internal", "Internal error.", 500}, - {rpcINVALID_LGR_RANGE, "invalidLgrRange", "Ledger range is invalid.", 400}, - {rpcINVALID_PARAMS, "invalidParams", "Invalid parameters.", 400}, - {rpcJSON_RPC, "json_rpc", "JSON-RPC transport error.", 500}, - {rpcLGR_IDXS_INVALID, "lgrIdxsInvalid", "Ledger indexes invalid.", 400}, - {rpcLGR_IDX_MALFORMED, "lgrIdxMalformed", "Ledger index malformed.", 400}, - {rpcLGR_NOT_FOUND, "lgrNotFound", "Ledger not found.", 404}, - {rpcLGR_NOT_VALIDATED, "lgrNotValidated", "Ledger not validated.", 202}, - {rpcMASTER_DISABLED, "masterDisabled", "Master key is disabled.", 403}, - {rpcNOT_ENABLED, "notEnabled", "Not enabled in configuration.", 501}, - {rpcNOT_IMPL, "notImpl", "Not implemented.", 501}, - {rpcNOT_READY, "notReady", "Not ready to handle this request.", 503}, - {rpcNOT_SUPPORTED, "notSupported", "Operation not supported.", 501}, - {rpcNO_CLOSED, "noClosed", "Closed ledger is unavailable.", 503}, - {rpcNO_CURRENT, "noCurrent", "Current ledger is unavailable.", 503}, - {rpcNOT_SYNCED, "notSynced", "Not synced to the network.", 503}, - {rpcNO_EVENTS, "noEvents", "Current transport does not support events.", 405}, - {rpcNO_NETWORK, "noNetwork", "Not synced to the network.", 503}, - {rpcNO_PERMISSION, "noPermission", "You don't have permission for this command.", 401}, - {rpcNO_PF_REQUEST, "noPathRequest", "No pathfinding request in progress.", 404}, - {rpcOBJECT_NOT_FOUND, "objectNotFound", "The requested object was not found.", 404}, - {rpcPUBLIC_MALFORMED, "publicMalformed", "Public key is malformed.", 400}, - {rpcREPORTING_UNSUPPORTED, "reportingUnsupported", "Requested operation not supported by reporting mode server", 405}, - {rpcSENDMAX_MALFORMED, "sendMaxMalformed", "SendMax amount malformed.", 400}, - {rpcSIGNING_MALFORMED, "signingMalformed", "Signing of transaction is malformed.", 400}, - {rpcSLOW_DOWN, "slowDown", "You are placing too much load on the server.", 429}, - {rpcSRC_ACT_MALFORMED, "srcActMalformed", "Source account is malformed.", 400}, - {rpcSRC_ACT_MISSING, "srcActMissing", "Source account not provided.", 400}, - {rpcSRC_ACT_NOT_FOUND, "srcActNotFound", "Source account not found.", 404}, - {rpcSRC_CUR_MALFORMED, "srcCurMalformed", "Source currency is malformed.", 400}, - {rpcSRC_ISR_MALFORMED, "srcIsrMalformed", "Source issuer is malformed.", 400}, - {rpcSTREAM_MALFORMED, "malformedStream", "Stream malformed.", 400}, - {rpcTOO_BUSY, "tooBusy", "The server is too busy to help you now.", 503}, - {rpcTXN_NOT_FOUND, "txnNotFound", "Transaction not found.", 404}, - {rpcUNKNOWN_COMMAND, "unknownCmd", "Unknown method.", 405}}; + {rpcACT_MALFORMED, "actMalformed", "Account malformed."}, + {rpcACT_NOT_FOUND, "actNotFound", "Account not found."}, + {rpcALREADY_MULTISIG, "alreadyMultisig", "Already multisigned."}, + {rpcALREADY_SINGLE_SIG, "alreadySingleSig", "Already single-signed."}, + {rpcAMENDMENT_BLOCKED, "amendmentBlocked", "Amendment blocked, need upgrade."}, + {rpcEXPIRED_VALIDATOR_LIST, "unlBlocked", "Validator list expired."}, + {rpcATX_DEPRECATED, "deprecated", "Use the new API or specify a ledger range."}, + {rpcBAD_KEY_TYPE, "badKeyType", "Bad key type."}, + {rpcBAD_FEATURE, "badFeature", "Feature unknown or invalid."}, + {rpcBAD_ISSUER, "badIssuer", "Issuer account malformed."}, + {rpcBAD_MARKET, "badMarket", "No such market."}, + {rpcBAD_SECRET, "badSecret", "Secret does not match account."}, + {rpcBAD_SEED, "badSeed", "Disallowed seed."}, + {rpcBAD_SYNTAX, "badSyntax", "Syntax error."}, + {rpcCHANNEL_MALFORMED, "channelMalformed", "Payment channel is malformed."}, + {rpcCHANNEL_AMT_MALFORMED, "channelAmtMalformed", "Payment channel amount is malformed."}, + {rpcCOMMAND_MISSING, "commandMissing", "Missing command entry."}, + {rpcDB_DESERIALIZATION, "dbDeserialization", "Database deserialization error."}, + {rpcDST_ACT_MALFORMED, "dstActMalformed", "Destination account is malformed."}, + {rpcDST_ACT_MISSING, "dstActMissing", "Destination account not provided."}, + {rpcDST_ACT_NOT_FOUND, "dstActNotFound", "Destination account not found."}, + {rpcDST_AMT_MALFORMED, "dstAmtMalformed", "Destination amount/currency/issuer is malformed."}, + {rpcDST_AMT_MISSING, "dstAmtMissing", "Destination amount/currency/issuer is missing."}, + {rpcDST_ISR_MALFORMED, "dstIsrMalformed", "Destination issuer is malformed."}, + {rpcEXCESSIVE_LGR_RANGE, "excessiveLgrRange", "Ledger range exceeds 1000."}, + {rpcFORBIDDEN, "forbidden", "Bad credentials."}, + {rpcFAILED_TO_FORWARD, "failedToForward", "Failed to forward request to p2p node"}, + {rpcHIGH_FEE, "highFee", "Current transaction fee exceeds your limit."}, + {rpcINTERNAL, "internal", "Internal error."}, + {rpcINVALID_LGR_RANGE, "invalidLgrRange", "Ledger range is invalid."}, + {rpcINVALID_PARAMS, "invalidParams", "Invalid parameters."}, + {rpcJSON_RPC, "json_rpc", "JSON-RPC transport error."}, + {rpcLGR_IDXS_INVALID, "lgrIdxsInvalid", "Ledger indexes invalid."}, + {rpcLGR_IDX_MALFORMED, "lgrIdxMalformed", "Ledger index malformed."}, + {rpcLGR_NOT_FOUND, "lgrNotFound", "Ledger not found."}, + {rpcLGR_NOT_VALIDATED, "lgrNotValidated", "Ledger not validated."}, + {rpcMASTER_DISABLED, "masterDisabled", "Master key is disabled."}, + {rpcNOT_ENABLED, "notEnabled", "Not enabled in configuration."}, + {rpcNOT_IMPL, "notImpl", "Not implemented."}, + {rpcNOT_READY, "notReady", "Not ready to handle this request."}, + {rpcNOT_SUPPORTED, "notSupported", "Operation not supported."}, + {rpcNO_CLOSED, "noClosed", "Closed ledger is unavailable."}, + {rpcNO_CURRENT, "noCurrent", "Current ledger is unavailable."}, + {rpcNOT_SYNCED, "notSynced", "Not synced to the network."}, + {rpcNO_EVENTS, "noEvents", "Current transport does not support events."}, + {rpcNO_NETWORK, "noNetwork", "Not synced to the network."}, + {rpcNO_PERMISSION, "noPermission", "You don't have permission for this command."}, + {rpcNO_PF_REQUEST, "noPathRequest", "No pathfinding request in progress."}, + {rpcPUBLIC_MALFORMED, "publicMalformed", "Public key is malformed."}, + {rpcREPORTING_UNSUPPORTED, "reportingUnsupported", "Requested operation not supported by reporting mode server"}, + {rpcSIGNING_MALFORMED, "signingMalformed", "Signing of transaction is malformed."}, + {rpcSLOW_DOWN, "slowDown", "You are placing too much load on the server."}, + {rpcSRC_ACT_MALFORMED, "srcActMalformed", "Source account is malformed."}, + {rpcSRC_ACT_MISSING, "srcActMissing", "Source account not provided."}, + {rpcSRC_ACT_NOT_FOUND, "srcActNotFound", "Source account not found."}, + {rpcSRC_CUR_MALFORMED, "srcCurMalformed", "Source currency is malformed."}, + {rpcSRC_ISR_MALFORMED, "srcIsrMalformed", "Source issuer is malformed."}, + {rpcSTREAM_MALFORMED, "malformedStream", "Stream malformed."}, + {rpcTOO_BUSY, "tooBusy", "The server is too busy to help you now."}, + {rpcTXN_NOT_FOUND, "txnNotFound", "Transaction not found."}, + {rpcUNKNOWN_COMMAND, "unknownCmd", "Unknown method."}, + {rpcSENDMAX_MALFORMED, "sendMaxMalformed", "SendMax amount malformed."}, + {rpcOBJECT_NOT_FOUND, "objectNotFound", "The requested object was not found."}}; // clang-format on +// C++ does not allow you to return an array from a function. You must +// return an object which may in turn contain an array. The following +// struct is simply defined so the enclosed array can be returned from a +// constexpr function. +// +// In C++17 this struct can be replaced by a std::array. But in C++14 +// the constexpr methods of a std::array are not sufficient to perform the +// necessary work at compile time. +template +struct ErrorInfoArray +{ + // Visual Studio doesn't treat a templated aggregate as an aggregate. + // So, for Visual Studio, we define a constexpr default constructor. + constexpr ErrorInfoArray() : infos{} + { + } + + ErrorInfo infos[N]; +}; + // Sort and validate unorderedErrorInfos at compile time. Should be // converted to consteval when get to C++20. template constexpr auto -sortErrorInfos(ErrorInfo const (&unordered)[N]) -> std::array +sortErrorInfos(ErrorInfo const (&unordered)[N]) -> ErrorInfoArray { - std::array ret = {}; + ErrorInfoArray ret; for (ErrorInfo const& info : unordered) { @@ -127,10 +135,12 @@ sortErrorInfos(ErrorInfo const (&unordered)[N]) -> std::array static_assert(rpcSUCCESS == 0, "Unexpected error_code_i layout."); int const index{info.code - 1}; - if (ret[index].code != rpcUNKNOWN) + if (ret.infos[index].code != rpcUNKNOWN) throw(std::invalid_argument("Duplicate error_code_i in list")); - ret[index] = info; + ret.infos[index].code = info.code; + ret.infos[index].token = info.token; + ret.infos[index].message = info.message; } // Verify that all entries are filled in starting with 1 and proceeding @@ -140,7 +150,7 @@ sortErrorInfos(ErrorInfo const (&unordered)[N]) -> std::array // rpcUNKNOWN. But other than that all entries should match their index. int codeCount{0}; int expect{rpcBAD_SYNTAX - 1}; - for (ErrorInfo const& info : ret) + for (ErrorInfo const& info : ret.infos) { ++expect; if (info.code == rpcUNKNOWN) @@ -171,7 +181,7 @@ get_error_info(error_code_i code) { if (code <= rpcSUCCESS || code > rpcLAST) return detail::unknownError; - return detail::sortedErrorInfos[code - 1]; + return detail::sortedErrorInfos.infos[code - 1]; } Json::Value @@ -198,12 +208,6 @@ contains_error(Json::Value const& json) return false; } -int -error_code_http_status(error_code_i code) -{ - return get_error_info(code).http_status; -} - } // namespace RPC std::string diff --git a/src/ripple/protocol/impl/Feature.cpp b/src/ripple/protocol/impl/Feature.cpp index 5903603f975..fa0d167ef09 100644 --- a/src/ripple/protocol/impl/Feature.cpp +++ b/src/ripple/protocol/impl/Feature.cpp @@ -450,7 +450,6 @@ REGISTER_FEATURE(NonFungibleTokensV1_1, Supported::yes, DefaultVote::no) REGISTER_FIX (fixTrustLinesToSelf, Supported::yes, DefaultVote::no); REGISTER_FIX (fixRemoveNFTokenAutoTrustLine, Supported::yes, DefaultVote::yes); REGISTER_FEATURE(ImmediateOfferKilled, Supported::yes, DefaultVote::no); -REGISTER_FEATURE(DisallowIncoming, Supported::yes, DefaultVote::no); // The following amendments have been active for at least two years. Their // pre-amendment code has been removed and the identifiers are deprecated. diff --git a/src/ripple/protocol/impl/PublicKey.cpp b/src/ripple/protocol/impl/PublicKey.cpp index 8ab1bd46cdf..ac86634f1ef 100644 --- a/src/ripple/protocol/impl/PublicKey.cpp +++ b/src/ripple/protocol/impl/PublicKey.cpp @@ -23,8 +23,7 @@ #include #include #include -#include -#include +#include namespace ripple { diff --git a/src/ripple/protocol/impl/SecretKey.cpp b/src/ripple/protocol/impl/SecretKey.cpp index 63661888f48..13aafdb1f10 100644 --- a/src/ripple/protocol/impl/SecretKey.cpp +++ b/src/ripple/protocol/impl/SecretKey.cpp @@ -26,7 +26,7 @@ #include #include #include -#include +#include namespace ripple { diff --git a/src/ripple/rpc/handlers/WalletPropose.cpp b/src/ripple/rpc/handlers/WalletPropose.cpp index 624c5c83c2b..012ed5c1689 100644 --- a/src/ripple/rpc/handlers/WalletPropose.cpp +++ b/src/ripple/rpc/handlers/WalletPropose.cpp @@ -29,7 +29,7 @@ #include #include #include -#include +#include #include namespace ripple { diff --git a/src/ripple/rpc/impl/ServerHandlerImp.cpp b/src/ripple/rpc/impl/ServerHandlerImp.cpp index f269283b83a..cb70fdcab4f 100644 --- a/src/ripple/rpc/impl/ServerHandlerImp.cpp +++ b/src/ripple/rpc/impl/ServerHandlerImp.cpp @@ -30,7 +30,6 @@ #include #include #include -#include #include #include #include @@ -971,29 +970,6 @@ ServerHandlerImp::processRequest( } } } - - // If we're returning an error_code, use that to determine the HTTP status. - int const httpStatus = [&reply]() { - // This feature is enabled with ripplerpc version 3.0 and above. - // Before ripplerpc version 3.0 always return 200. - if (reply.isMember(jss::ripplerpc) && - reply[jss::ripplerpc].isString() && - reply[jss::ripplerpc].asString() >= "3.0") - { - // If there's an error_code, use that to determine the HTTP Status. - if (reply.isMember(jss::error) && - reply[jss::error].isMember(jss::error_code) && - reply[jss::error][jss::error_code].isInt()) - { - int const errCode = reply[jss::error][jss::error_code].asInt(); - return RPC::error_code_http_status( - static_cast(errCode)); - } - } - // Return OK. - return 200; - }(); - auto response = to_string(reply); rpc_time_.notify(std::chrono::duration_cast( @@ -1012,7 +988,7 @@ ServerHandlerImp::processRequest( stream << "Reply: " << response.substr(0, maxSize); } - HTTPReply(httpStatus, response, output, rpcJ); + HTTPReply(200, response, output, rpcJ); } //------------------------------------------------------------------------------ diff --git a/src/ripple/server/impl/JSONRPCUtil.cpp b/src/ripple/server/impl/JSONRPCUtil.cpp index 12d12829ca9..f5bb815a959 100644 --- a/src/ripple/server/impl/JSONRPCUtil.cpp +++ b/src/ripple/server/impl/JSONRPCUtil.cpp @@ -61,7 +61,7 @@ HTTPReply( { JLOG(j.trace()) << "HTTP Reply " << nStatus << " " << content; - if (content.empty() && nStatus == 401) + if (nStatus == 401) { output("HTTP/1.0 401 Authorization Required\r\n"); output(getHTTPHeaderTimestamp()); @@ -100,33 +100,18 @@ HTTPReply( case 200: output("HTTP/1.1 200 OK\r\n"); break; - case 202: - output("HTTP/1.1 202 Accepted\r\n"); - break; case 400: output("HTTP/1.1 400 Bad Request\r\n"); break; - case 401: - output("HTTP/1.1 401 Authorization Required\r\n"); - break; case 403: output("HTTP/1.1 403 Forbidden\r\n"); break; case 404: output("HTTP/1.1 404 Not Found\r\n"); break; - case 405: - output("HTTP/1.1 405 Method Not Allowed\r\n"); - break; - case 429: - output("HTTP/1.1 429 Too Many Requests\r\n"); - break; case 500: output("HTTP/1.1 500 Internal Server Error\r\n"); break; - case 501: - output("HTTP/1.1 501 Not Implemented\r\n"); - break; case 503: output("HTTP/1.1 503 Server is overloaded\r\n"); break; diff --git a/src/secp256k1/CMakeLists.txt b/src/secp256k1/CMakeLists.txt deleted file mode 100644 index 6b41b66c8ab..00000000000 --- a/src/secp256k1/CMakeLists.txt +++ /dev/null @@ -1,52 +0,0 @@ -cmake_minimum_required(VERSION 3.11) - -project(secp256k1 - LANGUAGES C -) - -if(PROJECT_NAME STREQUAL CMAKE_PROJECT_NAME) - set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/output/$/lib") -endif() - -add_library(secp256k1 STATIC - src/secp256k1.c -) -add_library(secp256k1::secp256k1 ALIAS secp256k1) - -include(GNUInstallDirs) - -target_compile_definitions(secp256k1 PRIVATE - USE_NUM_NONE - USE_FIELD_10X26 - USE_FIELD_INV_BUILTIN - USE_SCALAR_8X32 - USE_SCALAR_INV_BUILTIN -) -target_include_directories(secp256k1 - PUBLIC $ - PRIVATE $ -) -target_compile_options(secp256k1 PRIVATE - $<$:-wd4319> - $<$>: - -Wno-deprecated-declarations - -Wno-unused-function - > - $<$:-Wno-nonnull-compare> -) - -install( - TARGETS secp256k1 - EXPORT ${PROJECT_NAME}-exports - ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}" -) -install( - EXPORT ${PROJECT_NAME}-exports - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" - FILE ${PROJECT_NAME}-targets.cmake - NAMESPACE ${PROJECT_NAME}:: -) -install( - FILES include/secp256k1.h - DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" -) diff --git a/src/test/app/Check_test.cpp b/src/test/app/Check_test.cpp index 8f0c0ec46b8..31a2e572e70 100644 --- a/src/test/app/Check_test.cpp +++ b/src/test/app/Check_test.cpp @@ -85,8 +85,6 @@ class dest_tag class Check_test : public beast::unit_test::suite { - FeatureBitset const disallowIncoming{featureDisallowIncoming}; - static uint256 getCheckIndex(AccountID const& account, std::uint32_t uSequence) { @@ -295,100 +293,6 @@ class Check_test : public beast::unit_test::suite BEAST_EXPECT(checksOnAccount(env, bob).size() == bobCount + 7); } - void - testCreateDisallowIncoming(FeatureBitset features) - { - testcase("Create valid with disallow incoming"); - - using namespace test::jtx; - - // test flag doesn't set unless amendment enabled - { - Env env{*this, features - disallowIncoming}; - Account const alice{"alice"}; - env.fund(XRP(10000), alice); - env(fset(alice, asfDisallowIncomingCheck)); - env.close(); - auto const sle = env.le(alice); - uint32_t flags = sle->getFlags(); - BEAST_EXPECT(!(flags & lsfDisallowIncomingCheck)); - } - - Account const gw{"gateway"}; - Account const alice{"alice"}; - Account const bob{"bob"}; - IOU const USD{gw["USD"]}; - - Env env{*this, features | disallowIncoming}; - - STAmount const startBalance{XRP(1000).value()}; - env.fund(startBalance, gw, alice, bob); - - /* - * Attempt to create two checks from `from` to `to` and - * require they both result in error/success code `expected` - */ - auto writeTwoChecksDI = [&env, &USD, this]( - Account const& from, - Account const& to, - TER expected) { - std::uint32_t const fromOwnerCount{ownerCount(env, from)}; - std::uint32_t const toOwnerCount{ownerCount(env, to)}; - - std::size_t const fromCkCount{checksOnAccount(env, from).size()}; - std::size_t const toCkCount{checksOnAccount(env, to).size()}; - - env(check::create(from, to, XRP(2000)), ter(expected)); - env.close(); - - env(check::create(from, to, USD(50)), ter(expected)); - env.close(); - - if (expected == tesSUCCESS) - { - BEAST_EXPECT( - checksOnAccount(env, from).size() == fromCkCount + 2); - BEAST_EXPECT(checksOnAccount(env, to).size() == toCkCount + 2); - - env.require(owners(from, fromOwnerCount + 2)); - env.require( - owners(to, to == from ? fromOwnerCount + 2 : toOwnerCount)); - return; - } - - BEAST_EXPECT(checksOnAccount(env, from).size() == fromCkCount); - BEAST_EXPECT(checksOnAccount(env, to).size() == toCkCount); - - env.require(owners(from, fromOwnerCount)); - env.require(owners(to, to == from ? fromOwnerCount : toOwnerCount)); - }; - - // enable the DisallowIncoming flag on both bob and alice - env(fset(bob, asfDisallowIncomingCheck)); - env(fset(alice, asfDisallowIncomingCheck)); - env.close(); - - // both alice and bob can't receive checks - writeTwoChecksDI(alice, bob, tecNO_PERMISSION); - writeTwoChecksDI(gw, alice, tecNO_PERMISSION); - - // remove the flag from alice but not from bob - env(fclear(alice, asfDisallowIncomingCheck)); - env.close(); - - // now bob can send alice a cheque but not visa-versa - writeTwoChecksDI(bob, alice, tesSUCCESS); - writeTwoChecksDI(alice, bob, tecNO_PERMISSION); - - // remove bob's flag too - env(fclear(bob, asfDisallowIncomingCheck)); - env.close(); - - // now they can send checks freely - writeTwoChecksDI(bob, alice, tesSUCCESS); - writeTwoChecksDI(alice, bob, tesSUCCESS); - } - void testCreateInvalid(FeatureBitset features) { @@ -2698,7 +2602,6 @@ class Check_test : public beast::unit_test::suite { testEnabled(features); testCreateValid(features); - testCreateDisallowIncoming(features); testCreateInvalid(features); testCashXRP(features); testCashIOU(features); @@ -2718,7 +2621,6 @@ class Check_test : public beast::unit_test::suite using namespace test::jtx; auto const sa = supported_amendments(); testWithFeats(sa - featureCheckCashMakesTrustLine); - testWithFeats(sa - disallowIncoming); testWithFeats(sa); testTrustLineCreation(sa); // Test with featureCheckCashMakesTrustLine diff --git a/src/test/app/NFToken_test.cpp b/src/test/app/NFToken_test.cpp index 1e47887253b..ecaf1bcc13d 100644 --- a/src/test/app/NFToken_test.cpp +++ b/src/test/app/NFToken_test.cpp @@ -29,8 +29,6 @@ namespace ripple { class NFToken_test : public beast::unit_test::suite { - FeatureBitset const disallowIncoming{featureDisallowIncoming}; - // Helper function that returns the owner count of an account root. static std::uint32_t ownerCount(test::jtx::Env const& env, test::jtx::Account const& acct) @@ -2976,135 +2974,6 @@ class NFToken_test : public beast::unit_test::suite } } - void - testCreateOfferDestinationDisallowIncoming(FeatureBitset features) - { - testcase("Create offer destination disallow incoming"); - - using namespace test::jtx; - - // test flag doesn't set unless amendment enabled - { - Env env{*this, features - disallowIncoming}; - Account const alice{"alice"}; - env.fund(XRP(10000), alice); - env(fset(alice, asfDisallowIncomingNFTOffer)); - env.close(); - auto const sle = env.le(alice); - uint32_t flags = sle->getFlags(); - BEAST_EXPECT(!(flags & lsfDisallowIncomingNFTOffer)); - } - - Env env{*this, features | disallowIncoming}; - - Account const issuer{"issuer"}; - Account const minter{"minter"}; - Account const buyer{"buyer"}; - Account const alice{"alice"}; - - env.fund(XRP(1000), issuer, minter, buyer, alice); - - env(token::setMinter(issuer, minter)); - env.close(); - - uint256 const nftokenID = - token::getNextID(env, issuer, 0, tfTransferable); - env(token::mint(minter, 0), - token::issuer(issuer), - txflags(tfTransferable)); - env.close(); - - // enable flag - env(fset(buyer, asfDisallowIncomingNFTOffer)); - env.close(); - - // a sell offer from the minter to the buyer should be rejected - { - env(token::createOffer(minter, nftokenID, drops(1)), - token::destination(buyer), - txflags(tfSellNFToken), - ter(tecNO_PERMISSION)); - env.close(); - BEAST_EXPECT(ownerCount(env, issuer) == 0); - BEAST_EXPECT(ownerCount(env, minter) == 1); - BEAST_EXPECT(ownerCount(env, buyer) == 0); - } - - // disable the flag - env(fclear(buyer, asfDisallowIncomingNFTOffer)); - env.close(); - - // create offer (allowed now) then cancel - { - uint256 const offerIndex = - keylet::nftoffer(minter, env.seq(minter)).key; - - env(token::createOffer(minter, nftokenID, drops(1)), - token::destination(buyer), - txflags(tfSellNFToken)); - env.close(); - - env(token::cancelOffer(minter, {offerIndex})); - env.close(); - } - - // create offer, enable flag, then cancel - { - uint256 const offerIndex = - keylet::nftoffer(minter, env.seq(minter)).key; - - env(token::createOffer(minter, nftokenID, drops(1)), - token::destination(buyer), - txflags(tfSellNFToken)); - env.close(); - - env(fset(buyer, asfDisallowIncomingNFTOffer)); - env.close(); - - env(token::cancelOffer(minter, {offerIndex})); - env.close(); - - env(fclear(buyer, asfDisallowIncomingNFTOffer)); - env.close(); - } - - // create offer then transfer - { - uint256 const offerIndex = - keylet::nftoffer(minter, env.seq(minter)).key; - - env(token::createOffer(minter, nftokenID, drops(1)), - token::destination(buyer), - txflags(tfSellNFToken)); - env.close(); - - env(token::acceptSellOffer(buyer, offerIndex)); - env.close(); - } - - // buyer now owns the token - - // enable flag again - env(fset(buyer, asfDisallowIncomingNFTOffer)); - env.close(); - - // a random offer to buy the token - { - env(token::createOffer(alice, nftokenID, drops(1)), - token::owner(buyer), - ter(tecNO_PERMISSION)); - env.close(); - } - - // minter offer to buy the token - { - env(token::createOffer(minter, nftokenID, drops(1)), - token::owner(buyer), - ter(tecNO_PERMISSION)); - env.close(); - } - } - void testCreateOfferExpiration(FeatureBitset features) { @@ -5162,7 +5031,6 @@ class NFToken_test : public beast::unit_test::suite testMintTaxon(features); testMintURI(features); testCreateOfferDestination(features); - testCreateOfferDestinationDisallowIncoming(features); testCreateOfferExpiration(features); testCancelOffers(features); testCancelTooManyOffers(features); @@ -5184,7 +5052,6 @@ class NFToken_test : public beast::unit_test::suite FeatureBitset const fixNFTDir{fixNFTokenDirV1}; testWithFeats(all - fixNFTDir); - testWithFeats(all - disallowIncoming); testWithFeats(all); } }; diff --git a/src/test/app/PayChan_test.cpp b/src/test/app/PayChan_test.cpp index 2a8ea360e6c..cf600a9fc87 100644 --- a/src/test/app/PayChan_test.cpp +++ b/src/test/app/PayChan_test.cpp @@ -32,8 +32,6 @@ namespace ripple { namespace test { struct PayChan_test : public beast::unit_test::suite { - FeatureBitset const disallowIncoming{featureDisallowIncoming}; - static uint256 channel( jtx::Account const& account, @@ -177,12 +175,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testSimple(FeatureBitset features) + testSimple() { testcase("simple"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto USDA = alice["USD"]; @@ -352,91 +350,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testDisallowIncoming(FeatureBitset features) - { - testcase("Disallow Incoming Flag"); - using namespace jtx; - - // test flag doesn't set unless amendment enabled - { - Env env{*this, features - disallowIncoming}; - Account const alice{"alice"}; - env.fund(XRP(10000), alice); - env(fset(alice, asfDisallowIncomingPayChan)); - env.close(); - auto const sle = env.le(alice); - uint32_t flags = sle->getFlags(); - BEAST_EXPECT(!(flags & lsfDisallowIncomingPayChan)); - } - - using namespace std::literals::chrono_literals; - Env env{*this, features | disallowIncoming}; - auto const alice = Account("alice"); - auto const bob = Account("bob"); - auto const cho = Account("cho"); - env.fund(XRP(10000), alice, bob, cho); - auto const pk = alice.pk(); - auto const settleDelay = 100s; - - // set flag on bob only - env(fset(bob, asfDisallowIncomingPayChan)); - env.close(); - - // channel creation from alice to bob is disallowed - { - auto const chan = channel(alice, bob, env.seq(alice)); - env(create(alice, bob, XRP(1000), settleDelay, pk), - ter(tecNO_PERMISSION)); - BEAST_EXPECT(!channelExists(*env.current(), chan)); - } - - // set flag on alice also - env(fset(alice, asfDisallowIncomingPayChan)); - env.close(); - - // channel creation from bob to alice is now disallowed - { - auto const chan = channel(bob, alice, env.seq(bob)); - env(create(bob, alice, XRP(1000), settleDelay, pk), - ter(tecNO_PERMISSION)); - BEAST_EXPECT(!channelExists(*env.current(), chan)); - } - - // remove flag from bob - env(fclear(bob, asfDisallowIncomingPayChan)); - env.close(); - - // now the channel between alice and bob can exist - { - auto const chan = channel(alice, bob, env.seq(alice)); - env(create(alice, bob, XRP(1000), settleDelay, pk), - ter(tesSUCCESS)); - BEAST_EXPECT(channelExists(*env.current(), chan)); - } - - // a channel from cho to alice isn't allowed - { - auto const chan = channel(cho, alice, env.seq(cho)); - env(create(cho, alice, XRP(1000), settleDelay, pk), - ter(tecNO_PERMISSION)); - BEAST_EXPECT(!channelExists(*env.current(), chan)); - } - - // remove flag from alice - env(fclear(alice, asfDisallowIncomingPayChan)); - env.close(); - - // now a channel from cho to alice is allowed - { - auto const chan = channel(cho, alice, env.seq(cho)); - env(create(cho, alice, XRP(1000), settleDelay, pk), - ter(tesSUCCESS)); - BEAST_EXPECT(channelExists(*env.current(), chan)); - } - } - - void - testCancelAfter(FeatureBitset features) + testCancelAfter() { testcase("cancel after"); using namespace jtx; @@ -446,7 +360,7 @@ struct PayChan_test : public beast::unit_test::suite auto const carol = Account("carol"); { // If dst claims after cancel after, channel closes - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob); auto const pk = alice.pk(); auto const settleDelay = 100s; @@ -478,7 +392,7 @@ struct PayChan_test : public beast::unit_test::suite } { // Third party can close after cancel after - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob, carol); auto const pk = alice.pk(); auto const settleDelay = 100s; @@ -501,12 +415,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testExpiration(FeatureBitset features) + testExpiration() { testcase("expiration"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto const carol = Account("carol"); @@ -567,12 +481,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testSettleDelay(FeatureBitset features) + testSettleDelay() { testcase("settle delay"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); env.fund(XRP(10000), alice, bob); @@ -627,12 +541,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testCloseDry(FeatureBitset features) + testCloseDry() { testcase("close dry"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); env.fund(XRP(10000), alice, bob); @@ -661,13 +575,13 @@ struct PayChan_test : public beast::unit_test::suite } void - testDefaultAmount(FeatureBitset features) + testDefaultAmount() { // auth amount defaults to balance if not present testcase("default amount"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); env.fund(XRP(10000), alice, bob); @@ -716,7 +630,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testDisallowXRP(FeatureBitset features) + testDisallowXRP() { // auth amount defaults to balance if not present testcase("Disallow XRP"); @@ -727,7 +641,7 @@ struct PayChan_test : public beast::unit_test::suite auto const bob = Account("bob"); { // Create a channel where dst disallows XRP - Env env(*this, features - featureDepositAuth); + Env env(*this, supported_amendments() - featureDepositAuth); env.fund(XRP(10000), alice, bob); env(fset(bob, asfDisallowXRP)); auto const chan = channel(alice, bob, env.seq(alice)); @@ -738,7 +652,7 @@ struct PayChan_test : public beast::unit_test::suite { // Create a channel where dst disallows XRP. Ignore that flag, // since it's just advisory. - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob); env(fset(bob, asfDisallowXRP)); auto const chan = channel(alice, bob, env.seq(alice)); @@ -749,7 +663,7 @@ struct PayChan_test : public beast::unit_test::suite { // Claim to a channel where dst disallows XRP // (channel is created before disallow xrp is set) - Env env(*this, features - featureDepositAuth); + Env env(*this, supported_amendments() - featureDepositAuth); env.fund(XRP(10000), alice, bob); auto const chan = channel(alice, bob, env.seq(alice)); env(create(alice, bob, XRP(1000), 3600s, alice.pk())); @@ -763,7 +677,7 @@ struct PayChan_test : public beast::unit_test::suite // Claim to a channel where dst disallows XRP (channel is // created before disallow xrp is set). Ignore that flag // since it is just advisory. - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob); auto const chan = channel(alice, bob, env.seq(alice)); env(create(alice, bob, XRP(1000), 3600s, alice.pk())); @@ -776,14 +690,14 @@ struct PayChan_test : public beast::unit_test::suite } void - testDstTag(FeatureBitset features) + testDstTag() { // auth amount defaults to balance if not present testcase("Dst Tag"); using namespace jtx; using namespace std::literals::chrono_literals; // Create a channel where dst disallows XRP - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); env.fund(XRP(10000), alice, bob); @@ -806,7 +720,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testDepositAuth(FeatureBitset features) + testDepositAuth() { testcase("Deposit Authorization"); using namespace jtx; @@ -817,7 +731,7 @@ struct PayChan_test : public beast::unit_test::suite auto const carol = Account("carol"); auto USDA = alice["USD"]; { - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob, carol); env(fset(bob, asfDepositAuth)); @@ -930,13 +844,13 @@ struct PayChan_test : public beast::unit_test::suite } void - testMultiple(FeatureBitset features) + testMultiple() { // auth amount defaults to balance if not present testcase("Multiple channels to the same account"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); env.fund(XRP(10000), alice, bob); @@ -953,13 +867,13 @@ struct PayChan_test : public beast::unit_test::suite } void - testAccountChannelsRPC(FeatureBitset features) + testAccountChannelsRPC() { testcase("AccountChannels RPC"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto const charlie = Account("charlie", KeyType::ed25519); @@ -1008,7 +922,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testAccountChannelsRPCMarkers(FeatureBitset features) + testAccountChannelsRPCMarkers() { testcase("Account channels RPC markers"); @@ -1027,7 +941,7 @@ struct PayChan_test : public beast::unit_test::suite return r; }(); - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice); for (auto const& a : bobs) { @@ -1124,7 +1038,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testAccountChannelsRPCSenderOnly(FeatureBitset features) + testAccountChannelsRPCSenderOnly() { // Check that the account_channels command only returns channels owned // by the account @@ -1135,7 +1049,7 @@ struct PayChan_test : public beast::unit_test::suite auto const alice = Account("alice"); auto const bob = Account("bob"); - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob); // Create a channel from alice to bob and from bob to alice @@ -1161,12 +1075,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testAuthVerifyRPC(FeatureBitset features) + testAuthVerifyRPC() { testcase("PayChan Auth/Verify RPC"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto const charlie = Account("charlie", KeyType::ed25519); @@ -1501,12 +1415,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testOptionalFields(FeatureBitset features) + testOptionalFields() { testcase("Optional Fields"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto const carol = Account("carol"); @@ -1552,12 +1466,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testMalformedPK(FeatureBitset features) + testMalformedPK() { testcase("malformed pk"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto USDA = alice["USD"]; @@ -1622,7 +1536,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testMetaAndOwnership(FeatureBitset features) + testMetaAndOwnership() { testcase("Metadata & Ownership"); @@ -1651,7 +1565,8 @@ struct PayChan_test : public beast::unit_test::suite { // Test without adding the paychan to the recipient's owner // directory - Env env(*this, features - fixPayChanRecipientOwnerDir); + Env env( + *this, supported_amendments() - fixPayChanRecipientOwnerDir); env.fund(XRP(10000), alice, bob); env(create(alice, bob, XRP(1000), settleDelay, pk)); env.close(); @@ -1672,7 +1587,7 @@ struct PayChan_test : public beast::unit_test::suite { // Test with adding the paychan to the recipient's owner directory - Env env{*this, features}; + Env env(*this); env.fund(XRP(10000), alice, bob); env(create(alice, bob, XRP(1000), settleDelay, pk)); env.close(); @@ -1694,7 +1609,8 @@ struct PayChan_test : public beast::unit_test::suite { // Test removing paychans created before adding to the recipient's // owner directory - Env env(*this, features - fixPayChanRecipientOwnerDir); + Env env( + *this, supported_amendments() - fixPayChanRecipientOwnerDir); env.fund(XRP(10000), alice, bob); // create the channel before the amendment activates env(create(alice, bob, XRP(1000), settleDelay, pk)); @@ -1728,7 +1644,7 @@ struct PayChan_test : public beast::unit_test::suite } void - testAccountDelete(FeatureBitset features) + testAccountDelete() { testcase("Account Delete"); using namespace test::jtx; @@ -1762,8 +1678,8 @@ struct PayChan_test : public beast::unit_test::suite for (bool const withOwnerDirFix : {false, true}) { auto const amd = withOwnerDirFix - ? features - : features - fixPayChanRecipientOwnerDir; + ? supported_amendments() + : supported_amendments() - fixPayChanRecipientOwnerDir; Env env{*this, amd}; env.fund(XRP(10000), alice, bob, carol); env.close(); @@ -1855,7 +1771,8 @@ struct PayChan_test : public beast::unit_test::suite { // test resurrected account - Env env{*this, features - fixPayChanRecipientOwnerDir}; + Env env{ + *this, supported_amendments() - fixPayChanRecipientOwnerDir}; env.fund(XRP(10000), alice, bob, carol); env.close(); auto const feeDrops = env.current()->fees().base; @@ -1961,12 +1878,12 @@ struct PayChan_test : public beast::unit_test::suite } void - testUsingTickets(FeatureBitset features) + testUsingTickets() { testcase("using tickets"); using namespace jtx; using namespace std::literals::chrono_literals; - Env env{*this, features}; + Env env(*this); auto const alice = Account("alice"); auto const bob = Account("bob"); auto USDA = alice["USD"]; @@ -2122,39 +2039,28 @@ struct PayChan_test : public beast::unit_test::suite BEAST_EXPECT(env.seq(bob) == bobSeq); } - void - testWithFeats(FeatureBitset features) - { - testSimple(features); - testDisallowIncoming(features); - testCancelAfter(features); - testSettleDelay(features); - testExpiration(features); - testCloseDry(features); - testDefaultAmount(features); - testDisallowXRP(features); - testDstTag(features); - testDepositAuth(features); - testMultiple(features); - testAccountChannelsRPC(features); - testAccountChannelsRPCMarkers(features); - testAccountChannelsRPCSenderOnly(features); - testAuthVerifyRPC(features); - testOptionalFields(features); - testMalformedPK(features); - testMetaAndOwnership(features); - testAccountDelete(features); - testUsingTickets(features); - } - -public: void run() override { - using namespace test::jtx; - FeatureBitset const all{supported_amendments()}; - testWithFeats(all - disallowIncoming); - testWithFeats(all); + testSimple(); + testCancelAfter(); + testSettleDelay(); + testExpiration(); + testCloseDry(); + testDefaultAmount(); + testDisallowXRP(); + testDstTag(); + testDepositAuth(); + testMultiple(); + testAccountChannelsRPC(); + testAccountChannelsRPCMarkers(); + testAccountChannelsRPCSenderOnly(); + testAuthVerifyRPC(); + testOptionalFields(); + testMalformedPK(); + testMetaAndOwnership(); + testAccountDelete(); + testUsingTickets(); } }; diff --git a/src/test/app/SetTrust_test.cpp b/src/test/app/SetTrust_test.cpp index fce9c4295c2..45a9e5c767e 100644 --- a/src/test/app/SetTrust_test.cpp +++ b/src/test/app/SetTrust_test.cpp @@ -26,14 +26,9 @@ namespace test { class SetTrust_test : public beast::unit_test::suite { - FeatureBitset const disallowIncoming{featureDisallowIncoming}; - public: void - testFreeTrustlines( - FeatureBitset features, - bool thirdLineCreatesLE, - bool createOnHighAcct) + testFreeTrustlines(bool thirdLineCreatesLE, bool createOnHighAcct) { if (thirdLineCreatesLE) testcase("Allow two free trustlines"); @@ -41,7 +36,7 @@ class SetTrust_test : public beast::unit_test::suite testcase("Dynamic reserve for trustline"); using namespace jtx; - Env env(*this, features); + Env env(*this); auto const gwA = Account{"gwA"}; auto const gwB = Account{"gwB"}; @@ -112,14 +107,14 @@ class SetTrust_test : public beast::unit_test::suite } void - testTicketSetTrust(FeatureBitset features) + testTicketSetTrust() { testcase("SetTrust using a ticket"); using namespace jtx; // Verify that TrustSet transactions can use tickets. - Env env{*this, features}; + Env env{*this}; auto const gw = Account{"gateway"}; auto const alice = Account{"alice"}; auto const USD = gw["USD"]; @@ -157,12 +152,12 @@ class SetTrust_test : public beast::unit_test::suite } void - testMalformedTransaction(FeatureBitset features) + testMalformedTransaction() { testcase("SetTrust checks for malformed transactions"); using namespace jtx; - Env env{*this, features}; + Env env{*this}; auto const gw = Account{"gateway"}; auto const alice = Account{"alice"}; @@ -204,17 +199,14 @@ class SetTrust_test : public beast::unit_test::suite } void - testModifyQualityOfTrustline( - FeatureBitset features, - bool createQuality, - bool createOnHighAcct) + testModifyQualityOfTrustline(bool createQuality, bool createOnHighAcct) { testcase << "SetTrust " << (createQuality ? "creates" : "removes") << " quality of trustline for " << (createOnHighAcct ? "high" : "low") << " account"; using namespace jtx; - Env env{*this, features}; + Env env{*this}; auto const alice = Account{"alice"}; auto const bob = Account{"bob"}; @@ -257,119 +249,20 @@ class SetTrust_test : public beast::unit_test::suite } void - testDisallowIncoming(FeatureBitset features) - { - testcase("Create trustline with disallow incoming"); - - using namespace test::jtx; - - // test flag doesn't set unless amendment enabled - { - Env env{*this, features - disallowIncoming}; - Account const alice{"alice"}; - env.fund(XRP(10000), alice); - env(fset(alice, asfDisallowIncomingTrustline)); - env.close(); - auto const sle = env.le(alice); - uint32_t flags = sle->getFlags(); - BEAST_EXPECT(!(flags & lsfDisallowIncomingTrustline)); - } - - Env env{*this, features | disallowIncoming}; - - auto const gw = Account{"gateway"}; - auto const alice = Account{"alice"}; - auto const bob = Account{"bob"}; - auto const USD = gw["USD"]; - - env.fund(XRP(10000), gw, alice, bob); - env.close(); - - // Set flag on gateway - env(fset(gw, asfDisallowIncomingTrustline)); - env.close(); - - // Create a trustline which will fail - env(trust(alice, USD(1000)), ter(tecNO_PERMISSION)); - env.close(); - - // Unset the flag - env(fclear(gw, asfDisallowIncomingTrustline)); - env.close(); - - // Create a trustline which will now succeed - env(trust(alice, USD(1000))); - env.close(); - - // Now the payment succeeds. - env(pay(gw, alice, USD(200))); - env.close(); - - // Set flag on gateway again - env(fset(gw, asfDisallowIncomingTrustline)); - env.close(); - - // Destroy the balance by sending it back - env(pay(gw, alice, USD(200))); - env.close(); - - // The trustline still exists in default state - // So a further payment should work - env(pay(gw, alice, USD(200))); - env.close(); - - // Also set the flag on bob - env(fset(bob, asfDisallowIncomingTrustline)); - env.close(); - - // But now bob can't open a trustline because he didn't already have one - env(trust(bob, USD(1000)), ter(tecNO_PERMISSION)); - env.close(); - - // The gateway also can't open this trustline because bob has the flag - // set - env(trust(gw, bob["USD"](1000)), ter(tecNO_PERMISSION)); - env.close(); - - // Unset the flag only on the gateway - env(fclear(gw, asfDisallowIncomingTrustline)); - env.close(); - - // Now bob can open a trustline - env(trust(bob, USD(1000))); - env.close(); - - // And the gateway can send bob a balance - env(pay(gw, bob, USD(200))); - env.close(); - } - - void - testWithFeats(FeatureBitset features) + run() override { - testFreeTrustlines(features, true, false); - testFreeTrustlines(features, false, true); - testFreeTrustlines(features, false, true); + testFreeTrustlines(true, false); + testFreeTrustlines(false, true); + testFreeTrustlines(false, true); // true, true case doesn't matter since creating a trustline ledger // entry requires reserve from the creator // independent of hi/low account ids for endpoints - testTicketSetTrust(features); - testMalformedTransaction(features); - testModifyQualityOfTrustline(features, false, false); - testModifyQualityOfTrustline(features, false, true); - testModifyQualityOfTrustline(features, true, false); - testModifyQualityOfTrustline(features, true, true); - testDisallowIncoming(features); - } - -public: - void - run() override - { - using namespace test::jtx; - auto const sa = supported_amendments(); - testWithFeats(sa - disallowIncoming); - testWithFeats(sa); + testTicketSetTrust(); + testMalformedTransaction(); + testModifyQualityOfTrustline(false, false); + testModifyQualityOfTrustline(false, true); + testModifyQualityOfTrustline(true, false); + testModifyQualityOfTrustline(true, true); } }; BEAST_DEFINE_TESTSUITE(SetTrust, app, ripple); diff --git a/src/test/core/SociDB_test.cpp b/src/test/core/SociDB_test.cpp index c0365ad9ae7..875af9aa053 100644 --- a/src/test/core/SociDB_test.cpp +++ b/src/test/core/SociDB_test.cpp @@ -226,15 +226,13 @@ class SociDB_test final : public TestSuite // SOCI requires boost::optional (not std::optional) as // parameters. boost::optional ig; - // Known bug: https://github.com/SOCI/soci/issues/926 - // boost::optional uig; - uint32_t uig = 0; + boost::optional uig; boost::optional big; boost::optional ubig; s << "SELECT I, UI, BI, UBI from STT;", soci::into(ig), soci::into(uig), soci::into(big), soci::into(ubig); BEAST_EXPECT( - *ig == id[0] && uig == uid[0] && *big == bid[0] && + *ig == id[0] && *uig == uid[0] && *big == bid[0] && *ubig == ubid[0]); } catch (std::exception&) @@ -359,13 +357,18 @@ class SociDB_test final : public TestSuite bfs::remove(dbPath); } void - run() override + testSQLite() { testSQLiteFileNames(); testSQLiteSession(); testSQLiteSelect(); testSQLiteDeleteWithSubselect(); } + void + run() override + { + testSQLite(); + } }; BEAST_DEFINE_TESTSUITE(SociDB, core, ripple); diff --git a/src/test/protocol/Memo_test.cpp b/src/test/protocol/Memo_test.cpp deleted file mode 100644 index b39482e42d0..00000000000 --- a/src/test/protocol/Memo_test.cpp +++ /dev/null @@ -1,123 +0,0 @@ -//------------------------------------------------------------------------------ -/* - This file is part of rippled: https://github.com/ripple/rippled - Copyright (c) 2022 Ripple Labs Inc. - - Permission to use, copy, modify, and/or distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -*/ -//============================================================================== - -#include -#include -#include - -namespace ripple { - -class Memo_test : public beast::unit_test::suite -{ -public: - void - testMemos() - { - testcase("Test memos"); - - using namespace test::jtx; - Account alice{"alice"}; - - Env env(*this); - env.fund(XRP(10000), alice); - env.close(); - - // Lambda that returns a valid JTx with a memo that we can hack up. - // This is the basis for building tests of invalid states. - auto makeJtxWithMemo = [&env, &alice]() { - JTx example = noop(alice); - memo const exampleMemo{"tic", "tac", "toe"}; - exampleMemo(env, example); - return example; - }; - - // A valid memo. - env(makeJtxWithMemo()); - env.close(); - - { - // Make sure that too big a memo is flagged as invalid. - JTx memoSize = makeJtxWithMemo(); - memoSize.jv[sfMemos.jsonName][0u][sfMemo.jsonName] - [sfMemoData.jsonName] = std::string(2020, '0'); - env(memoSize, ter(temINVALID)); - - // This memo is just barely small enough. - memoSize.jv[sfMemos.jsonName][0u][sfMemo.jsonName] - [sfMemoData.jsonName] = std::string(2018, '1'); - env(memoSize); - } - { - // Put a non-Memo in the Memos array. - JTx memoNonMemo = noop(alice); - auto& jv = memoNonMemo.jv; - auto& ma = jv[sfMemos.jsonName]; - auto& mi = ma[ma.size()]; - auto& m = mi[sfCreatedNode.jsonName]; // CreatedNode in Memos - m[sfMemoData.jsonName] = "3030303030"; - - env(memoNonMemo, ter(temINVALID)); - } - { - // Put an invalid field in a Memo object. - JTx memoExtra = makeJtxWithMemo(); - memoExtra - .jv[sfMemos.jsonName][0u][sfMemo.jsonName][sfFlags.jsonName] = - 13; - env(memoExtra, ter(temINVALID)); - } - { - // Put a character that is not allowed in a URL in a MemoType field. - JTx memoBadChar = makeJtxWithMemo(); - memoBadChar.jv[sfMemos.jsonName][0u][sfMemo.jsonName] - [sfMemoType.jsonName] = - strHex(std::string_view("ONE