diff --git a/configure.ac b/configure.ac index b94113f5ab..0456cd79ab 100644 --- a/configure.ac +++ b/configure.ac @@ -1,8 +1,8 @@ dnl require autoconf 2.60 (AS_ECHO/AS_ECHO_N) AC_PREREQ([2.60]) -define(_CLIENT_VERSION_MAJOR, 3) -define(_CLIENT_VERSION_MINOR, 3) -define(_CLIENT_VERSION_REVISION, 2) +define(_CLIENT_VERSION_MAJOR, 4) +define(_CLIENT_VERSION_MINOR, 0) +define(_CLIENT_VERSION_REVISION, 0) define(_CLIENT_VERSION_BUILD, 0) define(_CLIENT_VERSION_IS_RELEASE, true) define(_COPYRIGHT_YEAR, 2019) diff --git a/contrib/devtools/check-doc.py b/contrib/devtools/check-doc.py index 55a0dcc9fe..70d668ecb3 100755 --- a/contrib/devtools/check-doc.py +++ b/contrib/devtools/check-doc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/contrib/devtools/check-rpc-mappings.py b/contrib/devtools/check-rpc-mappings.py index 9e088bdf28..820e07458b 100755 --- a/contrib/devtools/check-rpc-mappings.py +++ b/contrib/devtools/check-rpc-mappings.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Check RPC argument consistency.""" diff --git a/contrib/devtools/copyright_header.py b/contrib/devtools/copyright_header.py index 3ad4cd6248..51e57c6961 100755 --- a/contrib/devtools/copyright_header.py +++ b/contrib/devtools/copyright_header.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -413,24 +413,24 @@ def exec_update_header_year(base_directory): changed in a year more recent than is listed. For example: // Copyright (c) - The Bitcoin Core developers -// Copyright (c) 2017-2019 The Raven Core developers +// Copyright (c) 2017-2020 The Raven Core developers will be updated to: // Copyright (c) - The Bitcoin Core developers -// Copyright (c) 2017-2019 The Raven Core developers +// Copyright (c) 2017-2020 The Raven Core developers where is obtained from the 'git log' history. This subcommand also handles copyright headers that have only a single year. In those cases: // Copyright (c) The Bitcoin Core developers -// Copyright (c) 2017-2019 The Raven Core developers +// Copyright (c) 2017-2020 The Raven Core developers will be updated to: // Copyright (c) - The Bitcoin Core developers -// Copyright (c) 2017-2019 The Raven Core developers +// Copyright (c) 2017-2020 The Raven Core developers where the update is appropriate. @@ -464,7 +464,7 @@ def get_header_lines(header, start_year, end_year): CPP_HEADER = ''' // Copyright (c) %s The Bitcoin Core developers -// Copyright (c) 2017-2019 The Raven Core developers +// Copyright (c) 2017-2020 The Raven Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' @@ -474,7 +474,7 @@ def get_cpp_header_lines_to_insert(start_year, end_year): PYTHON_HEADER = ''' # Copyright (c) %s The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/contrib/devtools/optimize-pngs.py b/contrib/devtools/optimize-pngs.py index f5b0d45fe6..2ad34c6083 100755 --- a/contrib/devtools/optimize-pngs.py +++ b/contrib/devtools/optimize-pngs.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/contrib/devtools/security-check.py b/contrib/devtools/security-check.py index b673cb26ed..2c64007ec0 100755 --- a/contrib/devtools/security-check.py +++ b/contrib/devtools/security-check.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/contrib/devtools/test-security-check.py b/contrib/devtools/test-security-check.py index b930c6ca86..8381c0b9a2 100755 --- a/contrib/devtools/test-security-check.py +++ b/contrib/devtools/test-security-check.py @@ -1,6 +1,6 @@ #!/usr/bin/env python2 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/contrib/linearize/linearize-data.py b/contrib/linearize/linearize-data.py index 02facde42c..695024dbcd 100755 --- a/contrib/linearize/linearize-data.py +++ b/contrib/linearize/linearize-data.py @@ -3,7 +3,7 @@ # linearize-data.py: Construct a linear, no-fork version of the chain. # # Copyright (c) 2013-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # diff --git a/contrib/linearize/linearize-hashes.py b/contrib/linearize/linearize-hashes.py index f96c49a4d3..9a08d808b8 100755 --- a/contrib/linearize/linearize-hashes.py +++ b/contrib/linearize/linearize-hashes.py @@ -3,7 +3,7 @@ # linearize-hashes.py: List blocks in a linear, no-fork version of the chain. # # Copyright (c) 2013-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # diff --git a/contrib/macdeploy/custom_dsstore.py b/contrib/macdeploy/custom_dsstore.py index a377c8239c..003322b632 100755 --- a/contrib/macdeploy/custom_dsstore.py +++ b/contrib/macdeploy/custom_dsstore.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # Copyright (c) 2013-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from __future__ import division,print_function,unicode_literals diff --git a/contrib/seeds/makeseeds.py b/contrib/seeds/makeseeds.py index b01b176e03..4abc6e6ec0 100755 --- a/contrib/seeds/makeseeds.py +++ b/contrib/seeds/makeseeds.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2013-2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # diff --git a/contrib/testgen/base58.py b/contrib/testgen/base58.py index 1d81fc13f5..891f62d6fb 100644 --- a/contrib/testgen/base58.py +++ b/contrib/testgen/base58.py @@ -1,5 +1,5 @@ # Copyright (c) 2012-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/contrib/testgen/gen_base58_test_vectors.py b/contrib/testgen/gen_base58_test_vectors.py index 4b7ff1e846..589bad0237 100755 --- a/contrib/testgen/gen_base58_test_vectors.py +++ b/contrib/testgen/gen_base58_test_vectors.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # Copyright (c) 2012-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/contrib/zmq/zmq_sub.py b/contrib/zmq/zmq_sub.py index 999ff20cc9..d2929a6712 100755 --- a/contrib/zmq/zmq_sub.py +++ b/contrib/zmq/zmq_sub.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/contrib/zmq/zmq_sub3.4.py b/contrib/zmq/zmq_sub3.4.py index e446961c1d..db8581f96b 100755 --- a/contrib/zmq/zmq_sub3.4.py +++ b/contrib/zmq/zmq_sub3.4.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/contrib/zmq/zmq_test.py b/contrib/zmq/zmq_test.py index 2956193e2b..b54c2c0808 100644 --- a/contrib/zmq/zmq_test.py +++ b/contrib/zmq/zmq_test.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/depends/packages/qt.mk b/depends/packages/qt.mk index 114dc102d2..f47cf50943 100644 --- a/depends/packages/qt.mk +++ b/depends/packages/qt.mk @@ -1,6 +1,6 @@ PACKAGE=qt $(package)_version=5.7.1 -$(package)_download_path=https://download.qt.io/archive/qt/5.7/$($(package)_version)/submodules +$(package)_download_path=https://download.qt.io/new_archive/qt/5.7/$($(package)_version)/submodules $(package)_suffix=opensource-src-$($(package)_version).tar.gz $(package)_file_name=qtbase-$($(package)_suffix) $(package)_sha256_hash=95f83e532d23b3ddbde7973f380ecae1bac13230340557276f75f2e37984e410 diff --git a/share/qt/extract_strings_qt.py b/share/qt/extract_strings_qt.py index bcdf120fb4..be5677f27a 100755 --- a/share/qt/extract_strings_qt.py +++ b/share/qt/extract_strings_qt.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # Copyright (c) 2012-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' diff --git a/share/rpcuser/rpcuser.py b/share/rpcuser/rpcuser.py index 26986d30f4..dd442e0027 100755 --- a/share/rpcuser/rpcuser.py +++ b/share/rpcuser/rpcuser.py @@ -1,6 +1,6 @@ #!/usr/bin/env python2 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/src/Makefile.am b/src/Makefile.am index 0a92a9da6c..a44cc99c66 100644 --- a/src/Makefile.am +++ b/src/Makefile.am @@ -100,7 +100,6 @@ RAVEN_CORE_H = \ algo/lyra2.h \ algo/sponge.h \ algo/gost_streebog.h \ - algo/hash_algos.h \ algo/groestl.c \ algo/blake.c \ algo/bmw.c \ @@ -228,6 +227,8 @@ RAVEN_CORE_H = \ wallet/rpcwallet.h \ wallet/wallet.h \ wallet/walletdb.h \ + wallet/bip39.h \ + wallet/bip39_english.h \ warnings.h \ zmq/zmqabstractnotifier.h \ zmq/zmqconfig.h\ @@ -322,6 +323,7 @@ libraven_wallet_a_SOURCES = \ wallet/rpcwallet.cpp \ wallet/wallet.cpp \ wallet/walletdb.cpp \ + wallet/bip39.cpp \ $(RAVEN_CORE_H) # crypto primitives library @@ -343,7 +345,31 @@ crypto_libraven_crypto_a_SOURCES = \ crypto/sha256.cpp \ crypto/sha256.h \ crypto/sha512.h \ - crypto/sha512.cpp + crypto/sha512.cpp \ + crypto/ethash/include/ethash/ethash.h \ + crypto/ethash/include/ethash/ethash.hpp \ + crypto/ethash/include/ethash/hash_types.h \ + crypto/ethash/include/ethash/hash_types.hpp \ + crypto/ethash/include/ethash/keccak.h \ + crypto/ethash/include/ethash/keccak.hpp \ + crypto/ethash/include/ethash/progpow.hpp \ + crypto/ethash/include/ethash/version.h \ + crypto/ethash/lib/ethash/bit_manipulation.h \ + crypto/ethash/lib/ethash/builtins.h \ + crypto/ethash/lib/ethash/endianness.hpp \ + crypto/ethash/lib/ethash/ethash.cpp \ + crypto/ethash/lib/ethash/ethash-internal.hpp \ + crypto/ethash/lib/ethash/kiss99.hpp \ + crypto/ethash/lib/ethash/managed.cpp \ + crypto/ethash/lib/ethash/primes.c \ + crypto/ethash/lib/ethash/primes.h \ + crypto/ethash/lib/ethash/progpow.cpp \ + crypto/ethash/lib/keccak/keccak.c \ + crypto/ethash/lib/keccak/keccakf1600.c \ + crypto/ethash/lib/keccak/keccakf800.c \ + crypto/ethash/lib/support/attributes.h \ + crypto/ethash/helpers.hpp \ + crypto/ethash/progpow_test_vectors.hpp if USE_ASM crypto_libraven_crypto_a_SOURCES += crypto/sha256_sse4.cpp @@ -525,7 +551,7 @@ if GLIBC_BACK_COMPAT endif libravenconsensus_la_LDFLAGS = $(AM_LDFLAGS) -no-undefined $(RELDFLAGS) -libravenconsensus_la_LIBADD = $(LIBSECP256K1) +libravenconsensus_la_LIBADD = $(LIBSECP256K1) $(BOOST_LIBS) libravenconsensus_la_CPPFLAGS = $(AM_CPPFLAGS) -I$(builddir)/obj -I$(srcdir)/secp256k1/include -DBUILD_RAVEN_INTERNAL libravenconsensus_la_CXXFLAGS = $(AM_CXXFLAGS) $(PIE_FLAGS) diff --git a/src/Makefile.qt.include b/src/Makefile.qt.include index 800edcd483..187752db33 100644 --- a/src/Makefile.qt.include +++ b/src/Makefile.qt.include @@ -119,7 +119,8 @@ QT_FORMS_UI = \ qt/forms/restrictedassetsdialog.ui \ qt/forms/restrictedassignqualifier.ui \ qt/forms/restrictedfreezeaddress.ui \ - qt/forms/sendassetsentry.ui + qt/forms/sendassetsentry.ui \ + qt/forms/mnemonicdialog.ui QT_MOC_CPP = \ qt/moc_addressbookpage.cpp \ @@ -180,7 +181,8 @@ QT_MOC_CPP = \ qt/moc_restrictedassetsdialog.cpp \ qt/moc_restrictedassignqualifier.cpp \ qt/moc_restrictedfreezeaddress.cpp \ - qt/moc_sendassetsentry.cpp + qt/moc_sendassetsentry.cpp \ + qt/moc_mnemonicdialog.cpp RAVEN_MM = \ qt/macdockiconhandler.mm \ @@ -271,7 +273,8 @@ RAVEN_QT_H = \ qt/walletmodel.h \ qt/walletmodeltransaction.h \ qt/walletview.h \ - qt/winshutdownmonitor.h + qt/winshutdownmonitor.h \ + qt/mnemonicdialog.h RES_ICONS = \ qt/res/icons/add.png \ @@ -446,7 +449,8 @@ RAVEN_QT_WALLET_CPP = \ qt/walletframe.cpp \ qt/walletmodel.cpp \ qt/walletmodeltransaction.cpp \ - qt/walletview.cpp + qt/walletview.cpp \ + qt/mnemonicdialog.cpp RAVEN_QT_CPP = $(RAVEN_QT_BASE_CPP) if TARGET_WINDOWS diff --git a/src/Makefile.test.include b/src/Makefile.test.include index e66e98ec1e..54775b819c 100644 --- a/src/Makefile.test.include +++ b/src/Makefile.test.include @@ -71,6 +71,7 @@ RAVEN_TESTS =\ test/policyestimator_tests.cpp \ test/pow_tests.cpp \ test/prevector_tests.cpp \ + test/kawpow_tests.cpp \ test/raii_event_tests.cpp \ test/random_tests.cpp \ test/reverselock_tests.cpp \ diff --git a/src/algo/hash_algos.h b/src/algo/hash_algos.h deleted file mode 100644 index 980f57db51..0000000000 --- a/src/algo/hash_algos.h +++ /dev/null @@ -1,593 +0,0 @@ - -// Copyright (c) 2017-2019 The Raven Core developers -// Distributed under the MIT software license, see the accompanying -// file COPYING or http://www.opensource.org/licenses/mit-license.php. - -#ifndef HASHALGOS_H -#define HASHALGOS_H - - -#include "../uint256.h" -#include "sph_blake.h" -#include "sph_bmw.h" -#include "sph_groestl.h" -#include "sph_jh.h" -#include "sph_keccak.h" -#include "sph_skein.h" -#include "sph_luffa.h" -#include "sph_cubehash.h" -#include "sph_shavite.h" -#include "sph_simd.h" -#include "sph_echo.h" -#include "sph_hamsi.h" -#include "sph_fugue.h" -#include "sph_shabal.h" -#include "sph_whirlpool.h" -#include "sph_sha2.h" -#include "sph_haval.h" - -#include "sph_tiger.h" -#include "lyra2.h" -#include "gost_streebog.h" - -#ifndef QT_NO_DEBUG -#include -#endif - -#ifdef GLOBALDEFINED -#define GLOBAL -#else -#define GLOBAL extern -#endif - - -inline int GetHashSelection(const uint256 PrevBlockHash, int index) { - assert(index >= 0); - assert(index < 16); - -#define START_OF_LAST_16_NIBBLES_OF_HASH 48 - int hashSelection = PrevBlockHash.GetNibble(START_OF_LAST_16_NIBBLES_OF_HASH + index); - return(hashSelection); -} - -template -inline uint256 HashX16R(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) -{ -// static std::chrono::duration[16]; - int hashSelection; - - sph_blake512_context ctx_blake; //0 - sph_bmw512_context ctx_bmw; //1 - sph_groestl512_context ctx_groestl; //2 - sph_jh512_context ctx_jh; //3 - sph_keccak512_context ctx_keccak; //4 - sph_skein512_context ctx_skein; //5 - sph_luffa512_context ctx_luffa; //6 - sph_cubehash512_context ctx_cubehash; //7 - sph_shavite512_context ctx_shavite; //8 - sph_simd512_context ctx_simd; //9 - sph_echo512_context ctx_echo; //A - sph_hamsi512_context ctx_hamsi; //B - sph_fugue512_context ctx_fugue; //C - sph_shabal512_context ctx_shabal; //D - sph_whirlpool_context ctx_whirlpool; //E - sph_sha512_context ctx_sha512; //F - - - - static unsigned char pblank[1]; - - uint512 hash[16]; - - for (int i=0;i<16;i++) - { - const void *toHash; - int lenToHash; - if (i == 0) { - toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); - lenToHash = (pend - pbegin) * sizeof(pbegin[0]); - } else { - toHash = static_cast(&hash[i-1]); - lenToHash = 64; - } - - hashSelection = GetHashSelection(PrevBlockHash, i); - - switch(hashSelection) { - case 0: - sph_blake512_init(&ctx_blake); - sph_blake512 (&ctx_blake, toHash, lenToHash); - sph_blake512_close(&ctx_blake, static_cast(&hash[i])); - break; - case 1: - sph_bmw512_init(&ctx_bmw); - sph_bmw512 (&ctx_bmw, toHash, lenToHash); - sph_bmw512_close(&ctx_bmw, static_cast(&hash[i])); - break; - case 2: - sph_groestl512_init(&ctx_groestl); - sph_groestl512 (&ctx_groestl, toHash, lenToHash); - sph_groestl512_close(&ctx_groestl, static_cast(&hash[i])); - break; - case 3: - sph_jh512_init(&ctx_jh); - sph_jh512 (&ctx_jh, toHash, lenToHash); - sph_jh512_close(&ctx_jh, static_cast(&hash[i])); - break; - case 4: - sph_keccak512_init(&ctx_keccak); - sph_keccak512 (&ctx_keccak, toHash, lenToHash); - sph_keccak512_close(&ctx_keccak, static_cast(&hash[i])); - break; - case 5: - sph_skein512_init(&ctx_skein); - sph_skein512 (&ctx_skein, toHash, lenToHash); - sph_skein512_close(&ctx_skein, static_cast(&hash[i])); - break; - case 6: - sph_luffa512_init(&ctx_luffa); - sph_luffa512 (&ctx_luffa, toHash, lenToHash); - sph_luffa512_close(&ctx_luffa, static_cast(&hash[i])); - break; - case 7: - sph_cubehash512_init(&ctx_cubehash); - sph_cubehash512 (&ctx_cubehash, toHash, lenToHash); - sph_cubehash512_close(&ctx_cubehash, static_cast(&hash[i])); - break; - case 8: - sph_shavite512_init(&ctx_shavite); - sph_shavite512(&ctx_shavite, toHash, lenToHash); - sph_shavite512_close(&ctx_shavite, static_cast(&hash[i])); - break; - case 9: - sph_simd512_init(&ctx_simd); - sph_simd512 (&ctx_simd, toHash, lenToHash); - sph_simd512_close(&ctx_simd, static_cast(&hash[i])); - break; - case 10: - sph_echo512_init(&ctx_echo); - sph_echo512 (&ctx_echo, toHash, lenToHash); - sph_echo512_close(&ctx_echo, static_cast(&hash[i])); - break; - case 11: - sph_hamsi512_init(&ctx_hamsi); - sph_hamsi512 (&ctx_hamsi, toHash, lenToHash); - sph_hamsi512_close(&ctx_hamsi, static_cast(&hash[i])); - break; - case 12: - sph_fugue512_init(&ctx_fugue); - sph_fugue512 (&ctx_fugue, toHash, lenToHash); - sph_fugue512_close(&ctx_fugue, static_cast(&hash[i])); - break; - case 13: - sph_shabal512_init(&ctx_shabal); - sph_shabal512 (&ctx_shabal, toHash, lenToHash); - sph_shabal512_close(&ctx_shabal, static_cast(&hash[i])); - break; - case 14: - sph_whirlpool_init(&ctx_whirlpool); - sph_whirlpool(&ctx_whirlpool, toHash, lenToHash); - sph_whirlpool_close(&ctx_whirlpool, static_cast(&hash[i])); - break; - case 15: - sph_sha512_init(&ctx_sha512); - sph_sha512 (&ctx_sha512, toHash, lenToHash); - sph_sha512_close(&ctx_sha512, static_cast(&hash[i])); - break; - } - } - - return hash[15].trim256(); -} - -template -inline uint256 HashX16RV2(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) -{ -// static std::chrono::duration[16]; - int hashSelection; - - sph_blake512_context ctx_blake; //0 - sph_bmw512_context ctx_bmw; //1 - sph_groestl512_context ctx_groestl; //2 - sph_jh512_context ctx_jh; //3 - sph_keccak512_context ctx_keccak; //4 - sph_skein512_context ctx_skein; //5 - sph_luffa512_context ctx_luffa; //6 - sph_cubehash512_context ctx_cubehash; //7 - sph_shavite512_context ctx_shavite; //8 - sph_simd512_context ctx_simd; //9 - sph_echo512_context ctx_echo; //A - sph_hamsi512_context ctx_hamsi; //B - sph_fugue512_context ctx_fugue; //C - sph_shabal512_context ctx_shabal; //D - sph_whirlpool_context ctx_whirlpool; //E - - sph_sha512_context ctx_sha512; - sph_tiger_context ctx_tiger; - - - - static unsigned char pblank[1]; - - uint512 hash[16]; - - for (int i=0;i<16;i++) - { - const void *toHash; - int lenToHash; - if (i == 0) { - toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); - lenToHash = (pend - pbegin) * sizeof(pbegin[0]); - } else { - toHash = static_cast(&hash[i-1]); - lenToHash = 64; - } - - hashSelection = GetHashSelection(PrevBlockHash, i); - - switch(hashSelection) { - case 0: - sph_blake512_init(&ctx_blake); - sph_blake512 (&ctx_blake, toHash, lenToHash); - sph_blake512_close(&ctx_blake, static_cast(&hash[i])); - break; - case 1: - sph_bmw512_init(&ctx_bmw); - sph_bmw512 (&ctx_bmw, toHash, lenToHash); - sph_bmw512_close(&ctx_bmw, static_cast(&hash[i])); - break; - case 2: - sph_groestl512_init(&ctx_groestl); - sph_groestl512 (&ctx_groestl, toHash, lenToHash); - sph_groestl512_close(&ctx_groestl, static_cast(&hash[i])); - break; - case 3: - sph_jh512_init(&ctx_jh); - sph_jh512 (&ctx_jh, toHash, lenToHash); - sph_jh512_close(&ctx_jh, static_cast(&hash[i])); - break; - case 4: - sph_tiger_init(&ctx_tiger); - sph_tiger (&ctx_tiger, toHash, lenToHash); - sph_tiger_close(&ctx_tiger, static_cast(&hash[i])); - - sph_keccak512_init(&ctx_keccak); - sph_keccak512 (&ctx_keccak, static_cast(&hash[i]), 64); - sph_keccak512_close(&ctx_keccak, static_cast(&hash[i])); - break; - case 5: - sph_skein512_init(&ctx_skein); - sph_skein512 (&ctx_skein, toHash, lenToHash); - sph_skein512_close(&ctx_skein, static_cast(&hash[i])); - break; - case 6: - sph_tiger_init(&ctx_tiger); - sph_tiger (&ctx_tiger, toHash, lenToHash); - sph_tiger_close(&ctx_tiger, static_cast(&hash[i])); - - sph_luffa512_init(&ctx_luffa); - sph_luffa512 (&ctx_luffa, static_cast(&hash[i]), 64); - sph_luffa512_close(&ctx_luffa, static_cast(&hash[i])); - break; - case 7: - sph_cubehash512_init(&ctx_cubehash); - sph_cubehash512 (&ctx_cubehash, toHash, lenToHash); - sph_cubehash512_close(&ctx_cubehash, static_cast(&hash[i])); - break; - case 8: - sph_shavite512_init(&ctx_shavite); - sph_shavite512(&ctx_shavite, toHash, lenToHash); - sph_shavite512_close(&ctx_shavite, static_cast(&hash[i])); - break; - case 9: - sph_simd512_init(&ctx_simd); - sph_simd512 (&ctx_simd, toHash, lenToHash); - sph_simd512_close(&ctx_simd, static_cast(&hash[i])); - break; - case 10: - sph_echo512_init(&ctx_echo); - sph_echo512 (&ctx_echo, toHash, lenToHash); - sph_echo512_close(&ctx_echo, static_cast(&hash[i])); - break; - case 11: - sph_hamsi512_init(&ctx_hamsi); - sph_hamsi512 (&ctx_hamsi, toHash, lenToHash); - sph_hamsi512_close(&ctx_hamsi, static_cast(&hash[i])); - break; - case 12: - sph_fugue512_init(&ctx_fugue); - sph_fugue512 (&ctx_fugue, toHash, lenToHash); - sph_fugue512_close(&ctx_fugue, static_cast(&hash[i])); - break; - case 13: - sph_shabal512_init(&ctx_shabal); - sph_shabal512 (&ctx_shabal, toHash, lenToHash); - sph_shabal512_close(&ctx_shabal, static_cast(&hash[i])); - break; - case 14: - sph_whirlpool_init(&ctx_whirlpool); - sph_whirlpool(&ctx_whirlpool, toHash, lenToHash); - sph_whirlpool_close(&ctx_whirlpool, static_cast(&hash[i])); - break; - case 15: - sph_tiger_init(&ctx_tiger); - sph_tiger (&ctx_tiger, toHash, lenToHash); - sph_tiger_close(&ctx_tiger, static_cast(&hash[i])); - - sph_sha512_init(&ctx_sha512); - sph_sha512 (&ctx_sha512, static_cast(&hash[i]), 64); - sph_sha512_close(&ctx_sha512, static_cast(&hash[i])); - break; - } - } - - return hash[15].trim256(); -} - -/// Used for testing the algo switch from X16R to X16RV2 - -//inline int GetX21sSelection(const uint256 PrevBlockHash, int index) { -// assert(index >= 0); -// assert(index < 16); -// -// #define START_OF_LAST_16_NIBBLES_OF_HASH 48 -// int hashSelection = PrevBlockHash.GetNibble(START_OF_LAST_16_NIBBLES_OF_HASH + index); -// return(hashSelection); -//} -// -// -//template -//inline uint256 HashX21S(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) { -// sph_blake512_context ctx_blake; -// sph_bmw512_context ctx_bmw; -// sph_groestl512_context ctx_groestl; -// sph_jh512_context ctx_jh; -// sph_keccak512_context ctx_keccak; -// sph_skein512_context ctx_skein; -// sph_luffa512_context ctx_luffa; -// sph_cubehash512_context ctx_cubehash; -// sph_shavite512_context ctx_shavite; -// sph_simd512_context ctx_simd; -// sph_echo512_context ctx_echo; -// sph_hamsi512_context ctx_hamsi; -// sph_fugue512_context ctx_fugue; -// sph_shabal512_context ctx_shabal; -// sph_whirlpool_context ctx_whirlpool; -// sph_sha512_context ctx_sha512; -// sph_haval256_5_context ctx_haval; -// sph_tiger_context ctx_tiger; -// sph_gost512_context ctx_gost; -// sph_sha256_context ctx_sha; -// -// static unsigned char pblank[1]; -// uint512 hash[21]; -// -// std::string hashString = PrevBlockHash.GetHex(); // uint256 to string -// std::string list = "0123456789abcdef"; -// std::string order = list; -// -// std::string hashFront = hashString.substr(0,48); // preserve first 48 chars -// std::string sixteen = hashString.substr(48,64); // extract last sixteen chars -// -// for(int i=0; i<16; i++){ -// int offset = list.find(sixteen[i]); // find offset of sixteen char -// -// order.insert(0, 1, order[offset]); // insert the nth character at the beginning -// order.erase(offset+1, 1); // erase the n+1 character (was nth) -// } -// -// const uint256 scrambleHash = uint256S(hashFront + order); // uint256 with length of hash and shuffled last sixteen -// -// -// for (int i=0;i<16;i++) -// { -// const void *toHash; -// int lenToHash; -// if (i == 0) { -// toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); -// lenToHash = (pend - pbegin) * sizeof(pbegin[0]); -// } else { -// toHash = static_cast(&hash[i-1]); -// lenToHash = 64; -// } -// -// int hashSelection = GetX21sSelection(scrambleHash, i); // change PrevBlockHash to scrambleHash (x16s) -// switch(hashSelection) { -// case 0: -// sph_blake512_init(&ctx_blake); -// sph_blake512 (&ctx_blake, toHash, lenToHash); -// sph_blake512_close(&ctx_blake, static_cast(&hash[i])); -// break; -// case 1: -// sph_bmw512_init(&ctx_bmw); -// sph_bmw512 (&ctx_bmw, toHash, lenToHash); -// sph_bmw512_close(&ctx_bmw, static_cast(&hash[i])); -// break; -// case 2: -// sph_groestl512_init(&ctx_groestl); -// sph_groestl512 (&ctx_groestl, toHash, lenToHash); -// sph_groestl512_close(&ctx_groestl, static_cast(&hash[i])); -// break; -// case 3: -// sph_jh512_init(&ctx_jh); -// sph_jh512 (&ctx_jh, toHash, lenToHash); -// sph_jh512_close(&ctx_jh, static_cast(&hash[i])); -// break; -// case 4: -// sph_keccak512_init(&ctx_keccak); -// sph_keccak512 (&ctx_keccak, toHash, lenToHash); -// sph_keccak512_close(&ctx_keccak, static_cast(&hash[i])); -// break; -// case 5: -// sph_skein512_init(&ctx_skein); -// sph_skein512 (&ctx_skein, toHash, lenToHash); -// sph_skein512_close(&ctx_skein, static_cast(&hash[i])); -// break; -// case 6: -// sph_luffa512_init(&ctx_luffa); -// sph_luffa512 (&ctx_luffa, toHash, lenToHash); -// sph_luffa512_close(&ctx_luffa, static_cast(&hash[i])); -// break; -// case 7: -// sph_cubehash512_init(&ctx_cubehash); -// sph_cubehash512 (&ctx_cubehash, toHash, lenToHash); -// sph_cubehash512_close(&ctx_cubehash, static_cast(&hash[i])); -// break; -// case 8: -// sph_shavite512_init(&ctx_shavite); -// sph_shavite512(&ctx_shavite, toHash, lenToHash); -// sph_shavite512_close(&ctx_shavite, static_cast(&hash[i])); -// break; -// case 9: -// sph_simd512_init(&ctx_simd); -// sph_simd512 (&ctx_simd, toHash, lenToHash); -// sph_simd512_close(&ctx_simd, static_cast(&hash[i])); -// break; -// case 10: -// sph_echo512_init(&ctx_echo); -// sph_echo512 (&ctx_echo, toHash, lenToHash); -// sph_echo512_close(&ctx_echo, static_cast(&hash[i])); -// break; -// case 11: -// sph_hamsi512_init(&ctx_hamsi); -// sph_hamsi512 (&ctx_hamsi, toHash, lenToHash); -// sph_hamsi512_close(&ctx_hamsi, static_cast(&hash[i])); -// break; -// case 12: -// sph_fugue512_init(&ctx_fugue); -// sph_fugue512 (&ctx_fugue, toHash, lenToHash); -// sph_fugue512_close(&ctx_fugue, static_cast(&hash[i])); -// break; -// case 13: -// sph_shabal512_init(&ctx_shabal); -// sph_shabal512 (&ctx_shabal, toHash, lenToHash); -// sph_shabal512_close(&ctx_shabal, static_cast(&hash[i])); -// break; -// case 14: -// sph_whirlpool_init(&ctx_whirlpool); -// sph_whirlpool(&ctx_whirlpool, toHash, lenToHash); -// sph_whirlpool_close(&ctx_whirlpool, static_cast(&hash[i])); -// break; -// case 15: -// sph_sha512_init(&ctx_sha512); -// sph_sha512 (&ctx_sha512, toHash, lenToHash); -// sph_sha512_close(&ctx_sha512, static_cast(&hash[i])); -// break; -// } -// } -// -// sph_haval256_5_init(&ctx_haval); -// sph_haval256_5 (&ctx_haval, static_cast(&hash[15]), 64); -// sph_haval256_5_close(&ctx_haval, static_cast(&hash[15])); -// -// sph_tiger_init(&ctx_tiger); -// sph_tiger (&ctx_tiger, static_cast(&hash[15]), 64); -// sph_tiger_close(&ctx_tiger, static_cast(&hash[15])); -// -// LYRA2(static_cast(&hash[15]), 32, static_cast(&hash[15]), 32, static_cast(&hash[15]), 32, 1, 4, 4); -// -// sph_gost512_init(&ctx_gost); -// sph_gost512 (&ctx_gost, static_cast(&hash[15]), 64); -// sph_gost512_close(&ctx_gost, static_cast(&hash[15])); -// -// sph_sha256_init(&ctx_sha); -// sph_sha256 (&ctx_sha, static_cast(&hash[15]), 64); -// sph_sha256_close(&ctx_sha, static_cast(&hash[15])); -// -// return hash[15].trim256(); -//} - -//template -//inline uint256 HashTestTiger(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) -//{ -// sph_tiger_context ctx_tiger; -// -// static unsigned char pblank[1]; -// -// uint512 hash[1]; -// int count = 0; -// -// while (count < 10000000) { -// const void *toHash; -// int lenToHash; -// if (count == 0) { -// toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); -// lenToHash = (pend - pbegin) * sizeof(pbegin[0]); -// } else { -// toHash = static_cast(&hash[0]); -// lenToHash = 64; -// } -// -// sph_tiger_init(&ctx_tiger); -// sph_tiger(&ctx_tiger, toHash, lenToHash); -// sph_tiger_close(&ctx_tiger, static_cast(&hash[0])); -// -// count++; -// } -// -// return hash[0].trim256(); -//} -// -//template -//inline uint256 HashTestSha512(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) -//{ -// sph_sha512_context ctx_sha512; //F -// -// static unsigned char pblank[1]; -// -// uint512 hash[1]; -// int count = 0; -// -// while (count < 10000000) { -// const void *toHash; -// int lenToHash; -// if (count == 0) { -// toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); -// lenToHash = (pend - pbegin) * sizeof(pbegin[0]); -// } else { -// toHash = static_cast(&hash[0]); -// lenToHash = 64; -// } -// -// sph_sha512_init(&ctx_sha512); -// sph_sha512 (&ctx_sha512, toHash, lenToHash); -// sph_sha512_close(&ctx_sha512, static_cast(&hash[0])); -// -// count++; -// } -// -// return hash[0].trim256(); -//} -// -//template -//inline uint256 HashTestGost512(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) -//{ -// sph_gost512_context ctx_gost; -// -// static unsigned char pblank[1]; -// -// uint512 hash[1]; -// int count = 0; -// -// while (count < 10000000) { -// const void *toHash; -// int lenToHash; -// if (count == 0) { -// toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); -// lenToHash = (pend - pbegin) * sizeof(pbegin[0]); -// } else { -// toHash = static_cast(&hash[0]); -// lenToHash = 64; -// } -// -// sph_gost512_init(&ctx_gost); -// sph_gost512 (&ctx_gost, static_cast(&hash[0]), lenToHash); -// sph_gost512_close(&ctx_gost, static_cast(&hash[0])); -// -// count++; -// } -// -// return hash[0].trim256(); -//} - -#endif // HASHALGOS_H diff --git a/src/assets/assets.cpp b/src/assets/assets.cpp index a241c57312..03cb5249f0 100644 --- a/src/assets/assets.cpp +++ b/src/assets/assets.cpp @@ -674,13 +674,23 @@ bool TransferAssetFromScript(const CScript& scriptPubKey, CAssetTransfer& assetT strAddress = EncodeDestination(destination); std::vector vchTransferAsset; - vchTransferAsset.insert(vchTransferAsset.end(), scriptPubKey.begin() + 31, scriptPubKey.end()); + + if (AreTransferScriptsSizeDeployed()) { + // Before kawpow activation we used the hardcoded 31 to find the data + // This created a bug where large transfers scripts would fail to serialize. + // This fixes that issue (https://github.com/RavenProject/Ravencoin/issues/752) + // TODO, after the kawpow fork goes active, we should be able to remove this if/else statement and just use this line. + vchTransferAsset.insert(vchTransferAsset.end(), scriptPubKey.begin() + nStartingIndex, scriptPubKey.end()); + } else { + vchTransferAsset.insert(vchTransferAsset.end(), scriptPubKey.begin() + 31, scriptPubKey.end()); + } + CDataStream ssAsset(vchTransferAsset, SER_NETWORK, PROTOCOL_VERSION); try { ssAsset >> assetTransfer; } catch(std::exception& e) { - std::cout << "Failed to get the transfer asset from the stream: " << e.what() << std::endl; + error("Failed to get the transfer asset from the stream: %s", e.what()); return false; } @@ -705,7 +715,7 @@ bool AssetFromScript(const CScript& scriptPubKey, CNewAsset& assetNew, std::stri try { ssAsset >> assetNew; } catch(std::exception& e) { - std::cout << "Failed to get the asset from the stream: " << e.what() << std::endl; + error("Failed to get the asset from the stream: %s", e.what()); return false; } @@ -730,7 +740,7 @@ bool MsgChannelAssetFromScript(const CScript& scriptPubKey, CNewAsset& assetNew, try { ssAsset >> assetNew; } catch(std::exception& e) { - std::cout << "Failed to get the msg channel asset from the stream: " << e.what() << std::endl; + error("Failed to get the msg channel asset from the stream: %s", e.what()); return false; } @@ -755,7 +765,7 @@ bool QualifierAssetFromScript(const CScript& scriptPubKey, CNewAsset& assetNew, try { ssAsset >> assetNew; } catch(std::exception& e) { - std::cout << "Failed to get the qualifier asset from the stream: " << e.what() << std::endl; + error("Failed to get the qualifier asset from the stream: %s", e.what()); return false; } @@ -780,7 +790,7 @@ bool RestrictedAssetFromScript(const CScript& scriptPubKey, CNewAsset& assetNew, try { ssAsset >> assetNew; } catch(std::exception& e) { - std::cout << "Failed to get the restricted asset from the stream: " << e.what() << std::endl; + error("Failed to get the restricted asset from the stream: %s", e.what()); return false; } @@ -805,7 +815,7 @@ bool OwnerAssetFromScript(const CScript& scriptPubKey, std::string& assetName, s try { ssOwner >> assetName; } catch(std::exception& e) { - std::cout << "Failed to get the owner asset from the stream: " << e.what() << std::endl; + error("Failed to get the owner asset from the stream: %s", e.what()); return false; } @@ -830,7 +840,7 @@ bool ReissueAssetFromScript(const CScript& scriptPubKey, CReissueAsset& reissue, try { ssReissue >> reissue; } catch(std::exception& e) { - std::cout << "Failed to get the reissue asset from the stream: " << e.what() << std::endl; + error("Failed to get the reissue asset from the stream: %s", e.what()); return false; } @@ -855,7 +865,7 @@ bool AssetNullDataFromScript(const CScript& scriptPubKey, CNullAssetTxData& asse try { ssData >> assetData; } catch(std::exception& e) { - std::cout << "Failed to get the asset tx data from the stream: " << e.what() << std::endl; + error("Failed to get the null asset tx data from the stream: %s", e.what()); return false; } @@ -875,7 +885,7 @@ bool GlobalAssetNullDataFromScript(const CScript& scriptPubKey, CNullAssetTxData try { ssData >> assetData; } catch(std::exception& e) { - std::cout << "Failed to get the global restriction asset tx data from the stream: " << e.what() << std::endl; + error("Failed to get the global restriction asset tx data from the stream: %s", e.what()); return false; } @@ -895,7 +905,7 @@ bool AssetNullVerifierDataFromScript(const CScript& scriptPubKey, CNullAssetTxVe try { ssData >> verifierData; } catch(std::exception& e) { - std::cout << "Failed to get the verifier string from the stream: " << e.what() << std::endl; + error("Failed to get the verifier string from the stream: %s", e.what()); return false; } diff --git a/src/assets/rewards.cpp b/src/assets/rewards.cpp index ca46574878..83ba5d47f7 100644 --- a/src/assets/rewards.cpp +++ b/src/assets/rewards.cpp @@ -60,7 +60,7 @@ bool GenerateDistributionList(const CRewardSnapshot& p_rewardSnapshot, std::vect // Get details on the specified source asset CNewAsset distributionAsset; - bool srcIsIndivisible = false; + UNUSED_VAR bool srcIsIndivisible = false; CAmount srcUnitDivisor = COIN; // Default to divisor for RVN const int8_t COIN_DIGITS_PAST_DECIMAL = 8; diff --git a/src/blockencodings.cpp b/src/blockencodings.cpp index e5e15bf4fc..74b7394cf9 100644 --- a/src/blockencodings.cpp +++ b/src/blockencodings.cpp @@ -163,7 +163,8 @@ ReadStatus PartiallyDownloadedBlock::InitData(const CBlockHeaderAndShortTxIDs& c break; } - LogPrint(BCLog::CMPCTBLOCK, "Initialized PartiallyDownloadedBlock for block %s using a cmpctblock of size %lu\n", cmpctblock.header.GetHash().ToString(), GetSerializeSize(cmpctblock, SER_NETWORK, PROTOCOL_VERSION)); + LogPrint(BCLog::CMPCTBLOCK, "Initialized PartiallyDownloadedBlock for block %s using a cmpctblock of size %lu\n", + cmpctblock.header.GetHash().ToString(), GetSerializeSize(cmpctblock, SER_NETWORK, PROTOCOL_VERSION)); return READ_STATUS_OK; } diff --git a/src/chain.h b/src/chain.h index 7fff56434c..6cafdeea37 100644 --- a/src/chain.h +++ b/src/chain.h @@ -215,6 +215,10 @@ class CBlockIndex uint32_t nBits; uint32_t nNonce; + // KAWPOW + uint64_t nNonce64; + uint256 mix_hash; + //! (memory only) Sequential id assigned to distinguish order in which blocks are received. int32_t nSequenceId; @@ -242,6 +246,10 @@ class CBlockIndex nTime = 0; nBits = 0; nNonce = 0; + + //KAWPOW + nNonce64 = 0; + mix_hash = uint256(); } CBlockIndex() @@ -258,6 +266,12 @@ class CBlockIndex nTime = block.nTime; nBits = block.nBits; nNonce = block.nNonce; + + //KAWPOW + nHeight = block.nHeight; + nNonce64 = block.nNonce64; + mix_hash = block.mix_hash; + } CDiskBlockPos GetBlockPos() const { @@ -288,6 +302,9 @@ class CBlockIndex block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; + block.nHeight = nHeight; + block.nNonce64 = nNonce64; + block.mix_hash = mix_hash; return block; } @@ -406,7 +423,14 @@ class CDiskBlockIndex : public CBlockIndex READWRITE(hashMerkleRoot); READWRITE(nTime); READWRITE(nBits); - READWRITE(nNonce); + if (nTime < nKAWPOWActivationTime) { + READWRITE(nNonce); + } else { + //KAWPOW + READWRITE(nNonce64); + READWRITE(mix_hash); + } + } uint256 GetBlockHash() const @@ -418,6 +442,10 @@ class CDiskBlockIndex : public CBlockIndex block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; + + block.nHeight = nHeight; + block.nNonce64 = nNonce64; + block.mix_hash = mix_hash; return block.GetHash(); } diff --git a/src/chainparams.cpp b/src/chainparams.cpp index 120febba62..a48b6b2926 100644 --- a/src/chainparams.cpp +++ b/src/chainparams.cpp @@ -124,6 +124,7 @@ class CMainParams : public CChainParams { consensus.nSegwitEnabled = true; consensus.nCSVEnabled = true; consensus.powLimit = uint256S("00000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); + consensus.kawpowLimit = uint256S("0000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffff"); // Estimated starting diff for first 180 kawpow blocks consensus.nPowTargetTimespan = 2016 * 60; // 1.4 days consensus.nPowTargetSpacing = 1 * 60; consensus.fPowAllowMinDifficultyBlocks = false; @@ -145,12 +146,17 @@ class CMainParams : public CChainParams { consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nTimeout = 1610542800; // UTC: Wed Jan 13 2021 13:00:00 consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nOverrideRuleChangeActivationThreshold = 1714; // Approx 85% of 2016 consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nOverrideMinerConfirmationWindow = 2016; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].bit = 8; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nStartTime = 1588788000; // UTC: Wed May 06 2020 18:00:00 + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nTimeout = 1620324000; // UTC: Thu May 06 2021 18:00:00 + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nOverrideRuleChangeActivationThreshold = 1714; // Approx 85% of 2016 + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nOverrideMinerConfirmationWindow = 2016; // The best chain should have at least this much work - consensus.nMinimumChainWork = uint256S("0x00000000000000000000000000000000000000000000001445cb2bc4398ebded"); // Block 1040000 + consensus.nMinimumChainWork = uint256S("000000000000000000000000000000000000000000000020d4ac871fb7009b63"); // Block 1186833 // By default assume that the signatures in ancestors of this block are valid. Block# 1040000 - consensus.defaultAssumeValid = uint256S("0x000000000000138e2690b06b1ddd8cf158c3a5cf540ee5278debdcdffcf75839"); // Block 1040000 + consensus.defaultAssumeValid = uint256S("0x0000000000000d4840d4de1f7d943542c2aed532bd5d6527274fc0142fa1a410"); // Block 1186833 /** * The message start string is designed to be unlikely to occur in normal data. @@ -181,6 +187,9 @@ class CMainParams : public CChainParams { base58Prefixes[EXT_PUBLIC_KEY] = {0x04, 0x88, 0xB2, 0x1E}; base58Prefixes[EXT_SECRET_KEY] = {0x04, 0x88, 0xAD, 0xE4}; + // Raven BIP44 cointype in mainnet is '175' + nExtCoinType = 175; + vFixedSeeds = std::vector(pnSeed6_main, pnSeed6_main + ARRAYLEN(pnSeed6_main)); fDefaultConsistencyChecks = false; @@ -194,7 +203,8 @@ class CMainParams : public CChainParams { { 697376, uint256S("0x000000000000499bf4ebbe61541b02e4692b33defc7109d8f12d2825d4d2dfa0")}, { 740000, uint256S("0x00000000000027d11bf1e7a3b57d3c89acc1722f39d6e08f23ac3a07e16e3172")}, { 909251, uint256S("0x000000000000694c9a363eff06518aa7399f00014ce667b9762f9a4e7a49f485")}, - { 1040000, uint256S("0x000000000000138e2690b06b1ddd8cf158c3a5cf540ee5278debdcdffcf75839")} + { 1040000, uint256S("0x000000000000138e2690b06b1ddd8cf158c3a5cf540ee5278debdcdffcf75839")}, + { 1186833, uint256S("0x0000000000000d4840d4de1f7d943542c2aed532bd5d6527274fc0142fa1a410")} } }; @@ -241,8 +251,11 @@ class CMainParams : public CChainParams { nMinReorganizationAge = 60 * 60 * 12; // 12 hours nAssetActivationHeight = 435456; // Asset activated block height - nMessagingActivationBlock = 0; // Messaging activated block height // TODO after messaging goes active on mainnet - nRestrictedActivationBlock = 0; // Restricted activated block height // TODO after restricted goes active on mainnet + nMessagingActivationBlock = 1092672; // Messaging activated block height + nRestrictedActivationBlock = 1092672; // Restricted activated block height + + nKAAAWWWPOWActivationTime = 1588788000; // UTC: Wed May 06 2020 18:00:00 + nKAWPOWActivationTime = nKAAAWWWPOWActivationTime; /** RVN End **/ } }; @@ -262,6 +275,7 @@ class CTestNetParams : public CChainParams { consensus.nCSVEnabled = true; consensus.powLimit = uint256S("00000fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); + consensus.kawpowLimit = uint256S("000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); consensus.nPowTargetTimespan = 2016 * 60; // 1.4 days consensus.nPowTargetSpacing = 1 * 60; consensus.fPowAllowMinDifficultyBlocks = true; @@ -283,12 +297,17 @@ class CTestNetParams : public CChainParams { consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nTimeout = 1577257200; // UTC: Wed Dec 25 2019 07:00:00 consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nOverrideRuleChangeActivationThreshold = 1310; consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nOverrideMinerConfirmationWindow = 2016; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].bit = 8; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nStartTime = 1586973600; // UTC: Wed Apr 15 2020 18:00:00 + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nTimeout = 1618509600; // UTC: Thu Apr 15 2021 18:00:00 + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nOverrideRuleChangeActivationThreshold = 1310; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nOverrideMinerConfirmationWindow = 2016; // The best chain should have at least this much work. - consensus.nMinimumChainWork = uint256S("0x00"); + consensus.nMinimumChainWork = uint256S("0x000000000000000000000000000000000000000000000000000168050db560b4"); // By default assume that the signatures in ancestors of this block are valid. - consensus.defaultAssumeValid = uint256S("0x00"); + consensus.defaultAssumeValid = uint256S("0x000000006272208605c4df3b54d4d5515759105e7ffcb258e8cd8077924ffef1"); pchMessageStart[0] = 0x52; // R @@ -381,6 +400,9 @@ class CTestNetParams : public CChainParams { base58Prefixes[EXT_PUBLIC_KEY] = {0x04, 0x35, 0x87, 0xCF}; base58Prefixes[EXT_SECRET_KEY] = {0x04, 0x35, 0x83, 0x94}; + // Raven BIP44 cointype in testnet + nExtCoinType = 1; + vFixedSeeds = std::vector(pnSeed6_test, pnSeed6_test + ARRAYLEN(pnSeed6_test)); fDefaultConsistencyChecks = false; @@ -390,7 +412,10 @@ class CTestNetParams : public CChainParams { checkpointData = (CCheckpointData) { { - { 225, uint256S("0x000003465e3e0167322eb8269ce91246bbc211e293bc5fbf6f0a0d12c1ccb363")}, + { 225, uint256S("0x000003465e3e0167322eb8269ce91246bbc211e293bc5fbf6f0a0d12c1ccb363")}, + {223408, uint256S("0x000000012a0c09dd6456ab19018cc458648dec762b04f4ddf8ef8108eae69db9")}, + {232980, uint256S("0x000000007b16ae547fce76c3308dbeec2090cde75de74ab5dfcd6f60d13f089b")}, + {257610, uint256S("0x000000006272208605c4df3b54d4d5515759105e7ffcb258e8cd8077924ffef1")} } }; @@ -436,9 +461,12 @@ class CTestNetParams : public CChainParams { nMinReorganizationPeers = 4; nMinReorganizationAge = 60 * 60 * 12; // 12 hours - nAssetActivationHeight = 0; // Asset activated block height - nMessagingActivationBlock = 0; // Messaging activated block height - nRestrictedActivationBlock = 0; // Restricted activated block height // TODO after restricted goes active on testnet + nAssetActivationHeight = 6048; // Asset activated block height + nMessagingActivationBlock = 10080; // Messaging activated block height + nRestrictedActivationBlock = 10080; // Restricted activated block height + + nKAAAWWWPOWActivationTime = 1585159200; //Wed Mar 25 2020 18:00:00 UTC + nKAWPOWActivationTime = nKAAAWWWPOWActivationTime; /** RVN End **/ } }; @@ -457,6 +485,7 @@ class CRegTestParams : public CChainParams { consensus.nCSVEnabled = true; consensus.nSubsidyHalvingInterval = 150; consensus.powLimit = uint256S("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); + consensus.kawpowLimit = uint256S("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); consensus.nPowTargetTimespan = 2016 * 60; // 1.4 days consensus.nPowTargetSpacing = 1 * 60; consensus.fPowAllowMinDifficultyBlocks = true; @@ -478,6 +507,11 @@ class CRegTestParams : public CChainParams { consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nTimeout = 999999999999ULL; // UTC: Wed Dec 25 2019 07:00:00 consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nOverrideRuleChangeActivationThreshold = 108; consensus.vDeployments[Consensus::DEPLOYMENT_MSG_REST_ASSETS].nOverrideMinerConfirmationWindow = 144; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].bit = 8; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nStartTime = 0; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nTimeout = 999999999999ULL; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nOverrideRuleChangeActivationThreshold = 208; + consensus.vDeployments[Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE].nOverrideMinerConfirmationWindow = 288; // The best chain should have at least this much work. consensus.nMinimumChainWork = uint256S("0x00"); @@ -583,6 +617,8 @@ class CRegTestParams : public CChainParams { base58Prefixes[EXT_PUBLIC_KEY] = {0x04, 0x35, 0x87, 0xCF}; base58Prefixes[EXT_SECRET_KEY] = {0x04, 0x35, 0x83, 0x94}; + // Raven BIP44 cointype in regtest + nExtCoinType = 1; /** RVN Start **/ // Burn Amounts @@ -620,6 +656,12 @@ class CRegTestParams : public CChainParams { nAssetActivationHeight = 0; // Asset activated block height nMessagingActivationBlock = 0; // Messaging activated block height nRestrictedActivationBlock = 0; // Restricted activated block height + + // TODO, we need to figure out what to do with this for regtest. This effects the unit tests + // For now we can use a timestamp very far away + // If you are looking to test the kawpow hashing function in regtest. You will need to change this number + nKAAAWWWPOWActivationTime = 3582830167; + nKAWPOWActivationTime = nKAAAWWWPOWActivationTime; /** RVN End **/ } }; diff --git a/src/chainparams.h b/src/chainparams.h index 9de4cabcc2..5145d237c2 100644 --- a/src/chainparams.h +++ b/src/chainparams.h @@ -75,6 +75,7 @@ class CChainParams std::string NetworkIDString() const { return strNetworkID; } const std::vector& DNSSeeds() const { return vSeeds; } const std::vector& Base58Prefix(Base58Type type) const { return base58Prefixes[type]; } + int ExtCoinType() const { return nExtCoinType; } const std::vector& FixedSeeds() const { return vFixedSeeds; } const CCheckpointData& Checkpoints() const { return checkpointData; } const ChainTxData& TxData() const { return chainTxData; } @@ -152,6 +153,7 @@ class CChainParams uint64_t nPruneAfterHeight; std::vector vSeeds; std::vector base58Prefixes[MAX_BASE58_TYPES]; + int nExtCoinType; std::string strNetworkID; CBlock genesis; std::vector vFixedSeeds; @@ -197,6 +199,8 @@ class CChainParams int nMinReorganizationAge; int nAssetActivationHeight; + + uint32_t nKAAAWWWPOWActivationTime; /** RVN End **/ }; diff --git a/src/consensus/consensus.h b/src/consensus/consensus.h index 55764cd29a..6c746cc0ef 100644 --- a/src/consensus/consensus.h +++ b/src/consensus/consensus.h @@ -36,6 +36,7 @@ static const size_t MIN_SERIALIZABLE_TRANSACTION_WEIGHT = WITNESS_SCALE_FACTOR * //! it causes unused variable warnings when compiling. This UNUSED_VAR removes the unused warnings UNUSED_VAR static bool fAssetsIsActive = false; UNUSED_VAR static bool fRip5IsActive = false; +UNUSED_VAR static bool fTransferScriptIsActive = false; unsigned int GetMaxBlockWeight(); unsigned int GetMaxBlockSerializedSize(); diff --git a/src/consensus/params.h b/src/consensus/params.h index 3b6c90a63d..8b255ac652 100644 --- a/src/consensus/params.h +++ b/src/consensus/params.h @@ -18,6 +18,7 @@ enum DeploymentPos DEPLOYMENT_TESTDUMMY, DEPLOYMENT_ASSETS, // Deployment of RIP2 DEPLOYMENT_MSG_REST_ASSETS, // Delpoyment of RIP5 and Restricted assets + DEPLOYMENT_TRANSFER_SCRIPT_SIZE, // DEPLOYMENT_CSV, // Deployment of BIP68, BIP112, and BIP113. // DEPLOYMENT_SEGWIT, // Deployment of BIP141, BIP143, and BIP147. // NOTE: Also add new deployments to VersionBitsDeploymentInfo in versionbits.cpp @@ -65,6 +66,7 @@ struct Params { BIP9Deployment vDeployments[MAX_VERSION_BITS_DEPLOYMENTS]; /** Proof of work parameters */ uint256 powLimit; + uint256 kawpowLimit; bool fPowAllowMinDifficultyBlocks; bool fPowNoRetargeting; int64_t nPowTargetSpacing; diff --git a/src/crypto/ethash/helpers.hpp b/src/crypto/ethash/helpers.hpp new file mode 100644 index 0000000000..ec0fc8317d --- /dev/null +++ b/src/crypto/ethash/helpers.hpp @@ -0,0 +1,54 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +#pragma once + +#include + +#include + +template +inline std::string to_hex(const Hash& h) +{ + static const auto hex_chars = "0123456789abcdef"; + std::string str; + str.reserve(sizeof(h) * 2); + for (auto b : h.bytes) + { + str.push_back(hex_chars[uint8_t(b) >> 4]); + str.push_back(hex_chars[uint8_t(b) & 0xf]); + } + return str; +} + +inline ethash::hash256 to_hash256(const std::string& hex) +{ + auto parse_digit = [](char d) -> int { return d <= '9' ? (d - '0') : (d - 'a' + 10); }; + + ethash::hash256 hash = {}; + for (size_t i = 1; i < hex.size(); i += 2) + { + int h = parse_digit(hex[i - 1]); + int l = parse_digit(hex[i]); + hash.bytes[i / 2] = uint8_t((h << 4) | l); + } + return hash; +} + +/// Comparison operator for hash256 to be used in unit tests. +inline bool operator==(const ethash::hash256& a, const ethash::hash256& b) noexcept +{ + return std::memcmp(a.bytes, b.bytes, sizeof(a)) == 0; +} + +inline bool operator!=(const ethash::hash256& a, const ethash::hash256& b) noexcept +{ + return !(a == b); +} + +inline const ethash::epoch_context& get_ethash_epoch_context_0() noexcept +{ + static ethash::epoch_context_ptr context = ethash::create_epoch_context(0); + return *context; +} diff --git a/src/crypto/ethash/include/ethash/ethash.h b/src/crypto/ethash/include/ethash/ethash.h new file mode 100644 index 0000000000..a01a0d92c8 --- /dev/null +++ b/src/crypto/ethash/include/ethash/ethash.h @@ -0,0 +1,131 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +#include + +#include +#include + +#ifdef __cplusplus +#define NOEXCEPT noexcept +#else +#define NOEXCEPT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * The Ethash algorithm revision implemented as specified in the Ethash spec + * https://github.com/ethereum/wiki/wiki/Ethash. + */ +#define ETHASH_REVISION "23" + +#define ETHASH_EPOCH_LENGTH 7500 +#define ETHASH_LIGHT_CACHE_ITEM_SIZE 64 +#define ETHASH_FULL_DATASET_ITEM_SIZE 128 +#define ETHASH_NUM_DATASET_ACCESSES 64 + + +struct ethash_epoch_context +{ + const int epoch_number; + const int light_cache_num_items; + const union ethash_hash512* const light_cache; + const uint32_t* const l1_cache; + const int full_dataset_num_items; +}; + + +struct ethash_epoch_context_full; + + +struct ethash_result +{ + union ethash_hash256 final_hash; + union ethash_hash256 mix_hash; +}; + + +/** + * Calculates the number of items in the light cache for given epoch. + * + * This function will search for a prime number matching the criteria given + * by the Ethash so the execution time is not constant. It takes ~ 0.01 ms. + * + * @param epoch_number The epoch number. + * @return The number items in the light cache. + */ +int ethash_calculate_light_cache_num_items(int epoch_number) NOEXCEPT; + + +/** + * Calculates the number of items in the full dataset for given epoch. + * + * This function will search for a prime number matching the criteria given + * by the Ethash so the execution time is not constant. It takes ~ 0.05 ms. + * + * @param epoch_number The epoch number. + * @return The number items in the full dataset. + */ +int ethash_calculate_full_dataset_num_items(int epoch_number) NOEXCEPT; + +/** + * Calculates the epoch seed hash. + * @param epoch_number The epoch number. + * @return The epoch seed hash. + */ +union ethash_hash256 ethash_calculate_epoch_seed(int epoch_number) NOEXCEPT; + + +struct ethash_epoch_context* ethash_create_epoch_context(int epoch_number) NOEXCEPT; + +/** + * Creates the epoch context with the full dataset initialized. + * + * The memory for the full dataset is only allocated and marked as "not-generated". + * The items of the full dataset are generated on the fly when hit for the first time. + * + * The memory allocated in the context MUST be freed with ethash_destroy_epoch_context_full(). + * + * @param epoch_number The epoch number. + * @return Pointer to the context or null in case of memory allocation failure. + */ +struct ethash_epoch_context_full* ethash_create_epoch_context_full(int epoch_number) NOEXCEPT; + +void ethash_destroy_epoch_context(struct ethash_epoch_context* context) NOEXCEPT; + +void ethash_destroy_epoch_context_full(struct ethash_epoch_context_full* context) NOEXCEPT; + + +/** + * Get global shared epoch context. + */ +const struct ethash_epoch_context* ethash_get_global_epoch_context(int epoch_number) NOEXCEPT; + +/** + * Get global shared epoch context with full dataset initialized. + */ +const struct ethash_epoch_context_full* ethash_get_global_epoch_context_full( + int epoch_number) NOEXCEPT; + + +struct ethash_result ethash_hash(const struct ethash_epoch_context* context, + const union ethash_hash256* header_hash, uint64_t nonce) NOEXCEPT; + +bool ethash_verify(const struct ethash_epoch_context* context, + const union ethash_hash256* header_hash, const union ethash_hash256* mix_hash, uint64_t nonce, + const union ethash_hash256* boundary) NOEXCEPT; + +bool ethash_verify_final_hash(const union ethash_hash256* header_hash, + const union ethash_hash256* mix_hash, uint64_t nonce, + const union ethash_hash256* boundary) NOEXCEPT; + +#ifdef __cplusplus +} +#endif diff --git a/src/crypto/ethash/include/ethash/ethash.hpp b/src/crypto/ethash/include/ethash/ethash.hpp new file mode 100644 index 0000000000..8a2c8ff808 --- /dev/null +++ b/src/crypto/ethash/include/ethash/ethash.hpp @@ -0,0 +1,172 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +/// @file +/// +/// API design decisions: +/// +/// 1. Signed integer type is used whenever the size of the type is not +/// restricted by the Ethash specification. +/// See http://www.aristeia.com/Papers/C++ReportColumns/sep95.pdf. +/// See https://stackoverflow.com/questions/10168079/why-is-size-t-unsigned/. +/// See https://github.com/Microsoft/GSL/issues/171. + +#pragma once + +#include +#include + +#include +#include +#include + +namespace ethash +{ +constexpr auto revision = ETHASH_REVISION; + +static constexpr int epoch_length = ETHASH_EPOCH_LENGTH; +static constexpr int light_cache_item_size = ETHASH_LIGHT_CACHE_ITEM_SIZE; +static constexpr int full_dataset_item_size = ETHASH_FULL_DATASET_ITEM_SIZE; +static constexpr int num_dataset_accesses = ETHASH_NUM_DATASET_ACCESSES; + +using epoch_context = ethash_epoch_context; +using epoch_context_full = ethash_epoch_context_full; + +using result = ethash_result; + +/// Constructs a 256-bit hash from an array of bytes. +/// +/// @param bytes A pointer to array of at least 32 bytes. +/// @return The constructed hash. +inline hash256 hash256_from_bytes(const uint8_t bytes[32]) noexcept +{ + hash256 h; + std::memcpy(&h, bytes, sizeof(h)); + return h; +} + +struct search_result +{ + bool solution_found = false; + uint64_t nonce = 0; + hash256 final_hash = {}; + hash256 mix_hash = {}; + + search_result() noexcept = default; + + search_result(result res, uint64_t n) noexcept + : solution_found(true), nonce(n), final_hash(res.final_hash), mix_hash(res.mix_hash) + {} +}; + + +/// Alias for ethash_calculate_light_cache_num_items(). +static constexpr auto calculate_light_cache_num_items = ethash_calculate_light_cache_num_items; + +/// Alias for ethash_calculate_full_dataset_num_items(). +static constexpr auto calculate_full_dataset_num_items = ethash_calculate_full_dataset_num_items; + +/// Alias for ethash_calculate_epoch_seed(). +static constexpr auto calculate_epoch_seed = ethash_calculate_epoch_seed; + + +/// Calculates the epoch number out of the block number. +inline constexpr int get_epoch_number(int block_number) noexcept +{ + return block_number ? block_number / epoch_length : 0; +} + +/** + * Coverts the number of items of a light cache to size in bytes. + * + * @param num_items The number of items in the light cache. + * @return The size of the light cache in bytes. + */ +inline constexpr size_t get_light_cache_size(int num_items) noexcept +{ + return static_cast(num_items) * light_cache_item_size; +} + +/** + * Coverts the number of items of a full dataset to size in bytes. + * + * @param num_items The number of items in the full dataset. + * @return The size of the full dataset in bytes. + */ +inline constexpr uint64_t get_full_dataset_size(int num_items) noexcept +{ + return static_cast(num_items) * full_dataset_item_size; +} + +/// Owned unique pointer to an epoch context. +using epoch_context_ptr = std::unique_ptr; + +using epoch_context_full_ptr = + std::unique_ptr; + +/// Creates Ethash epoch context. +/// +/// This is a wrapper for ethash_create_epoch_number C function that returns +/// the context as a smart pointer which handles the destruction of the context. +inline epoch_context_ptr create_epoch_context(int epoch_number) noexcept +{ + return {ethash_create_epoch_context(epoch_number), ethash_destroy_epoch_context}; +} + +inline epoch_context_full_ptr create_epoch_context_full(int epoch_number) noexcept +{ + return {ethash_create_epoch_context_full(epoch_number), ethash_destroy_epoch_context_full}; +} + + +inline result hash( + const epoch_context& context, const hash256& header_hash, uint64_t nonce) noexcept +{ + return ethash_hash(&context, &header_hash, nonce); +} + +result hash(const epoch_context_full& context, const hash256& header_hash, uint64_t nonce) noexcept; + +inline bool verify_final_hash(const hash256& header_hash, const hash256& mix_hash, uint64_t nonce, + const hash256& boundary) noexcept +{ + return ethash_verify_final_hash(&header_hash, &mix_hash, nonce, &boundary); +} + +inline bool verify(const epoch_context& context, const hash256& header_hash, const hash256& mix_hash, + uint64_t nonce, const hash256& boundary) noexcept +{ + return ethash_verify(&context, &header_hash, &mix_hash, nonce, &boundary); +} + +search_result search_light(const epoch_context& context, const hash256& header_hash, + const hash256& boundary, uint64_t start_nonce, size_t iterations) noexcept; + +search_result search(const epoch_context_full& context, const hash256& header_hash, + const hash256& boundary, uint64_t start_nonce, size_t iterations) noexcept; + + +/// Tries to find the epoch number matching the given seed hash. +/// +/// Mining pool protocols (many variants of stratum and "getwork") send out +/// seed hash instead of epoch number to workers. This function tries to recover +/// the epoch number from this seed hash. +/// +/// @param seed Ethash seed hash. +/// @return The epoch number or -1 if not found. +int find_epoch_number(const hash256& seed) noexcept; + + +/// Get global shared epoch context. +inline const epoch_context& get_global_epoch_context(int epoch_number) noexcept +{ + return *ethash_get_global_epoch_context(epoch_number); +} + +/// Get global shared epoch context with full dataset initialized. +inline const epoch_context_full& get_global_epoch_context_full(int epoch_number) noexcept +{ + return *ethash_get_global_epoch_context_full(epoch_number); +} +} // namespace ethash diff --git a/src/crypto/ethash/include/ethash/hash_types.h b/src/crypto/ethash/include/ethash/hash_types.h new file mode 100644 index 0000000000..bd9343686f --- /dev/null +++ b/src/crypto/ethash/include/ethash/hash_types.h @@ -0,0 +1,50 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +union ethash_hash256 +{ + uint64_t word64s[4]; + uint32_t word32s[8]; + uint8_t bytes[32]; + char str[32]; +}; + +union ethash_hash512 +{ + uint64_t word64s[8]; + uint32_t word32s[16]; + uint8_t bytes[64]; + char str[64]; +}; + +union ethash_hash1024 +{ + union ethash_hash512 hash512s[2]; + uint64_t word64s[16]; + uint32_t word32s[32]; + uint8_t bytes[128]; + char str[128]; +}; + +union ethash_hash2048 +{ + union ethash_hash512 hash512s[4]; + uint64_t word64s[32]; + uint32_t word32s[64]; + uint8_t bytes[256]; + char str[256]; +}; + +#ifdef __cplusplus +} +#endif diff --git a/src/crypto/ethash/include/ethash/hash_types.hpp b/src/crypto/ethash/include/ethash/hash_types.hpp new file mode 100644 index 0000000000..3c02b61cea --- /dev/null +++ b/src/crypto/ethash/include/ethash/hash_types.hpp @@ -0,0 +1,15 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +#pragma once + +#include + +namespace ethash +{ +using hash256 = ethash_hash256; +using hash512 = ethash_hash512; +using hash1024 = ethash_hash1024; +using hash2048 = ethash_hash2048; +} // namespace ethash diff --git a/src/crypto/ethash/include/ethash/keccak.h b/src/crypto/ethash/include/ethash/keccak.h new file mode 100644 index 0000000000..9c449dbbd8 --- /dev/null +++ b/src/crypto/ethash/include/ethash/keccak.h @@ -0,0 +1,49 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +#include + +#include + +#ifdef __cplusplus +#define NOEXCEPT noexcept +#else +#define NOEXCEPT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * The Keccak-f[1600] function. + * + * The implementation of the Keccak-f function with 1600-bit width of the permutation (b). + * The size of the state is also 1600 bit what gives 25 64-bit words. + * + * @param state The state of 25 64-bit words on which the permutation is to be performed. + */ +void ethash_keccakf1600(uint64_t state[25]) NOEXCEPT; + +/** + * The Keccak-f[800] function. + * + * The implementation of the Keccak-f function with 800-bit width of the permutation (b). + * The size of the state is also 800 bit what gives 25 32-bit words. + * + * @param state The state of 25 32-bit words on which the permutation is to be performed. + */ +void ethash_keccakf800(uint32_t state[25]) NOEXCEPT; + +union ethash_hash256 ethash_keccak256(const uint8_t* data, size_t size) NOEXCEPT; +union ethash_hash256 ethash_keccak256_32(const uint8_t data[32]) NOEXCEPT; +union ethash_hash512 ethash_keccak512(const uint8_t* data, size_t size) NOEXCEPT; +union ethash_hash512 ethash_keccak512_64(const uint8_t data[64]) NOEXCEPT; + +#ifdef __cplusplus +} +#endif diff --git a/src/crypto/ethash/include/ethash/keccak.hpp b/src/crypto/ethash/include/ethash/keccak.hpp new file mode 100644 index 0000000000..d92948c5b0 --- /dev/null +++ b/src/crypto/ethash/include/ethash/keccak.hpp @@ -0,0 +1,35 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +#pragma once + +#include +#include + +namespace ethash +{ +inline hash256 keccak256(const uint8_t* data, size_t size) noexcept +{ + return ethash_keccak256(data, size); +} + +inline hash256 keccak256(const hash256& input) noexcept +{ + return ethash_keccak256_32(input.bytes); +} + +inline hash512 keccak512(const uint8_t* data, size_t size) noexcept +{ + return ethash_keccak512(data, size); +} + +inline hash512 keccak512(const hash512& input) noexcept +{ + return ethash_keccak512_64(input.bytes); +} + +static constexpr auto keccak256_32 = ethash_keccak256_32; +static constexpr auto keccak512_64 = ethash_keccak512_64; + +} // namespace ethash diff --git a/src/crypto/ethash/include/ethash/progpow.hpp b/src/crypto/ethash/include/ethash/progpow.hpp new file mode 100644 index 0000000000..4f36cb81bb --- /dev/null +++ b/src/crypto/ethash/include/ethash/progpow.hpp @@ -0,0 +1,49 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +/// @file +/// +/// ProgPoW API +/// +/// This file provides the public API for ProgPoW as the Ethash API extension. + +#include + +namespace progpow +{ +using namespace ethash; // Include ethash namespace. + +/// The ProgPoW algorithm revision implemented as specified in the spec +/// https://github.com/ifdefelse/ProgPOW. +constexpr auto revision = "0.9.4"; + +constexpr int period_length = 3; +constexpr uint32_t num_regs = 32; +constexpr size_t num_lanes = 16; +constexpr int num_cache_accesses = 11; +constexpr int num_math_operations = 18; +constexpr size_t l1_cache_size = 16 * 1024; +constexpr size_t l1_cache_num_items = l1_cache_size / sizeof(uint32_t); + +result hash(const epoch_context& context, int block_number, const hash256& header_hash, + uint64_t nonce) noexcept; + +result hash(const epoch_context_full& context, int block_number, const hash256& header_hash, + uint64_t nonce) noexcept; + +bool verify(const epoch_context& context, int block_number, const hash256& header_hash, + const hash256& mix_hash, uint64_t nonce, const hash256& boundary) noexcept; + +hash256 hash_no_verify(const int& block_number, const hash256& header_hash, + const hash256& mix_hash, const uint64_t& nonce) noexcept; + +search_result search_light(const epoch_context& context, int block_number, + const hash256& header_hash, const hash256& boundary, uint64_t start_nonce, + size_t iterations) noexcept; + +search_result search(const epoch_context_full& context, int block_number, + const hash256& header_hash, const hash256& boundary, uint64_t start_nonce, + size_t iterations) noexcept; + +} // namespace progpow diff --git a/src/crypto/ethash/include/ethash/version.h b/src/crypto/ethash/include/ethash/version.h new file mode 100644 index 0000000000..f08900fc0d --- /dev/null +++ b/src/crypto/ethash/include/ethash/version.h @@ -0,0 +1,18 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +/** The ethash library version. */ +#define ETHASH_VERSION "0.5.1-alpha.1" + +#ifdef __cplusplus +namespace ethash +{ +/// The ethash library version. +constexpr auto version = ETHASH_VERSION; + +} // namespace ethash +#endif diff --git a/src/crypto/ethash/lib/ethash/bit_manipulation.h b/src/crypto/ethash/lib/ethash/bit_manipulation.h new file mode 100644 index 0000000000..b88bfdaab6 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/bit_manipulation.h @@ -0,0 +1,81 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +#include "builtins.h" +#include "../support/attributes.h" + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +static inline uint32_t rotl32(uint32_t n, unsigned int c) +{ + const unsigned int mask = 31; + + c &= mask; + unsigned int neg_c = (unsigned int)(-(int)c); + return (n << c) | (n >> (neg_c & mask)); +} + +static inline uint32_t rotr32(uint32_t n, unsigned int c) +{ + const unsigned int mask = 31; + + c &= mask; + unsigned int neg_c = (unsigned int)(-(int)c); + return (n >> c) | (n << (neg_c & mask)); +} + +static inline uint32_t clz32(uint32_t x) +{ + return x ? (uint32_t)__builtin_clz(x) : 32; +} + +static inline uint32_t popcount32(uint32_t x) +{ + return (uint32_t)__builtin_popcount(x); +} + +static inline uint32_t mul_hi32(uint32_t x, uint32_t y) +{ + return (uint32_t)(((uint64_t)x * (uint64_t)y) >> 32); +} + + +/** FNV 32-bit prime. */ +static const uint32_t fnv_prime = 0x01000193; + +/** FNV 32-bit offset basis. */ +static const uint32_t fnv_offset_basis = 0x811c9dc5; + +/** + * The implementation of FNV-1 hash. + * + * See https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function#FNV-1_hash. + */ +NO_SANITIZE("unsigned-integer-overflow") +static inline uint32_t fnv1(uint32_t u, uint32_t v) noexcept +{ + return (u * fnv_prime) ^ v; +} + +/** + * The implementation of FNV-1a hash. + * + * See https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function#FNV-1a_hash. + */ +NO_SANITIZE("unsigned-integer-overflow") +static inline uint32_t fnv1a(uint32_t u, uint32_t v) noexcept +{ + return (u ^ v) * fnv_prime; +} + +#ifdef __cplusplus +} +#endif diff --git a/src/crypto/ethash/lib/ethash/builtins.h b/src/crypto/ethash/lib/ethash/builtins.h new file mode 100644 index 0000000000..0c43188ada --- /dev/null +++ b/src/crypto/ethash/lib/ethash/builtins.h @@ -0,0 +1,43 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +/** + * @file + * Implementation of GCC/clang builtins for MSVC compiler. + */ + +#pragma once + +#ifdef _MSC_VER +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * Returns the number of leading 0-bits in `x`, starting at the most significant bit position. + * If `x` is 0, the result is undefined. + */ +static inline int __builtin_clz(unsigned int x) +{ + unsigned long most_significant_bit; + _BitScanReverse(&most_significant_bit, x); + return 31 - (int)most_significant_bit; +} + +/** + * Returns the number of 1-bits in `x`. + */ +static inline int __builtin_popcount(unsigned int x) +{ + return (int)__popcnt(x); +} + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/crypto/ethash/lib/ethash/endianness.hpp b/src/crypto/ethash/lib/ethash/endianness.hpp new file mode 100644 index 0000000000..7d367a39a5 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/endianness.hpp @@ -0,0 +1,99 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +/// @file +/// This file contains helper functions to handle big-endian architectures. +/// The Ethash algorithm is naturally defined for little-endian architectures +/// so for those the helpers are just no-op empty functions. +/// For big-endian architectures we need 32-bit and 64-bit byte swapping in +/// some places. + +#pragma once + +#include + +#if _WIN32 + +#include + +#define bswap32 _byteswap_ulong +#define bswap64 _byteswap_uint64 + +// On Windows assume little endian. +#define __LITTLE_ENDIAN 1234 +#define __BIG_ENDIAN 4321 +#define __BYTE_ORDER __LITTLE_ENDIAN + +#elif __APPLE__ + +#include + +#define bswap32 __builtin_bswap32 +#define bswap64 __builtin_bswap64 + +#else + +#include + +#define bswap32 __builtin_bswap32 +#define bswap64 __builtin_bswap64 + +#endif + +namespace ethash +{ +#if __BYTE_ORDER == __LITTLE_ENDIAN + +struct le +{ + static uint32_t uint32(uint32_t x) noexcept { return x; } + static uint64_t uint64(uint64_t x) noexcept { return x; } + + static const hash1024& uint32s(const hash1024& h) noexcept { return h; } + static const hash512& uint32s(const hash512& h) noexcept { return h; } + static const hash256& uint32s(const hash256& h) noexcept { return h; } +}; + +struct be +{ + static uint64_t uint64(uint64_t x) noexcept { return bswap64(x); } +}; + + +#elif __BYTE_ORDER == __BIG_ENDIAN + +struct le +{ + static uint32_t uint32(uint32_t x) noexcept { return bswap32(x); } + static uint64_t uint64(uint64_t x) noexcept { return bswap64(x); } + + static hash1024 uint32s(hash1024 h) noexcept + { + for (auto& w : h.word32s) + w = uint32(w); + return h; + } + + static hash512 uint32s(hash512 h) noexcept + { + for (auto& w : h.word32s) + w = uint32(w); + return h; + } + + static hash256 uint32s(hash256 h) noexcept + { + for (auto& w : h.word32s) + w = uint32(w); + return h; + } +}; + +struct be +{ + static uint64_t uint64(uint64_t x) noexcept { return x; } +}; + +#endif +} // namespace ethash \ No newline at end of file diff --git a/src/crypto/ethash/lib/ethash/ethash-internal.hpp b/src/crypto/ethash/lib/ethash/ethash-internal.hpp new file mode 100644 index 0000000000..c7c0833325 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/ethash-internal.hpp @@ -0,0 +1,68 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +/// @file +/// Contains declarations of internal ethash functions to allow them to be +/// unit-tested. + +#pragma once + +#include + +#include "endianness.hpp" + +#include +#include + +extern "C" struct ethash_epoch_context_full : ethash_epoch_context +{ + ethash_hash1024* full_dataset; + + constexpr ethash_epoch_context_full(int epoch, int light_num_items, + const ethash_hash512* light, const uint32_t* l1, int dataset_num_items, + ethash_hash1024* dataset) noexcept + : ethash_epoch_context{epoch, light_num_items, light, l1, dataset_num_items}, + full_dataset{dataset} + {} +}; + +namespace ethash +{ +inline bool is_less_or_equal(const hash256& a, const hash256& b) noexcept +{ + for (size_t i = 0; i < (sizeof(a) / sizeof(a.word64s[0])); ++i) + { + if (be::uint64(a.word64s[i]) > be::uint64(b.word64s[i])) + return false; + if (be::uint64(a.word64s[i]) < be::uint64(b.word64s[i])) + return true; + } + return true; +} + +inline bool is_equal(const hash256& a, const hash256& b) noexcept +{ + return std::memcmp(a.bytes, b.bytes, sizeof(a)) == 0; +} + +void build_light_cache(hash512 cache[], int num_items, const hash256& seed) noexcept; + +hash512 calculate_dataset_item_512(const epoch_context& context, int64_t index) noexcept; +hash1024 calculate_dataset_item_1024(const epoch_context& context, uint32_t index) noexcept; +hash2048 calculate_dataset_item_2048(const epoch_context& context, uint32_t index) noexcept; + +namespace generic +{ +using hash_fn_512 = hash512 (*)(const uint8_t* data, size_t size); +using build_light_cache_fn = void (*)(hash512 cache[], int num_items, const hash256& seed); + +void build_light_cache( + hash_fn_512 hash_fn, hash512 cache[], int num_items, const hash256& seed) noexcept; + +epoch_context_full* create_epoch_context( + build_light_cache_fn build_fn, int epoch_number, bool full) noexcept; + +} // namespace generic + +} // namespace ethash diff --git a/src/crypto/ethash/lib/ethash/ethash.cpp b/src/crypto/ethash/lib/ethash/ethash.cpp new file mode 100644 index 0000000000..b32a857cd5 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/ethash.cpp @@ -0,0 +1,442 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +#include "ethash-internal.hpp" + +#include "../support/attributes.h" +#include "bit_manipulation.h" +#include "endianness.hpp" +#include "primes.h" +#include +#include + +#include +#include +#include +#include + +namespace ethash +{ +// Internal constants: +constexpr static int light_cache_init_size = 1 << 24; +constexpr static int light_cache_growth = 1 << 17; +constexpr static int light_cache_rounds = 3; +constexpr static int full_dataset_init_size = 1 << 30; +constexpr static int full_dataset_growth = 1 << 23; +constexpr static int full_dataset_item_parents = 512; + +// Verify constants: +static_assert(sizeof(hash512) == ETHASH_LIGHT_CACHE_ITEM_SIZE, ""); +static_assert(sizeof(hash1024) == ETHASH_FULL_DATASET_ITEM_SIZE, ""); +static_assert(light_cache_item_size == ETHASH_LIGHT_CACHE_ITEM_SIZE, ""); +static_assert(full_dataset_item_size == ETHASH_FULL_DATASET_ITEM_SIZE, ""); + + +namespace +{ +using ::fnv1; + +inline hash512 fnv1(const hash512& u, const hash512& v) noexcept +{ + hash512 r; + for (size_t i = 0; i < sizeof(r) / sizeof(r.word32s[0]); ++i) + r.word32s[i] = fnv1(u.word32s[i], v.word32s[i]); + return r; +} + +inline hash512 bitwise_xor(const hash512& x, const hash512& y) noexcept +{ + hash512 z; + for (size_t i = 0; i < sizeof(z) / sizeof(z.word64s[0]); ++i) + z.word64s[i] = x.word64s[i] ^ y.word64s[i]; + return z; +} +} // namespace + +int find_epoch_number(const hash256& seed) noexcept +{ + static constexpr int num_tries = 30000; // Divisible by 16. + + // Thread-local cache of the last search. + static thread_local int cached_epoch_number = 0; + static thread_local hash256 cached_seed = {}; + + // Load from memory once (memory will be clobbered by keccak256()). + const uint32_t seed_part = seed.word32s[0]; + const int e = cached_epoch_number; + hash256 s = cached_seed; + + if (s.word32s[0] == seed_part) + return e; + + // Try the next seed, will match for sequential epoch access. + s = keccak256(s); + if (s.word32s[0] == seed_part) + { + cached_seed = s; + cached_epoch_number = e + 1; + return e + 1; + } + + // Search for matching seed starting from epoch 0. + s = {}; + for (int i = 0; i < num_tries; ++i) + { + if (s.word32s[0] == seed_part) + { + cached_seed = s; + cached_epoch_number = i; + return i; + } + + s = keccak256(s); + } + + return -1; +} + +namespace generic +{ +void build_light_cache( + hash_fn_512 hash_fn, hash512 cache[], int num_items, const hash256& seed) noexcept +{ + hash512 item = hash_fn(seed.bytes, sizeof(seed)); + cache[0] = item; + for (int i = 1; i < num_items; ++i) + { + item = hash_fn(item.bytes, sizeof(item)); + cache[i] = item; + } + + for (int q = 0; q < light_cache_rounds; ++q) + { + for (int i = 0; i < num_items; ++i) + { + const uint32_t index_limit = static_cast(num_items); + + // Fist index: 4 first bytes of the item as little-endian integer. + const uint32_t t = le::uint32(cache[i].word32s[0]); + const uint32_t v = t % index_limit; + + // Second index. + const uint32_t w = static_cast(num_items + (i - 1)) % index_limit; + + const hash512 x = bitwise_xor(cache[v], cache[w]); + cache[i] = hash_fn(x.bytes, sizeof(x)); + } + } +} + +epoch_context_full* create_epoch_context( + build_light_cache_fn build_fn, int epoch_number, bool full) noexcept +{ + static_assert(sizeof(epoch_context_full) < sizeof(hash512), "epoch_context too big"); + static constexpr size_t context_alloc_size = sizeof(hash512); + + const int light_cache_num_items = calculate_light_cache_num_items(epoch_number); + const int full_dataset_num_items = calculate_full_dataset_num_items(epoch_number); + const size_t light_cache_size = get_light_cache_size(light_cache_num_items); + const size_t full_dataset_size = + full ? static_cast(full_dataset_num_items) * sizeof(hash1024) : + progpow::l1_cache_size; + + const size_t alloc_size = context_alloc_size + light_cache_size + full_dataset_size; + + char* const alloc_data = static_cast(std::calloc(1, alloc_size)); + if (!alloc_data) + return nullptr; // Signal out-of-memory by returning null pointer. + + hash512* const light_cache = reinterpret_cast(alloc_data + context_alloc_size); + const hash256 epoch_seed = calculate_epoch_seed(epoch_number); + build_fn(light_cache, light_cache_num_items, epoch_seed); + + uint32_t* const l1_cache = + reinterpret_cast(alloc_data + context_alloc_size + light_cache_size); + + hash1024* full_dataset = full ? reinterpret_cast(l1_cache) : nullptr; + + epoch_context_full* const context = new (alloc_data) epoch_context_full{ + epoch_number, + light_cache_num_items, + light_cache, + l1_cache, + full_dataset_num_items, + full_dataset, + }; + + auto* full_dataset_2048 = reinterpret_cast(l1_cache); + for (uint32_t i = 0; i < progpow::l1_cache_size / sizeof(full_dataset_2048[0]); ++i) + full_dataset_2048[i] = calculate_dataset_item_2048(*context, i); + return context; +} +} // namespace generic + +void build_light_cache(hash512 cache[], int num_items, const hash256& seed) noexcept +{ + return generic::build_light_cache(keccak512, cache, num_items, seed); +} + +struct item_state +{ + const hash512* const cache; + const int64_t num_cache_items; + const uint32_t seed; + + hash512 mix; + + ALWAYS_INLINE item_state(const epoch_context& context, int64_t index) noexcept + : cache{context.light_cache}, + num_cache_items{context.light_cache_num_items}, + seed{static_cast(index)} + { + mix = cache[index % num_cache_items]; + mix.word32s[0] ^= le::uint32(seed); + mix = le::uint32s(keccak512(mix)); + } + + ALWAYS_INLINE void update(uint32_t round) noexcept + { + static constexpr size_t num_words = sizeof(mix) / sizeof(uint32_t); + const uint32_t t = fnv1(seed ^ round, mix.word32s[round % num_words]); + const int64_t parent_index = t % num_cache_items; + mix = fnv1(mix, le::uint32s(cache[parent_index])); + } + + ALWAYS_INLINE hash512 final() noexcept { return keccak512(le::uint32s(mix)); } +}; + +hash512 calculate_dataset_item_512(const epoch_context& context, int64_t index) noexcept +{ + item_state item0{context, index}; + for (uint32_t j = 0; j < full_dataset_item_parents; ++j) + item0.update(j); + return item0.final(); +} + +/// Calculates a full dataset item +/// +/// This consist of two 512-bit items produced by calculate_dataset_item_partial(). +/// Here the computation is done interleaved for better performance. +hash1024 calculate_dataset_item_1024(const epoch_context& context, uint32_t index) noexcept +{ + item_state item0{context, int64_t(index) * 2}; + item_state item1{context, int64_t(index) * 2 + 1}; + + for (uint32_t j = 0; j < full_dataset_item_parents; ++j) + { + item0.update(j); + item1.update(j); + } + + return hash1024{{item0.final(), item1.final()}}; +} + +hash2048 calculate_dataset_item_2048(const epoch_context& context, uint32_t index) noexcept +{ + item_state item0{context, int64_t(index) * 4}; + item_state item1{context, int64_t(index) * 4 + 1}; + item_state item2{context, int64_t(index) * 4 + 2}; + item_state item3{context, int64_t(index) * 4 + 3}; + + for (uint32_t j = 0; j < full_dataset_item_parents; ++j) + { + item0.update(j); + item1.update(j); + item2.update(j); + item3.update(j); + } + + return hash2048{{item0.final(), item1.final(), item2.final(), item3.final()}}; +} + +namespace +{ +using lookup_fn = hash1024 (*)(const epoch_context&, uint32_t); + +inline hash512 hash_seed(const hash256& header_hash, uint64_t nonce) noexcept +{ + nonce = le::uint64(nonce); + uint8_t init_data[sizeof(header_hash) + sizeof(nonce)]; + std::memcpy(&init_data[0], &header_hash, sizeof(header_hash)); + std::memcpy(&init_data[sizeof(header_hash)], &nonce, sizeof(nonce)); + + return keccak512(init_data, sizeof(init_data)); +} + +inline hash256 hash_final(const hash512& seed, const hash256& mix_hash) +{ + uint8_t final_data[sizeof(seed) + sizeof(mix_hash)]; + std::memcpy(&final_data[0], seed.bytes, sizeof(seed)); + std::memcpy(&final_data[sizeof(seed)], mix_hash.bytes, sizeof(mix_hash)); + return keccak256(final_data, sizeof(final_data)); +} + +inline hash256 hash_kernel( + const epoch_context& context, const hash512& seed, lookup_fn lookup) noexcept +{ + static constexpr size_t num_words = sizeof(hash1024) / sizeof(uint32_t); + const uint32_t index_limit = static_cast(context.full_dataset_num_items); + const uint32_t seed_init = le::uint32(seed.word32s[0]); + + hash1024 mix{{le::uint32s(seed), le::uint32s(seed)}}; + + for (uint32_t i = 0; i < num_dataset_accesses; ++i) + { + const uint32_t p = fnv1(i ^ seed_init, mix.word32s[i % num_words]) % index_limit; + const hash1024 newdata = le::uint32s(lookup(context, p)); + + for (size_t j = 0; j < num_words; ++j) + mix.word32s[j] = fnv1(mix.word32s[j], newdata.word32s[j]); + } + + hash256 mix_hash; + for (size_t i = 0; i < num_words; i += 4) + { + const uint32_t h1 = fnv1(mix.word32s[i], mix.word32s[i + 1]); + const uint32_t h2 = fnv1(h1, mix.word32s[i + 2]); + const uint32_t h3 = fnv1(h2, mix.word32s[i + 3]); + mix_hash.word32s[i / 4] = h3; + } + + return le::uint32s(mix_hash); +} +} // namespace + +result hash(const epoch_context_full& context, const hash256& header_hash, uint64_t nonce) noexcept +{ + static const auto lazy_lookup = [](const epoch_context& ctx, uint32_t index) noexcept + { + auto full_dataset = static_cast(ctx).full_dataset; + hash1024& item = full_dataset[index]; + if (item.word64s[0] == 0) + { + // TODO: Copy elision here makes it thread-safe? + item = calculate_dataset_item_1024(ctx, index); + } + + return item; + }; + + const hash512 seed = hash_seed(header_hash, nonce); + const hash256 mix_hash = hash_kernel(context, seed, lazy_lookup); + return {hash_final(seed, mix_hash), mix_hash}; +} + +search_result search_light(const epoch_context& context, const hash256& header_hash, + const hash256& boundary, uint64_t start_nonce, size_t iterations) noexcept +{ + const uint64_t end_nonce = start_nonce + iterations; + for (uint64_t nonce = start_nonce; nonce < end_nonce; ++nonce) + { + result r = hash(context, header_hash, nonce); + if (is_less_or_equal(r.final_hash, boundary)) + return {r, nonce}; + } + return {}; +} + +search_result search(const epoch_context_full& context, const hash256& header_hash, + const hash256& boundary, uint64_t start_nonce, size_t iterations) noexcept +{ + const uint64_t end_nonce = start_nonce + iterations; + for (uint64_t nonce = start_nonce; nonce < end_nonce; ++nonce) + { + result r = hash(context, header_hash, nonce); + if (is_less_or_equal(r.final_hash, boundary)) + return {r, nonce}; + } + return {}; +} +} // namespace ethash + +using namespace ethash; + +extern "C" { + +ethash_hash256 ethash_calculate_epoch_seed(int epoch_number) noexcept +{ + ethash_hash256 epoch_seed = {}; + for (int i = 0; i < epoch_number; ++i) + epoch_seed = ethash_keccak256_32(epoch_seed.bytes); + return epoch_seed; +} + +int ethash_calculate_light_cache_num_items(int epoch_number) noexcept +{ + static constexpr int item_size = sizeof(hash512); + static constexpr int num_items_init = light_cache_init_size / item_size; + static constexpr int num_items_growth = light_cache_growth / item_size; + static_assert( + light_cache_init_size % item_size == 0, "light_cache_init_size not multiple of item size"); + static_assert( + light_cache_growth % item_size == 0, "light_cache_growth not multiple of item size"); + + int num_items_upper_bound = num_items_init + epoch_number * num_items_growth; + int num_items = ethash_find_largest_prime(num_items_upper_bound); + return num_items; +} + +int ethash_calculate_full_dataset_num_items(int epoch_number) noexcept +{ + static constexpr int item_size = sizeof(hash1024); + static constexpr int num_items_init = full_dataset_init_size / item_size; + static constexpr int num_items_growth = full_dataset_growth / item_size; + static_assert(full_dataset_init_size % item_size == 0, + "full_dataset_init_size not multiple of item size"); + static_assert( + full_dataset_growth % item_size == 0, "full_dataset_growth not multiple of item size"); + + int num_items_upper_bound = num_items_init + epoch_number * num_items_growth; + int num_items = ethash_find_largest_prime(num_items_upper_bound); + return num_items; +} + +epoch_context* ethash_create_epoch_context(int epoch_number) noexcept +{ + return generic::create_epoch_context(build_light_cache, epoch_number, false); +} + +epoch_context_full* ethash_create_epoch_context_full(int epoch_number) noexcept +{ + return generic::create_epoch_context(build_light_cache, epoch_number, true); +} + +void ethash_destroy_epoch_context_full(epoch_context_full* context) noexcept +{ + ethash_destroy_epoch_context(context); +} + +void ethash_destroy_epoch_context(epoch_context* context) noexcept +{ + context->~epoch_context(); + std::free(context); +} + +ethash_result ethash_hash( + const epoch_context* context, const hash256* header_hash, uint64_t nonce) noexcept +{ + const hash512 seed = hash_seed(*header_hash, nonce); + const hash256 mix_hash = hash_kernel(*context, seed, calculate_dataset_item_1024); + return {hash_final(seed, mix_hash), mix_hash}; +} + +bool ethash_verify_final_hash(const hash256* header_hash, const hash256* mix_hash, uint64_t nonce, + const hash256* boundary) noexcept +{ + const hash512 seed = hash_seed(*header_hash, nonce); + return is_less_or_equal(hash_final(seed, *mix_hash), *boundary); +} + +bool ethash_verify(const epoch_context* context, const hash256* header_hash, + const hash256* mix_hash, uint64_t nonce, const hash256* boundary) noexcept +{ + const hash512 seed = hash_seed(*header_hash, nonce); + if (!is_less_or_equal(hash_final(seed, *mix_hash), *boundary)) + return false; + + const hash256 expected_mix_hash = hash_kernel(*context, seed, calculate_dataset_item_1024); + return is_equal(expected_mix_hash, *mix_hash); +} + +} // extern "C" diff --git a/src/crypto/ethash/lib/ethash/kiss99.hpp b/src/crypto/ethash/lib/ethash/kiss99.hpp new file mode 100644 index 0000000000..8332a7ce9d --- /dev/null +++ b/src/crypto/ethash/lib/ethash/kiss99.hpp @@ -0,0 +1,64 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +#include "../support/attributes.h" +#include + +/** + * KISS PRNG by the spec from 1999. + * + * The implementation of KISS pseudo-random number generator + * by the specification published on 21 Jan 1999 in + * http://www.cse.yorku.ca/~oz/marsaglia-rng.html. + * The KISS is not versioned so here we are using `kiss99` prefix to indicate + * the version from 1999. + * + * The specification uses `unsigned long` type with the intention for 32-bit + * values. Because in GCC/clang for 64-bit architectures `unsigned long` is + * 64-bit size type, here the explicit `uint32_t` type is used. + * + * @defgroup kiss99 KISS99 + * @{ + */ + +/** + * The KISS generator. + */ +class kiss99 +{ + uint32_t z = 362436069; + uint32_t w = 521288629; + uint32_t jsr = 123456789; + uint32_t jcong = 380116160; + +public: + /** Creates KISS generator state with default values provided by the specification. */ + kiss99() noexcept = default; + + /** Creates KISS generator state with provided init values.*/ + kiss99(uint32_t _z, uint32_t _w, uint32_t _jsr, uint32_t _jcong) noexcept + : z{_z}, w{_w}, jsr{_jsr}, jcong{_jcong} + {} + + /** Generates next number from the KISS generator. */ + NO_SANITIZE("unsigned-integer-overflow") + uint32_t operator()() noexcept + { + z = 36969 * (z & 0xffff) + (z >> 16); + w = 18000 * (w & 0xffff) + (w >> 16); + + jcong = 69069 * jcong + 1234567; + + jsr ^= (jsr << 17); + jsr ^= (jsr >> 13); + jsr ^= (jsr << 5); + + return (((z << 16) + w) ^ jcong) + jsr; + } +}; + +/** @} */ diff --git a/src/crypto/ethash/lib/ethash/managed.cpp b/src/crypto/ethash/lib/ethash/managed.cpp new file mode 100644 index 0000000000..3bc277df68 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/managed.cpp @@ -0,0 +1,100 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +#include "crypto/ethash/lib/ethash/ethash-internal.hpp" +#include "sync.h" + +#include + +#if !defined(__has_cpp_attribute) +#define __has_cpp_attribute(x) 0 +#endif + +#if __has_cpp_attribute(gnu::noinline) +#define ATTRIBUTE_NOINLINE [[gnu::noinline]] +#elif _MSC_VER +#define ATTRIBUTE_NOINLINE __declspec(noinline) +#else +#define ATTRIBUTE_NOINLINE +#endif + +using namespace ethash; + +namespace +{ + +CCriticalSection shared_context_cs; +std::shared_ptr shared_context; +thread_local std::shared_ptr thread_local_context; + +CCriticalSection shared_context_full_cs; +std::shared_ptr shared_context_full; +thread_local std::shared_ptr thread_local_context_full; + +/// Update thread local epoch context. +/// +/// This function is on the slow path. It's separated to allow inlining the fast +/// path. +/// +/// @todo: Redesign to guarantee deallocation before new allocation. +ATTRIBUTE_NOINLINE +void update_local_context(int epoch_number) +{ + // Release the shared pointer of the obsoleted context. + thread_local_context.reset(); + + // Local context invalid, check the shared context. + LOCK(shared_context_cs); + + if (!shared_context || shared_context->epoch_number != epoch_number) + { + // Release the shared pointer of the obsoleted context. + shared_context.reset(); + + // Build new context. + shared_context = create_epoch_context(epoch_number); + } + + thread_local_context = shared_context; +} + +ATTRIBUTE_NOINLINE +void update_local_context_full(int epoch_number) +{ + // Release the shared pointer of the obsoleted context. + thread_local_context_full.reset(); + + // Local context invalid, check the shared context. + LOCK(shared_context_full_cs); + + if (!shared_context_full || shared_context_full->epoch_number != epoch_number) + { + // Release the shared pointer of the obsoleted context. + shared_context_full.reset(); + + // Build new context. + shared_context_full = create_epoch_context_full(epoch_number); + } + + thread_local_context_full = shared_context_full; +} +} // namespace + +const ethash_epoch_context* ethash_get_global_epoch_context(int epoch_number) noexcept +{ + // Check if local context matches epoch number. + if (!thread_local_context || thread_local_context->epoch_number != epoch_number) + update_local_context(epoch_number); + + return thread_local_context.get(); +} + +const ethash_epoch_context_full* ethash_get_global_epoch_context_full(int epoch_number) noexcept +{ + // Check if local context matches epoch number. + if (!thread_local_context_full || thread_local_context_full->epoch_number != epoch_number) + update_local_context_full(epoch_number); + + return thread_local_context_full.get(); +} diff --git a/src/crypto/ethash/lib/ethash/primes.c b/src/crypto/ethash/lib/ethash/primes.c new file mode 100644 index 0000000000..e27a535e3e --- /dev/null +++ b/src/crypto/ethash/lib/ethash/primes.c @@ -0,0 +1,43 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#include "primes.h" + +/** Checks if the number is prime. Requires the number to be > 2 and odd. */ +static int is_odd_prime(int number) +{ + int d; + + /* Check factors up to sqrt(number). + To avoid computing sqrt, compare d*d <= number with 64-bit precision. */ + for (d = 3; (int64_t)d * (int64_t)d <= (int64_t)number; d += 2) + { + if (number % d == 0) + return 0; + } + + return 1; +} + +int ethash_find_largest_prime(int upper_bound) +{ + int n = upper_bound; + + if (n < 2) + return 0; + + if (n == 2) + return 2; + + /* If even number, skip it. */ + if (n % 2 == 0) + --n; + + /* Test descending odd numbers. */ + while (!is_odd_prime(n)) + n -= 2; + + return n; +} diff --git a/src/crypto/ethash/lib/ethash/primes.h b/src/crypto/ethash/lib/ethash/primes.h new file mode 100644 index 0000000000..6fd77f76c1 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/primes.h @@ -0,0 +1,25 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * Finds the largest prime number not greater than the provided upper bound. + * + * @param upper_bound The upper bound. SHOULD be greater than 1. + * @return The largest prime number `p` such `p <= upper_bound`. + * In case `upper_bound <= 1`, returns 0. + */ +int ethash_find_largest_prime(int upper_bound) NOEXCEPT; + +#ifdef __cplusplus +} +#endif diff --git a/src/crypto/ethash/lib/ethash/progpow.cpp b/src/crypto/ethash/lib/ethash/progpow.cpp new file mode 100644 index 0000000000..e3a0870d58 --- /dev/null +++ b/src/crypto/ethash/lib/ethash/progpow.cpp @@ -0,0 +1,581 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +#include + +#include "crypto/ethash/lib/ethash/bit_manipulation.h" +#include "crypto/ethash/lib/ethash/endianness.hpp" +#include "crypto/ethash/lib/ethash/ethash-internal.hpp" +#include "crypto/ethash/lib/ethash/kiss99.hpp" +#include + +#include + +namespace progpow +{ +namespace +{ +/// A variant of Keccak hash function for ProgPoW. +/// +/// This Keccak hash function uses 800-bit permutation (Keccak-f[800]) with 576 bitrate. +/// It take exactly 576 bits of input (split across 3 arguments) and adds no padding. +/// +/// @param header_hash The 256-bit header hash. +/// @param nonce The 64-bit nonce. +/// @param mix_hash Additional 256-bits of data. +/// @return The 256-bit output of the hash function. +void keccak_progpow_256(uint32_t* st) noexcept +{ + ethash_keccakf800(st); +} + +/// The same as keccak_progpow_256() but uses null mix +/// and returns top 64 bits of the output being a big-endian prefix of the 256-bit hash. +inline void keccak_progpow_64(uint32_t* st) noexcept +{ + keccak_progpow_256(st); +} + + +/// ProgPoW mix RNG state. +/// +/// Encapsulates the state of the random number generator used in computing ProgPoW mix. +/// This includes the state of the KISS99 RNG and the precomputed random permutation of the +/// sequence of mix item indexes. +class mix_rng_state +{ +public: + inline explicit mix_rng_state(uint32_t* seed) noexcept; + + uint32_t next_dst() noexcept { return dst_seq[(dst_counter++) % num_regs]; } + uint32_t next_src() noexcept { return src_seq[(src_counter++) % num_regs]; } + + kiss99 rng; + +private: + size_t dst_counter = 0; + std::array dst_seq; + size_t src_counter = 0; + std::array src_seq; +}; + +mix_rng_state::mix_rng_state(uint32_t* hash_seed) noexcept +{ + const auto seed_lo = static_cast(hash_seed[0]); + const auto seed_hi = static_cast(hash_seed[1]); + + const auto z = fnv1a(fnv_offset_basis, seed_lo); + const auto w = fnv1a(z, seed_hi); + const auto jsr = fnv1a(w, seed_lo); + const auto jcong = fnv1a(jsr, seed_hi); + + rng = kiss99{z, w, jsr, jcong}; + + // Create random permutations of mix destinations / sources. + // Uses Fisher-Yates shuffle. + for (uint32_t i = 0; i < num_regs; ++i) + { + dst_seq[i] = i; + src_seq[i] = i; + } + + for (uint32_t i = num_regs; i > 1; --i) + { + std::swap(dst_seq[i - 1], dst_seq[rng() % i]); + std::swap(src_seq[i - 1], src_seq[rng() % i]); + } +} + + +NO_SANITIZE("unsigned-integer-overflow") +inline uint32_t random_math(uint32_t a, uint32_t b, uint32_t selector) noexcept +{ + switch (selector % 11) + { + default: + case 0: + return a + b; + case 1: + return a * b; + case 2: + return mul_hi32(a, b); + case 3: + return std::min(a, b); + case 4: + return rotl32(a, b); + case 5: + return rotr32(a, b); + case 6: + return a & b; + case 7: + return a | b; + case 8: + return a ^ b; + case 9: + return clz32(a) + clz32(b); + case 10: + return popcount32(a) + popcount32(b); + } +} + +/// Merge data from `b` and `a`. +/// Assuming `a` has high entropy, only do ops that retain entropy even if `b` +/// has low entropy (i.e. do not do `a & b`). +NO_SANITIZE("unsigned-integer-overflow") +inline void random_merge(uint32_t& a, uint32_t b, uint32_t selector) noexcept +{ + const auto x = (selector >> 16) % 31 + 1; // Additional non-zero selector from higher bits. + switch (selector % 4) + { + case 0: + a = (a * 33) + b; + break; + case 1: + a = (a ^ b) * 33; + break; + case 2: + a = rotl32(a, x) ^ b; + break; + case 3: + a = rotr32(a, x) ^ b; + break; + } +} + +static const uint32_t round_constants[22] = { + 0x00000001,0x00008082,0x0000808A, + 0x80008000,0x0000808B,0x80000001, + 0x80008081,0x00008009,0x0000008A, + 0x00000088,0x80008009,0x8000000A, + 0x8000808B,0x0000008B,0x00008089, + 0x00008003,0x00008002,0x00000080, + 0x0000800A,0x8000000A,0x80008081, + 0x00008080, +}; + +static const uint32_t ravencoin_kawpow[15] = { + 0x00000072, //R + 0x00000041, //A + 0x00000056, //V + 0x00000045, //E + 0x0000004E, //N + 0x00000043, //C + 0x0000004F, //O + 0x00000049, //I + 0x0000004E, //N + 0x0000004B, //K + 0x00000041, //A + 0x00000057, //W + 0x00000050, //P + 0x0000004F, //O + 0x00000057, //W +}; + +using lookup_fn = hash2048 (*)(const epoch_context&, uint32_t); + +using mix_array = std::array, num_lanes>; + +void round( + const epoch_context& context, uint32_t r, mix_array& mix, mix_rng_state state, lookup_fn lookup) +{ + const uint32_t num_items = static_cast(context.full_dataset_num_items / 2); + const uint32_t item_index = mix[r % num_lanes][0] % num_items; + const hash2048 item = lookup(context, item_index); + + constexpr size_t num_words_per_lane = sizeof(item) / (sizeof(uint32_t) * num_lanes); + constexpr int max_operations = + num_cache_accesses > num_math_operations ? num_cache_accesses : num_math_operations; + + // Process lanes. + for (int i = 0; i < max_operations; ++i) + { + if (i < num_cache_accesses) // Random access to cached memory. + { + const auto src = state.next_src(); + const auto dst = state.next_dst(); + const auto sel = state.rng(); + + for (size_t l = 0; l < num_lanes; ++l) + { + const size_t offset = mix[l][src] % l1_cache_num_items; + random_merge(mix[l][dst], le::uint32(context.l1_cache[offset]), sel); + } + } + if (i < num_math_operations) // Random math. + { + // Generate 2 unique source indexes. + const auto src_rnd = state.rng() % (num_regs * (num_regs - 1)); + const auto src1 = src_rnd % num_regs; // O <= src1 < num_regs + auto src2 = src_rnd / num_regs; // 0 <= src2 < num_regs - 1 + if (src2 >= src1) + ++src2; + + const auto sel1 = state.rng(); + const auto dst = state.next_dst(); + const auto sel2 = state.rng(); + + for (size_t l = 0; l < num_lanes; ++l) + { + const uint32_t data = random_math(mix[l][src1], mix[l][src2], sel1); + random_merge(mix[l][dst], data, sel2); + } + } + } + + // DAG access pattern. + uint32_t dsts[num_words_per_lane]; + uint32_t sels[num_words_per_lane]; + for (size_t i = 0; i < num_words_per_lane; ++i) + { + dsts[i] = i == 0 ? 0 : state.next_dst(); + sels[i] = state.rng(); + } + + // DAG access. + for (size_t l = 0; l < num_lanes; ++l) + { + const auto offset = ((l ^ r) % num_lanes) * num_words_per_lane; + for (size_t i = 0; i < num_words_per_lane; ++i) + { + const auto word = le::uint32(item.word32s[offset + i]); + random_merge(mix[l][dsts[i]], word, sels[i]); + } + } +} + +mix_array init_mix(uint32_t* hash_seed) +{ + const uint32_t z = fnv1a(fnv_offset_basis, static_cast(hash_seed[0])); + const uint32_t w = fnv1a(z, static_cast(hash_seed[1])); + + mix_array mix; + for (uint32_t l = 0; l < mix.size(); ++l) + { + const uint32_t jsr = fnv1a(w, l); + const uint32_t jcong = fnv1a(jsr, l); + kiss99 rng{z, w, jsr, jcong}; + + for (auto& row : mix[l]) + row = rng(); + } + return mix; +} + +hash256 hash_mix( + const epoch_context& context, int block_number, uint32_t * seed, lookup_fn lookup) noexcept +{ + auto mix = init_mix(seed); + auto number = uint64_t(block_number / period_length); + uint32_t new_state[2]; + new_state[0] = number; + new_state[1] = number >> 32; + mix_rng_state state{new_state}; + + for (uint32_t i = 0; i < 64; ++i) + round(context, i, mix, state, lookup); + + // Reduce mix data to a single per-lane result. + uint32_t lane_hash[num_lanes]; + for (size_t l = 0; l < num_lanes; ++l) + { + lane_hash[l] = fnv_offset_basis; + for (uint32_t i = 0; i < num_regs; ++i) + lane_hash[l] = fnv1a(lane_hash[l], mix[l][i]); + } + + // Reduce all lanes to a single 256-bit result. + static constexpr size_t num_words = sizeof(hash256) / sizeof(uint32_t); + hash256 mix_hash; + for (uint32_t& w : mix_hash.word32s) + w = fnv_offset_basis; + for (size_t l = 0; l < num_lanes; ++l) + mix_hash.word32s[l % num_words] = fnv1a(mix_hash.word32s[l % num_words], lane_hash[l]); + return le::uint32s(mix_hash); +} +} // namespace + +result hash(const epoch_context& context, int block_number, const hash256& header_hash, + uint64_t nonce) noexcept +{ + uint32_t hash_seed[2]; // KISS99 initiator + + uint32_t state2[8]; + + { + // Absorb phase for initial round of keccak + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st fill with header data (8 words) + for (int i = 0; i < 8; i++) + state[i] = header_hash.word32s[i]; + + // 2nd fill with nonce (2 words) + state[8] = nonce; + state[9] = nonce >> 32; + + // 3rd apply ravencoin input constraints + for (int i = 10; i < 25; i++) + state[i] = ravencoin_kawpow[i-10]; + + keccak_progpow_64(state); + + for (int i = 0; i < 8; i++) + state2[i] = state[i]; + } + + hash_seed[0] = state2[0]; + hash_seed[1] = state2[1]; + const hash256 mix_hash = hash_mix(context, block_number, hash_seed, calculate_dataset_item_2048); + + // Absorb phase for last round of keccak (256 bits) + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st initial 8 words of state are kept as carry-over from initial keccak + for (int i = 0; i < 8; i++) + state[i] = state2[i]; + + // 2nd subsequent 8 words are carried from digest/mix + for (int i = 8; i < 16; i++) + state[i] = mix_hash.word32s[i-8]; + + // 3rd apply ravencoin input constraints + for (int i = 16; i < 25; i++) + state[i] = ravencoin_kawpow[i - 16]; + + // Run keccak loop + keccak_progpow_256(state); + + hash256 output; + for (int i = 0; i < 8; ++i) + output.word32s[i] = le::uint32(state[i]); + + return {output, mix_hash}; +} + +result hash(const epoch_context_full& context, int block_number, const hash256& header_hash, + uint64_t nonce) noexcept +{ + static const auto lazy_lookup = [](const epoch_context& ctx, uint32_t index) noexcept + { + auto* full_dataset_1024 = static_cast(ctx).full_dataset; + auto* full_dataset_2048 = reinterpret_cast(full_dataset_1024); + hash2048& item = full_dataset_2048[index]; + if (item.word64s[0] == 0) + { + // TODO: Copy elision here makes it thread-safe? + item = calculate_dataset_item_2048(ctx, index); + } + + return item; + }; + + uint32_t hash_seed[2]; // KISS99 initiator + + uint32_t state2[8]; + + { + // Absorb phase for initial round of keccak + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st fill with header data (8 words) + for (int i = 0; i < 8; i++) + state[i] = header_hash.word32s[i]; + + // 2nd fill with nonce (2 words) + state[8] = nonce; + state[9] = nonce >> 32; + + // 3rd apply ravencoin input constraints + for (int i = 10; i < 25; i++) + state[i] = ravencoin_kawpow[i-10]; + + keccak_progpow_64(state); + + for (int i = 0; i < 8; i++) + state2[i] = state[i]; + } + + hash_seed[0] = state2[0]; + hash_seed[1] = state2[1]; + + const hash256 mix_hash = hash_mix(context, block_number, hash_seed, lazy_lookup); + + // Absorb phase for last round of keccak (256 bits) + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st initial 8 words of state are kept as carry-over from initial keccak + for (int i = 0; i < 8; i++) + state[i] = state2[i]; + + // 2nd subsequent 8 words are carried from digest/mix + for (int i = 8; i < 16; i++) + state[i] = mix_hash.word32s[i-8]; + + // 3rd apply ravencoin input constraints + for (int i = 16; i < 25; i++) + state[i] = ravencoin_kawpow[i - 16]; + + // Run keccak loop + keccak_progpow_256(state); + + hash256 output; + for (int i = 0; i < 8; ++i) + output.word32s[i] = le::uint32(state[i]); + return {output, mix_hash}; +} + +bool verify(const epoch_context& context, int block_number, const hash256& header_hash, + const hash256& mix_hash, uint64_t nonce, const hash256& boundary) noexcept +{ + + uint32_t hash_seed[2]; // KISS99 initiator + uint32_t state2[8]; + + { + // Absorb phase for initial round of keccak + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st fill with header data (8 words) + for (int i = 0; i < 8; i++) + state[i] = header_hash.word32s[i]; + + // 2nd fill with nonce (2 words) + state[8] = nonce; + state[9] = nonce >> 32; + + // 3rd apply ravencoin input constraints + for (int i = 10; i < 25; i++) + state[i] = ravencoin_kawpow[i-10]; + + keccak_progpow_64(state); + + for (int i = 0; i < 8; i++) + state2[i] = state[i]; + } + + hash_seed[0] = state2[0]; + hash_seed[1] = state2[1]; + + // Absorb phase for last round of keccak (256 bits) + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st initial 8 words of state are kept as carry-over from initial keccak + for (int i = 0; i < 8; i++) + state[i] = state2[i]; + + // 2nd subsequent 8 words are carried from digest/mix + for (int i = 8; i < 16; i++) + state[i] = mix_hash.word32s[i-8]; + + // 3rd apply ravencoin input constraints + for (int i = 16; i < 25; i++) + state[i] = ravencoin_kawpow[i - 16]; + + // Run keccak loop + keccak_progpow_256(state); + + hash256 output; + for (int i = 0; i < 8; ++i) + output.word32s[i] = le::uint32(state[i]); + + if (!is_less_or_equal(output, boundary)) { + return false; + } + + const hash256 expected_mix_hash = + hash_mix(context, block_number, hash_seed, calculate_dataset_item_2048); + + return is_equal(expected_mix_hash, mix_hash); +} + + +hash256 hash_no_verify(const int& block_number, const hash256& header_hash, + const hash256& mix_hash, const uint64_t& nonce) noexcept +{ + uint32_t state2[8]; + + { + // Absorb phase for initial round of keccak + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st fill with header data (8 words) + for (int i = 0; i < 8; i++) + state[i] = header_hash.word32s[i]; + + // 2nd fill with nonce (2 words) + state[8] = nonce; + state[9] = nonce >> 32; + + // 3rd apply ravencoin input constraints + for (int i = 10; i < 25; i++) + state[i] = ravencoin_kawpow[i-10]; + + keccak_progpow_64(state); + + for (int i = 0; i < 8; i++) + state2[i] = state[i]; + } + + // Absorb phase for last round of keccak (256 bits) + + uint32_t state[25] = {0x0}; // Keccak's state + + // 1st initial 8 words of state are kept as carry-over from initial keccak + for (int i = 0; i < 8; i++) + state[i] = state2[i]; + + // 2nd subsequent 8 words are carried from digest/mix + for (int i = 8; i < 16; i++) + state[i] = mix_hash.word32s[i-8]; + + // 3rd apply ravencoin input constraints + for (int i = 16; i < 25; i++) + state[i] = ravencoin_kawpow[i - 16]; + + // Run keccak loop + keccak_progpow_256(state); + + hash256 output; + for (int i = 0; i < 8; ++i) + output.word32s[i] = le::uint32(state[i]); + + return output; +} + + +search_result search_light(const epoch_context& context, int block_number, + const hash256& header_hash, const hash256& boundary, uint64_t start_nonce, + size_t iterations) noexcept +{ + const uint64_t end_nonce = start_nonce + iterations; + for (uint64_t nonce = start_nonce; nonce < end_nonce; ++nonce) + { + result r = hash(context, block_number, header_hash, nonce); + if (is_less_or_equal(r.final_hash, boundary)) + return {r, nonce}; + } + return {}; +} + +search_result search(const epoch_context_full& context, int block_number, + const hash256& header_hash, const hash256& boundary, uint64_t start_nonce, + size_t iterations) noexcept +{ + const uint64_t end_nonce = start_nonce + iterations; + for (uint64_t nonce = start_nonce; nonce < end_nonce; ++nonce) + { + result r = hash(context, block_number, header_hash, nonce); + if (is_less_or_equal(r.final_hash, boundary)) + return {r, nonce}; + } + return {}; +} + +} // namespace progpow diff --git a/src/crypto/ethash/lib/keccak/keccak.c b/src/crypto/ethash/lib/keccak/keccak.c new file mode 100644 index 0000000000..e455c0a126 --- /dev/null +++ b/src/crypto/ethash/lib/keccak/keccak.c @@ -0,0 +1,127 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#include + +#include "../support/attributes.h" + +#if _MSC_VER +#include +#define __builtin_memcpy memcpy +#endif + +#if _WIN32 +/* On Windows assume little endian. */ +#define __LITTLE_ENDIAN 1234 +#define __BIG_ENDIAN 4321 +#define __BYTE_ORDER __LITTLE_ENDIAN +#elif __APPLE__ +#include +#else +#include +#endif + +#if __BYTE_ORDER == __LITTLE_ENDIAN +#define to_le64(X) X +#else +#define to_le64(X) __builtin_bswap64(X) +#endif + + +/** Loads 64-bit integer from given memory location as little-endian number. */ +static INLINE ALWAYS_INLINE uint64_t load_le(const uint8_t* data) +{ + /* memcpy is the best way of expressing the intention. Every compiler will + optimize is to single load instruction if the target architecture + supports unaligned memory access (GCC and clang even in O0). + This is great trick because we are violating C/C++ memory alignment + restrictions with no performance penalty. */ + uint64_t word; + __builtin_memcpy(&word, data, sizeof(word)); + return to_le64(word); +} + +static INLINE ALWAYS_INLINE void keccak( + uint64_t* out, size_t bits, const uint8_t* data, size_t size) +{ + static const size_t word_size = sizeof(uint64_t); + const size_t hash_size = bits / 8; + const size_t block_size = (1600 - bits * 2) / 8; + + size_t i; + uint64_t* state_iter; + uint64_t last_word = 0; + uint8_t* last_word_iter = (uint8_t*)&last_word; + + uint64_t state[25] = {0}; + + while (size >= block_size) + { + for (i = 0; i < (block_size / word_size); ++i) + { + state[i] ^= load_le(data); + data += word_size; + } + + ethash_keccakf1600(state); + + size -= block_size; + } + + state_iter = state; + + while (size >= word_size) + { + *state_iter ^= load_le(data); + ++state_iter; + data += word_size; + size -= word_size; + } + + while (size > 0) + { + *last_word_iter = *data; + ++last_word_iter; + ++data; + --size; + } + *last_word_iter = 0x01; + *state_iter ^= to_le64(last_word); + + state[(block_size / word_size) - 1] ^= 0x8000000000000000; + + ethash_keccakf1600(state); + + for (i = 0; i < (hash_size / word_size); ++i) + out[i] = to_le64(state[i]); +} + +union ethash_hash256 ethash_keccak256(const uint8_t* data, size_t size) +{ + union ethash_hash256 hash; + keccak(hash.word64s, 256, data, size); + return hash; +} + +union ethash_hash256 ethash_keccak256_32(const uint8_t data[32]) +{ + union ethash_hash256 hash; + keccak(hash.word64s, 256, data, 32); + return hash; +} + +union ethash_hash512 ethash_keccak512(const uint8_t* data, size_t size) +{ + union ethash_hash512 hash; + keccak(hash.word64s, 512, data, size); + return hash; +} + +union ethash_hash512 ethash_keccak512_64(const uint8_t data[64]) +{ + union ethash_hash512 hash; + keccak(hash.word64s, 512, data, 64); + return hash; +} diff --git a/src/crypto/ethash/lib/keccak/keccakf1600.c b/src/crypto/ethash/lib/keccak/keccakf1600.c new file mode 100644 index 0000000000..e12b268d13 --- /dev/null +++ b/src/crypto/ethash/lib/keccak/keccakf1600.c @@ -0,0 +1,255 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#include + +static uint64_t rol(uint64_t x, unsigned s) +{ + return (x << s) | (x >> (64 - s)); +} + +static const uint64_t round_constants[24] = { + 0x0000000000000001, + 0x0000000000008082, + 0x800000000000808a, + 0x8000000080008000, + 0x000000000000808b, + 0x0000000080000001, + 0x8000000080008081, + 0x8000000000008009, + 0x000000000000008a, + 0x0000000000000088, + 0x0000000080008009, + 0x000000008000000a, + 0x000000008000808b, + 0x800000000000008b, + 0x8000000000008089, + 0x8000000000008003, + 0x8000000000008002, + 0x8000000000000080, + 0x000000000000800a, + 0x800000008000000a, + 0x8000000080008081, + 0x8000000000008080, + 0x0000000080000001, + 0x8000000080008008, +}; + +void ethash_keccakf1600(uint64_t state[25]) +{ + /* The implementation based on the "simple" implementation by Ronny Van Keer. */ + + int round; + + uint64_t Aba, Abe, Abi, Abo, Abu; + uint64_t Aga, Age, Agi, Ago, Agu; + uint64_t Aka, Ake, Aki, Ako, Aku; + uint64_t Ama, Ame, Ami, Amo, Amu; + uint64_t Asa, Ase, Asi, Aso, Asu; + + uint64_t Eba, Ebe, Ebi, Ebo, Ebu; + uint64_t Ega, Ege, Egi, Ego, Egu; + uint64_t Eka, Eke, Eki, Eko, Eku; + uint64_t Ema, Eme, Emi, Emo, Emu; + uint64_t Esa, Ese, Esi, Eso, Esu; + + uint64_t Ba, Be, Bi, Bo, Bu; + + uint64_t Da, De, Di, Do, Du; + + Aba = state[0]; + Abe = state[1]; + Abi = state[2]; + Abo = state[3]; + Abu = state[4]; + Aga = state[5]; + Age = state[6]; + Agi = state[7]; + Ago = state[8]; + Agu = state[9]; + Aka = state[10]; + Ake = state[11]; + Aki = state[12]; + Ako = state[13]; + Aku = state[14]; + Ama = state[15]; + Ame = state[16]; + Ami = state[17]; + Amo = state[18]; + Amu = state[19]; + Asa = state[20]; + Ase = state[21]; + Asi = state[22]; + Aso = state[23]; + Asu = state[24]; + + for (round = 0; round < 24; round += 2) + { + /* Round (round + 0): Axx -> Exx */ + + Ba = Aba ^ Aga ^ Aka ^ Ama ^ Asa; + Be = Abe ^ Age ^ Ake ^ Ame ^ Ase; + Bi = Abi ^ Agi ^ Aki ^ Ami ^ Asi; + Bo = Abo ^ Ago ^ Ako ^ Amo ^ Aso; + Bu = Abu ^ Agu ^ Aku ^ Amu ^ Asu; + + Da = Bu ^ rol(Be, 1); + De = Ba ^ rol(Bi, 1); + Di = Be ^ rol(Bo, 1); + Do = Bi ^ rol(Bu, 1); + Du = Bo ^ rol(Ba, 1); + + Ba = Aba ^ Da; + Be = rol(Age ^ De, 44); + Bi = rol(Aki ^ Di, 43); + Bo = rol(Amo ^ Do, 21); + Bu = rol(Asu ^ Du, 14); + Eba = Ba ^ (~Be & Bi) ^ round_constants[round]; + Ebe = Be ^ (~Bi & Bo); + Ebi = Bi ^ (~Bo & Bu); + Ebo = Bo ^ (~Bu & Ba); + Ebu = Bu ^ (~Ba & Be); + + Ba = rol(Abo ^ Do, 28); + Be = rol(Agu ^ Du, 20); + Bi = rol(Aka ^ Da, 3); + Bo = rol(Ame ^ De, 45); + Bu = rol(Asi ^ Di, 61); + Ega = Ba ^ (~Be & Bi); + Ege = Be ^ (~Bi & Bo); + Egi = Bi ^ (~Bo & Bu); + Ego = Bo ^ (~Bu & Ba); + Egu = Bu ^ (~Ba & Be); + + Ba = rol(Abe ^ De, 1); + Be = rol(Agi ^ Di, 6); + Bi = rol(Ako ^ Do, 25); + Bo = rol(Amu ^ Du, 8); + Bu = rol(Asa ^ Da, 18); + Eka = Ba ^ (~Be & Bi); + Eke = Be ^ (~Bi & Bo); + Eki = Bi ^ (~Bo & Bu); + Eko = Bo ^ (~Bu & Ba); + Eku = Bu ^ (~Ba & Be); + + Ba = rol(Abu ^ Du, 27); + Be = rol(Aga ^ Da, 36); + Bi = rol(Ake ^ De, 10); + Bo = rol(Ami ^ Di, 15); + Bu = rol(Aso ^ Do, 56); + Ema = Ba ^ (~Be & Bi); + Eme = Be ^ (~Bi & Bo); + Emi = Bi ^ (~Bo & Bu); + Emo = Bo ^ (~Bu & Ba); + Emu = Bu ^ (~Ba & Be); + + Ba = rol(Abi ^ Di, 62); + Be = rol(Ago ^ Do, 55); + Bi = rol(Aku ^ Du, 39); + Bo = rol(Ama ^ Da, 41); + Bu = rol(Ase ^ De, 2); + Esa = Ba ^ (~Be & Bi); + Ese = Be ^ (~Bi & Bo); + Esi = Bi ^ (~Bo & Bu); + Eso = Bo ^ (~Bu & Ba); + Esu = Bu ^ (~Ba & Be); + + + /* Round (round + 1): Exx -> Axx */ + + Ba = Eba ^ Ega ^ Eka ^ Ema ^ Esa; + Be = Ebe ^ Ege ^ Eke ^ Eme ^ Ese; + Bi = Ebi ^ Egi ^ Eki ^ Emi ^ Esi; + Bo = Ebo ^ Ego ^ Eko ^ Emo ^ Eso; + Bu = Ebu ^ Egu ^ Eku ^ Emu ^ Esu; + + Da = Bu ^ rol(Be, 1); + De = Ba ^ rol(Bi, 1); + Di = Be ^ rol(Bo, 1); + Do = Bi ^ rol(Bu, 1); + Du = Bo ^ rol(Ba, 1); + + Ba = Eba ^ Da; + Be = rol(Ege ^ De, 44); + Bi = rol(Eki ^ Di, 43); + Bo = rol(Emo ^ Do, 21); + Bu = rol(Esu ^ Du, 14); + Aba = Ba ^ (~Be & Bi) ^ round_constants[round + 1]; + Abe = Be ^ (~Bi & Bo); + Abi = Bi ^ (~Bo & Bu); + Abo = Bo ^ (~Bu & Ba); + Abu = Bu ^ (~Ba & Be); + + Ba = rol(Ebo ^ Do, 28); + Be = rol(Egu ^ Du, 20); + Bi = rol(Eka ^ Da, 3); + Bo = rol(Eme ^ De, 45); + Bu = rol(Esi ^ Di, 61); + Aga = Ba ^ (~Be & Bi); + Age = Be ^ (~Bi & Bo); + Agi = Bi ^ (~Bo & Bu); + Ago = Bo ^ (~Bu & Ba); + Agu = Bu ^ (~Ba & Be); + + Ba = rol(Ebe ^ De, 1); + Be = rol(Egi ^ Di, 6); + Bi = rol(Eko ^ Do, 25); + Bo = rol(Emu ^ Du, 8); + Bu = rol(Esa ^ Da, 18); + Aka = Ba ^ (~Be & Bi); + Ake = Be ^ (~Bi & Bo); + Aki = Bi ^ (~Bo & Bu); + Ako = Bo ^ (~Bu & Ba); + Aku = Bu ^ (~Ba & Be); + + Ba = rol(Ebu ^ Du, 27); + Be = rol(Ega ^ Da, 36); + Bi = rol(Eke ^ De, 10); + Bo = rol(Emi ^ Di, 15); + Bu = rol(Eso ^ Do, 56); + Ama = Ba ^ (~Be & Bi); + Ame = Be ^ (~Bi & Bo); + Ami = Bi ^ (~Bo & Bu); + Amo = Bo ^ (~Bu & Ba); + Amu = Bu ^ (~Ba & Be); + + Ba = rol(Ebi ^ Di, 62); + Be = rol(Ego ^ Do, 55); + Bi = rol(Eku ^ Du, 39); + Bo = rol(Ema ^ Da, 41); + Bu = rol(Ese ^ De, 2); + Asa = Ba ^ (~Be & Bi); + Ase = Be ^ (~Bi & Bo); + Asi = Bi ^ (~Bo & Bu); + Aso = Bo ^ (~Bu & Ba); + Asu = Bu ^ (~Ba & Be); + } + + state[0] = Aba; + state[1] = Abe; + state[2] = Abi; + state[3] = Abo; + state[4] = Abu; + state[5] = Aga; + state[6] = Age; + state[7] = Agi; + state[8] = Ago; + state[9] = Agu; + state[10] = Aka; + state[11] = Ake; + state[12] = Aki; + state[13] = Ako; + state[14] = Aku; + state[15] = Ama; + state[16] = Ame; + state[17] = Ami; + state[18] = Amo; + state[19] = Amu; + state[20] = Asa; + state[21] = Ase; + state[22] = Asi; + state[23] = Aso; + state[24] = Asu; +} diff --git a/src/crypto/ethash/lib/keccak/keccakf800.c b/src/crypto/ethash/lib/keccak/keccakf800.c new file mode 100644 index 0000000000..5b9a180254 --- /dev/null +++ b/src/crypto/ethash/lib/keccak/keccakf800.c @@ -0,0 +1,253 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#include + +static uint32_t rol(uint32_t x, unsigned s) +{ + return (x << s) | (x >> (32 - s)); +} + +static const uint32_t round_constants[22] = { + 0x00000001, + 0x00008082, + 0x0000808A, + 0x80008000, + 0x0000808B, + 0x80000001, + 0x80008081, + 0x00008009, + 0x0000008A, + 0x00000088, + 0x80008009, + 0x8000000A, + 0x8000808B, + 0x0000008B, + 0x00008089, + 0x00008003, + 0x00008002, + 0x00000080, + 0x0000800A, + 0x8000000A, + 0x80008081, + 0x00008080, +}; + +void ethash_keccakf800(uint32_t state[25]) +{ + /* The implementation directly translated from ethash_keccakf1600. */ + + int round; + + uint32_t Aba, Abe, Abi, Abo, Abu; + uint32_t Aga, Age, Agi, Ago, Agu; + uint32_t Aka, Ake, Aki, Ako, Aku; + uint32_t Ama, Ame, Ami, Amo, Amu; + uint32_t Asa, Ase, Asi, Aso, Asu; + + uint32_t Eba, Ebe, Ebi, Ebo, Ebu; + uint32_t Ega, Ege, Egi, Ego, Egu; + uint32_t Eka, Eke, Eki, Eko, Eku; + uint32_t Ema, Eme, Emi, Emo, Emu; + uint32_t Esa, Ese, Esi, Eso, Esu; + + uint32_t Ba, Be, Bi, Bo, Bu; + + uint32_t Da, De, Di, Do, Du; + + Aba = state[0]; + Abe = state[1]; + Abi = state[2]; + Abo = state[3]; + Abu = state[4]; + Aga = state[5]; + Age = state[6]; + Agi = state[7]; + Ago = state[8]; + Agu = state[9]; + Aka = state[10]; + Ake = state[11]; + Aki = state[12]; + Ako = state[13]; + Aku = state[14]; + Ama = state[15]; + Ame = state[16]; + Ami = state[17]; + Amo = state[18]; + Amu = state[19]; + Asa = state[20]; + Ase = state[21]; + Asi = state[22]; + Aso = state[23]; + Asu = state[24]; + + for (round = 0; round < 22; round += 2) + { + /* Round (round + 0): Axx -> Exx */ + + Ba = Aba ^ Aga ^ Aka ^ Ama ^ Asa; + Be = Abe ^ Age ^ Ake ^ Ame ^ Ase; + Bi = Abi ^ Agi ^ Aki ^ Ami ^ Asi; + Bo = Abo ^ Ago ^ Ako ^ Amo ^ Aso; + Bu = Abu ^ Agu ^ Aku ^ Amu ^ Asu; + + Da = Bu ^ rol(Be, 1); + De = Ba ^ rol(Bi, 1); + Di = Be ^ rol(Bo, 1); + Do = Bi ^ rol(Bu, 1); + Du = Bo ^ rol(Ba, 1); + + Ba = Aba ^ Da; + Be = rol(Age ^ De, 12); + Bi = rol(Aki ^ Di, 11); + Bo = rol(Amo ^ Do, 21); + Bu = rol(Asu ^ Du, 14); + Eba = Ba ^ (~Be & Bi) ^ round_constants[round]; + Ebe = Be ^ (~Bi & Bo); + Ebi = Bi ^ (~Bo & Bu); + Ebo = Bo ^ (~Bu & Ba); + Ebu = Bu ^ (~Ba & Be); + + Ba = rol(Abo ^ Do, 28); + Be = rol(Agu ^ Du, 20); + Bi = rol(Aka ^ Da, 3); + Bo = rol(Ame ^ De, 13); + Bu = rol(Asi ^ Di, 29); + Ega = Ba ^ (~Be & Bi); + Ege = Be ^ (~Bi & Bo); + Egi = Bi ^ (~Bo & Bu); + Ego = Bo ^ (~Bu & Ba); + Egu = Bu ^ (~Ba & Be); + + Ba = rol(Abe ^ De, 1); + Be = rol(Agi ^ Di, 6); + Bi = rol(Ako ^ Do, 25); + Bo = rol(Amu ^ Du, 8); + Bu = rol(Asa ^ Da, 18); + Eka = Ba ^ (~Be & Bi); + Eke = Be ^ (~Bi & Bo); + Eki = Bi ^ (~Bo & Bu); + Eko = Bo ^ (~Bu & Ba); + Eku = Bu ^ (~Ba & Be); + + Ba = rol(Abu ^ Du, 27); + Be = rol(Aga ^ Da, 4); + Bi = rol(Ake ^ De, 10); + Bo = rol(Ami ^ Di, 15); + Bu = rol(Aso ^ Do, 24); + Ema = Ba ^ (~Be & Bi); + Eme = Be ^ (~Bi & Bo); + Emi = Bi ^ (~Bo & Bu); + Emo = Bo ^ (~Bu & Ba); + Emu = Bu ^ (~Ba & Be); + + Ba = rol(Abi ^ Di, 30); + Be = rol(Ago ^ Do, 23); + Bi = rol(Aku ^ Du, 7); + Bo = rol(Ama ^ Da, 9); + Bu = rol(Ase ^ De, 2); + Esa = Ba ^ (~Be & Bi); + Ese = Be ^ (~Bi & Bo); + Esi = Bi ^ (~Bo & Bu); + Eso = Bo ^ (~Bu & Ba); + Esu = Bu ^ (~Ba & Be); + + + /* Round (round + 1): Exx -> Axx */ + + Ba = Eba ^ Ega ^ Eka ^ Ema ^ Esa; + Be = Ebe ^ Ege ^ Eke ^ Eme ^ Ese; + Bi = Ebi ^ Egi ^ Eki ^ Emi ^ Esi; + Bo = Ebo ^ Ego ^ Eko ^ Emo ^ Eso; + Bu = Ebu ^ Egu ^ Eku ^ Emu ^ Esu; + + Da = Bu ^ rol(Be, 1); + De = Ba ^ rol(Bi, 1); + Di = Be ^ rol(Bo, 1); + Do = Bi ^ rol(Bu, 1); + Du = Bo ^ rol(Ba, 1); + + Ba = Eba ^ Da; + Be = rol(Ege ^ De, 12); + Bi = rol(Eki ^ Di, 11); + Bo = rol(Emo ^ Do, 21); + Bu = rol(Esu ^ Du, 14); + Aba = Ba ^ (~Be & Bi) ^ round_constants[round + 1]; + Abe = Be ^ (~Bi & Bo); + Abi = Bi ^ (~Bo & Bu); + Abo = Bo ^ (~Bu & Ba); + Abu = Bu ^ (~Ba & Be); + + Ba = rol(Ebo ^ Do, 28); + Be = rol(Egu ^ Du, 20); + Bi = rol(Eka ^ Da, 3); + Bo = rol(Eme ^ De, 13); + Bu = rol(Esi ^ Di, 29); + Aga = Ba ^ (~Be & Bi); + Age = Be ^ (~Bi & Bo); + Agi = Bi ^ (~Bo & Bu); + Ago = Bo ^ (~Bu & Ba); + Agu = Bu ^ (~Ba & Be); + + Ba = rol(Ebe ^ De, 1); + Be = rol(Egi ^ Di, 6); + Bi = rol(Eko ^ Do, 25); + Bo = rol(Emu ^ Du, 8); + Bu = rol(Esa ^ Da, 18); + Aka = Ba ^ (~Be & Bi); + Ake = Be ^ (~Bi & Bo); + Aki = Bi ^ (~Bo & Bu); + Ako = Bo ^ (~Bu & Ba); + Aku = Bu ^ (~Ba & Be); + + Ba = rol(Ebu ^ Du, 27); + Be = rol(Ega ^ Da, 4); + Bi = rol(Eke ^ De, 10); + Bo = rol(Emi ^ Di, 15); + Bu = rol(Eso ^ Do, 24); + Ama = Ba ^ (~Be & Bi); + Ame = Be ^ (~Bi & Bo); + Ami = Bi ^ (~Bo & Bu); + Amo = Bo ^ (~Bu & Ba); + Amu = Bu ^ (~Ba & Be); + + Ba = rol(Ebi ^ Di, 30); + Be = rol(Ego ^ Do, 23); + Bi = rol(Eku ^ Du, 7); + Bo = rol(Ema ^ Da, 9); + Bu = rol(Ese ^ De, 2); + Asa = Ba ^ (~Be & Bi); + Ase = Be ^ (~Bi & Bo); + Asi = Bi ^ (~Bo & Bu); + Aso = Bo ^ (~Bu & Ba); + Asu = Bu ^ (~Ba & Be); + } + + state[0] = Aba; + state[1] = Abe; + state[2] = Abi; + state[3] = Abo; + state[4] = Abu; + state[5] = Aga; + state[6] = Age; + state[7] = Agi; + state[8] = Ago; + state[9] = Agu; + state[10] = Aka; + state[11] = Ake; + state[12] = Aki; + state[13] = Ako; + state[14] = Aku; + state[15] = Ama; + state[16] = Ame; + state[17] = Ami; + state[18] = Amo; + state[19] = Amu; + state[20] = Asa; + state[21] = Ase; + state[22] = Asi; + state[23] = Aso; + state[24] = Asu; +} diff --git a/src/crypto/ethash/lib/support/attributes.h b/src/crypto/ethash/lib/support/attributes.h new file mode 100644 index 0000000000..83be231f0f --- /dev/null +++ b/src/crypto/ethash/lib/support/attributes.h @@ -0,0 +1,33 @@ +/* ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. + * Copyright 2018-2019 Pawel Bylica. + * Licensed under the Apache License, Version 2.0. + */ + +#pragma once + +/** inline */ +#if _MSC_VER || __STDC_VERSION__ +#define INLINE inline +#else +#define INLINE +#endif + +/** [[always_inline]] */ +#if _MSC_VER +#define ALWAYS_INLINE __forceinline +#elif defined(__has_attribute) && __STDC_VERSION__ +#if __has_attribute(always_inline) +#define ALWAYS_INLINE __attribute__((always_inline)) +#endif +#endif +#if !defined(ALWAYS_INLINE) +#define ALWAYS_INLINE +#endif + +/** [[no_sanitize()]] */ +#if __clang__ +#define NO_SANITIZE(sanitizer) \ + __attribute__((no_sanitize(sanitizer))) +#else +#define NO_SANITIZE(sanitizer) +#endif diff --git a/src/crypto/ethash/progpow_test_vectors.hpp b/src/crypto/ethash/progpow_test_vectors.hpp new file mode 100644 index 0000000000..62dc23a056 --- /dev/null +++ b/src/crypto/ethash/progpow_test_vectors.hpp @@ -0,0 +1,64 @@ +// ethash: C/C++ implementation of Ethash, the Ethereum Proof of Work algorithm. +// Copyright 2018-2019 Pawel Bylica. +// Licensed under the Apache License, Version 2.0. + +/// @file +/// ProgPoW test vectors. + +#pragma once + +namespace // In anonymous namespace to allow including in multiple compilation units. +{ +/// Defines a test case for ProgPoW hash() function. +struct progpow_hash_test_case +{ + int block_number; + const char* header_hash_hex; + const char* nonce_hex; + const char* mix_hash_hex; + const char* final_hash_hex; +}; + +progpow_hash_test_case progpow_hash_test_cases[] = { + {0, "0000000000000000000000000000000000000000000000000000000000000000", "0000000000000000", + "6e97b47b134fda0c7888802988e1a373affeb28bcd813b6e9a0fc669c935d03a", + "e601a7257a70dc48fccc97a7330d704d776047623b92883d77111fb36870f3d1"}, + {49, "63155f732f2bf556967f906155b510c917e48e99685ead76ea83f4eca03ab12b", "0000000007073c07", + "d36f7e815ee09e74eceb9c96993a3d681edf2bf0921fc7bb710364042db99777", + "e7ced124598fd2500a55ad9f9f48e3569327fe50493c77a4ac9799b96efb9463"}, + {50, "9e7248f20914913a73d80a70174c331b1d34f260535ac3631d770e656b5dd922", "00000000076e482e", + "d6dc634ae837e2785b347648ea515e25e5d8821ae0b95e1c2a9c2d497e0dcfbd", + "ab0ad7ef8d8ee317dd12d10310aceed7321d34fb263791c2de5776a6658d177e"}, + {99, "de37e1824c86d35d154cf65a88de6d9286aec4f7f10c3fc9f0fa1bcc2687188d", "000000003917afab", + "fa706860e5e0e830d5d1d7157e5bea7f5f8a350c7c8612ac1d1fcf2974d64244", + "aa85340690f2e907054324a5021937910e15edfd1ef1577231843e7d32ec3a61"}, + {29950, "ac7b55e801511b77e11d52e9599206101550144525b5679f2dab19386f23dcce", "005d409dbc23a62a", + "5359807b77a74878269c3a3044df8618a576ce8dc52e1c48d927d4a60e7c6b79", + "022019e5408683f7f8326b4e46b42864a3a069f17b6151e434fcaedecaadd918"}, + {29999, "e43d7e0bdc8a4a3f6e291a5ed790b9fa1a0948a2b9e33c844888690847de19f5", "005db5fa4c2a3d03", + "d15de3f9bfedd9b6d0f498273eb3b437115bdc8326c96c6457ac06deb5c9f389", + "4e93630b81198752f876b24380999189b7b9366c08222ac05e4237b87114f305"}, + {30000, "d34519f72c97cae8892c277776259db3320820cb5279a299d0ef1e155e5c6454", "005db8607994ff30", + "de0348b69bf91dfe2c3d3dba6f0132e9048a5284e57b8d9d20adc5f3dc0d3236", + "c7953d848cda6e304f77b4c6d735645c8e8508a5e74c9e9814ef37b19087cd6c"}, + {30049, "8b6ce5da0b06d18db7bd8492d9e5717f8b53e7e098d9fef7886d58a6e913ef64", "005e2e215a8ca2e7", + "975c6a9decc89cba7ace69338d4de8510d9619aef42b1d35d0bef7e0ce0614a9", + "c262d8055e288d04b951a844bfca8ba529f5b4d652b408e3942727d7dd90957a"}, + {30050, "c2c46173481b9ced61123d2e293b42ede5a1b323210eb2a684df0874ffe09047", "005e30899481055e", + "362f2fabdb9699d3634b6499703f939f378ee4eac803396c2b0ed0fe1d154972", + "4cd7e6e79e0b63d42b2b06716a919ccc7834077ec727a9ea94edcdaff2fefab8"}, + {30099, "ea42197eb2ba79c63cb5e655b8b1f612c5f08aae1a49ff236795a3516d87bc71", "005ea6aef136f88b", + "b1196457261bd05ccb387a8ff3fd02687bf496bd7943d89419465289669e27aa", + "39d1ebfa783b61a6fa8e9747d0f9f134efae5cfba284a2c80e8deabae6b98676"}, + {59950, "49e15ba4bf501ce8fe8876101c808e24c69a859be15de554bf85dbc095491bd6", "02ebe0503bd7b1da", + "df3dbb1669fd35dbb0ae96bbea2d498f0c6992cbddd092aeace42dd933505f95", + "b8984cf4021c4433f753654848d721f33a0792b4417241f0cf7c7c2db011a54a"}, + {59999, "f5c50ba5c0d6210ddb16250ec3efda178de857b2b1703d8d5403bd0f848e19cf", "02edb6275bd221e3", + "5017df70e97ca35638cf439cdbe54f30383d335e18eb4a74d6e166736f1038fa", + "4cf1fa62f25b577ac822a6a28d55f8b7e3ae7fe983abd868ae00927e68c41016"}, + {170915, "5b3e8dfa1aafd3924a51f33e2d672d8dae32fa528d8b1d378d6e4db0ec5d665d", "0000000044975727", + "efb29147484c434f1cc59629da90fd0343e3b047407ecd36e9ad973bd51bbac5", + "e7e6bb3b2f9acd3864bc86f72f87237eaf475633ef650c726ac80eb0adf116b6"}, + +}; +} // namespace diff --git a/src/hash.cpp b/src/hash.cpp index 856f79428a..d814abe6f5 100644 --- a/src/hash.cpp +++ b/src/hash.cpp @@ -3,10 +3,14 @@ // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. +#include #include "hash.h" #include "crypto/common.h" #include "crypto/hmac_sha512.h" #include "pubkey.h" +#include "util.h" + +#include //TODO remove these double algoHashTotal[16]; @@ -250,3 +254,41 @@ uint64_t SipHashUint256Extra(uint64_t k0, uint64_t k1, const uint256& val, uint3 SIPROUND; return v0 ^ v1 ^ v2 ^ v3; } + +uint256 KAWPOWHash(const CBlockHeader& blockHeader, uint256& mix_hash) +{ + static ethash::epoch_context_ptr context{nullptr, nullptr}; + + // Get the context from the block height + const auto epoch_number = ethash::get_epoch_number(blockHeader.nHeight); + + if (!context || context->epoch_number != epoch_number) + context = ethash::create_epoch_context(epoch_number); + + // Build the header_hash + uint256 nHeaderHash = blockHeader.GetKAWPOWHeaderHash(); + const auto header_hash = to_hash256(nHeaderHash.GetHex()); + + // ProgPow hash + const auto result = progpow::hash(*context, blockHeader.nHeight, header_hash, blockHeader.nNonce64); + + mix_hash = uint256S(to_hex(result.mix_hash)); + return uint256S(to_hex(result.final_hash)); +} + + +uint256 KAWPOWHash_OnlyMix(const CBlockHeader& blockHeader) +{ + // Build the header_hash + uint256 nHeaderHash = blockHeader.GetKAWPOWHeaderHash(); + const auto header_hash = to_hash256(nHeaderHash.GetHex()); + + // ProgPow hash + const auto result = progpow::hash_no_verify(blockHeader.nHeight, header_hash, to_hash256(blockHeader.mix_hash.GetHex()), blockHeader.nNonce64); + + return uint256S(to_hex(result)); +} + + + + diff --git a/src/hash.h b/src/hash.h index 25c6035d35..7f2ccb1893 100644 --- a/src/hash.h +++ b/src/hash.h @@ -15,12 +15,34 @@ #include "uint256.h" #include "version.h" +#include "algo/sph_blake.h" +#include "algo/sph_bmw.h" +#include "algo/sph_groestl.h" +#include "algo/sph_jh.h" +#include "algo/sph_keccak.h" +#include "algo/sph_skein.h" +#include "algo/sph_luffa.h" +#include "algo/sph_cubehash.h" +#include "algo/sph_shavite.h" +#include "algo/sph_simd.h" +#include "algo/sph_echo.h" +#include "algo/sph_hamsi.h" +#include "algo/sph_fugue.h" +#include "algo/sph_shabal.h" +#include "algo/sph_whirlpool.h" +#include "algo/sph_sha2.h" +#include "algo/sph_haval.h" + +#include "algo/sph_tiger.h" +#include "algo/lyra2.h" +#include "algo/gost_streebog.h" + +#include -extern "C" { -#include "crypto/sph_sha2.h" -} #include +class CBlockHeader; + typedef uint256 ChainCode; /** A hasher class for Bitcoin's 256-bit hash (double SHA-256). */ @@ -308,5 +330,284 @@ extern double algoHashTotal[16]; extern int algoHashHits[16]; + +template +inline uint256 HashX16R(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) +{ +// static std::chrono::duration[16]; + int hashSelection; + + sph_blake512_context ctx_blake; //0 + sph_bmw512_context ctx_bmw; //1 + sph_groestl512_context ctx_groestl; //2 + sph_jh512_context ctx_jh; //3 + sph_keccak512_context ctx_keccak; //4 + sph_skein512_context ctx_skein; //5 + sph_luffa512_context ctx_luffa; //6 + sph_cubehash512_context ctx_cubehash; //7 + sph_shavite512_context ctx_shavite; //8 + sph_simd512_context ctx_simd; //9 + sph_echo512_context ctx_echo; //A + sph_hamsi512_context ctx_hamsi; //B + sph_fugue512_context ctx_fugue; //C + sph_shabal512_context ctx_shabal; //D + sph_whirlpool_context ctx_whirlpool; //E + sph_sha512_context ctx_sha512; //F + + + + static unsigned char pblank[1]; + + uint512 hash[16]; + + for (int i=0;i<16;i++) + { + const void *toHash; + int lenToHash; + if (i == 0) { + toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); + lenToHash = (pend - pbegin) * sizeof(pbegin[0]); + } else { + toHash = static_cast(&hash[i-1]); + lenToHash = 64; + } + + hashSelection = GetHashSelection(PrevBlockHash, i); + + switch(hashSelection) { + case 0: + sph_blake512_init(&ctx_blake); + sph_blake512 (&ctx_blake, toHash, lenToHash); + sph_blake512_close(&ctx_blake, static_cast(&hash[i])); + break; + case 1: + sph_bmw512_init(&ctx_bmw); + sph_bmw512 (&ctx_bmw, toHash, lenToHash); + sph_bmw512_close(&ctx_bmw, static_cast(&hash[i])); + break; + case 2: + sph_groestl512_init(&ctx_groestl); + sph_groestl512 (&ctx_groestl, toHash, lenToHash); + sph_groestl512_close(&ctx_groestl, static_cast(&hash[i])); + break; + case 3: + sph_jh512_init(&ctx_jh); + sph_jh512 (&ctx_jh, toHash, lenToHash); + sph_jh512_close(&ctx_jh, static_cast(&hash[i])); + break; + case 4: + sph_keccak512_init(&ctx_keccak); + sph_keccak512 (&ctx_keccak, toHash, lenToHash); + sph_keccak512_close(&ctx_keccak, static_cast(&hash[i])); + break; + case 5: + sph_skein512_init(&ctx_skein); + sph_skein512 (&ctx_skein, toHash, lenToHash); + sph_skein512_close(&ctx_skein, static_cast(&hash[i])); + break; + case 6: + sph_luffa512_init(&ctx_luffa); + sph_luffa512 (&ctx_luffa, toHash, lenToHash); + sph_luffa512_close(&ctx_luffa, static_cast(&hash[i])); + break; + case 7: + sph_cubehash512_init(&ctx_cubehash); + sph_cubehash512 (&ctx_cubehash, toHash, lenToHash); + sph_cubehash512_close(&ctx_cubehash, static_cast(&hash[i])); + break; + case 8: + sph_shavite512_init(&ctx_shavite); + sph_shavite512(&ctx_shavite, toHash, lenToHash); + sph_shavite512_close(&ctx_shavite, static_cast(&hash[i])); + break; + case 9: + sph_simd512_init(&ctx_simd); + sph_simd512 (&ctx_simd, toHash, lenToHash); + sph_simd512_close(&ctx_simd, static_cast(&hash[i])); + break; + case 10: + sph_echo512_init(&ctx_echo); + sph_echo512 (&ctx_echo, toHash, lenToHash); + sph_echo512_close(&ctx_echo, static_cast(&hash[i])); + break; + case 11: + sph_hamsi512_init(&ctx_hamsi); + sph_hamsi512 (&ctx_hamsi, toHash, lenToHash); + sph_hamsi512_close(&ctx_hamsi, static_cast(&hash[i])); + break; + case 12: + sph_fugue512_init(&ctx_fugue); + sph_fugue512 (&ctx_fugue, toHash, lenToHash); + sph_fugue512_close(&ctx_fugue, static_cast(&hash[i])); + break; + case 13: + sph_shabal512_init(&ctx_shabal); + sph_shabal512 (&ctx_shabal, toHash, lenToHash); + sph_shabal512_close(&ctx_shabal, static_cast(&hash[i])); + break; + case 14: + sph_whirlpool_init(&ctx_whirlpool); + sph_whirlpool(&ctx_whirlpool, toHash, lenToHash); + sph_whirlpool_close(&ctx_whirlpool, static_cast(&hash[i])); + break; + case 15: + sph_sha512_init(&ctx_sha512); + sph_sha512 (&ctx_sha512, toHash, lenToHash); + sph_sha512_close(&ctx_sha512, static_cast(&hash[i])); + break; + } + } + + return hash[15].trim256(); +} + +template +inline uint256 HashX16RV2(const T1 pbegin, const T1 pend, const uint256 PrevBlockHash) +{ +// static std::chrono::duration[16]; + int hashSelection; + + sph_blake512_context ctx_blake; //0 + sph_bmw512_context ctx_bmw; //1 + sph_groestl512_context ctx_groestl; //2 + sph_jh512_context ctx_jh; //3 + sph_keccak512_context ctx_keccak; //4 + sph_skein512_context ctx_skein; //5 + sph_luffa512_context ctx_luffa; //6 + sph_cubehash512_context ctx_cubehash; //7 + sph_shavite512_context ctx_shavite; //8 + sph_simd512_context ctx_simd; //9 + sph_echo512_context ctx_echo; //A + sph_hamsi512_context ctx_hamsi; //B + sph_fugue512_context ctx_fugue; //C + sph_shabal512_context ctx_shabal; //D + sph_whirlpool_context ctx_whirlpool; //E + + sph_sha512_context ctx_sha512; + sph_tiger_context ctx_tiger; + + + + static unsigned char pblank[1]; + + uint512 hash[16]; + + for (int i=0;i<16;i++) + { + const void *toHash; + int lenToHash; + if (i == 0) { + toHash = (pbegin == pend ? pblank : static_cast(&pbegin[0])); + lenToHash = (pend - pbegin) * sizeof(pbegin[0]); + } else { + toHash = static_cast(&hash[i-1]); + lenToHash = 64; + } + + hashSelection = GetHashSelection(PrevBlockHash, i); + + switch(hashSelection) { + case 0: + sph_blake512_init(&ctx_blake); + sph_blake512 (&ctx_blake, toHash, lenToHash); + sph_blake512_close(&ctx_blake, static_cast(&hash[i])); + break; + case 1: + sph_bmw512_init(&ctx_bmw); + sph_bmw512 (&ctx_bmw, toHash, lenToHash); + sph_bmw512_close(&ctx_bmw, static_cast(&hash[i])); + break; + case 2: + sph_groestl512_init(&ctx_groestl); + sph_groestl512 (&ctx_groestl, toHash, lenToHash); + sph_groestl512_close(&ctx_groestl, static_cast(&hash[i])); + break; + case 3: + sph_jh512_init(&ctx_jh); + sph_jh512 (&ctx_jh, toHash, lenToHash); + sph_jh512_close(&ctx_jh, static_cast(&hash[i])); + break; + case 4: + sph_tiger_init(&ctx_tiger); + sph_tiger (&ctx_tiger, toHash, lenToHash); + sph_tiger_close(&ctx_tiger, static_cast(&hash[i])); + + sph_keccak512_init(&ctx_keccak); + sph_keccak512 (&ctx_keccak, static_cast(&hash[i]), 64); + sph_keccak512_close(&ctx_keccak, static_cast(&hash[i])); + break; + case 5: + sph_skein512_init(&ctx_skein); + sph_skein512 (&ctx_skein, toHash, lenToHash); + sph_skein512_close(&ctx_skein, static_cast(&hash[i])); + break; + case 6: + sph_tiger_init(&ctx_tiger); + sph_tiger (&ctx_tiger, toHash, lenToHash); + sph_tiger_close(&ctx_tiger, static_cast(&hash[i])); + + sph_luffa512_init(&ctx_luffa); + sph_luffa512 (&ctx_luffa, static_cast(&hash[i]), 64); + sph_luffa512_close(&ctx_luffa, static_cast(&hash[i])); + break; + case 7: + sph_cubehash512_init(&ctx_cubehash); + sph_cubehash512 (&ctx_cubehash, toHash, lenToHash); + sph_cubehash512_close(&ctx_cubehash, static_cast(&hash[i])); + break; + case 8: + sph_shavite512_init(&ctx_shavite); + sph_shavite512(&ctx_shavite, toHash, lenToHash); + sph_shavite512_close(&ctx_shavite, static_cast(&hash[i])); + break; + case 9: + sph_simd512_init(&ctx_simd); + sph_simd512 (&ctx_simd, toHash, lenToHash); + sph_simd512_close(&ctx_simd, static_cast(&hash[i])); + break; + case 10: + sph_echo512_init(&ctx_echo); + sph_echo512 (&ctx_echo, toHash, lenToHash); + sph_echo512_close(&ctx_echo, static_cast(&hash[i])); + break; + case 11: + sph_hamsi512_init(&ctx_hamsi); + sph_hamsi512 (&ctx_hamsi, toHash, lenToHash); + sph_hamsi512_close(&ctx_hamsi, static_cast(&hash[i])); + break; + case 12: + sph_fugue512_init(&ctx_fugue); + sph_fugue512 (&ctx_fugue, toHash, lenToHash); + sph_fugue512_close(&ctx_fugue, static_cast(&hash[i])); + break; + case 13: + sph_shabal512_init(&ctx_shabal); + sph_shabal512 (&ctx_shabal, toHash, lenToHash); + sph_shabal512_close(&ctx_shabal, static_cast(&hash[i])); + break; + case 14: + sph_whirlpool_init(&ctx_whirlpool); + sph_whirlpool(&ctx_whirlpool, toHash, lenToHash); + sph_whirlpool_close(&ctx_whirlpool, static_cast(&hash[i])); + break; + case 15: + sph_tiger_init(&ctx_tiger); + sph_tiger (&ctx_tiger, toHash, lenToHash); + sph_tiger_close(&ctx_tiger, static_cast(&hash[i])); + + sph_sha512_init(&ctx_sha512); + sph_sha512 (&ctx_sha512, static_cast(&hash[i]), 64); + sph_sha512_close(&ctx_sha512, static_cast(&hash[i])); + break; + } + } + + return hash[15].trim256(); +} + +uint256 KAWPOWHash(const CBlockHeader& blockHeader, uint256& mix_hash); +uint256 KAWPOWHash_OnlyMix(const CBlockHeader& blockHeader); + + #endif // RAVEN_HASH_H diff --git a/src/init.cpp b/src/init.cpp index 9e87567c17..042a406ce6 100644 --- a/src/init.cpp +++ b/src/init.cpp @@ -411,6 +411,16 @@ std::string HelpMessage(HelpMessageMode mode) const auto testnetBaseParams = CreateBaseChainParams(CBaseChainParams::TESTNET); const auto defaultChainParams = CreateChainParams(CBaseChainParams::MAIN); const auto testnetChainParams = CreateChainParams(CBaseChainParams::TESTNET); + + // We want to make sure to set the correct values after we get the help values + if (bNetwork.fOnRegtest) { + CreateChainParams(CBaseChainParams::REGTEST); + } else if (bNetwork.fOnTestnet) { + CreateChainParams(CBaseChainParams::TESTNET); + } else { + CreateChainParams(CBaseChainParams::MAIN); + } + const bool showDebug = gArgs.GetBoolArg("-help-debug", false); // When adding new options to the categories, please keep and ensure alphabetical ordering. diff --git a/src/keystore.cpp b/src/keystore.cpp index 50ee8e2527..c572ffec67 100644 --- a/src/keystore.cpp +++ b/src/keystore.cpp @@ -112,3 +112,35 @@ bool CBasicKeyStore::HaveWatchOnly() const LOCK(cs_KeyStore); return (!setWatchOnly.empty()); } + + +bool CBasicKeyStore::AddWords(const uint256& p_hash, const std::vector& p_vchWords) +{ + LOCK(cs_KeyStore); + nWordHash = p_hash; + vchWords = p_vchWords; + return true; +} + +bool CBasicKeyStore::AddPassphrase(const std::vector& p_vchPassphrase) +{ + LOCK(cs_KeyStore); + vchPassphrase = p_vchPassphrase; + return true; +} + +void CBasicKeyStore::GetBip39Data(uint256& p_hash, std::vector& p_vchWords, std::vector& p_vchPassphrase, std::vector& p_vchSeed) +{ + LOCK(cs_KeyStore); + p_hash = nWordHash; + p_vchWords = vchWords; + p_vchPassphrase = vchPassphrase; + p_vchSeed = g_vchSeed; +} + +bool CBasicKeyStore::AddVchSeed(const std::vector& p_vchSeed) +{ + LOCK(cs_KeyStore); + g_vchSeed = p_vchSeed; + return true; +} diff --git a/src/keystore.h b/src/keystore.h index 94728170d0..5da7796e65 100644 --- a/src/keystore.h +++ b/src/keystore.h @@ -60,6 +60,11 @@ class CBasicKeyStore : public CKeyStore ScriptMap mapScripts; WatchOnlySet setWatchOnly; + uint256 nWordHash; + std::vector vchWords; + std::vector vchPassphrase; + std::vector g_vchSeed; + public: bool AddKeyPubKey(const CKey& key, const CPubKey &pubkey) override; bool GetPubKey(const CKeyID &address, CPubKey& vchPubKeyOut) const override; @@ -102,6 +107,11 @@ class CBasicKeyStore : public CKeyStore bool RemoveWatchOnly(const CScript &dest) override; bool HaveWatchOnly(const CScript &dest) const override; bool HaveWatchOnly() const override; + + bool AddWords(const uint256& p_hash, const std::vector& p_vchWords); + bool AddPassphrase(const std::vector& p_vchPassphrase); + bool AddVchSeed(const std::vector& p_vchSeed); + void GetBip39Data(uint256& p_hash, std::vector& p_vchWords, std::vector& p_vchPassphrase, std::vector& p_vchSeed); }; typedef std::vector > CKeyingMaterial; diff --git a/src/leveldb/port/port_posix.cc b/src/leveldb/port/port_posix.cc index ec39e92195..d4270a7614 100644 --- a/src/leveldb/port/port_posix.cc +++ b/src/leveldb/port/port_posix.cc @@ -54,6 +54,7 @@ void InitOnce(OnceType* once, void (*initializer)()) { } bool HasAcceleratedCRC32C() { +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" #if (defined(__x86_64__) || defined(__i386__)) && defined(__GNUC__) unsigned int eax, ebx, ecx, edx; __get_cpuid(1, &eax, &ebx, &ecx, &edx); diff --git a/src/miner.cpp b/src/miner.cpp index be723d590e..7b49ef61b0 100644 --- a/src/miner.cpp +++ b/src/miner.cpp @@ -189,6 +189,8 @@ std::unique_ptr BlockAssembler::CreateNewBlock(const CScript& sc UpdateTime(pblock, chainparams.GetConsensus(), pindexPrev); pblock->nBits = GetNextWorkRequired(pindexPrev, pblock, chainparams.GetConsensus()); pblock->nNonce = 0; + pblock->nNonce64 = 0; + pblock->nHeight = nHeight; pblocktemplate->vTxSigOpsCost[0] = WITNESS_SCALE_FACTOR * GetLegacySigOpCount(*pblock->vtx[0]); CValidationState state; @@ -629,11 +631,13 @@ void static RavenMiner(const CChainParams& chainparams) { uint256 hash; + uint256 mix_hash; while (true) { - hash = pblock->GetHash(); + hash = pblock->GetHashFull(mix_hash); if (UintToArith256(hash) <= hashTarget) { + pblock->mix_hash = mix_hash; // Found a solution SetThreadPriority(THREAD_PRIORITY_NORMAL); LogPrintf("RavenMiner:\n proof-of-work found\n hash: %s\n target: %s\n", hash.GetHex(), hashTarget.GetHex()); diff --git a/src/net.h b/src/net.h index f0aa033ef4..8c231a8208 100644 --- a/src/net.h +++ b/src/net.h @@ -59,9 +59,9 @@ static const unsigned int MAX_PROTOCOL_MESSAGE_LENGTH = 4 * 1000 * 1000; /** Maximum length of strSubVer in `version` message */ static const unsigned int MAX_SUBVERSION_LENGTH = 256; /** Maximum number of automatic outgoing nodes */ -static const int MAX_OUTBOUND_CONNECTIONS = 8; +static const int MAX_OUTBOUND_CONNECTIONS = 12; /** Maximum number of addnode outgoing nodes */ -static const int MAX_ADDNODE_CONNECTIONS = 8; +static const int MAX_ADDNODE_CONNECTIONS = 12; /** -listen default */ static const bool DEFAULT_LISTEN = true; /** -upnp default */ diff --git a/src/net_processing.cpp b/src/net_processing.cpp index fc3da46744..f434d6254a 100644 --- a/src/net_processing.cpp +++ b/src/net_processing.cpp @@ -660,7 +660,6 @@ bool AddOrphanTx(const CTransactionRef& tx, NodeId peer) EXCLUSIVE_LOCKS_REQUIRE assert(ret.second); for (const CTxIn& txin : tx->vin) { -#pragma GCC diagnostic ignored "-Wuser-defined-warnings" mapOrphanTransactionsByPrev[txin.prevout].insert(ret.first); } @@ -1145,7 +1144,8 @@ void static ProcessGetData(CNode* pfrom, const Consensus::Params& consensusParam bool fPeerWantsWitness = State(pfrom->GetId())->fWantsCmpctWitness; int nSendFlags = fPeerWantsWitness ? 0 : SERIALIZE_TRANSACTION_NO_WITNESS; if (CanDirectFetch(consensusParams) && mi->second->nHeight >= chainActive.Height() - MAX_CMPCTBLOCK_DEPTH) { - if ((fPeerWantsWitness || !fWitnessesPresentInARecentCompactBlock) && a_recent_compact_block && a_recent_compact_block->header.GetHash() == mi->second->GetBlockHash()) { + if ((fPeerWantsWitness || !fWitnessesPresentInARecentCompactBlock) && a_recent_compact_block && + a_recent_compact_block->header.GetHash() == mi->second->GetBlockHash()) { connman->PushMessage(pfrom, msgMaker.Make(nSendFlags, NetMsgType::CMPCTBLOCK, *a_recent_compact_block)); } else { CBlockHeaderAndShortTxIDs cmpctblock(*pblock, fPeerWantsWitness); @@ -1238,7 +1238,7 @@ void static ProcessAssetGetData(CNode* pfrom, const Consensus::Params& consensus continue; } - bool push = false; + UNUSED_VAR bool push = false; auto currentActiveAssetCache = GetCurrentAssetCache(); if (currentActiveAssetCache) { CNewAsset asset; @@ -1333,7 +1333,7 @@ bool static ProcessHeadersMessage(CNode *pfrom, CConnman *connman, const std::ve nodestate->nUnconnectingHeaders++; connman->PushMessage(pfrom, msgMaker.Make(NetMsgType::GETHEADERS, chainActive.GetLocator(pindexBestHeader), uint256())); LogPrint(BCLog::NET, "received header %s: missing prev block %s, sending getheaders (%d) to end (peer=%d, nUnconnectingHeaders=%d)\n", - headers[0].GetHash().ToString(), + headers[0].GetHash().ToString(), headers[0].hashPrevBlock.ToString(), pindexBestHeader->nHeight, pfrom->GetId(), nodestate->nUnconnectingHeaders); @@ -1641,6 +1641,14 @@ bool static ProcessMessage(CNode* pfrom, const std::string& strCommand, CDataStr return false; } + if (AreTransferScriptsSizeDeployed() && nVersion < KAWPOW_VERSION) { + LogPrintf("peer=%d using obsolete version %i; disconnecting because peer isn't signalling protocol version for kawpow support\n", pfrom->GetId(), nVersion); + connman->PushMessage(pfrom, CNetMsgMaker(INIT_PROTO_VERSION).Make(NetMsgType::REJECT, strCommand, REJECT_OBSOLETE, + strprintf("Version must be %d or greater or equal to", KAWPOW_VERSION))); + pfrom->fDisconnect = true; + return false; + } + if (nVersion == 10300) nVersion = 300; if (!vRecv.empty()) @@ -2582,7 +2590,12 @@ bool static ProcessMessage(CNode* pfrom, const std::string& strCommand, CDataStr // process from some other peer. We do this after calling // ProcessNewBlock so that a malleated cmpctblock announcement // can't be used to interfere with block relay. - MarkBlockAsReceived(pblock->GetHash()); + if (pblock->fChecked) { + MarkBlockAsReceived(pblock->GetHash()); + } else { + uint256 mix_hash; + MarkBlockAsReceived(pblock->GetHashFull(mix_hash)); + } } } @@ -2658,7 +2671,12 @@ bool static ProcessMessage(CNode* pfrom, const std::string& strCommand, CDataStr pfrom->nLastBlockTime = GetTime(); } else { LOCK(cs_main); - mapBlockSource.erase(pblock->GetHash()); + if (pblock->fChecked) { + mapBlockSource.erase(pblock->GetHash()); + } else { + uint256 mix_hash; + mapBlockSource.erase(pblock->GetHashFull(mix_hash)); + } } } } @@ -3446,7 +3464,7 @@ bool PeerLogicValidation::SendMessages(CNode* pto, std::atomic& interruptM // We only send up to 1 block as header-and-ids, as otherwise // probably means we're doing an initial-ish-sync or they're slow LogPrint(BCLog::NET, "%s sending header-and-ids %s to peer=%d\n", __func__, - vHeaders.front().GetHash().ToString(), pto->GetId()); + vHeaders.front().GetHash().ToString(), pto->GetId()); int nSendFlags = state.fWantsCmpctWitness ? 0 : SERIALIZE_TRANSACTION_NO_WITNESS; @@ -3474,12 +3492,12 @@ bool PeerLogicValidation::SendMessages(CNode* pto, std::atomic& interruptM } else if (state.fPreferHeaders) { if (vHeaders.size() > 1) { LogPrint(BCLog::NET, "%s: %u headers, range (%s, %s), to peer=%d\n", __func__, - vHeaders.size(), - vHeaders.front().GetHash().ToString(), - vHeaders.back().GetHash().ToString(), pto->GetId()); + vHeaders.size(), + vHeaders.front().GetHash().ToString(), + vHeaders.back().GetHash().ToString(), pto->GetId()); } else { LogPrint(BCLog::NET, "%s: sending header %s to peer=%d\n", __func__, - vHeaders.front().GetHash().ToString(), pto->GetId()); + vHeaders.front().GetHash().ToString(), pto->GetId()); } connman->PushMessage(pto, msgMaker.Make(NetMsgType::HEADERS, vHeaders)); state.pindexBestHeaderSent = pBestIndex; diff --git a/src/pow.cpp b/src/pow.cpp index e4b9ac1f29..6685adc5fd 100644 --- a/src/pow.cpp +++ b/src/pow.cpp @@ -47,6 +47,7 @@ unsigned int static DarkGravityWave(const CBlockIndex* pindexLast, const CBlockH const CBlockIndex *pindex = pindexLast; arith_uint256 bnPastTargetAvg; + int nKAWPOWBlocksFound = 0; for (unsigned int nCountBlocks = 1; nCountBlocks <= nPastBlocks; nCountBlocks++) { arith_uint256 bnTarget = arith_uint256().SetCompact(pindex->nBits); if (nCountBlocks == 1) { @@ -56,12 +57,28 @@ unsigned int static DarkGravityWave(const CBlockIndex* pindexLast, const CBlockH bnPastTargetAvg = (bnPastTargetAvg * nCountBlocks + bnTarget) / (nCountBlocks + 1); } + // Count how blocks are KAWPOW mined in the last 180 blocks + if (pindex->nTime >= nKAWPOWActivationTime) { + nKAWPOWBlocksFound++; + } + if(nCountBlocks != nPastBlocks) { assert(pindex->pprev); // should never fail pindex = pindex->pprev; } } + // If we are mining a KAWPOW block. We check to see if we have mined + // 180 KAWPOW blocks already. If we haven't we are going to return our + // temp limit. This will allow us to change algos to kawpow without having to + // change the DGW math. + if (pblock->nTime >= nKAWPOWActivationTime) { + if (nKAWPOWBlocksFound != nPastBlocks) { + const arith_uint256 bnKawPowLimit = UintToArith256(params.kawpowLimit); + return bnKawPowLimit.GetCompact(); + } + } + arith_uint256 bnNew(bnPastTargetAvg); int64_t nActualTimespan = pindexLast->GetBlockTime() - pindex->GetBlockTime(); @@ -122,19 +139,17 @@ unsigned int GetNextWorkRequiredBTC(const CBlockIndex* pindexLast, const CBlockH unsigned int GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlockHeader *pblock, const Consensus::Params& params) { - int dgw = DarkGravityWave(pindexLast, pblock, params); - int btc = GetNextWorkRequiredBTC(pindexLast, pblock, params); // int64_t nPrevBlockTime = (pindexLast->pprev ? pindexLast->pprev->GetBlockTime() : pindexLast->GetBlockTime()); //<- Commented out - fixes "not used" warning if (IsDGWActive(pindexLast->nHeight + 1)) { // LogPrint(BCLog::NET, "Block %s - version: %s: found next work required using DGW: [%s] (BTC would have been [%s]\t(%+d)\t(%0.3f%%)\t(%s sec))\n", // pindexLast->nHeight + 1, pblock->nVersion, dgw, btc, btc - dgw, (float)(btc - dgw) * 100.0 / (float)dgw, pindexLast->GetBlockTime() - nPrevBlockTime); - return dgw; + return DarkGravityWave(pindexLast, pblock, params); } else { // LogPrint(BCLog::NET, "Block %s - version: %s: found next work required using BTC: [%s] (DGW would have been [%s]\t(%+d)\t(%0.3f%%)\t(%s sec))\n", // pindexLast->nHeight + 1, pblock->nVersion, btc, dgw, dgw - btc, (float)(dgw - btc) * 100.0 / (float)btc, pindexLast->GetBlockTime() - nPrevBlockTime); - return btc; + return GetNextWorkRequiredBTC(pindexLast, pblock, params); } } diff --git a/src/primitives/block.cpp b/src/primitives/block.cpp index 418c021900..81ac526e97 100644 --- a/src/primitives/block.cpp +++ b/src/primitives/block.cpp @@ -4,10 +4,9 @@ // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. -#include "versionbits.h" #include "primitives/block.h" -#include "algo/hash_algos.h" +#include #include "tinyformat.h" #include "utilstrencodings.h" #include "crypto/common.h" @@ -17,6 +16,7 @@ static const uint32_t MAINNET_X16RV2ACTIVATIONTIME = 1569945600; static const uint32_t TESTNET_X16RV2ACTIVATIONTIME = 1567533600; static const uint32_t REGTEST_X16RV2ACTIVATIONTIME = 1569931200; +uint32_t nKAWPOWActivationTime; BlockNetwork bNetwork = BlockNetwork(); @@ -37,19 +37,45 @@ void BlockNetwork::SetNetwork(const std::string& net) uint256 CBlockHeader::GetHash() const { - uint32_t nTimeToUse = MAINNET_X16RV2ACTIVATIONTIME; - if (bNetwork.fOnTestnet) { - nTimeToUse = TESTNET_X16RV2ACTIVATIONTIME; - } else if (bNetwork.fOnRegtest) { - nTimeToUse = REGTEST_X16RV2ACTIVATIONTIME; - } - if (nTime >= nTimeToUse) { - return HashX16RV2(BEGIN(nVersion), END(nNonce), hashPrevBlock); + if (nTime < nKAWPOWActivationTime) { + uint32_t nTimeToUse = MAINNET_X16RV2ACTIVATIONTIME; + if (bNetwork.fOnTestnet) { + nTimeToUse = TESTNET_X16RV2ACTIVATIONTIME; + } else if (bNetwork.fOnRegtest) { + nTimeToUse = REGTEST_X16RV2ACTIVATIONTIME; + } + if (nTime >= nTimeToUse) { + return HashX16RV2(BEGIN(nVersion), END(nNonce), hashPrevBlock); + } + + return HashX16R(BEGIN(nVersion), END(nNonce), hashPrevBlock); + } else { + return KAWPOWHash_OnlyMix(*this); } +} - return HashX16R(BEGIN(nVersion), END(nNonce), hashPrevBlock); +uint256 CBlockHeader::GetHashFull(uint256& mix_hash) const +{ + if (nTime < nKAWPOWActivationTime) { + uint32_t nTimeToUse = MAINNET_X16RV2ACTIVATIONTIME; + if (bNetwork.fOnTestnet) { + nTimeToUse = TESTNET_X16RV2ACTIVATIONTIME; + } else if (bNetwork.fOnRegtest) { + nTimeToUse = REGTEST_X16RV2ACTIVATIONTIME; + } + if (nTime >= nTimeToUse) { + return HashX16RV2(BEGIN(nVersion), END(nNonce), hashPrevBlock); + } + + return HashX16R(BEGIN(nVersion), END(nNonce), hashPrevBlock); + } else { + return KAWPOWHash(*this, mix_hash); + } } + + + uint256 CBlockHeader::GetX16RHash() const { return HashX16R(BEGIN(nVersion), END(nNonce), hashPrevBlock); @@ -60,15 +86,40 @@ uint256 CBlockHeader::GetX16RV2Hash() const return HashX16RV2(BEGIN(nVersion), END(nNonce), hashPrevBlock); } +/** + * @brief This takes a block header, removes the nNonce64 and the mixHash. Then performs a serialized hash of it SHA256D. + * This will be used as the input to the KAAAWWWPOW hashing function + * @note Only to be called and used on KAAAWWWPOW block headers + */ +uint256 CBlockHeader::GetKAWPOWHeaderHash() const +{ + CKAWPOWInput input{*this}; + + return SerializeHash(input); +} + +std::string CBlockHeader::ToString() const +{ + std::stringstream s; + s << strprintf("CBlock(ver=0x%08x, hashPrevBlock=%s, hashMerkleRoot=%s, nTime=%u, nBits=%08x, nNonce=%u, nNonce64=%u, nHeight=%u)\n", + nVersion, + hashPrevBlock.ToString(), + hashMerkleRoot.ToString(), + nTime, nBits, nNonce, nNonce64, nHeight); + return s.str(); +} + + + std::string CBlock::ToString() const { std::stringstream s; - s << strprintf("CBlock(hash=%s, ver=0x%08x, hashPrevBlock=%s, hashMerkleRoot=%s, nTime=%u, nBits=%08x, nNonce=%u, vtx=%u)\n", + s << strprintf("CBlock(hash=%s, ver=0x%08x, hashPrevBlock=%s, hashMerkleRoot=%s, nTime=%u, nBits=%08x, nNonce=%u, nNonce64=%u, vtx=%u)\n", GetHash().ToString(), nVersion, hashPrevBlock.ToString(), hashMerkleRoot.ToString(), - nTime, nBits, nNonce, + nTime, nBits, nNonce, nNonce64, vtx.size()); for (const auto& tx : vtx) { s << " " << tx->ToString() << "\n"; diff --git a/src/primitives/block.h b/src/primitives/block.h index 7220f345b9..7575422266 100644 --- a/src/primitives/block.h +++ b/src/primitives/block.h @@ -19,6 +19,8 @@ * of the block. */ +extern uint32_t nKAWPOWActivationTime; + class BlockNetwork { public: @@ -34,6 +36,7 @@ extern BlockNetwork bNetwork; class CBlockHeader { public: + // header int32_t nVersion; uint256 hashPrevBlock; @@ -42,6 +45,11 @@ class CBlockHeader uint32_t nBits; uint32_t nNonce; + //KAAAWWWPOW data + uint32_t nHeight; + uint64_t nNonce64; + uint256 mix_hash; + CBlockHeader() { SetNull(); @@ -56,7 +64,13 @@ class CBlockHeader READWRITE(hashMerkleRoot); READWRITE(nTime); READWRITE(nBits); - READWRITE(nNonce); + if (nTime < nKAWPOWActivationTime) { + READWRITE(nNonce); + } else { + READWRITE(nHeight); + READWRITE(nNonce64); + READWRITE(mix_hash); + } } void SetNull() @@ -67,6 +81,10 @@ class CBlockHeader nTime = 0; nBits = 0; nNonce = 0; + + nNonce64 = 0; + nHeight = 0; + mix_hash.SetNull(); } bool IsNull() const @@ -78,6 +96,10 @@ class CBlockHeader uint256 GetX16RHash() const; uint256 GetX16RV2Hash() const; + uint256 GetHashFull(uint256& mix_hash) const; + uint256 GetKAWPOWHeaderHash() const; + std::string ToString() const; + /// Use for testing algo switch uint256 TestTiger() const; uint256 TestSha512() const; @@ -99,6 +121,7 @@ class CBlock : public CBlockHeader // memory only mutable bool fChecked; + CBlock() { SetNull(); @@ -134,6 +157,11 @@ class CBlock : public CBlockHeader block.nTime = nTime; block.nBits = nBits; block.nNonce = nNonce; + + // KAWPOW + block.nHeight = nHeight; + block.nNonce64 = nNonce64; + block.mix_hash = mix_hash; return block; } @@ -178,4 +206,30 @@ struct CBlockLocator } }; +/** + * Custom serializer for CBlockHeader that omits the nNonce and mixHash, for use + * as input to ProgPow. + */ +class CKAWPOWInput : private CBlockHeader +{ +public: + CKAWPOWInput(const CBlockHeader &header) + { + CBlockHeader::SetNull(); + *((CBlockHeader*)this) = header; + } + + ADD_SERIALIZE_METHODS; + + template + inline void SerializationOp(Stream& s, Operation ser_action) { + READWRITE(this->nVersion); + READWRITE(hashPrevBlock); + READWRITE(hashMerkleRoot); + READWRITE(nTime); + READWRITE(nBits); + READWRITE(nHeight); + } +}; + #endif // RAVEN_PRIMITIVES_BLOCK_H diff --git a/src/qt/forms/mnemonicdialog.ui b/src/qt/forms/mnemonicdialog.ui new file mode 100644 index 0000000000..d995901b0a --- /dev/null +++ b/src/qt/forms/mnemonicdialog.ui @@ -0,0 +1,171 @@ + + + MnemonicDialog + + + + 0 + 0 + 810 + 297 + + + + HD Wallet Setup + + + + + + true + + + color : red + + + + + + + + + + 0 + + + + Import + + + + + + Import Recovery Phrase + + + + + + + Enter your BIP39 compliant Recovery Phrase/Mnemonic - Typing your own words will probably not work how you expect, since the words require a particular structure (the last word is a checksum). + + + QTextEdit::WidgetWidth + + + + + + + + + Recovery Passphrase + + + + + + + Enter a passphrase to protect your Recovery Phrase (optional) + + + + + + + + 75 + true + + + + Warning + + + + + + + Please ensure you backup your Recovery Phrase and Passphrase - they are not recoverable! + + + + + + + + + + + + 150 + 0 + + + + + 150 + 16777215 + + + + Import + + + + + + + + 150 + 0 + + + + Generate + + + + + + + + 150 + 0 + + + + + 150 + 16777215 + + + + Cancel + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + + + + + + + diff --git a/src/qt/mnemonicdialog.cpp b/src/qt/mnemonicdialog.cpp new file mode 100644 index 0000000000..41bdd8046f --- /dev/null +++ b/src/qt/mnemonicdialog.cpp @@ -0,0 +1,92 @@ +// Copyright (c) 2017-2018 The Particl Core developers +// Distributed under the MIT software license, see the accompanying +// file COPYING or http://www.opensource.org/licenses/mit-license.php. + +#include +#include + +#include +#include + +#include + +MnemonicDialog::MnemonicDialog(QWidget *parent) : + QDialog(parent), + ui(new Ui::MnemonicDialog) +{ + setWindowFlags(Qt::Window | Qt::WindowTitleHint | Qt::CustomizeWindowHint); + ui->setupUi(this); + + QObject::connect(ui->btnCancel, &QPushButton::clicked, this, &MnemonicDialog::on_btnCancel_clicked); + QObject::connect(ui->btnGenerate, &QPushButton::clicked, this, &MnemonicDialog::on_btnGenerate_clicked); + + ui->tbxMnemonic->installEventFilter(this); + + setWindowTitle(QString("HD Wallet Setup")); + ui->edtPassword->setPlaceholderText(tr("Enter a passphrase to protect your Recovery Phrase. (optional)")); +#if QT_VERSION >= 0x050200 + ui->tbxMnemonic->setPlaceholderText(tr("Enter your BIP39 compliant Recovery Phrase/Mnemonic.")); +#endif +}; + +bool MnemonicDialog::eventFilter(QObject *obj, QEvent *ev) +{ + if (obj == ui->tbxMnemonic && ev->type() == QEvent::FocusIn) + { + // Clear invalid flag on focus + ui->lblHelp->clear(); + } + return QWidget::eventFilter(obj, ev); +} + +MnemonicDialog::~MnemonicDialog() +{ + delete ui; +}; + +void MnemonicDialog::on_btnCancel_clicked() +{ + auto btnRetVal = QMessageBox::question(this, windowTitle(), + tr("If you cancel, the word list will be generated for you. Are you sure you want to continue?"), + QMessageBox::No, QMessageBox::Yes); + + if(btnRetVal == QMessageBox::Yes) + close(); +}; + +void MnemonicDialog::on_btnImport_clicked() +{ + std::string words = ui->tbxMnemonic->toPlainText().toStdString(); + std::string passphrase = ui->edtPassword->text().toStdString(); + + my_words = words; + my_passphrase = passphrase; + + SecureString tmp(my_words.begin(), my_words.end()); + + // NOTE: default mnemonic passphrase is an empty string + if (!CMnemonic::Check(tmp)) { + ui->lblHelp->setText("Words are not valid, please check the words and try again"); + my_words.clear(); + my_passphrase.clear(); + return; + } + + close(); +}; + +void MnemonicDialog::GenerateWords() +{ + SecureString words = CMnemonic::Generate(128); + std::string str_words = std::string(words.begin(), words.end()); +#if QT_VERSION >= 0x050200 + ui->tbxMnemonic->setText(QString::fromStdString(str_words)); +#endif +} + +void MnemonicDialog::on_btnGenerate_clicked() +{ + GenerateWords(); +}; + + diff --git a/src/qt/mnemonicdialog.h b/src/qt/mnemonicdialog.h new file mode 100644 index 0000000000..2e525c24c7 --- /dev/null +++ b/src/qt/mnemonicdialog.h @@ -0,0 +1,33 @@ +// Copyright (c) 2017-2018 The Particl Core developers +// Distributed under the MIT software license, see the accompanying +// file COPYING or http://www.opensource.org/licenses/mit-license.php. + +#ifndef PARTICL_QT_MNEMONICDIALOG_H +#define PARTICL_QT_MNEMONICDIALOG_H + +#include + +namespace Ui { + class MnemonicDialog; +} + +class MnemonicDialog : public QDialog +{ + Q_OBJECT +public: + explicit MnemonicDialog(QWidget *parent); + ~MnemonicDialog(); + + void GenerateWords(); + bool eventFilter(QObject *obj, QEvent *ev); + +public Q_SLOTS: + void on_btnCancel_clicked(); + void on_btnImport_clicked(); + void on_btnGenerate_clicked(); + +private: + Ui::MnemonicDialog *ui; +}; + +#endif // PARTICL_QT_MNEMONICDIALOG_H diff --git a/src/qt/overviewpage.cpp b/src/qt/overviewpage.cpp index 77be23e725..f468418422 100644 --- a/src/qt/overviewpage.cpp +++ b/src/qt/overviewpage.cpp @@ -439,9 +439,7 @@ void OverviewPage::handleAssetClicked(const QModelIndex &index) { if(assetFilter) { QString name = index.data(AssetTableModel::AssetNameRole).toString(); - bool fOwner = false; if (IsAssetNameAnOwner(name.toStdString())) { - fOwner = true; name = name.left(name.size() - 1); sendAction->setDisabled(true); } else { diff --git a/src/qt/raven.qrc b/src/qt/raven.qrc index 9689a299f5..695f4de27d 100644 --- a/src/qt/raven.qrc +++ b/src/qt/raven.qrc @@ -57,6 +57,7 @@ res/icons/chevron.png res/icons/transaction_abandoned.png res/icons/hd_enabled.png + res/icons/hd_enabled_44.png res/icons/hd_disabled.png res/icons/network_disabled.png res/icons/refresh.png diff --git a/src/qt/ravengui.cpp b/src/qt/ravengui.cpp index 59a076ea5f..753fe204fd 100644 --- a/src/qt/ravengui.cpp +++ b/src/qt/ravengui.cpp @@ -26,6 +26,7 @@ #ifdef ENABLE_WALLET #include "walletframe.h" #include "walletmodel.h" +#include "mnemonicdialog.h" #endif // ENABLE_WALLET #ifdef Q_OS_MAC @@ -76,6 +77,9 @@ #include #include #include +#include +#include + #endif #if QT_VERSION < QT_VERSION_CHECK(5, 11, 0) @@ -144,6 +148,9 @@ RavenGUI::RavenGUI(const PlatformStyle *_platformStyle, const NetworkStyle *netw pricingTimer(0), networkManager(0), request(0), + labelVersionUpdate(0), + networkVersionManager(0), + versionRequest(0), trayIcon(0), trayIconMenu(0), notificator(0), @@ -210,6 +217,9 @@ RavenGUI::RavenGUI(const PlatformStyle *_platformStyle, const NetworkStyle *netw pricingTimer = new QTimer(); networkManager = new QNetworkAccessManager(); request = new QNetworkRequest(); + labelVersionUpdate = new QLabel(); + networkVersionManager = new QNetworkAccessManager(); + versionRequest = new QNetworkRequest(); /** RVN END */ // Accept D&D of URIs @@ -726,11 +736,23 @@ void RavenGUI::createToolBars() labelBtcRvn->setStyleSheet(STRING_LABEL_COLOR); labelBtcRvn->setFont(currentMarketFont); + labelVersionUpdate->setText("New Wallet Version Available"); + labelVersionUpdate->setTextFormat(Qt::RichText); + labelVersionUpdate->setTextInteractionFlags(Qt::TextBrowserInteraction); + labelVersionUpdate->setOpenExternalLinks(true); + labelVersionUpdate->setContentsMargins(0,0,15,0); + labelVersionUpdate->setFixedHeight(75); + labelVersionUpdate->setAlignment(Qt::AlignVCenter); + labelVersionUpdate->setStyleSheet(STRING_LABEL_COLOR); + labelVersionUpdate->setFont(currentMarketFont); + labelVersionUpdate->hide(); + priceLayout->setGeometry(headerWidget->rect()); priceLayout->addWidget(labelCurrentMarket, 0, Qt::AlignVCenter | Qt::AlignLeft); priceLayout->addWidget(labelCurrentPrice, 0, Qt::AlignVCenter | Qt::AlignLeft); priceLayout->addWidget(labelBtcRvn, 0 , Qt::AlignVCenter | Qt::AlignLeft); priceLayout->addStretch(); + priceLayout->addWidget(labelVersionUpdate, 0 , Qt::AlignVCenter | Qt::AlignRight); // Create the layout for widget to the right of the tool bar QVBoxLayout* mainFrameLayout = new QVBoxLayout(mainWalletWidget); @@ -796,11 +818,105 @@ void RavenGUI::createToolBars() } ); + connect(quitAction, SIGNAL(triggered()), qApp, SLOT(quit())); + // Create the timer connect(pricingTimer, SIGNAL(timeout()), this, SLOT(getPriceInfo())); pricingTimer->start(10000); getPriceInfo(); /** RVN END */ + + // Get the latest Ravencoin release and let the user know if they are using the latest version + // Network request code for the header widget + QObject::connect(networkVersionManager, &QNetworkAccessManager::finished, + this, [=](QNetworkReply *reply) { + if (reply->error()) { + qDebug() << reply->errorString(); + return; + } + + // Get the data from the network request + QString answer = reply->readAll(); + + UniValue releases(UniValue::VARR); + releases.read(answer.toStdString()); + + if (!releases.isArray()) { + return; + } + + if (!releases.size()) { + return; + } + + // Latest release lives in the first index of the array return from github v3 api + auto latestRelease = releases[0]; + + auto keys = latestRelease.getKeys(); + for (auto key : keys) { + if (key == "tag_name") { + auto latestVersion = latestRelease["tag_name"].get_str(); + + QRegExp rx("v(\\d+).(\\d+).(\\d+)"); + rx.indexIn(QString::fromStdString(latestVersion)); + + // List the found values + QStringList list = rx.capturedTexts(); + bool fNewSoftwareFound = false; + bool fStopSearch = false; + if (list.size() >= 4) { + if (MAIN_SOFTWARE_VERSION < list[1].toInt()) { + fNewSoftwareFound = true; + } else { + if (MAIN_SOFTWARE_VERSION > list[1].toInt()) { + fStopSearch = true; + } + } + + if (!fStopSearch) { + if (SECOND_SOFTWARE_VERSION < list[2].toInt()) { + fNewSoftwareFound = true; + } else { + if (SECOND_SOFTWARE_VERSION > list[2].toInt()) { + fStopSearch = true; + } + } + } + + if (!fStopSearch) { + if (THIRD_SOFTWARE_VERSION < list[3].toInt()) { + fNewSoftwareFound = true; + } + } + } + + if (fNewSoftwareFound) { + labelVersionUpdate->setToolTip(QString::fromStdString(strprintf("Currently running: %s\nLatest version: %s", SOFTWARE_VERSION, + latestVersion))); + labelVersionUpdate->show(); + + // Only display the message on startup to the user around 1/2 of the time + if (GetRandInt(2) == 1) { + bool fRet = uiInterface.ThreadSafeQuestion( + strprintf("\nCurrently running: %s\nLatest version: %s", SOFTWARE_VERSION, + latestVersion) + "\n\nWould you like to visit the releases page?", + "", + "New Wallet Version Found", + CClientUIInterface::MSG_VERSION | CClientUIInterface::BTN_NO); + if (fRet) { + QString link = "https://github.com/RavenProject/Ravencoin/releases"; + QDesktopServices::openUrl(QUrl(link)); + } + } + } else { + labelVersionUpdate->hide(); + } + } + } + } + ); + + getLatestVersion(); } } @@ -1456,7 +1572,16 @@ bool RavenGUI::handlePaymentRequest(const SendCoinsRecipient& recipient) void RavenGUI::setHDStatus(int hdEnabled) { - labelWalletHDStatusIcon->setPixmap(platformStyle->SingleColorIcon(hdEnabled ? ":/icons/hd_enabled" : ":/icons/hd_disabled").pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); + QString icon = ""; + if (hdEnabled == HD_DISABLED) { + icon = ":/icons/hd_disabled"; + } else if (hdEnabled == HD_ENABLED) { + icon = ":/icons/hd_enabled"; + } else if (hdEnabled == HD44_ENABLED) { + icon = ":/icons/hd_enabled_44"; + } + + labelWalletHDStatusIcon->setPixmap(platformStyle->SingleColorIcon(icon).pixmap(STATUSBAR_ICONSIZE,STATUSBAR_ICONSIZE)); labelWalletHDStatusIcon->setToolTip(hdEnabled ? tr("HD key generation is enabled") : tr("HD key generation is disabled")); // eventually disable the QLabel to set its opacity to 50% @@ -1587,11 +1712,25 @@ static bool ThreadSafeMessageBox(RavenGUI *gui, const std::string& message, cons return ret; } +static bool ThreadSafeMnemonic(RavenGUI *gui, unsigned int style) +{ + bool modal = (style & CClientUIInterface::MODAL); + // The SECURE flag has no effect in the Qt GUI. + // bool secure = (style & CClientUIInterface::SECURE); + style &= ~CClientUIInterface::SECURE; + bool ret = false; + // In case of modal message, use blocking connection to wait for user to click a button + QMetaObject::invokeMethod(gui, "mnemonic", + modal ? GUIUtil::blockingGUIThreadConnection() : Qt::QueuedConnection); + return ret; +} + void RavenGUI::subscribeToCoreSignals() { // Connect signals to client uiInterface.ThreadSafeMessageBox.connect(boost::bind(ThreadSafeMessageBox, this, _1, _2, _3)); uiInterface.ThreadSafeQuestion.connect(boost::bind(ThreadSafeMessageBox, this, _1, _3, _4)); + uiInterface.ShowMnemonic.connect(boost::bind(ThreadSafeMnemonic, this, _1)); } void RavenGUI::unsubscribeFromCoreSignals() @@ -1599,6 +1738,7 @@ void RavenGUI::unsubscribeFromCoreSignals() // Disconnect signals from client uiInterface.ThreadSafeMessageBox.disconnect(boost::bind(ThreadSafeMessageBox, this, _1, _2, _3)); uiInterface.ThreadSafeQuestion.disconnect(boost::bind(ThreadSafeMessageBox, this, _1, _3, _4)); + uiInterface.ShowMnemonic.disconnect(boost::bind(ThreadSafeMnemonic, this, _1)); } void RavenGUI::toggleNetworkActive() @@ -1699,3 +1839,17 @@ void RavenGUI::getPriceInfo() request->setUrl(QUrl("https://api.binance.com/api/v1/ticker/price?symbol=RVNBTC")); networkManager->get(*request); } + +#ifdef ENABLE_WALLET +void RavenGUI::mnemonic() +{ + MnemonicDialog dlg(this); + dlg.exec(); +} +#endif + +void RavenGUI::getLatestVersion() +{ + versionRequest->setUrl(QUrl("https://api.github.com/repos/RavenProject/Ravencoin/releases")); + networkVersionManager->get(*versionRequest); +} \ No newline at end of file diff --git a/src/qt/ravengui.h b/src/qt/ravengui.h index 7aa1411320..d490d19087 100644 --- a/src/qt/ravengui.h +++ b/src/qt/ravengui.h @@ -71,6 +71,12 @@ class RavenGUI : public QMainWindow #endif // ENABLE_WALLET bool enableWallet; + enum { + HD_DISABLED = 0, + HD_ENABLED = 1, + HD44_ENABLED = 2 + }; + protected: void changeEvent(QEvent *e); void closeEvent(QCloseEvent *event); @@ -129,6 +135,9 @@ class RavenGUI : public QMainWindow QTimer *pricingTimer; QNetworkAccessManager* networkManager; QNetworkRequest* request; + QLabel *labelVersionUpdate; + QNetworkAccessManager* networkVersionManager; + QNetworkRequest* versionRequest; /** RVN END */ QSystemTrayIcon *trayIcon; @@ -197,6 +206,8 @@ public Q_SLOTS: void getPriceInfo(); + void getLatestVersion(); + #ifdef ENABLE_WALLET /** Set the encryption status as shown in the UI. @param[in] status current encryption status @@ -217,6 +228,8 @@ public Q_SLOTS: /** Show the assets button if assets are active */ void checkAssets(); + + void mnemonic(); #endif // ENABLE_WALLET private Q_SLOTS: @@ -229,7 +242,6 @@ private Q_SLOTS: void gotoReceiveCoinsPage(); /** Switch to send coins page */ void gotoSendCoinsPage(QString addr = ""); - /** Show Sign/Verify Message dialog and switch to sign message tab */ void gotoSignMessageTab(QString addr = ""); /** Show Sign/Verify Message dialog and switch to verify message tab */ diff --git a/src/qt/res/icons/hd_enabled_44.png b/src/qt/res/icons/hd_enabled_44.png new file mode 100644 index 0000000000..c549d95d39 Binary files /dev/null and b/src/qt/res/icons/hd_enabled_44.png differ diff --git a/src/qt/rpcconsole.cpp b/src/qt/rpcconsole.cpp index 34054f940f..246dbd055f 100644 --- a/src/qt/rpcconsole.cpp +++ b/src/qt/rpcconsole.cpp @@ -893,12 +893,26 @@ void RPCConsole::setMempoolSize(long numberOfTxs, size_t dynUsage) ui->mempoolSize->setText(QString::number(dynUsage/1000000.0, 'f', 2) + " MB"); } +bool ImportKeyConditionAccepted(QWidget * parent, QString& cmd) +{ + + if(!cmd.trimmed().startsWith("importprivkey")) return true; + + std::string msg = "the imported private keys are not recoverable with mnemonic words.\n" + "you have to backup your private key in order to use them as a recovery when needed\n" + "or to backup the whole wallet.dat file"; + + return QMessageBox::warning(parent, "Warning", msg.c_str() , QMessageBox::Ok | QMessageBox::Cancel, QMessageBox::Cancel) == QMessageBox::Ok; +} + void RPCConsole::on_lineEdit_returnPressed() { QString cmd = ui->lineEdit->text(); if(!cmd.isEmpty()) { + if(!ImportKeyConditionAccepted(this, cmd)) + return; std::string strFilteredCmd; try { std::string dummy; diff --git a/src/qt/sendassetsentry.cpp b/src/qt/sendassetsentry.cpp index e60923af7f..b7bfd2604d 100644 --- a/src/qt/sendassetsentry.cpp +++ b/src/qt/sendassetsentry.cpp @@ -269,7 +269,7 @@ bool SendAssetsEntry::validate() if (passets->CheckForGlobalRestriction(assetName)) { ui->assetSelectionBox->lineEdit()->setStyleSheet(STYLE_INVALID); ui->messageTextLabel->show(); - ui->messageTextLabel->setText(tr("This restricted asset have been frozen globally. No transfers can't we sent on the network.")); + ui->messageTextLabel->setText(tr("This restricted asset has been frozen globally. No transfers can be sent on the network.")); retval = false; } diff --git a/src/qt/walletmodel.cpp b/src/qt/walletmodel.cpp index bda5c75ab4..3709b749bf 100644 --- a/src/qt/walletmodel.cpp +++ b/src/qt/walletmodel.cpp @@ -858,6 +858,11 @@ bool WalletModel::hdEnabled() const return wallet->IsHDEnabled(); } +bool WalletModel::hd44Enabled() const +{ + return wallet->IsBip44Enabled(); +} + int WalletModel::getDefaultConfirmTarget() const { return nTxConfirmTarget; diff --git a/src/qt/walletmodel.h b/src/qt/walletmodel.h index c2241b3bd3..a83f20579e 100644 --- a/src/qt/walletmodel.h +++ b/src/qt/walletmodel.h @@ -288,6 +288,7 @@ class WalletModel : public QObject static bool isWalletEnabled(); bool hdEnabled() const; + bool hd44Enabled() const; int getDefaultConfirmTarget() const; diff --git a/src/qt/walletview.cpp b/src/qt/walletview.cpp index fbc725de70..2f6fd67ae6 100644 --- a/src/qt/walletview.cpp +++ b/src/qt/walletview.cpp @@ -186,7 +186,7 @@ void WalletView::setWalletModel(WalletModel *_walletModel) updateEncryptionStatus(); // update HD status - Q_EMIT hdEnabledStatusChanged(_walletModel->hdEnabled()); + Q_EMIT hdEnabledStatusChanged(_walletModel->hd44Enabled() ? RavenGUI::HD44_ENABLED : _walletModel->hdEnabled() ? RavenGUI::HD_ENABLED : RavenGUI::HD_DISABLED); // Balloon pop-up for new transaction connect(_walletModel->getTransactionTableModel(), SIGNAL(rowsInserted(QModelIndex,int,int)), diff --git a/src/rpc/assets.cpp b/src/rpc/assets.cpp index 464e30ec78..91633e82c5 100644 --- a/src/rpc/assets.cpp +++ b/src/rpc/assets.cpp @@ -159,9 +159,11 @@ UniValue UpdateAddressTag(const JSONRPCRequest &request, const int8_t &flag) std::string change_address = ""; if (request.params.size() > 2) { change_address = request.params[2].get_str(); - CTxDestination change_dest = DecodeDestination(change_address); - if (!IsValidDestination(change_dest)) { - throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid Raven change address: ") + change_address); + if (!change_address.empty()) { + CTxDestination change_dest = DecodeDestination(change_address); + if (!IsValidDestination(change_dest)) { + throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid Raven change address: ") + change_address); + } } } @@ -257,9 +259,11 @@ UniValue UpdateAddressRestriction(const JSONRPCRequest &request, const int8_t &f std::string change_address = ""; if (request.params.size() > 2) { change_address = request.params[2].get_str(); - CTxDestination change_dest = DecodeDestination(change_address); - if (!IsValidDestination(change_dest)) { - throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid Raven change address: ") + change_address); + if (!change_address.empty()) { + CTxDestination change_dest = DecodeDestination(change_address); + if (!IsValidDestination(change_dest)) { + throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid Raven change address: ") + change_address); + } } } @@ -352,9 +356,11 @@ UniValue UpdateGlobalRestrictedAsset(const JSONRPCRequest &request, const int8_t std::string change_address = ""; if (request.params.size() > 1) { change_address = request.params[1].get_str(); - CTxDestination change_dest = DecodeDestination(change_address); - if (!IsValidDestination(change_dest)) { - throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid Raven change address: ") + change_address); + if (!change_address.empty()) { + CTxDestination change_dest = DecodeDestination(change_address); + if (!IsValidDestination(change_dest)) { + throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid Raven change address: ") + change_address); + } } } diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp index 6847433a67..3edb09a0d8 100644 --- a/src/rpc/blockchain.cpp +++ b/src/rpc/blockchain.cpp @@ -251,6 +251,9 @@ UniValue blockToJSON(const CBlock& block, const CBlockIndex* blockindex, bool tx result.push_back(Pair("bits", strprintf("%08x", block.nBits))); result.push_back(Pair("difficulty", GetDifficulty(blockindex))); result.push_back(Pair("chainwork", blockindex->nChainWork.GetHex())); + result.push_back(Pair("headerhash", block.GetKAWPOWHeaderHash().GetHex())); + result.push_back(Pair("mixhash", block.mix_hash.GetHex())); + result.push_back(Pair("nonce64", (uint64_t)block.nNonce64)); if (blockindex->pprev) result.push_back(Pair("previousblockhash", blockindex->pprev->GetBlockHash().GetHex())); @@ -1429,6 +1432,7 @@ UniValue getblockchaininfo(const JSONRPCRequest& request) //BIP9SoftForkDescPushBack(bip9_softforks, "segwit", consensusParams, Consensus::DEPLOYMENT_SEGWIT); BIP9SoftForkDescPushBack(bip9_softforks, "assets", consensusParams, Consensus::DEPLOYMENT_ASSETS); BIP9SoftForkDescPushBack(bip9_softforks, "messaging_restricted", consensusParams, Consensus::DEPLOYMENT_MSG_REST_ASSETS); + BIP9SoftForkDescPushBack(bip9_softforks, "transfer_script", consensusParams, Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE); obj.push_back(Pair("softforks", softforks)); obj.push_back(Pair("bip9_softforks", bip9_softforks)); diff --git a/src/rpc/blockchain.h b/src/rpc/blockchain.h index 8227acd954..f22e1a6aad 100644 --- a/src/rpc/blockchain.h +++ b/src/rpc/blockchain.h @@ -5,11 +5,17 @@ #ifndef RAVEN_RPC_BLOCKCHAIN_H #define RAVEN_RPC_BLOCKCHAIN_H +#include +#include class CBlock; class CBlockIndex; class UniValue; + +// To be used by local rpc GPU mining only +extern std::map mapRVNKAWBlockTemplates; + /** * Get the difficulty of the net wrt to the given block index, or the chain tip if * not provided. diff --git a/src/rpc/client.cpp b/src/rpc/client.cpp index 96c4f55c29..86f18b6f30 100644 --- a/src/rpc/client.cpp +++ b/src/rpc/client.cpp @@ -137,6 +137,8 @@ static const CRPCConvertParam vRPCConvertParams[] = { "signrawtransaction", 1, "prevtxs" }, { "signrawtransaction", 2, "privkeys" }, { "sendrawtransaction", 1, "allowhighfees" }, + { "testmempoolaccept", 0, "rawtxs" }, + { "testmempoolaccept", 1, "allowhighfees" }, { "combinerawtransaction", 0, "txs" }, { "fundrawtransaction", 1, "options" }, { "gettxout", 1, "n" }, @@ -208,11 +210,12 @@ static const CRPCConvertParam vRPCConvertParams[] = { "cancelsnapshotrequest", 1, "block_height"}, { "distributereward", 1, "snapshot_height"}, { "distributereward", 3, "gross_distribution_amount"}, - { "distributereward", 6, "dry_run"}, { "getdistributestatus", 1, "snapshot_height"}, { "getdistributestatus", 3, "gross_distribution_amount"}, { "getsnapshot", 1, "block_height"}, { "purgesnapshot", 1, "block_height"}, + { "stop", 0, "wait"}, + { "getkawpowhash", 3, "height"}, }; class CRPCConvertTable diff --git a/src/rpc/mining.cpp b/src/rpc/mining.cpp index a3d31348c2..1f899e86b1 100644 --- a/src/rpc/mining.cpp +++ b/src/rpc/mining.cpp @@ -31,9 +31,14 @@ #include #include +#include +#include +#include extern uint64_t nHashesPerSec; +std::map mapRVNKAWBlockTemplates; + unsigned int ParseConfirmTarget(const UniValue& value) { int target = value.get_int(); @@ -131,17 +136,26 @@ UniValue generateBlocks(std::shared_ptr coinbaseScript, int nGen LOCK(cs_main); IncrementExtraNonce(pblock, chainActive.Tip(), nExtraNonce); } - while (nMaxTries > 0 && pblock->nNonce < nInnerLoopCount && !CheckProofOfWork(pblock->GetHash(), pblock->nBits, + uint256 mix_hash; + while (nMaxTries > 0 && pblock->nNonce < nInnerLoopCount && !CheckProofOfWork(pblock->GetHashFull(mix_hash), pblock->nBits, GetParams().GetConsensus())) { - ++pblock->nNonce; + if (pblock->nTime < nKAWPOWActivationTime) { + ++pblock->nNonce; + } else { + ++pblock->nNonce64; + } --nMaxTries; } if (nMaxTries == 0) { break; } - if (pblock->nNonce == nInnerLoopCount) { + if (pblock->nNonce == nInnerLoopCount || pblock->nNonce64 == nInnerLoopCount) { continue; } + + // KAWPOW Assign the mix_hash to the block that was found + pblock->mix_hash = mix_hash; + std::shared_ptr shared_pblock = std::make_shared(*pblock); if (!ProcessNewBlock(GetParams(), shared_pblock, true, nullptr)) throw JSONRPCError(RPC_INTERNAL_ERROR, "ProcessNewBlock, block not accepted"); @@ -448,7 +462,7 @@ UniValue getblocktemplate(const JSONRPCRequest& request) if(!g_connman) throw JSONRPCError(RPC_CLIENT_P2P_DISABLED, "Error: Peer-to-peer functionality missing or disabled"); - if (g_connman->GetNodeCount(CConnman::CONNECTIONS_ALL) == 0) + if (g_connman->GetNodeCount(CConnman::CONNECTIONS_ALL) == 0 && !gArgs.GetBoolArg("-bypassdownload", false)) throw JSONRPCError(RPC_CLIENT_NOT_CONNECTED, "Raven is not connected!"); if (IsInitialBlockDownload() && !gArgs.GetBoolArg("-bypassdownload", false)) @@ -521,6 +535,7 @@ UniValue getblocktemplate(const JSONRPCRequest& request) { // Clear pindexPrev so future calls make a new block, despite any failures from here on pindexPrev = nullptr; + mapRVNKAWBlockTemplates.clear(); // Store the pindexBest used before CreateNewBlock, to avoid races nTransactionsUpdatedLast = mempool.GetTransactionsUpdated(); @@ -529,8 +544,22 @@ UniValue getblocktemplate(const JSONRPCRequest& request) fLastTemplateSupportsSegwit = fSupportsSegwit; // Create new block - CScript scriptDummy = CScript() << OP_TRUE; - pblocktemplate = BlockAssembler(GetParams()).CreateNewBlock(scriptDummy, fSupportsSegwit); + // Get mining address if it is set + CScript script; + std::string address = gArgs.GetArg("-miningaddress", ""); + if (!address.empty()) { + CTxDestination dest = DecodeDestination(address); + + if (IsValidDestination(dest)) { + script = GetScriptForDestination(dest); + } else { + throw JSONRPCError(RPC_INVALID_PARAMETER, "-miningaddress is not a valid address. Please use a valid address"); + } + } else { + script = CScript() << OP_TRUE; + } + + pblocktemplate = BlockAssembler(GetParams()).CreateNewBlock(script, fSupportsSegwit); if (!pblocktemplate) throw JSONRPCError(RPC_OUT_OF_MEMORY, "Out of memory"); @@ -685,6 +714,26 @@ UniValue getblocktemplate(const JSONRPCRequest& request) result.push_back(Pair("default_witness_commitment", HexStr(pblocktemplate->vchCoinbaseCommitment.begin(), pblocktemplate->vchCoinbaseCommitment.end()))); } + if (pblock->nTime >= nKAWPOWActivationTime) { + std::string address = gArgs.GetArg("-miningaddress", ""); + if (IsValidDestinationString(address)) { + static std::string lastheader = ""; + if (mapRVNKAWBlockTemplates.count(lastheader)) { + if (pblock->nTime - 30 < mapRVNKAWBlockTemplates.at(lastheader).nTime) { + result.pushKV("pprpcheader", lastheader); + result.pushKV("pprpcepoch", ethash::get_epoch_number(pblock->nHeight)); + return result; + } + } + + pblock->hashMerkleRoot = BlockMerkleRoot(*pblock); + result.pushKV("pprpcheader", pblock->GetKAWPOWHeaderHash().GetHex()); + result.pushKV("pprpcepoch", ethash::get_epoch_number(pblock->nHeight)); + mapRVNKAWBlockTemplates[pblock->GetKAWPOWHeaderHash().GetHex()] = *pblock; + lastheader = pblock->GetKAWPOWHeaderHash().GetHex(); + } + } + return result; } @@ -706,6 +755,177 @@ class submitblock_StateCatcher : public CValidationInterface } }; +static UniValue getkawpowhash(const JSONRPCRequest& request) { + if (request.fHelp || request.params.size() < 4) { + throw std::runtime_error( + "getkawpowhash \"header_hash\" \"mix_hash\" nonce, height, \"target\"\n" + "\nGet the kawpow hash for a block given its block data\n" + + "\nArguments\n" + "1. \"header_hash\" (string, required) the prow_pow header hash that was given to the gpu miner from this rpc client\n" + "2. \"mix_hash\" (string, required) the mix hash that was mined by the gpu miner via rpc\n" + "3. \"nonce\" (string, required) the hex nonce of the block that hashed the valid block\n" + "4. \"height\" (number, required) the height of the block data that is being hashed\n" + "5. \"target\" (string, optional) the target of the block that is hash is trying to meet\n" + "\nResult:\n" + "\nExamples:\n" + + HelpExampleCli("getkawpowhash", "\"header_hash\" \"mix_hash\" \"0x100000\" 2456") + + HelpExampleRpc("getkawpowhash", "\"header_hash\" \"mix_hash\" \"0x100000\" 2456") + ); + } + + std::string str_header_hash = request.params[0].get_str(); + std::string mix_hash = request.params[1].get_str(); + std::string hex_nonce = request.params[2].get_str(); + uint32_t nHeight = request.params[3].get_uint(); + + uint64_t nNonce; + if (!ParseUInt64(hex_nonce, &nNonce, 16)) + throw JSONRPCError(RPC_INVALID_PARAMS, "Invalid nonce hex string"); + + if (nHeight > (uint32_t)chainActive.Height() + 10) + throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block height is to large"); + + const auto header_hash = to_hash256(str_header_hash); + + uint256 target; + bool fCheckTarget = false; + if (request.params.size() == 5) { + target = uint256S(request.params[4].get_str()); + fCheckTarget = true; + } + + static ethash::epoch_context_ptr context{nullptr, nullptr}; + + // Get the context from the block height + const auto epoch_number = ethash::get_epoch_number(nHeight); + if (!context || context->epoch_number != epoch_number) + context = ethash::create_epoch_context(epoch_number); + + // ProgPow hash + const auto result = progpow::hash(*context, nHeight, header_hash, nNonce); + + uint256 mined_mix_hash = uint256S(to_hex(result.mix_hash)); + uint256 mined_final_hash = uint256S(to_hex(result.final_hash)); + + bool mix_hash_match = false; + bool final_hash_meets_target = false; + + if (mined_mix_hash == uint256S(mix_hash)) + mix_hash_match = true; + + if (fCheckTarget) { + arith_uint256 boundary = UintToArith256(target); + // Check proof of work matches claimed amount + if (UintToArith256(mined_final_hash) <= boundary) + final_hash_meets_target = true; + } + + UniValue ret(UniValue::VOBJ); + ret.pushKV("result", mix_hash_match ? "true" : "false"); + ret.pushKV("digest", mined_final_hash.GetHex()); + ret.pushKV("mix_hash", mined_mix_hash.GetHex()); + ret.pushKV("info", ""); + if (fCheckTarget) + ret.pushKV("meets_target", final_hash_meets_target ? "true" : "false"); + + + return ret; +} + +static UniValue pprpcsb(const JSONRPCRequest& request) { + if (request.fHelp || request.params.size() != 3) { + throw std::runtime_error( + "pprpcsb \"header_hash\" \"mix_hash\" \"nonce\"\n" + "\nAttempts to submit new block to network mined by kawpow gpu miner via rpc.\n" + + "\nArguments\n" + "1. \"header_hash\" (string, required) the prow_pow header hash that was given to the gpu miner from this rpc client\n" + "2. \"mix_hash\" (string, required) the mix hash that was mined by the gpu miner via rpc\n" + "3. \"nonce\" (string, required) the nonce of the block that hashed the valid block\n" + "\nResult:\n" + "\nExamples:\n" + + HelpExampleCli("pprpcsb", "\"header_hash\" \"mix_hash\" 100000") + + HelpExampleRpc("pprpcsb", "\"header_hash\" \"mix_hash\" 100000") + ); + } + + std::string header_hash = request.params[0].get_str(); + std::string mix_hash = request.params[1].get_str(); + std::string str_nonce = request.params[2].get_str(); + + uint64_t nonce; + if (!ParseUInt64(str_nonce, &nonce, 16)) + throw JSONRPCError(RPC_INVALID_PARAMS, "Invalid hex nonce"); + + if (!mapRVNKAWBlockTemplates.count(header_hash)) + throw JSONRPCError(RPC_INVALID_PARAMS, "Block header hash not found in block data"); + + std::shared_ptr blockptr = std::make_shared(); + *blockptr = mapRVNKAWBlockTemplates.at(header_hash); + + blockptr->nNonce64 = nonce; + blockptr->mix_hash = uint256S(mix_hash); + + if (blockptr->vtx.empty() || !blockptr->vtx[0]->IsCoinBase()) { + throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block does not start with a coinbase"); + } + + uint256 retMixHash; + if (!CheckProofOfWork(blockptr->GetHashFull(retMixHash), blockptr->nBits, GetParams().GetConsensus())) + throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block does not solve the boundary"); + + + uint256 hash = blockptr->GetHash(); + + bool fBlockPresent = false; + { + LOCK(cs_main); + BlockMap::iterator mi = mapBlockIndex.find(hash); + if (mi != mapBlockIndex.end()) { + CBlockIndex *pindex = mi->second; + if (pindex->IsValid(BLOCK_VALID_SCRIPTS)) { + return "duplicate"; + } + if (pindex->nStatus & BLOCK_FAILED_MASK) { + return "duplicate-invalid"; + } + // Otherwise, we might only have the header - process the block before returning + fBlockPresent = true; + } + } + + { + LOCK(cs_main); + BlockMap::iterator mi = mapBlockIndex.find(blockptr->hashPrevBlock); + if (mi != mapBlockIndex.end()) { + UpdateUncommittedBlockStructures(*blockptr, mi->second, GetParams().GetConsensus()); + } + } + + submitblock_StateCatcher sc(blockptr->GetHash()); + RegisterValidationInterface(&sc); + bool fAccepted = ProcessNewBlock(GetParams(), blockptr, true, nullptr); + UnregisterValidationInterface(&sc); + if (fBlockPresent) { + if (fAccepted && !sc.found) { + return "duplicate-inconclusive"; + } + return "duplicate"; + } + if (!sc.found) { + return "inconclusive"; + } + UniValue ret = BIP22ValidationResult(sc.state); + + // BIP22ValidationResult set the return to null when the state is valid + if (ret.isNull()) { + return true; + } else { + return ret; + } +} + UniValue submitblock(const JSONRPCRequest& request) { // We allow 2 arguments for compliance with BIP22. Argument 2 is ignored. @@ -735,6 +955,9 @@ UniValue submitblock(const JSONRPCRequest& request) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block does not start with a coinbase"); } + if (block.nHeight > (uint32_t)chainActive.Height() + 10) + throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Block height is to large"); + uint256 hash = block.GetHash(); bool fBlockPresent = false; @@ -1060,6 +1283,8 @@ static const CRPCCommand commands[] = { "mining", "prioritisetransaction", &prioritisetransaction, {"txid","dummy","fee_delta"} }, { "mining", "getblocktemplate", &getblocktemplate, {"template_request"} }, { "mining", "submitblock", &submitblock, {"hexdata","dummy"} }, + { "mining", "pprpcsb", &pprpcsb, {"header_hash","mix_hash", "nonce"} }, + { "mining", "getkawpowhash", &getkawpowhash, {"header_hash", "mix_hash", "nonce", "height"} }, /* Coin generation */ { "generating", "getgenerate", &getgenerate, {} }, diff --git a/src/rpc/rawtransaction.cpp b/src/rpc/rawtransaction.cpp index 1d30f84858..8bcc819607 100644 --- a/src/rpc/rawtransaction.cpp +++ b/src/rpc/rawtransaction.cpp @@ -2119,6 +2119,87 @@ UniValue sendrawtransaction(const JSONRPCRequest& request) return hashTx.GetHex(); } +UniValue testmempoolaccept(const JSONRPCRequest& request) +{ + if (request.fHelp || request.params.size() < 1 || request.params.size() > 2) { + throw std::runtime_error( + // clang-format off + "testmempoolaccept [\"rawtxs\"] ( allowhighfees )\n" + "\nReturns if raw transaction (serialized, hex-encoded) would be accepted by mempool.\n" + "\nThis checks if the transaction violates the consensus or policy rules.\n" + "\nSee sendrawtransaction call.\n" + "\nArguments:\n" + "1. [\"rawtxs\"] (array, required) An array of hex strings of raw transactions.\n" + " Length must be one for now.\n" + "2. allowhighfees (boolean, optional, default=false) Allow high fees\n" + "\nResult:\n" + "[ (array) The result of the mempool acceptance test for each raw transaction in the input array.\n" + " Length is exactly one for now.\n" + " {\n" + " \"txid\" (string) The transaction hash in hex\n" + " \"allowed\" (boolean) If the mempool allows this tx to be inserted\n" + " \"reject-reason\" (string) Rejection string (only present when 'allowed' is false)\n" + " }\n" + "]\n" + "\nExamples:\n" + "\nCreate a transaction\n" + + HelpExampleCli("createrawtransaction", "\"[{\\\"txid\\\" : \\\"mytxid\\\",\\\"vout\\\":0}]\" \"{\\\"myaddress\\\":0.01}\"") + + "Sign the transaction, and get back the hex\n" + + HelpExampleCli("signrawtransaction", "\"myhex\"") + + "\nTest acceptance of the transaction (signed hex)\n" + + HelpExampleCli("testmempoolaccept", "\"signedhex\"") + + "\nAs a json rpc call\n" + + HelpExampleRpc("testmempoolaccept", "[\"signedhex\"]") + // clang-format on + ); + } + + ObserveSafeMode(); + + RPCTypeCheck(request.params, {UniValue::VARR, UniValue::VBOOL}); + if (request.params[0].get_array().size() != 1) { + throw JSONRPCError(RPC_INVALID_PARAMETER, "Array must contain exactly one raw transaction for now"); + } + + CMutableTransaction mtx; + if (!DecodeHexTx(mtx, request.params[0].get_array()[0].get_str())) { + throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); + } + CTransactionRef tx(MakeTransactionRef(std::move(mtx))); + const uint256& tx_hash = tx->GetHash(); + + CAmount max_raw_tx_fee = ::maxTxFee; + if (!request.params[1].isNull() && request.params[1].get_bool()) { + max_raw_tx_fee = 0; + } + + UniValue result(UniValue::VARR); + UniValue result_0(UniValue::VOBJ); + result_0.pushKV("txid", tx_hash.GetHex()); + + CValidationState state; + bool missing_inputs; + bool test_accept_res; + { + LOCK(cs_main); + test_accept_res = AcceptToMemoryPool(mempool, state, std::move(tx), &missing_inputs, + nullptr /* plTxnReplaced */, false /* bypass_limits */, max_raw_tx_fee, /* test_accpet */ true); + } + result_0.pushKV("allowed", test_accept_res); + if (!test_accept_res) { + if (state.IsInvalid()) { + result_0.pushKV("reject-reason", strprintf("%i: %s", state.GetRejectCode(), state.GetRejectReason())); + } else if (missing_inputs) { + result_0.pushKV("reject-reason", "missing-inputs"); + } else { + result_0.pushKV("reject-reason", state.GetRejectReason()); + } + } + + result.push_back(std::move(result_0)); + return result; +} + static const CRPCCommand commands[] = { // category name actor (function) argNames // --------------------- ------------------------ ----------------------- ---------- @@ -2129,7 +2210,7 @@ static const CRPCCommand commands[] = { "rawtransactions", "sendrawtransaction", &sendrawtransaction, {"hexstring","allowhighfees"} }, { "rawtransactions", "combinerawtransaction", &combinerawtransaction, {"txs"} }, { "rawtransactions", "signrawtransaction", &signrawtransaction, {"hexstring","prevtxs","privkeys","sighashtype"} }, /* uses wallet if enabled */ - + { "rawtransactions", "testmempoolaccept", &testmempoolaccept, {"rawtxs","allowhighfees"} }, { "blockchain", "gettxoutproof", &gettxoutproof, {"txids", "blockhash"} }, { "blockchain", "verifytxoutproof", &verifytxoutproof, {"proof"} }, }; diff --git a/src/rpc/rewards.cpp b/src/rpc/rewards.cpp index 772b777283..82f1b275e6 100644 --- a/src/rpc/rewards.cpp +++ b/src/rpc/rewards.cpp @@ -298,7 +298,6 @@ UniValue distributereward(const JSONRPCRequest& request) { "4. \"gross_distribution_amount\" (number, required) The amount of the distribution asset that will be split amongst all owners\n" "5. \"exception_addresses\" (string, optional) Ownership addresses that should be excluded\n" "6. \"change_address\" (string, optional) If the rewards can't be fully distributed. The change will be sent to this address\n" - "7. \"dry_run\" (bool, optional) If you would like to do a dry run of what the distribution would look like\n" "\nResult:\n" "{\n" @@ -361,11 +360,6 @@ UniValue distributereward(const JSONRPCRequest& request) { throw JSONRPCError(RPC_INVALID_PARAMETER, std::string("Invalid change address: Use a valid RVN address")); } - bool fDryRun = false; - if (request.params.size() > 6) { - fDryRun = request.params[6].get_bool(); - } - AssetType ownershipAssetType; AssetType distributionAssetType; @@ -495,7 +489,7 @@ static const CRPCCommand commands[] = { "rewards", "getsnapshotrequest", &getsnapshotrequest, {"asset_name", "block_height"}}, { "rewards", "listsnapshotrequests", &listsnapshotrequests, {"asset_name", "block_height"}}, { "rewards", "cancelsnapshotrequest", &cancelsnapshotrequest, {"asset_name", "block_height"}}, - { "rewards", "distributereward", &distributereward, {"asset_name", "snapshot_height", "distribution_asset_name", "gross_distribution_amount", "exception_addresses", "change_address", "dry_run"}}, + { "rewards", "distributereward", &distributereward, {"asset_name", "snapshot_height", "distribution_asset_name", "gross_distribution_amount", "exception_addresses", "change_address"}}, { "rewards", "getdistributestatus", &getdistributestatus, {"asset_name", "block_height", "distribution_asset_name", "gross_distribution_amount", "exception_addresses"}} #endif }; diff --git a/src/rpc/server.cpp b/src/rpc/server.cpp index d63c244976..6449b02cc8 100644 --- a/src/rpc/server.cpp +++ b/src/rpc/server.cpp @@ -269,6 +269,9 @@ UniValue help(const JSONRPCRequest& jsonRequest) UniValue stop(const JSONRPCRequest& jsonRequest) { // Accept the deprecated and ignored 'detach' boolean argument + // Also accept the hidden 'wait' integer argument (milliseconds) + // For instance, 'stop 1000' makes the call wait 1 second before returning + // to the client (intended for testing) if (jsonRequest.fHelp || jsonRequest.params.size() > 1) throw std::runtime_error( "stop\n" @@ -276,6 +279,9 @@ UniValue stop(const JSONRPCRequest& jsonRequest) // Event loop will exit after current HTTP requests have been handled, so // this reply will get back to the client. StartShutdown(); + if (jsonRequest.params[0].isNum()) { + MilliSleep(jsonRequest.params[0].get_int()); + } return "Raven server stopping"; } @@ -342,7 +348,7 @@ static const CRPCCommand vRPCCommands[] = /* Overall control/query calls */ { "control", "getrpcinfo", &getrpcinfo, {} }, { "control", "help", &help, {"command"} }, - { "control", "stop", &stop, {} }, + { "control", "stop", &stop, {"wait"} }, { "control", "uptime", &uptime, {} }, diff --git a/src/support/allocators/secure.h b/src/support/allocators/secure.h index 6c227157a2..b0269a12d2 100644 --- a/src/support/allocators/secure.h +++ b/src/support/allocators/secure.h @@ -11,6 +11,7 @@ #include "support/cleanse.h" #include +#include // // Allocator that locks its contents from being paged @@ -55,5 +56,6 @@ struct secure_allocator : public std::allocator { // This is exactly like std::string, but with a custom allocator. typedef std::basic_string, secure_allocator > SecureString; +typedef std::vector > SecureVector; #endif // RAVEN_SUPPORT_ALLOCATORS_SECURE_H diff --git a/src/test/bip39_tests.cpp b/src/test/bip39_tests.cpp new file mode 100644 index 0000000000..2b525dca8f --- /dev/null +++ b/src/test/bip39_tests.cpp @@ -0,0 +1,67 @@ +// +// Created by ROSHii on 2019-06-01. +// + +#include "base58.h" +#include "data/bip39_vectors.json.h" +#include "key.h" +#include "util.h" +#include "utilstrencodings.h" +#include "test/test_raven.h" +#include "wallet/bip39.h" + +#include + +#include + +// In script_tests.cpp +extern UniValue read_json(const std::string& jsondata); + +BOOST_FIXTURE_TEST_SUITE(bip39_tests, BasicTestingSetup) + +// https://github.com/trezor/python-mnemonic/blob/b502451a33a440783926e04428115e0bed87d01f/vectors.json +BOOST_AUTO_TEST_CASE(bip39_vectors) +{ + UniValue tests = read_json(std::string(json_tests::bip39_vectors, json_tests::bip39_vectors + sizeof(json_tests::bip39_vectors))); + + for (unsigned int i = 0; i < tests.size(); i++) { + // printf("%d\n", i); + UniValue test = tests[i]; + std::string strTest = test.write(); + if (test.size() < 4) // Allow for extra stuff (useful for comments) + { + BOOST_ERROR("Bad test: " << strTest); + continue; + } + + std::vector vData = ParseHex(test[0].get_str()); + SecureVector data(vData.begin(), vData.end()); + + SecureString m = CMnemonic::FromData(data, data.size()); + std::string strMnemonic = test[1].get_str(); + SecureString mnemonic(strMnemonic.begin(), strMnemonic.end()); + + // printf("%s\n%s\n", m.c_str(), mnemonic.c_str()); + BOOST_CHECK(m == mnemonic); + BOOST_CHECK(CMnemonic::Check(mnemonic)); + + SecureVector seed; + SecureString passphrase("TREZOR"); + CMnemonic::ToSeed(mnemonic, passphrase, seed); + // printf("seed: %s\n", HexStr(seed).c_str()); + BOOST_CHECK(HexStr(seed) == test[2].get_str()); + + CExtKey key; + CExtPubKey pubkey; + + key.SetSeed(&seed[0], 64); + pubkey = key.Neuter(); + + CRavenExtKey b58key; + b58key.SetKey(key); + // printf("CRavenExtKey: %s\n", b58key.ToString().c_str()); + BOOST_CHECK(b58key.ToString() == test[3].get_str()); + } +} + +BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file diff --git a/src/test/data/bip39_vectors.json b/src/test/data/bip39_vectors.json new file mode 100644 index 0000000000..704be4c088 --- /dev/null +++ b/src/test/data/bip39_vectors.json @@ -0,0 +1,146 @@ +[ +[ +"00000000000000000000000000000000", +"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about", +"c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04", +"xprv9s21ZrQH143K3h3fDYiay8mocZ3afhfULfb5GX8kCBdno77K4HiA15Tg23wpbeF1pLfs1c5SPmYHrEpTuuRhxMwvKDwqdKiGJS9XFKzUsAF" +], +[ +"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", +"legal winner thank year wave sausage worth useful legal winner thank yellow", +"2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607", +"xprv9s21ZrQH143K2gA81bYFHqU68xz1cX2APaSq5tt6MFSLeXnCKV1RVUJt9FWNTbrrryem4ZckN8k4Ls1H6nwdvDTvnV7zEXs2HgPezuVccsq" +], +[ +"80808080808080808080808080808080", +"letter advice cage absurd amount doctor acoustic avoid letter advice cage above", +"d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8", +"xprv9s21ZrQH143K2shfP28KM3nr5Ap1SXjz8gc2rAqqMEynmjt6o1qboCDpxckqXavCwdnYds6yBHZGKHv7ef2eTXy461PXUjBFQg6PrwY4Gzq" +], +[ +"ffffffffffffffffffffffffffffffff", +"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong", +"ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069", +"xprv9s21ZrQH143K2V4oox4M8Zmhi2Fjx5XK4Lf7GKRvPSgydU3mjZuKGCTg7UPiBUD7ydVPvSLtg9hjp7MQTYsW67rZHAXeccqYqrsx8LcXnyd" +], +[ +"000000000000000000000000000000000000000000000000", +"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent", +"035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa", +"xprv9s21ZrQH143K3mEDrypcZ2usWqFgzKB6jBBx9B6GfC7fu26X6hPRzVjzkqkPvDqp6g5eypdk6cyhGnBngbjeHTe4LsuLG1cCmKJka5SMkmU" +], +[ +"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", +"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will", +"f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd", +"xprv9s21ZrQH143K3Lv9MZLj16np5GzLe7tDKQfVusBni7toqJGcnKRtHSxUwbKUyUWiwpK55g1DUSsw76TF1T93VT4gz4wt5RM23pkaQLnvBh7" +], +[ +"808080808080808080808080808080808080808080808080", +"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always", +"107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65", +"xprv9s21ZrQH143K3VPCbxbUtpkh9pRG371UCLDz3BjceqP1jz7XZsQ5EnNkYAEkfeZp62cDNj13ZTEVG1TEro9sZ9grfRmcYWLBhCocViKEJae" +], +[ +"ffffffffffffffffffffffffffffffffffffffffffffffff", +"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when", +"0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528", +"xprv9s21ZrQH143K36Ao5jHRVhFGDbLP6FCx8BEEmpru77ef3bmA928BxsqvVM27WnvvyfWywiFN8K6yToqMaGYfzS6Db1EHAXT5TuyCLBXUfdm" +], +[ +"0000000000000000000000000000000000000000000000000000000000000000", +"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art", +"bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8", +"xprv9s21ZrQH143K32qBagUJAMU2LsHg3ka7jqMcV98Y7gVeVyNStwYS3U7yVVoDZ4btbRNf4h6ibWpY22iRmXq35qgLs79f312g2kj5539ebPM" +], +[ +"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", +"legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title", +"bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87", +"xprv9s21ZrQH143K3Y1sd2XVu9wtqxJRvybCfAetjUrMMco6r3v9qZTBeXiBZkS8JxWbcGJZyio8TrZtm6pkbzG8SYt1sxwNLh3Wx7to5pgiVFU" +], +[ +"8080808080808080808080808080808080808080808080808080808080808080", +"letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless", +"c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f", +"xprv9s21ZrQH143K3CSnQNYC3MqAAqHwxeTLhDbhF43A4ss4ciWNmCY9zQGvAKUSqVUf2vPHBTSE1rB2pg4avopqSiLVzXEU8KziNnVPauTqLRo" +], +[ +"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", +"zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote", +"dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad", +"xprv9s21ZrQH143K2WFF16X85T2QCpndrGwx6GueB72Zf3AHwHJaknRXNF37ZmDrtHrrLSHvbuRejXcnYxoZKvRquTPyp2JiNG3XcjQyzSEgqCB" +], +[ +"9e885d952ad362caeb4efe34a8e91bd2", +"ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic", +"274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028", +"xprv9s21ZrQH143K2oZ9stBYpoaZ2ktHj7jLz7iMqpgg1En8kKFTXJHsjxry1JbKH19YrDTicVwKPehFKTbmaxgVEc5TpHdS1aYhB2s9aFJBeJH" +], +[ +"6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", +"gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog", +"628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac", +"xprv9s21ZrQH143K3uT8eQowUjsxrmsA9YUuQQK1RLqFufzybxD6DH6gPY7NjJ5G3EPHjsWDrs9iivSbmvjc9DQJbJGatfa9pv4MZ3wjr8qWPAK" +], +[ +"68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", +"hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length", +"64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440", +"xprv9s21ZrQH143K2XTAhys3pMNcGn261Fi5Ta2Pw8PwaVPhg3D8DWkzWQwjTJfskj8ofb81i9NP2cUNKxwjueJHHMQAnxtivTA75uUFqPFeWzk" +], +[ +"c0ba5a8e914111210f2bd131f3d5e08d", +"scheme spot photo card baby mountain device kick cradle pact join borrow", +"ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612", +"xprv9s21ZrQH143K3FperxDp8vFsFycKCRcJGAFmcV7umQmcnMZaLtZRt13QJDsoS5F6oYT6BB4sS6zmTmyQAEkJKxJ7yByDNtRe5asP2jFGhT6" +], +[ +"6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", +"horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave", +"fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d", +"xprv9s21ZrQH143K3R1SfVZZLtVbXEB9ryVxmVtVMsMwmEyEvgXN6Q84LKkLRmf4ST6QrLeBm3jQsb9gx1uo23TS7vo3vAkZGZz71uuLCcywUkt" +], +[ +"9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", +"panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside", +"72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d", +"xprv9s21ZrQH143K2WNnKmssvZYM96VAr47iHUQUTUyUXH3sAGNjhJANddnhw3i3y3pBbRAVk5M5qUGFr4rHbEWwXgX4qrvrceifCYQJbbFDems" +], +[ +"23db8160a31d3e0dca3688ed941adbf3", +"cat swing flag economy stadium alone churn speed unique patch report train", +"deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5", +"xprv9s21ZrQH143K4G28omGMogEoYgDQuigBo8AFHAGDaJdqQ99QKMQ5J6fYTMfANTJy6xBmhvsNZ1CJzRZ64PWbnTFUn6CDV2FxoMDLXdk95DQ" +], +[ +"8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", +"light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access", +"4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02", +"xprv9s21ZrQH143K3wtsvY8L2aZyxkiWULZH4vyQE5XkHTXkmx8gHo6RUEfH3Jyr6NwkJhvano7Xb2o6UqFKWHVo5scE31SGDCAUsgVhiUuUDyh" +], +[ +"066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", +"all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform", +"26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d", +"xprv9s21ZrQH143K3rEfqSM4QZRVmiMuSWY9wugscmaCjYja3SbUD3KPEB1a7QXJoajyR2T1SiXU7rFVRXMV9XdYVSZe7JoUXdP4SRHTxsT1nzm" +], +[ +"f30f8c1da665478f49b001d94c5fc452", +"vessel ladder alter error federal sibling chat ability sun glass valve picture", +"2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f", +"xprv9s21ZrQH143K2QWV9Wn8Vvs6jbqfF1YbTCdURQW9dLFKDovpKaKrqS3SEWsXCu6ZNky9PSAENg6c9AQYHcg4PjopRGGKmdD313ZHszymnps" +], +[ +"c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", +"scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump", +"7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88", +"xprv9s21ZrQH143K4aERa2bq7559eMCCEs2QmmqVjUuzfy5eAeDX4mqZffkYwpzGQRE2YEEeLVRoH4CSHxianrFaVnMN2RYaPUZJhJx8S5j6puX" +], +[ +"f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", +"void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold", +"01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998", +"xprv9s21ZrQH143K39rnQJknpH1WEPFJrzmAqqasiDcVrNuk926oizzJDDQkdiTvNPr2FYDYzWgiMiC63YmfPAa2oPyNB23r2g7d1yiK6WpqaQS" +] +] \ No newline at end of file diff --git a/src/test/kawpow_tests.cpp b/src/test/kawpow_tests.cpp new file mode 100644 index 0000000000..486e9058d4 --- /dev/null +++ b/src/test/kawpow_tests.cpp @@ -0,0 +1,142 @@ +// Copyright (c) 2019 Veil developers +// Distributed under the MIT software license, see the accompanying +// file COPYING or http://www.opensource.org/licenses/mit-license.php. + + +#include + +#include + +#include +#include + +#include "crypto/ethash/helpers.hpp" +#include "crypto/ethash/progpow_test_vectors.hpp" + +#include + +BOOST_FIXTURE_TEST_SUITE(kawpow_tests, BasicTestingSetup) + +BOOST_AUTO_TEST_CASE(kawpow_l1_cache) +{ + auto& context = get_ethash_epoch_context_0(); + + constexpr auto test_size = 20; + std::array cache_slice; + for (size_t i = 0; i < cache_slice.size(); ++i) + cache_slice[i] = ethash::le::uint32(context.l1_cache[i]); + + const std::array expected{ + {2492749011, 430724829, 2029256771, 3095580433, 3583790154, 3025086503, + 805985885, 4121693337, 2320382801, 3763444918, 1006127899, 1480743010, + 2592936015, 2598973744, 3038068233, 2754267228, 2867798800, 2342573634, + 467767296, 246004123}}; + int i = 0; + for (auto item : cache_slice) { + BOOST_CHECK(item == expected[i]); + i++; + } +} + +BOOST_AUTO_TEST_CASE(kawpow_hash_empty) +{ + auto& context = get_ethash_epoch_context_0(); + + int count = 1000; + ethash_result result; + while (count > 0) { + result = progpow::hash(context, count, {}, 0); + --count; + } + + const auto mix_hex = "6e97b47b134fda0c7888802988e1a373affeb28bcd813b6e9a0fc669c935d03a"; + const auto final_hex = "e601a7257a70dc48fccc97a7330d704d776047623b92883d77111fb36870f3d1"; + BOOST_CHECK_EQUAL(to_hex(result.mix_hash), mix_hex); + BOOST_CHECK_EQUAL(to_hex(result.final_hash), final_hex); +} + +BOOST_AUTO_TEST_CASE(kawpow_hash_30000) +{ + const int block_number = 30000; + const auto header = + to_hash256("ffeeddccbbaa9988776655443322110000112233445566778899aabbccddeeff"); + const uint64_t nonce = 0x123456789abcdef0; + + auto context = ethash::create_epoch_context(ethash::get_epoch_number(block_number)); + + const auto result = progpow::hash(*context, block_number, header, nonce); + const auto mix_hex = "177b565752a375501e11b6d9d3679c2df6197b2cab3a1ba2d6b10b8c71a3d459"; + const auto final_hex = "c824bee0418e3cfb7fae56e0d5b3b8b14ba895777feea81c70c0ba947146da69"; + BOOST_CHECK_EQUAL(to_hex(result.mix_hash), mix_hex); + BOOST_CHECK_EQUAL(to_hex(result.final_hash), final_hex); + +} + +BOOST_AUTO_TEST_CASE(kawpow_hash_and_verify) +{ + ethash::epoch_context_ptr context{nullptr, nullptr}; + + for (auto& t : progpow_hash_test_cases) + { + const auto epoch_number = ethash::get_epoch_number(t.block_number); + if (!context || context->epoch_number != epoch_number) + context = ethash::create_epoch_context(epoch_number); + + const auto header_hash = to_hash256(t.header_hash_hex); + const auto nonce = std::stoull(t.nonce_hex, nullptr, 16); + const auto result = progpow::hash(*context, t.block_number, header_hash, nonce); + BOOST_CHECK_EQUAL(to_hex(result.mix_hash), t.mix_hash_hex); + BOOST_CHECK_EQUAL(to_hex(result.final_hash), t.final_hash_hex); + + auto success = progpow::verify( + *context, t.block_number, header_hash, result.mix_hash, nonce, result.final_hash); + BOOST_CHECK(success); + + auto lower_boundary = result.final_hash; + --lower_boundary.bytes[31]; + auto final_failure = progpow::verify( + *context, t.block_number, header_hash, result.mix_hash, nonce, lower_boundary); + BOOST_CHECK(!final_failure); + + auto different_mix = result.mix_hash; + ++different_mix.bytes[7]; + auto mix_failure = progpow::verify( + *context, t.block_number, header_hash, different_mix, nonce, result.final_hash); + BOOST_CHECK(!mix_failure); + } +} + +BOOST_AUTO_TEST_CASE(kawpow_search) +{ + auto ctxp = ethash::create_epoch_context_full(0); + auto& ctx = *ctxp; + auto& ctxl = reinterpret_cast(ctx); + + auto boundary = to_hash256("00ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"); + auto sr = progpow::search(ctx, 0, {}, boundary, 700, 100); + auto srl = progpow::search_light(ctxl, 0, {}, boundary, 700, 100); + + BOOST_CHECK(sr.mix_hash == ethash::hash256{}); + BOOST_CHECK(sr.final_hash == ethash::hash256{}); + BOOST_CHECK(sr.nonce == 0x0); + BOOST_CHECK(sr.mix_hash == srl.mix_hash); + BOOST_CHECK(sr.final_hash == srl.final_hash); + BOOST_CHECK(sr.nonce == srl.nonce); + + // Switch it to a different starting nonce and find another solution + sr = progpow::search(ctx, 0, {}, boundary, 300, 100); + srl = progpow::search_light(ctxl, 0, {}, boundary, 300, 100); + + BOOST_CHECK(sr.mix_hash != ethash::hash256{}); + BOOST_CHECK(sr.final_hash != ethash::hash256{}); + BOOST_CHECK(sr.nonce == 395); + BOOST_CHECK(sr.mix_hash == srl.mix_hash); + BOOST_CHECK(sr.final_hash == srl.final_hash); + BOOST_CHECK(sr.nonce == srl.nonce); + + auto r = progpow::hash(ctx, 0, {}, 395); + BOOST_CHECK(sr.final_hash == r.final_hash); + BOOST_CHECK(sr.mix_hash == r.mix_hash); +} + +BOOST_AUTO_TEST_SUITE_END() \ No newline at end of file diff --git a/src/test/script_P2PKH_tests.cpp b/src/test/script_P2PKH_tests.cpp index b9d9cd3e3b..910e2df013 100644 --- a/src/test/script_P2PKH_tests.cpp +++ b/src/test/script_P2PKH_tests.cpp @@ -49,11 +49,15 @@ BOOST_FIXTURE_TEST_SUITE(script_P2PKH_tests, BasicTestingSetup) static const unsigned char missing2[] = { OP_DUP, OP_HASH160, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; + + #pragma GCC diagnostic ignored "-Warray-bounds" BOOST_CHECK(!CScript(missing2, missing2 + sizeof(missing)).IsPayToPublicKeyHash()); static const unsigned char tooshort[] = { OP_DUP, OP_HASH160, 2, 0, 0, OP_EQUALVERIFY, OP_CHECKSIG }; + + #pragma GCC diagnostic ignored "-Warray-bounds" BOOST_CHECK(!CScript(tooshort, tooshort + sizeof(direct)).IsPayToPublicKeyHash()); } diff --git a/src/test/test_raven.cpp b/src/test/test_raven.cpp index 9927331bc8..fba573cde1 100644 --- a/src/test/test_raven.cpp +++ b/src/test/test_raven.cpp @@ -142,7 +142,9 @@ TestChain100Setup::CreateAndProcessBlock(const std::vector unsigned int extraNonce = 0; IncrementExtraNonce(&block, chainActive.Tip(), extraNonce); - while (!CheckProofOfWork(block.GetHash(), block.nBits, chainparams.GetConsensus())) ++block.nNonce; + uint256 mix_hash; + while (!CheckProofOfWork(block.GetHashFull(mix_hash), block.nBits, chainparams.GetConsensus())) { ++block.nNonce64; ++block.nNonce;}; + block.mix_hash = mix_hash; std::shared_ptr shared_pblock = std::make_shared(block); ProcessNewBlock(chainparams, shared_pblock, true, nullptr); diff --git a/src/test/test_raven_hash.cpp b/src/test/test_raven_hash.cpp index 08f03459cf..e6d7350434 100644 --- a/src/test/test_raven_hash.cpp +++ b/src/test/test_raven_hash.cpp @@ -4,7 +4,7 @@ // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include -#include "algo/hash_algos.h" +#include #include #include diff --git a/src/test/util_tests.cpp b/src/test/util_tests.cpp index d285598e54..f1fefec8bb 100644 --- a/src/test/util_tests.cpp +++ b/src/test/util_tests.cpp @@ -514,6 +514,18 @@ BOOST_FIXTURE_TEST_SUITE(util_tests, BasicTestingSetup) BOOST_CHECK(!ParseUInt64("-2147483648", &n)); BOOST_CHECK(!ParseUInt64("-9223372036854775808", &n)); BOOST_CHECK(!ParseUInt64("-1234", &n)); + + + // HEX Tests (base 16) + BOOST_CHECK(ParseUInt64("0x7FFFFFFFFFFFFFFF", &n, 16) && n == 9223372036854775807ULL); + BOOST_CHECK(ParseUInt64("0x8000000000000000", &n, 16) && n == 9223372036854775808ULL); + BOOST_CHECK(ParseUInt64("0xFFFFFFFFFFFFFFFF", &n, 16) && n == 18446744073709551615ULL); + BOOST_CHECK(ParseUInt64("0x04D2", nullptr, 16)); + BOOST_CHECK(ParseUInt64("0x0", &n, 16) && n == 0LL); + BOOST_CHECK(ParseUInt64("0x04D2", &n, 16) && n == 1234LL); + BOOST_CHECK(ParseUInt64("0x7FFFFFFF", &n, 16) && n == 2147483647LL); + BOOST_CHECK(!ParseUInt64("9223372036854775807", &n, 16)); //no base 10 when base 16, + } BOOST_AUTO_TEST_CASE(ParseDouble_test) diff --git a/src/txdb.cpp b/src/txdb.cpp index 81f42dd53c..d5231b56f1 100644 --- a/src/txdb.cpp +++ b/src/txdb.cpp @@ -484,6 +484,9 @@ bool CBlockTreeDB::LoadBlockIndexGuts(const Consensus::Params& consensusParams, pindexNew->nNonce = diskindex.nNonce; pindexNew->nStatus = diskindex.nStatus; pindexNew->nTx = diskindex.nTx; + pindexNew->nNonce64 = diskindex.nNonce64; + pindexNew->mix_hash = diskindex.mix_hash; + pindexNew->nHeight = diskindex.nHeight; if (!CheckProofOfWork(pindexNew->GetBlockHash(), pindexNew->nBits, consensusParams)) return error("%s: CheckProofOfWork failed: %s", __func__, pindexNew->ToString()); diff --git a/src/txmempool.cpp b/src/txmempool.cpp index 619d34953f..52d93d4f98 100644 --- a/src/txmempool.cpp +++ b/src/txmempool.cpp @@ -732,7 +732,7 @@ void CTxMemPool::removeUnchecked(txiter it, MemPoolRemovalReason reason) // Also assumes that if an entry is in setDescendants already, then all // in-mempool descendants of it are already in setDescendants as well, so that we // can save time by not iterating over those entries. -void CTxMemPool::CalculateDescendants(txiter entryit, setEntries &setDescendants) +void CTxMemPool::CalculateDescendants(txiter entryit, setEntries& setDescendants) const { setEntries stage; if (setDescendants.count(entryit) == 0) { diff --git a/src/txmempool.h b/src/txmempool.h index 2d7f18d42b..052eeaec6f 100644 --- a/src/txmempool.h +++ b/src/txmempool.h @@ -638,7 +638,7 @@ class CTxMemPool /** Populate setDescendants with all in-mempool descendants of hash. * Assumes that setDescendants includes all in-mempool descendants of anything * already in it. */ - void CalculateDescendants(txiter it, setEntries &setDescendants); + void CalculateDescendants(txiter it, setEntries& setDescendants) const; /** The minimum fee to get into the mempool, which may itself not be enough * for larger-sized transactions. diff --git a/src/ui_interface.h b/src/ui_interface.h index 1613b671ab..7eabdf60ab 100644 --- a/src/ui_interface.h +++ b/src/ui_interface.h @@ -69,7 +69,8 @@ class CClientUIInterface /** Predefined combinations for certain default usage cases */ MSG_INFORMATION = ICON_INFORMATION, MSG_WARNING = (ICON_WARNING | BTN_OK | MODAL), - MSG_ERROR = (ICON_ERROR | BTN_OK | MODAL) + MSG_ERROR = (ICON_ERROR | BTN_OK | MODAL), + MSG_VERSION = (ICON_INFORMATION | BTN_OK | MODAL), }; /** Show message box. */ @@ -95,6 +96,9 @@ class CClientUIInterface /** A wallet has been loaded. */ boost::signals2::signal LoadWallet; + /** Show mnemoic. */ + boost::signals2::signal ShowMnemonic; + /** * Show progress e.g. for verifychain. * resume_possible indicates shutting down now will result in the current progress action resuming upon restart. diff --git a/src/univalue/include/univalue.h b/src/univalue/include/univalue.h index 89a212cf3b..62d51b0b1a 100644 --- a/src/univalue/include/univalue.h +++ b/src/univalue/include/univalue.h @@ -167,7 +167,9 @@ class UniValue { bool get_bool() const; const std::string& get_str() const; int get_int() const; + uint32_t get_uint() const; int64_t get_int64() const; + uint64_t get_uint64() const; double get_real() const; const UniValue& get_obj() const; const UniValue& get_array() const; diff --git a/src/univalue/lib/univalue_get.cpp b/src/univalue/lib/univalue_get.cpp index db3e747ef0..f324ad87c9 100644 --- a/src/univalue/lib/univalue_get.cpp +++ b/src/univalue/lib/univalue_get.cpp @@ -43,6 +43,22 @@ bool ParseInt32(const std::string& str, int32_t *out) n <= std::numeric_limits::max(); } +bool ParseUInt32(const std::string& str, uint32_t *out) +{ + if (!ParsePrechecks(str)) + return false; + char *endp = NULL; + errno = 0; // strtoul will not set errno if valid + unsigned long int n = strtoul(str.c_str(), &endp, 10); + if(out) *out = (uint32_t)n; + // Note that strtoul returns a *long int*, so even if strtoul doesn't report a over/underflow + // we still have to check that the returned value is within the range of an *int32_t*. On 64-bit + // platforms the size of these types may be different. + return endp && *endp == 0 && !errno && + n >= std::numeric_limits::min() && + n <= std::numeric_limits::max(); +} + bool ParseInt64(const std::string& str, int64_t *out) { if (!ParsePrechecks(str)) @@ -58,6 +74,21 @@ bool ParseInt64(const std::string& str, int64_t *out) n <= std::numeric_limits::max(); } +bool ParseUInt64(const std::string& str, uint64_t *out) +{ + if (!ParsePrechecks(str)) + return false; + char *endp = NULL; + errno = 0; // strtoull will not set errno if valid + unsigned long long int n = strtoull(str.c_str(), &endp, 10); + if(out) *out = (uint64_t)n; + // Note that strtoull returns a *long long int*, so even if strtoull doesn't report a over/underflow + // we still have to check that the returned value is within the range of an *uint64_t*. + return endp && *endp == 0 && !errno && + n >= std::numeric_limits::min() && + n <= std::numeric_limits::max(); +} + bool ParseDouble(const std::string& str, double *out) { if (!ParsePrechecks(str)) @@ -111,6 +142,16 @@ int UniValue::get_int() const return retval; } +uint32_t UniValue::get_uint() const +{ + if (typ != VNUM) + throw std::runtime_error("JSON value is not an integer as expected"); + uint32_t retval; + if (!ParseUInt32(getValStr(), &retval)) + throw std::runtime_error("JSON integer out of range"); + return retval; +} + int64_t UniValue::get_int64() const { if (typ != VNUM) @@ -121,6 +162,16 @@ int64_t UniValue::get_int64() const return retval; } +uint64_t UniValue::get_uint64() const +{ + if (typ != VNUM) + throw std::runtime_error("JSON value is not an integer as expected"); + uint64_t retval; + if (!ParseUInt64(getValStr(), &retval)) + throw std::runtime_error("JSON integer out of range"); + return retval; +} + double UniValue::get_real() const { if (typ != VNUM) diff --git a/src/utilstrencodings.cpp b/src/utilstrencodings.cpp index a6af7b0bbc..51afe55c01 100644 --- a/src/utilstrencodings.cpp +++ b/src/utilstrencodings.cpp @@ -510,7 +510,7 @@ bool ParseUInt32(const std::string& str, uint32_t *out) n <= std::numeric_limits::max(); } -bool ParseUInt64(const std::string& str, uint64_t *out) +bool ParseUInt64(const std::string& str, uint64_t *out, int base) { if (!ParsePrechecks(str)) return false; @@ -518,7 +518,7 @@ bool ParseUInt64(const std::string& str, uint64_t *out) return false; char *endp = nullptr; errno = 0; // strtoull will not set errno if valid - unsigned long long int n = strtoull(str.c_str(), &endp, 10); + unsigned long long int n = strtoull(str.c_str(), &endp, base); if(out) *out = (uint64_t)n; // Note that strtoull returns a *unsigned long long int*, so even if it doesn't report an over/underflow // we still have to check that the returned value is within the range of an *uint64_t*. diff --git a/src/utilstrencodings.h b/src/utilstrencodings.h index 264a3891fc..5f31ac681a 100644 --- a/src/utilstrencodings.h +++ b/src/utilstrencodings.h @@ -69,6 +69,13 @@ int atoi(const std::string& str); */ bool ParseInt32(const std::string& str, int32_t *out); +/** + * Convert string to unsigned 32-bit integer with strict parse error feedback. + * @returns true if the entire string could be parsed as valid integer, + * false if not the entire string could be parsed or when overflow or underflow occurred. + */ +bool ParseUInt32(const std::string& str, uint32_t *out); + /** * Convert string to signed 64-bit integer with strict parse error feedback. * @returns true if the entire string could be parsed as valid integer, @@ -88,7 +95,7 @@ bool ParseUInt32(const std::string& str, uint32_t *out); * @returns true if the entire string could be parsed as valid integer, * false if not the entire string could be parsed or when overflow or underflow occurred. */ -bool ParseUInt64(const std::string& str, uint64_t *out); +bool ParseUInt64(const std::string& str, uint64_t *out, int base = 10); /** * Convert string to double with strict parse error feedback. diff --git a/src/validation.cpp b/src/validation.cpp index 3ce220506d..cf4b0da065 100644 --- a/src/validation.cpp +++ b/src/validation.cpp @@ -505,7 +505,7 @@ static bool CheckInputsFromMempoolAndCache(const CTransaction& tx, CValidationSt static bool AcceptToMemoryPoolWorker(const CChainParams& chainparams, CTxMemPool& pool, CValidationState& state, const CTransactionRef& ptx, bool* pfMissingInputs, int64_t nAcceptTime, std::list* plTxnReplaced, - bool bypass_limits, const CAmount& nAbsurdFee, std::vector& coins_to_uncache) + bool bypass_limits, const CAmount& nAbsurdFee, std::vector& coins_to_uncache, bool test_accept) { const CTransaction& tx = *ptx; const uint256 hash = tx.GetHash(); @@ -922,6 +922,11 @@ static bool AcceptToMemoryPoolWorker(const CChainParams& chainparams, CTxMemPool } } + if (test_accept) { + // Tx was accepted, but not added + return true; + } + // Remove conflicting transactions from the mempool for (const CTxMemPool::txiter it : allConflicting) { @@ -1070,10 +1075,10 @@ static bool AcceptToMemoryPoolWorker(const CChainParams& chainparams, CTxMemPool /** (try to) add transaction to memory pool with a specified acceptance time **/ static bool AcceptToMemoryPoolWithTime(const CChainParams& chainparams, CTxMemPool& pool, CValidationState &state, const CTransactionRef &tx, bool* pfMissingInputs, int64_t nAcceptTime, std::list* plTxnReplaced, - bool bypass_limits, const CAmount nAbsurdFee) + bool bypass_limits, const CAmount nAbsurdFee, bool test_accept) { std::vector coins_to_uncache; - bool res = AcceptToMemoryPoolWorker(chainparams, pool, state, tx, pfMissingInputs, nAcceptTime, plTxnReplaced, bypass_limits, nAbsurdFee, coins_to_uncache); + bool res = AcceptToMemoryPoolWorker(chainparams, pool, state, tx, pfMissingInputs, nAcceptTime, plTxnReplaced, bypass_limits, nAbsurdFee, coins_to_uncache, test_accept); if (!res) { for (const COutPoint& hashTx : coins_to_uncache) pcoinsTip->Uncache(hashTx); @@ -1086,10 +1091,10 @@ static bool AcceptToMemoryPoolWithTime(const CChainParams& chainparams, CTxMemPo bool AcceptToMemoryPool(CTxMemPool& pool, CValidationState &state, const CTransactionRef &tx, bool* pfMissingInputs, std::list* plTxnReplaced, - bool bypass_limits, const CAmount nAbsurdFee) + bool bypass_limits, const CAmount nAbsurdFee, bool test_accept) { const CChainParams& chainparams = GetParams(); - return AcceptToMemoryPoolWithTime(chainparams, pool, state, tx, pfMissingInputs, GetTime(), plTxnReplaced, bypass_limits, nAbsurdFee); + return AcceptToMemoryPoolWithTime(chainparams, pool, state, tx, pfMissingInputs, GetTime(), plTxnReplaced, bypass_limits, nAbsurdFee, test_accept); } bool GetTimestampIndex(const unsigned int &high, const unsigned int &low, const bool fActiveOnly, std::vector > &hashes) @@ -3061,7 +3066,7 @@ void static UpdateTip(CBlockIndex *pindexNew, const CChainParams& chainParams) { ThresholdState state = checker.GetStateFor(pindex, chainParams.GetConsensus(), warningcache[bit]); if (state == THRESHOLD_ACTIVE || state == THRESHOLD_LOCKED_IN) { const std::string strWarning = strprintf(_("Warning: unknown new rules activated (versionbit %i)"), bit); - if (bit == 28) // DUMMY TEST BIT + if (bit == 28 || bit == 25) // DUMMY TEST BIT continue; if (state == THRESHOLD_ACTIVE) { DoWarning(strWarning); @@ -3484,8 +3489,9 @@ static bool ActivateBestChainStep(CValidationState& state, const CChainParams& c if (!ConnectTip(state, chainparams, pindexConnect, pindexConnect == pindexMostWork ? pblock : std::shared_ptr(), connectTrace, disconnectpool)) { if (state.IsInvalid()) { // The block violates a consensus rule. - if (!state.CorruptionPossible()) + if (!state.CorruptionPossible()) { InvalidChainFound(vpindexToConnect.back()); + } state = CValidationState(); fInvalidFound = true; fContinue = false; @@ -3918,9 +3924,30 @@ static bool FindUndoPos(CValidationState &state, int nFile, CDiskBlockPos &pos, static bool CheckBlockHeader(const CBlockHeader& block, CValidationState& state, const Consensus::Params& consensusParams, bool fCheckPOW = true) { + // If we are checking a KAWPOW block below a know checkpoint height. We can validate the proof of work using the mix_hash + if (fCheckPOW && block.nTime >= nKAWPOWActivationTime) { + CBlockIndex* pcheckpoint = Checkpoints::GetLastCheckpoint(GetParams().Checkpoints()); + if (fCheckPOW && pcheckpoint && block.nHeight <= (uint32_t)pcheckpoint->nHeight) { + if (!CheckProofOfWork(block.GetHash(), block.nBits, consensusParams)) { + return state.DoS(50, false, REJECT_INVALID, "high-hash", false, "proof of work failed with mix_hash only check"); + } + + return true; + } + } + + uint256 mix_hash; // Check proof of work matches claimed amount - if (fCheckPOW && !CheckProofOfWork(block.GetHash(), block.nBits, consensusParams)) + if (fCheckPOW && !CheckProofOfWork(block.GetHashFull(mix_hash), block.nBits, consensusParams)) { return state.DoS(50, false, REJECT_INVALID, "high-hash", false, "proof of work failed"); + } + + if (fCheckPOW && block.nTime >= nKAWPOWActivationTime) { + if (mix_hash != block.mix_hash) { + return state.DoS(50, false, REJECT_INVALID, "invalid-mix-hash", false, "mix_hash validity failed"); + } + } + return true; } @@ -3934,7 +3961,7 @@ bool CheckBlock(const CBlock& block, CValidationState& state, const Consensus::P // Check that the header is valid (particularly PoW). This is mostly // redundant with the call in AcceptBlockHeader. if (!CheckBlockHeader(block, state, consensusParams, fCheckPOW)) - return false; + return error("%s: Consensus::CheckBlockHeader: %s", __func__, FormatStateMessage(state)); // Check the merkle root. if (fCheckMerkleRoot) { @@ -4282,7 +4309,7 @@ bool ProcessNewBlockHeaders(const std::vector& headers, CValidatio } /** Store block on disk. If dbp is non-nullptr, the file is known to already reside on disk */ -static bool AcceptBlock(const std::shared_ptr& pblock, CValidationState& state, const CChainParams& chainparams, CBlockIndex** ppindex, bool fRequested, const CDiskBlockPos* dbp, bool* fNewBlock) +static bool AcceptBlock(const std::shared_ptr& pblock, CValidationState& state, const CChainParams& chainparams, CBlockIndex** ppindex, bool fRequested, const CDiskBlockPos* dbp, bool* fNewBlock, bool fFromLoad = false) { const CBlock& block = *pblock; @@ -4332,11 +4359,17 @@ static bool AcceptBlock(const std::shared_ptr& pblock, CValidation // Dont force the CheckBlock asset duplciates when checking from this state if (!CheckBlock(block, state, chainparams.GetConsensus(), true, true) || !ContextualCheckBlock(block, state, chainparams.GetConsensus(), pindex->pprev, currentActiveAssetCache)) { - if (state.IsInvalid() && !state.CorruptionPossible()) { - pindex->nStatus |= BLOCK_FAILED_VALID; - setDirtyBlockIndex.insert(pindex); + if (fFromLoad && state.GetRejectReason() == "bad-txns-transfer-asset-bad-deserialize") { + // keep going, we are only loading blocks from database + CValidationState new_state; + state = new_state; + } else { + if (state.IsInvalid() && !state.CorruptionPossible()) { + pindex->nStatus |= BLOCK_FAILED_VALID; + setDirtyBlockIndex.insert(pindex); + } + return error("%s: %s", __func__, FormatStateMessage(state)); } - return error("%s: %s", __func__, FormatStateMessage(state)); } // Header is valid/has work, merkle tree and segwit merkle tree are good...RELAY NOW @@ -5250,11 +5283,12 @@ bool LoadExternalBlockFile(const CChainParams& chainparams, FILE* fileIn, CDiskB if (mapBlockIndex.count(hash) == 0 || (mapBlockIndex[hash]->nStatus & BLOCK_HAVE_DATA) == 0) { LOCK(cs_main); CValidationState state; - if (AcceptBlock(pblock, state, chainparams, nullptr, true, dbp, nullptr)) { + if (AcceptBlock(pblock, state, chainparams, nullptr, true, dbp, nullptr, true)) { nLoaded++; } - if (state.IsError()) + if (state.IsError()) { break; + } } else if (hash != chainparams.GetConsensus().hashGenesisBlock && mapBlockIndex[hash]->nHeight % 1000 == 0) { LogPrint(BCLog::REINDEX, "Block Import: already had block %s at height %d\n", hash.ToString(), mapBlockIndex[hash]->nHeight); } @@ -5285,7 +5319,7 @@ bool LoadExternalBlockFile(const CChainParams& chainparams, FILE* fileIn, CDiskB head.ToString()); LOCK(cs_main); CValidationState dummy; - if (AcceptBlock(pblockrecursive, dummy, chainparams, nullptr, true, &it->second, nullptr)) + if (AcceptBlock(pblockrecursive, dummy, chainparams, nullptr, true, &it->second, nullptr, true)) { nLoaded++; queue.push_back(pblockrecursive->GetHash()); @@ -5565,7 +5599,8 @@ bool LoadMempool(void) if (nTime + nExpiryTimeout > nNow) { LOCK(cs_main); AcceptToMemoryPoolWithTime(chainparams, mempool, state, tx, nullptr /* pfMissingInputs */, nTime, - nullptr /* plTxnReplaced */, false /* bypass_limits */, 0 /* nAbsurdFee */); + nullptr /* plTxnReplaced */, false /* bypass_limits */, 0 /* nAbsurdFee */, + false /* test_accept */); if (state.IsValid()) { ++count; } else { @@ -5697,6 +5732,18 @@ bool AreMessagesDeployed() { return IsRip5Active(); } +bool AreTransferScriptsSizeDeployed() { + + if (fTransferScriptIsActive) + return true; + + const ThresholdState thresholdState = VersionBitsTipState(GetParams().GetConsensus(), Consensus::DEPLOYMENT_TRANSFER_SCRIPT_SIZE); + if (thresholdState == THRESHOLD_ACTIVE) + fTransferScriptIsActive = true; + + return fTransferScriptIsActive; +} + bool AreRestrictedAssetsDeployed() { return IsRip5Active(); diff --git a/src/validation.h b/src/validation.h index c72666be4f..7ac6f28421 100644 --- a/src/validation.h +++ b/src/validation.h @@ -343,7 +343,7 @@ void PruneBlockFilesManual(int nManualPruneHeight); * plTxnReplaced will be appended to with all transactions replaced from mempool **/ bool AcceptToMemoryPool(CTxMemPool& pool, CValidationState &state, const CTransactionRef &tx, bool* pfMissingInputs, std::list* plTxnReplaced, - bool bypass_limits, const CAmount nAbsurdFee); + bool bypass_limits, const CAmount nAbsurdFee, bool test_accept=false); /** Convert CValidationState to a human-readable message for logging */ std::string FormatStateMessage(const CValidationState &state); @@ -597,6 +597,9 @@ bool AreRestrictedAssetsDeployed(); bool IsRip5Active(); + +bool AreTransferScriptsSizeDeployed(); + bool IsDGWActive(unsigned int nBlockNumber); bool IsMessagingActive(unsigned int nBlockNumber); bool IsRestrictedActive(unsigned int nBlockNumber); diff --git a/src/version.h b/src/version.h index c62fa02081..61687d7733 100644 --- a/src/version.h +++ b/src/version.h @@ -10,6 +10,14 @@ * network protocol versioning */ +// Update these four values on every release cycle +// These values should match the values in configure.ac +// Used for checking the Ravencoin releases on github +static const std::string SOFTWARE_VERSION = "v4.0.0"; +static const int MAIN_SOFTWARE_VERSION = 4; +static const int SECOND_SOFTWARE_VERSION = 0; +static const int THIRD_SOFTWARE_VERSION = 0; + static const int PROTOCOL_VERSION = 70026; //! initial proto version, to be increased after version/verack negotiation @@ -24,6 +32,9 @@ static const int ASSETDATA_VERSION = 70017; //! getassetdata reutrn asstnotfound, and assetdata doesn't have blockhash in the data static const int X16RV2_VERSION = 70025; +//! getassetdata reutrn asstnotfound, and assetdata doesn't have blockhash in the data +static const int KAWPOW_VERSION = 70027; + //! disconnect from peers older than this proto version //!!! Anytime this value is changed please also update the "MY_VERSION" value to match in the //!!! ./test/functional/test_framework/mininode.py file. Not doing so will cause verack to fail! @@ -57,4 +68,5 @@ static const int ASSETDATA_VERSION_UPDATED = 70020; //! In this version, 'rip5 (messaging and restricted assets)' was introduced static const int MESSAGING_RESTRICTED_ASSETS_VERSION = 70026; + #endif // RAVEN_VERSION_H diff --git a/src/versionbits.cpp b/src/versionbits.cpp index 39050d3f4b..cad9b3ad90 100644 --- a/src/versionbits.cpp +++ b/src/versionbits.cpp @@ -22,6 +22,10 @@ const struct VBDeploymentInfo VersionBitsDeploymentInfo[Consensus::MAX_VERSION_B { /*.name =*/ "messaging_restricted", /*.gbt_force =*/ true, + }, + { + /*.name =*/ "transfer_script", + /*.gbt_force =*/ true, } }; diff --git a/src/wallet/bip39.cpp b/src/wallet/bip39.cpp new file mode 100644 index 0000000000..4ed01a8b96 --- /dev/null +++ b/src/wallet/bip39.cpp @@ -0,0 +1,156 @@ +/** + * Created by ROSHii on 2019-06-01. + * Copyright (c) 2013-2014 Tomas Dzetkulic + * Copyright (c) 2013-2014 Pavol Rusnak + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES + * OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. + */ + + +#include "wallet/bip39.h" +#include "wallet/bip39_english.h" +#include "crypto/sha256.h" +#include "random.h" + +#include + +SecureString CMnemonic::Generate(int strength) +{ + if (strength % 32 || strength < 128 || strength > 256) { + return SecureString(); + } + SecureVector data(32); + GetStrongRandBytes(&data[0], 32); + SecureString mnemonic = FromData(data, strength / 8); + return mnemonic; +} + +// SecureString CMnemonic::FromData(const uint8_t *data, int len) +SecureString CMnemonic::FromData(const SecureVector& data, int len) +{ + if (len % 4 || len < 16 || len > 32) { + return SecureString(); + } + + SecureVector checksum(32); + CSHA256().Write(&data[0], len).Finalize(&checksum[0]); + + // data + SecureVector bits(len); + memcpy(&bits[0], &data[0], len); + // checksum + bits.push_back(checksum[0]); + + int mlen = len * 3 / 4; + SecureString mnemonic; + + int i, j, idx; + for (i = 0; i < mlen; i++) { + idx = 0; + for (j = 0; j < 11; j++) { + idx <<= 1; + idx += (bits[(i * 11 + j) / 8] & (1 << (7 - ((i * 11 + j) % 8)))) > 0; + } + mnemonic.append(wordlist[idx]); + if (i < mlen - 1) { + mnemonic += ' '; + } + } + + return mnemonic; +} + +bool CMnemonic::Check(SecureString mnemonic) +{ + if (mnemonic.empty()) { + return false; + } + + uint32_t nWordCount{}; + + for (size_t i = 0; i < mnemonic.size(); ++i) { + if (mnemonic[i] == ' ') { + nWordCount++; + } + } + nWordCount++; + // check number of words + if (nWordCount != 12 && nWordCount != 18 && nWordCount != 24) { + return false; + } + + SecureString ssCurrentWord; + SecureVector bits(32 + 1); + + uint32_t nWordIndex, ki, nBitsCount{}; + + for (size_t i = 0; i < mnemonic.size(); ++i) + { + ssCurrentWord = ""; + while (i + ssCurrentWord.size() < mnemonic.size() && mnemonic[i + ssCurrentWord.size()] != ' ') { + if (ssCurrentWord.size() >= 9) { + return false; + } + ssCurrentWord += mnemonic[i + ssCurrentWord.size()]; + } + i += ssCurrentWord.size(); + nWordIndex = 0; + for (;;) { + if (!wordlist[nWordIndex]) { // word not found + return false; + } + if (ssCurrentWord == wordlist[nWordIndex]) { // word found on index nWordIndex + for (ki = 0; ki < 11; ki++) { + if (nWordIndex & (1 << (10 - ki))) { + bits[nBitsCount / 8] |= 1 << (7 - (nBitsCount % 8)); + } + nBitsCount++; + } + break; + } + nWordIndex++; + } + } + if (nBitsCount != nWordCount * 11) { + return false; + } + bits[32] = bits[nWordCount * 4 / 3]; + CSHA256().Write(&bits[0], nWordCount * 4 / 3).Finalize(&bits[0]); + + bool fResult = 0; + if (nWordCount == 12) { + fResult = (bits[0] & 0xF0) == (bits[32] & 0xF0); // compare first 4 bits + } else + if (nWordCount == 18) { + fResult = (bits[0] & 0xFC) == (bits[32] & 0xFC); // compare first 6 bits + } else + if (nWordCount == 24) { + fResult = bits[0] == bits[32]; // compare 8 bits + } + + return fResult; +} + +void CMnemonic::ToSeed(SecureString mnemonic, SecureString passphrase, SecureVector& seedRet) +{ + SecureString ssSalt = SecureString("mnemonic") + passphrase; + SecureVector vchSalt(ssSalt.begin(), ssSalt.end()); + seedRet.resize(64); + PKCS5_PBKDF2_HMAC(mnemonic.c_str(), mnemonic.size(), &vchSalt[0], vchSalt.size(), 2048, EVP_sha512(), 64, &seedRet[0]); +} \ No newline at end of file diff --git a/src/wallet/bip39.h b/src/wallet/bip39.h new file mode 100644 index 0000000000..ba7eebf934 --- /dev/null +++ b/src/wallet/bip39.h @@ -0,0 +1,41 @@ +/** + * Created by ROSHii on 2019-06-01. + * Copyright (c) 2013-2014 Tomas Dzetkulic + * Copyright (c) 2013-2014 Pavol Rusnak + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES + * OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. + */ + +#ifndef SRC_BIP39_H +#define SRC_BIP39_H + +#include "support/allocators/secure.h" + +class CMnemonic +{ +public: + static SecureString Generate(int strength); // strength in bits + static SecureString FromData(const SecureVector& data, int len); + static bool Check(SecureString mnemonic); + static void ToSeed(SecureString mnemonic, SecureString passphrase, SecureVector& seedRet); +private: + CMnemonic() {}; +}; + +#endif \ No newline at end of file diff --git a/src/wallet/bip39_english.h b/src/wallet/bip39_english.h new file mode 100644 index 0000000000..3f004d878a --- /dev/null +++ b/src/wallet/bip39_english.h @@ -0,0 +1,2074 @@ +/** + * Copyright (c) 2013-2014 Tomas Dzetkulic + * Copyright (c) 2013-2014 Pavol Rusnak + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES + * OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + * OTHER DEALINGS IN THE SOFTWARE. + */ + +const char * const wordlist[] = { +"abandon", +"ability", +"able", +"about", +"above", +"absent", +"absorb", +"abstract", +"absurd", +"abuse", +"access", +"accident", +"account", +"accuse", +"achieve", +"acid", +"acoustic", +"acquire", +"across", +"act", +"action", +"actor", +"actress", +"actual", +"adapt", +"add", +"addict", +"address", +"adjust", +"admit", +"adult", +"advance", +"advice", +"aerobic", +"affair", +"afford", +"afraid", +"again", +"age", +"agent", +"agree", +"ahead", +"aim", +"air", +"airport", +"aisle", +"alarm", +"album", +"alcohol", +"alert", +"alien", +"all", +"alley", +"allow", +"almost", +"alone", +"alpha", +"already", +"also", +"alter", +"always", +"amateur", +"amazing", +"among", +"amount", +"amused", +"analyst", +"anchor", +"ancient", +"anger", +"angle", +"angry", +"animal", +"ankle", +"announce", +"annual", +"another", +"answer", +"antenna", +"antique", +"anxiety", +"any", +"apart", +"apology", +"appear", +"apple", +"approve", +"april", +"arch", +"arctic", +"area", +"arena", +"argue", +"arm", +"armed", +"armor", +"army", +"around", +"arrange", +"arrest", +"arrive", +"arrow", +"art", +"artefact", +"artist", +"artwork", +"ask", +"aspect", +"assault", +"asset", +"assist", +"assume", +"asthma", +"athlete", +"atom", +"attack", +"attend", +"attitude", +"attract", +"auction", +"audit", +"august", +"aunt", +"author", +"auto", +"autumn", +"average", +"avocado", +"avoid", +"awake", +"aware", +"away", +"awesome", +"awful", +"awkward", +"axis", +"baby", +"bachelor", +"bacon", +"badge", +"bag", +"balance", +"balcony", +"ball", +"bamboo", +"banana", +"banner", +"bar", +"barely", +"bargain", +"barrel", +"base", +"basic", +"basket", +"battle", +"beach", +"bean", +"beauty", +"because", +"become", +"beef", +"before", +"begin", +"behave", +"behind", +"believe", +"below", +"belt", +"bench", +"benefit", +"best", +"betray", +"better", +"between", +"beyond", +"bicycle", +"bid", +"bike", +"bind", +"biology", +"bird", +"birth", +"bitter", +"black", +"blade", +"blame", +"blanket", +"blast", +"bleak", +"bless", +"blind", +"blood", +"blossom", +"blouse", +"blue", +"blur", +"blush", +"board", +"boat", +"body", +"boil", +"bomb", +"bone", +"bonus", +"book", +"boost", +"border", +"boring", +"borrow", +"boss", +"bottom", +"bounce", +"box", +"boy", +"bracket", +"brain", +"brand", +"brass", +"brave", +"bread", +"breeze", +"brick", +"bridge", +"brief", +"bright", +"bring", +"brisk", +"broccoli", +"broken", +"bronze", +"broom", +"brother", +"brown", +"brush", +"bubble", +"buddy", +"budget", +"buffalo", +"build", +"bulb", +"bulk", +"bullet", +"bundle", +"bunker", +"burden", +"burger", +"burst", +"bus", +"business", +"busy", +"butter", +"buyer", +"buzz", +"cabbage", +"cabin", +"cable", +"cactus", +"cage", +"cake", +"call", +"calm", +"camera", +"camp", +"can", +"canal", +"cancel", +"candy", +"cannon", +"canoe", +"canvas", +"canyon", +"capable", +"capital", +"captain", +"car", +"carbon", +"card", +"cargo", +"carpet", +"carry", +"cart", +"case", +"cash", +"casino", +"castle", +"casual", +"cat", +"catalog", +"catch", +"category", +"cattle", +"caught", +"cause", +"caution", +"cave", +"ceiling", +"celery", +"cement", +"census", +"century", +"cereal", +"certain", +"chair", +"chalk", +"champion", +"change", +"chaos", +"chapter", +"charge", +"chase", +"chat", +"cheap", +"check", +"cheese", +"chef", +"cherry", +"chest", +"chicken", +"chief", +"child", +"chimney", +"choice", +"choose", +"chronic", +"chuckle", +"chunk", +"churn", +"cigar", +"cinnamon", +"circle", +"citizen", +"city", +"civil", +"claim", +"clap", +"clarify", +"claw", +"clay", +"clean", +"clerk", +"clever", +"click", +"client", +"cliff", +"climb", +"clinic", +"clip", +"clock", +"clog", +"close", +"cloth", +"cloud", +"clown", +"club", +"clump", +"cluster", +"clutch", +"coach", +"coast", +"coconut", +"code", +"coffee", +"coil", +"coin", +"collect", +"color", +"column", +"combine", +"come", +"comfort", +"comic", +"common", +"company", +"concert", +"conduct", +"confirm", +"congress", +"connect", +"consider", +"control", +"convince", +"cook", +"cool", +"copper", +"copy", +"coral", +"core", +"corn", +"correct", +"cost", +"cotton", +"couch", +"country", +"couple", +"course", +"cousin", +"cover", +"coyote", +"crack", +"cradle", +"craft", +"cram", +"crane", +"crash", +"crater", +"crawl", +"crazy", +"cream", +"credit", +"creek", +"crew", +"cricket", +"crime", +"crisp", +"critic", +"crop", +"cross", +"crouch", +"crowd", +"crucial", +"cruel", +"cruise", +"crumble", +"crunch", +"crush", +"cry", +"crystal", +"cube", +"culture", +"cup", +"cupboard", +"curious", +"current", +"curtain", +"curve", +"cushion", +"custom", +"cute", +"cycle", +"dad", +"damage", +"damp", +"dance", +"danger", +"daring", +"dash", +"daughter", +"dawn", +"day", +"deal", +"debate", +"debris", +"decade", +"december", +"decide", +"decline", +"decorate", +"decrease", +"deer", +"defense", +"define", +"defy", +"degree", +"delay", +"deliver", +"demand", +"demise", +"denial", +"dentist", +"deny", +"depart", +"depend", +"deposit", +"depth", +"deputy", +"derive", +"describe", +"desert", +"design", +"desk", +"despair", +"destroy", +"detail", +"detect", +"develop", +"device", +"devote", +"diagram", +"dial", +"diamond", +"diary", +"dice", +"diesel", +"diet", +"differ", +"digital", +"dignity", +"dilemma", +"dinner", +"dinosaur", +"direct", +"dirt", +"disagree", +"discover", +"disease", +"dish", +"dismiss", +"disorder", +"display", +"distance", +"divert", +"divide", +"divorce", +"dizzy", +"doctor", +"document", +"dog", +"doll", +"dolphin", +"domain", +"donate", +"donkey", +"donor", +"door", +"dose", +"double", +"dove", +"draft", +"dragon", +"drama", +"drastic", +"draw", +"dream", +"dress", +"drift", +"drill", +"drink", +"drip", +"drive", +"drop", +"drum", +"dry", +"duck", +"dumb", +"dune", +"during", +"dust", +"dutch", +"duty", +"dwarf", +"dynamic", +"eager", +"eagle", +"early", +"earn", +"earth", +"easily", +"east", +"easy", +"echo", +"ecology", +"economy", +"edge", +"edit", +"educate", +"effort", +"egg", +"eight", +"either", +"elbow", +"elder", +"electric", +"elegant", +"element", +"elephant", +"elevator", +"elite", +"else", +"embark", +"embody", +"embrace", +"emerge", +"emotion", +"employ", +"empower", +"empty", +"enable", +"enact", +"end", +"endless", +"endorse", +"enemy", +"energy", +"enforce", +"engage", +"engine", +"enhance", +"enjoy", +"enlist", +"enough", +"enrich", +"enroll", +"ensure", +"enter", +"entire", +"entry", +"envelope", +"episode", +"equal", +"equip", +"era", +"erase", +"erode", +"erosion", +"error", +"erupt", +"escape", +"essay", +"essence", +"estate", +"eternal", +"ethics", +"evidence", +"evil", +"evoke", +"evolve", +"exact", +"example", +"excess", +"exchange", +"excite", +"exclude", +"excuse", +"execute", +"exercise", +"exhaust", +"exhibit", +"exile", +"exist", +"exit", +"exotic", +"expand", +"expect", +"expire", +"explain", +"expose", +"express", +"extend", +"extra", +"eye", +"eyebrow", +"fabric", +"face", +"faculty", +"fade", +"faint", +"faith", +"fall", +"false", +"fame", +"family", +"famous", +"fan", +"fancy", +"fantasy", +"farm", +"fashion", +"fat", +"fatal", +"father", +"fatigue", +"fault", +"favorite", +"feature", +"february", +"federal", +"fee", +"feed", +"feel", +"female", +"fence", +"festival", +"fetch", +"fever", +"few", +"fiber", +"fiction", +"field", +"figure", +"file", +"film", +"filter", +"final", +"find", +"fine", +"finger", +"finish", +"fire", +"firm", +"first", +"fiscal", +"fish", +"fit", +"fitness", +"fix", +"flag", +"flame", +"flash", +"flat", +"flavor", +"flee", +"flight", +"flip", +"float", +"flock", +"floor", +"flower", +"fluid", +"flush", +"fly", +"foam", +"focus", +"fog", +"foil", +"fold", +"follow", +"food", +"foot", +"force", +"forest", +"forget", +"fork", +"fortune", +"forum", +"forward", +"fossil", +"foster", +"found", +"fox", +"fragile", +"frame", +"frequent", +"fresh", +"friend", +"fringe", +"frog", +"front", +"frost", +"frown", +"frozen", +"fruit", +"fuel", +"fun", +"funny", +"furnace", +"fury", +"future", +"gadget", +"gain", +"galaxy", +"gallery", +"game", +"gap", +"garage", +"garbage", +"garden", +"garlic", +"garment", +"gas", +"gasp", +"gate", +"gather", +"gauge", +"gaze", +"general", +"genius", +"genre", +"gentle", +"genuine", +"gesture", +"ghost", +"giant", +"gift", +"giggle", +"ginger", +"giraffe", +"girl", +"give", +"glad", +"glance", +"glare", +"glass", +"glide", +"glimpse", +"globe", +"gloom", +"glory", +"glove", +"glow", +"glue", +"goat", +"goddess", +"gold", +"good", +"goose", +"gorilla", +"gospel", +"gossip", +"govern", +"gown", +"grab", +"grace", +"grain", +"grant", +"grape", +"grass", +"gravity", +"great", +"green", +"grid", +"grief", +"grit", +"grocery", +"group", +"grow", +"grunt", +"guard", +"guess", +"guide", +"guilt", +"guitar", +"gun", +"gym", +"habit", +"hair", +"half", +"hammer", +"hamster", +"hand", +"happy", +"harbor", +"hard", +"harsh", +"harvest", +"hat", +"have", +"hawk", +"hazard", +"head", +"health", +"heart", +"heavy", +"hedgehog", +"height", +"hello", +"helmet", +"help", +"hen", +"hero", +"hidden", +"high", +"hill", +"hint", +"hip", +"hire", +"history", +"hobby", +"hockey", +"hold", +"hole", +"holiday", +"hollow", +"home", +"honey", +"hood", +"hope", +"horn", +"horror", +"horse", +"hospital", +"host", +"hotel", +"hour", +"hover", +"hub", +"huge", +"human", +"humble", +"humor", +"hundred", +"hungry", +"hunt", +"hurdle", +"hurry", +"hurt", +"husband", +"hybrid", +"ice", +"icon", +"idea", +"identify", +"idle", +"ignore", +"ill", +"illegal", +"illness", +"image", +"imitate", +"immense", +"immune", +"impact", +"impose", +"improve", +"impulse", +"inch", +"include", +"income", +"increase", +"index", +"indicate", +"indoor", +"industry", +"infant", +"inflict", +"inform", +"inhale", +"inherit", +"initial", +"inject", +"injury", +"inmate", +"inner", +"innocent", +"input", +"inquiry", +"insane", +"insect", +"inside", +"inspire", +"install", +"intact", +"interest", +"into", +"invest", +"invite", +"involve", +"iron", +"island", +"isolate", +"issue", +"item", +"ivory", +"jacket", +"jaguar", +"jar", +"jazz", +"jealous", +"jeans", +"jelly", +"jewel", +"job", +"join", +"joke", +"journey", +"joy", +"judge", +"juice", +"jump", +"jungle", +"junior", +"junk", +"just", +"kangaroo", +"keen", +"keep", +"ketchup", +"key", +"kick", +"kid", +"kidney", +"kind", +"kingdom", +"kiss", +"kit", +"kitchen", +"kite", +"kitten", +"kiwi", +"knee", +"knife", +"knock", +"know", +"lab", +"label", +"labor", +"ladder", +"lady", +"lake", +"lamp", +"language", +"laptop", +"large", +"later", +"latin", +"laugh", +"laundry", +"lava", +"law", +"lawn", +"lawsuit", +"layer", +"lazy", +"leader", +"leaf", +"learn", +"leave", +"lecture", +"left", +"leg", +"legal", +"legend", +"leisure", +"lemon", +"lend", +"length", +"lens", +"leopard", +"lesson", +"letter", +"level", +"liar", +"liberty", +"library", +"license", +"life", +"lift", +"light", +"like", +"limb", +"limit", +"link", +"lion", +"liquid", +"list", +"little", +"live", +"lizard", +"load", +"loan", +"lobster", +"local", +"lock", +"logic", +"lonely", +"long", +"loop", +"lottery", +"loud", +"lounge", +"love", +"loyal", +"lucky", +"luggage", +"lumber", +"lunar", +"lunch", +"luxury", +"lyrics", +"machine", +"mad", +"magic", +"magnet", +"maid", +"mail", +"main", +"major", +"make", +"mammal", +"man", +"manage", +"mandate", +"mango", +"mansion", +"manual", +"maple", +"marble", +"march", +"margin", +"marine", +"market", +"marriage", +"mask", +"mass", +"master", +"match", +"material", +"math", +"matrix", +"matter", +"maximum", +"maze", +"meadow", +"mean", +"measure", +"meat", +"mechanic", +"medal", +"media", +"melody", +"melt", +"member", +"memory", +"mention", +"menu", +"mercy", +"merge", +"merit", +"merry", +"mesh", +"message", +"metal", +"method", +"middle", +"midnight", +"milk", +"million", +"mimic", +"mind", +"minimum", +"minor", +"minute", +"miracle", +"mirror", +"misery", +"miss", +"mistake", +"mix", +"mixed", +"mixture", +"mobile", +"model", +"modify", +"mom", +"moment", +"monitor", +"monkey", +"monster", +"month", +"moon", +"moral", +"more", +"morning", +"mosquito", +"mother", +"motion", +"motor", +"mountain", +"mouse", +"move", +"movie", +"much", +"muffin", +"mule", +"multiply", +"muscle", +"museum", +"mushroom", +"music", +"must", +"mutual", +"myself", +"mystery", +"myth", +"naive", +"name", +"napkin", +"narrow", +"nasty", +"nation", +"nature", +"near", +"neck", +"need", +"negative", +"neglect", +"neither", +"nephew", +"nerve", +"nest", +"net", +"network", +"neutral", +"never", +"news", +"next", +"nice", +"night", +"noble", +"noise", +"nominee", +"noodle", +"normal", +"north", +"nose", +"notable", +"note", +"nothing", +"notice", +"novel", +"now", +"nuclear", +"number", +"nurse", +"nut", +"oak", +"obey", +"object", +"oblige", +"obscure", +"observe", +"obtain", +"obvious", +"occur", +"ocean", +"october", +"odor", +"off", +"offer", +"office", +"often", +"oil", +"okay", +"old", +"olive", +"olympic", +"omit", +"once", +"one", +"onion", +"online", +"only", +"open", +"opera", +"opinion", +"oppose", +"option", +"orange", +"orbit", +"orchard", +"order", +"ordinary", +"organ", +"orient", +"original", +"orphan", +"ostrich", +"other", +"outdoor", +"outer", +"output", +"outside", +"oval", +"oven", +"over", +"own", +"owner", +"oxygen", +"oyster", +"ozone", +"pact", +"paddle", +"page", +"pair", +"palace", +"palm", +"panda", +"panel", +"panic", +"panther", +"paper", +"parade", +"parent", +"park", +"parrot", +"party", +"pass", +"patch", +"path", +"patient", +"patrol", +"pattern", +"pause", +"pave", +"payment", +"peace", +"peanut", +"pear", +"peasant", +"pelican", +"pen", +"penalty", +"pencil", +"people", +"pepper", +"perfect", +"permit", +"person", +"pet", +"phone", +"photo", +"phrase", +"physical", +"piano", +"picnic", +"picture", +"piece", +"pig", +"pigeon", +"pill", +"pilot", +"pink", +"pioneer", +"pipe", +"pistol", +"pitch", +"pizza", +"place", +"planet", +"plastic", +"plate", +"play", +"please", +"pledge", +"pluck", +"plug", +"plunge", +"poem", +"poet", +"point", +"polar", +"pole", +"police", +"pond", +"pony", +"pool", +"popular", +"portion", +"position", +"possible", +"post", +"potato", +"pottery", +"poverty", +"powder", +"power", +"practice", +"praise", +"predict", +"prefer", +"prepare", +"present", +"pretty", +"prevent", +"price", +"pride", +"primary", +"print", +"priority", +"prison", +"private", +"prize", +"problem", +"process", +"produce", +"profit", +"program", +"project", +"promote", +"proof", +"property", +"prosper", +"protect", +"proud", +"provide", +"public", +"pudding", +"pull", +"pulp", +"pulse", +"pumpkin", +"punch", +"pupil", +"puppy", +"purchase", +"purity", +"purpose", +"purse", +"push", +"put", +"puzzle", +"pyramid", +"quality", +"quantum", +"quarter", +"question", +"quick", +"quit", +"quiz", +"quote", +"rabbit", +"raccoon", +"race", +"rack", +"radar", +"radio", +"rail", +"rain", +"raise", +"rally", +"ramp", +"ranch", +"random", +"range", +"rapid", +"rare", +"rate", +"rather", +"raven", +"raw", +"razor", +"ready", +"real", +"reason", +"rebel", +"rebuild", +"recall", +"receive", +"recipe", +"record", +"recycle", +"reduce", +"reflect", +"reform", +"refuse", +"region", +"regret", +"regular", +"reject", +"relax", +"release", +"relief", +"rely", +"remain", +"remember", +"remind", +"remove", +"render", +"renew", +"rent", +"reopen", +"repair", +"repeat", +"replace", +"report", +"require", +"rescue", +"resemble", +"resist", +"resource", +"response", +"result", +"retire", +"retreat", +"return", +"reunion", +"reveal", +"review", +"reward", +"rhythm", +"rib", +"ribbon", +"rice", +"rich", +"ride", +"ridge", +"rifle", +"right", +"rigid", +"ring", +"riot", +"ripple", +"risk", +"ritual", +"rival", +"river", +"road", +"roast", +"robot", +"robust", +"rocket", +"romance", +"roof", +"rookie", +"room", +"rose", +"rotate", +"rough", +"round", +"route", +"royal", +"rubber", +"rude", +"rug", +"rule", +"run", +"runway", +"rural", +"sad", +"saddle", +"sadness", +"safe", +"sail", +"salad", +"salmon", +"salon", +"salt", +"salute", +"same", +"sample", +"sand", +"satisfy", +"satoshi", +"sauce", +"sausage", +"save", +"say", +"scale", +"scan", +"scare", +"scatter", +"scene", +"scheme", +"school", +"science", +"scissors", +"scorpion", +"scout", +"scrap", +"screen", +"script", +"scrub", +"sea", +"search", +"season", +"seat", +"second", +"secret", +"section", +"security", +"seed", +"seek", +"segment", +"select", +"sell", +"seminar", +"senior", +"sense", +"sentence", +"series", +"service", +"session", +"settle", +"setup", +"seven", +"shadow", +"shaft", +"shallow", +"share", +"shed", +"shell", +"sheriff", +"shield", +"shift", +"shine", +"ship", +"shiver", +"shock", +"shoe", +"shoot", +"shop", +"short", +"shoulder", +"shove", +"shrimp", +"shrug", +"shuffle", +"shy", +"sibling", +"sick", +"side", +"siege", +"sight", +"sign", +"silent", +"silk", +"silly", +"silver", +"similar", +"simple", +"since", +"sing", +"siren", +"sister", +"situate", +"six", +"size", +"skate", +"sketch", +"ski", +"skill", +"skin", +"skirt", +"skull", +"slab", +"slam", +"sleep", +"slender", +"slice", +"slide", +"slight", +"slim", +"slogan", +"slot", +"slow", +"slush", +"small", +"smart", +"smile", +"smoke", +"smooth", +"snack", +"snake", +"snap", +"sniff", +"snow", +"soap", +"soccer", +"social", +"sock", +"soda", +"soft", +"solar", +"soldier", +"solid", +"solution", +"solve", +"someone", +"song", +"soon", +"sorry", +"sort", +"soul", +"sound", +"soup", +"source", +"south", +"space", +"spare", +"spatial", +"spawn", +"speak", +"special", +"speed", +"spell", +"spend", +"sphere", +"spice", +"spider", +"spike", +"spin", +"spirit", +"split", +"spoil", +"sponsor", +"spoon", +"sport", +"spot", +"spray", +"spread", +"spring", +"spy", +"square", +"squeeze", +"squirrel", +"stable", +"stadium", +"staff", +"stage", +"stairs", +"stamp", +"stand", +"start", +"state", +"stay", +"steak", +"steel", +"stem", +"step", +"stereo", +"stick", +"still", +"sting", +"stock", +"stomach", +"stone", +"stool", +"story", +"stove", +"strategy", +"street", +"strike", +"strong", +"struggle", +"student", +"stuff", +"stumble", +"style", +"subject", +"submit", +"subway", +"success", +"such", +"sudden", +"suffer", +"sugar", +"suggest", +"suit", +"summer", +"sun", +"sunny", +"sunset", +"super", +"supply", +"supreme", +"sure", +"surface", +"surge", +"surprise", +"surround", +"survey", +"suspect", +"sustain", +"swallow", +"swamp", +"swap", +"swarm", +"swear", +"sweet", +"swift", +"swim", +"swing", +"switch", +"sword", +"symbol", +"symptom", +"syrup", +"system", +"table", +"tackle", +"tag", +"tail", +"talent", +"talk", +"tank", +"tape", +"target", +"task", +"taste", +"tattoo", +"taxi", +"teach", +"team", +"tell", +"ten", +"tenant", +"tennis", +"tent", +"term", +"test", +"text", +"thank", +"that", +"theme", +"then", +"theory", +"there", +"they", +"thing", +"this", +"thought", +"three", +"thrive", +"throw", +"thumb", +"thunder", +"ticket", +"tide", +"tiger", +"tilt", +"timber", +"time", +"tiny", +"tip", +"tired", +"tissue", +"title", +"toast", +"tobacco", +"today", +"toddler", +"toe", +"together", +"toilet", +"token", +"tomato", +"tomorrow", +"tone", +"tongue", +"tonight", +"tool", +"tooth", +"top", +"topic", +"topple", +"torch", +"tornado", +"tortoise", +"toss", +"total", +"tourist", +"toward", +"tower", +"town", +"toy", +"track", +"trade", +"traffic", +"tragic", +"train", +"transfer", +"trap", +"trash", +"travel", +"tray", +"treat", +"tree", +"trend", +"trial", +"tribe", +"trick", +"trigger", +"trim", +"trip", +"trophy", +"trouble", +"truck", +"true", +"truly", +"trumpet", +"trust", +"truth", +"try", +"tube", +"tuition", +"tumble", +"tuna", +"tunnel", +"turkey", +"turn", +"turtle", +"twelve", +"twenty", +"twice", +"twin", +"twist", +"two", +"type", +"typical", +"ugly", +"umbrella", +"unable", +"unaware", +"uncle", +"uncover", +"under", +"undo", +"unfair", +"unfold", +"unhappy", +"uniform", +"unique", +"unit", +"universe", +"unknown", +"unlock", +"until", +"unusual", +"unveil", +"update", +"upgrade", +"uphold", +"upon", +"upper", +"upset", +"urban", +"urge", +"usage", +"use", +"used", +"useful", +"useless", +"usual", +"utility", +"vacant", +"vacuum", +"vague", +"valid", +"valley", +"valve", +"van", +"vanish", +"vapor", +"various", +"vast", +"vault", +"vehicle", +"velvet", +"vendor", +"venture", +"venue", +"verb", +"verify", +"version", +"very", +"vessel", +"veteran", +"viable", +"vibrant", +"vicious", +"victory", +"video", +"view", +"village", +"vintage", +"violin", +"virtual", +"virus", +"visa", +"visit", +"visual", +"vital", +"vivid", +"vocal", +"voice", +"void", +"volcano", +"volume", +"vote", +"voyage", +"wage", +"wagon", +"wait", +"walk", +"wall", +"walnut", +"want", +"warfare", +"warm", +"warrior", +"wash", +"wasp", +"waste", +"water", +"wave", +"way", +"wealth", +"weapon", +"wear", +"weasel", +"weather", +"web", +"wedding", +"weekend", +"weird", +"welcome", +"west", +"wet", +"whale", +"what", +"wheat", +"wheel", +"when", +"where", +"whip", +"whisper", +"wide", +"width", +"wife", +"wild", +"will", +"win", +"window", +"wine", +"wing", +"wink", +"winner", +"winter", +"wire", +"wisdom", +"wise", +"wish", +"witness", +"wolf", +"woman", +"wonder", +"wood", +"wool", +"word", +"work", +"world", +"worry", +"worth", +"wrap", +"wreck", +"wrestle", +"wrist", +"write", +"wrong", +"yard", +"year", +"yellow", +"you", +"young", +"youth", +"zebra", +"zero", +"zone", +"zoo", +0, +}; \ No newline at end of file diff --git a/src/wallet/crypter.cpp b/src/wallet/crypter.cpp index 3d231ef8f2..ef7c7513d3 100644 --- a/src/wallet/crypter.cpp +++ b/src/wallet/crypter.cpp @@ -104,11 +104,11 @@ bool CCrypter::Decrypt(const std::vector& vchCiphertext, CKeyingM nLen = dec.Decrypt(vchCiphertext.data(), vchCiphertext.size(), &vchPlaintext[0]); if(nLen == 0) return false; + vchPlaintext.resize(nLen); return true; } - static bool EncryptSecret(const CKeyingMaterial& vMasterKey, const CKeyingMaterial &vchPlaintext, const uint256& nIV, std::vector &vchCiphertext) { CCrypter cKeyCrypter; @@ -126,6 +126,7 @@ static bool DecryptSecret(const CKeyingMaterial& vMasterKey, const std::vector &vchCryptedSecret = (*mi).second.second; return DecryptKey(vMasterKey, vchCryptedSecret, vchPubKey, keyOut); } + } return false; } @@ -298,3 +311,103 @@ bool CCryptoKeyStore::EncryptKeys(CKeyingMaterial& vMasterKeyIn) } return true; } + +bool CCryptoKeyStore::AddCryptedWords(const uint256& hash, const std::vector &vchCryptedWords) +{ + { + LOCK(cs_KeyStore); + if (!SetCrypted()) + return false; + + nWordHash = hash; + vchCryptedBip39Words = vchCryptedWords; + } + return true; +} + +bool CCryptoKeyStore::AddCryptedPassphrase(const std::vector &vchCryptedPassphrase) +{ + { + LOCK(cs_KeyStore); + if (!SetCrypted()) + return false; + + vchCryptedBip39Passphrase = vchCryptedPassphrase; + } + return true; +} + +bool CCryptoKeyStore::AddCryptedVchSeed(const std::vector &vchCryptedVchSeed) +{ + { + LOCK(cs_KeyStore); + if (!SetCrypted()) + return false; + + vchCryptedBip39VchSeed = vchCryptedVchSeed; + } + return true; +} + +bool CCryptoKeyStore::EncryptBip39(CKeyingMaterial& vMasterKeyIn) +{ + { + LOCK(cs_KeyStore); + + CKeyingMaterial vchSecretWords(vchWords.begin(), vchWords.end()); + if (!EncryptSecret(vMasterKeyIn, vchSecretWords, nWordHash, vchCryptedBip39Words)) + return false; + + CKeyingMaterial vchSecretVchSeed(g_vchSeed.begin(), g_vchSeed.end()); + if (!EncryptSecret(vMasterKeyIn, vchSecretVchSeed, nWordHash, vchCryptedBip39VchSeed)) + return false; + + CKeyingMaterial vchDecryptedVchSeed; + if (!DecryptSecret(vMasterKeyIn, vchCryptedBip39VchSeed, nWordHash, vchDecryptedVchSeed)) { + return false; + } + + if (!vchPassphrase.empty()) { + CKeyingMaterial vchSecretPassphrase(vchPassphrase.begin(), vchPassphrase.end()); + if (!EncryptSecret(vMasterKeyIn, vchSecretPassphrase, nWordHash, vchCryptedBip39Passphrase)) + return false; + + CKeyingMaterial vchDecryptedPassphrase; + if (!DecryptSecret(vMasterKeyIn, vchCryptedBip39Passphrase, nWordHash, vchDecryptedPassphrase)) { + return false; + } + } + } + + return true; +} + +bool CCryptoKeyStore::DecryptBip39(const CKeyingMaterial& vMasterKeyIn) +{ + { + LOCK(cs_KeyStore); + CKeyingMaterial vchDecryptedWords; + if (!DecryptSecret(vMasterKeyIn, vchCryptedBip39Words, nWordHash, vchDecryptedWords)) { + return false; + } + + vchWords = std::vector(vchDecryptedWords.begin(), vchDecryptedWords.end()); + + CKeyingMaterial vchDecryptedVchSeed; + if (!DecryptSecret(vMasterKeyIn, vchCryptedBip39VchSeed, nWordHash, vchDecryptedVchSeed)) { + return false; + } + + g_vchSeed = std::vector(vchDecryptedVchSeed.begin(), vchDecryptedVchSeed.end()); + + if (!vchCryptedBip39Passphrase.empty()) { + CKeyingMaterial vchDecryptedPassphrase; + if (!DecryptSecret(vMasterKeyIn, vchCryptedBip39Passphrase, nWordHash, vchDecryptedPassphrase)) { + return false; + } + vchPassphrase = std::vector(vchDecryptedPassphrase.begin(), vchDecryptedPassphrase.end()); + } + } + + return true; +} \ No newline at end of file diff --git a/src/wallet/crypter.h b/src/wallet/crypter.h index ab8f6596d0..9b66ee463d 100644 --- a/src/wallet/crypter.h +++ b/src/wallet/crypter.h @@ -132,9 +132,17 @@ class CCryptoKeyStore : public CBasicKeyStore //! will encrypt previously unencrypted keys bool EncryptKeys(CKeyingMaterial& vMasterKeyIn); + //! EncryptBip39 words and passphrase + bool EncryptBip39(CKeyingMaterial& vMasterKeyIn); + bool DecryptBip39(const CKeyingMaterial& vMasterKeyIn); + bool Unlock(const CKeyingMaterial& vMasterKeyIn); CryptedKeyMap mapCryptedKeys; + std::vector vchCryptedBip39Words; + std::vector vchCryptedBip39Passphrase; + std::vector vchCryptedBip39VchSeed; + public: CCryptoKeyStore() : fUseCrypto(false), fDecryptionThoroughlyChecked(false) { @@ -160,6 +168,9 @@ class CCryptoKeyStore : public CBasicKeyStore bool Lock(); virtual bool AddCryptedKey(const CPubKey &vchPubKey, const std::vector &vchCryptedSecret); + virtual bool AddCryptedWords(const uint256& hash, const std::vector &vchCryptedWords); + virtual bool AddCryptedPassphrase(const std::vector &vchCryptedPassphrase); + virtual bool AddCryptedVchSeed(const std::vector &vchCryptedVchSeed); bool AddKeyPubKey(const CKey& key, const CPubKey &pubkey) override; bool HaveKey(const CKeyID &address) const override { diff --git a/src/wallet/init.cpp b/src/wallet/init.cpp index 065ceff06d..9d399b2566 100644 --- a/src/wallet/init.cpp +++ b/src/wallet/init.cpp @@ -16,28 +16,33 @@ std::string GetWalletHelpString(bool showDebug) { std::string strUsage = HelpMessageGroup(_("Wallet options:")); + strUsage += HelpMessageOpt("-bip44=", strprintf(_("Sets the wallet to use/not use bip44 12-words, non-bip44=0 or bip44=1 (default: 1). " + "Note: By default 12-words will automatically be generated for you (random word selection). See -mnemonic and -mnemonicpassphrase " + "below to create a wallet using a specific word list (use an existing bip-44 wallet word-list), or use the RPC/CLI getmywords or " + "dumpwallet to retrieve the auto-generated word-list. This flag is ignored if there is already an existing non-bip44 wallet."))); + strUsage += HelpMessageOpt("-discardfee=", strprintf(_("The fee rate (in %s/kB) that indicates your tolerance for discarding change by adding it to the fee (default: %s). " + "Note: An output is discarded if it is dust at this rate, but we will always discard up to the dust relay fee and a discard " + "fee above that is limited by the fee estimate for the longest target"), + CURRENCY_UNIT, FormatMoney(DEFAULT_DISCARD_FEE))); strUsage += HelpMessageOpt("-disablewallet", _("Do not load the wallet and disable wallet RPC calls")); + strUsage += HelpMessageOpt("-fallbackfee=", strprintf(_("A fee rate (in %s/kB) that will be used when fee estimation has insufficient data (default: %s)"), CURRENCY_UNIT, FormatMoney(DEFAULT_FALLBACK_FEE))); strUsage += HelpMessageOpt("-keypool=", strprintf(_("Set key pool size to (default: %u)"), DEFAULT_KEYPOOL_SIZE)); - strUsage += HelpMessageOpt("-fallbackfee=", strprintf(_("A fee rate (in %s/kB) that will be used when fee estimation has insufficient data (default: %s)"), - CURRENCY_UNIT, FormatMoney(DEFAULT_FALLBACK_FEE))); - strUsage += HelpMessageOpt("-discardfee=", strprintf(_("The fee rate (in %s/kB) that indicates your tolerance for discarding change by adding it to the fee (default: %s). " - "Note: An output is discarded if it is dust at this rate, but we will always discard up to the dust relay fee and a discard fee above that is limited by the fee estimate for the longest target"), - CURRENCY_UNIT, FormatMoney(DEFAULT_DISCARD_FEE))); - strUsage += HelpMessageOpt("-mintxfee=", strprintf(_("Fees (in %s/kB) smaller than this are considered zero fee for transaction creation (default: %s)"), - CURRENCY_UNIT, FormatMoney(DEFAULT_TRANSACTION_MINFEE))); - strUsage += HelpMessageOpt("-paytxfee=", strprintf(_("Fee (in %s/kB) to add to transactions you send (default: %s)"), - CURRENCY_UNIT, FormatMoney(payTxFee.GetFeePerK()))); + strUsage += HelpMessageOpt("-mintxfee=", strprintf(_("Fees (in %s/kB) smaller than this are considered zero fee for transaction creation (default: %s)"), CURRENCY_UNIT, FormatMoney(DEFAULT_TRANSACTION_MINFEE))); + strUsage += HelpMessageOpt("-mnemonic=", strprintf(_("A space separated list of 12-words used to import a bip44 wallet"))); + strUsage += HelpMessageOpt("-mnemonicpassphrase=", strprintf(_("Passphrase securing your 12-word mnemonic word-list"))); + strUsage += HelpMessageOpt("-paytxfee=", strprintf(_("Fee (in %s/kB) to add to transactions you send (default: %s)"), CURRENCY_UNIT, FormatMoney(payTxFee.GetFeePerK()))); strUsage += HelpMessageOpt("-rescan", _("Rescan the block chain for missing wallet transactions on startup")); strUsage += HelpMessageOpt("-salvagewallet", _("Attempt to recover private keys from a corrupt wallet on startup")); strUsage += HelpMessageOpt("-spendzeroconfchange", strprintf(_("Spend unconfirmed change when sending transactions (default: %u)"), DEFAULT_SPEND_ZEROCONF_CHANGE)); strUsage += HelpMessageOpt("-txconfirmtarget=", strprintf(_("If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)"), DEFAULT_TX_CONFIRM_TARGET)); - strUsage += HelpMessageOpt("-walletrbf", strprintf(_("Send transactions with full-RBF opt-in enabled (default: %u)"), DEFAULT_WALLET_RBF)); strUsage += HelpMessageOpt("-upgradewallet", _("Upgrade wallet to latest format on startup")); + strUsage += HelpMessageOpt("-walletrbf", strprintf(_("Send transactions with full-RBF opt-in enabled (default: %u)"), DEFAULT_WALLET_RBF)); strUsage += HelpMessageOpt("-wallet=", _("Specify wallet file (within data directory)") + " " + strprintf(_("(default: %s)"), DEFAULT_WALLET_DAT)); strUsage += HelpMessageOpt("-walletbroadcast", _("Make the wallet broadcast transactions") + " " + strprintf(_("(default: %u)"), DEFAULT_WALLETBROADCAST)); strUsage += HelpMessageOpt("-walletnotify=", _("Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)")); strUsage += HelpMessageOpt("-zapwallettxes=", _("Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup") + " " + _("(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)")); + strUsage += HelpMessageOpt("-miningaddress=
", _("When getblocktemplate is called. It will create the coinbase transaction using this address(default: empty string)")); if (showDebug) { diff --git a/src/wallet/rpcdump.cpp b/src/wallet/rpcdump.cpp index 0ed8f1a165..abd33be1fb 100644 --- a/src/wallet/rpcdump.cpp +++ b/src/wallet/rpcdump.cpp @@ -659,16 +659,44 @@ UniValue dumpwallet(const JSONRPCRequest& request) CKeyID seed_id = pwallet->GetHDChain().seed_id; if (!seed_id.IsNull()) { - CKey seed; - if (pwallet->GetKey(seed_id, seed)) { + + if (!pwallet->GetHDChain().IsBip44()) { + CKey seed; + if (pwallet->GetKey(seed_id, seed)) { + CExtKey masterKey; + masterKey.SetSeed(seed.begin(), seed.size()); + + CRavenExtKey b58extkey; + b58extkey.SetKey(masterKey); + + file << "# extended private masterkey: " << b58extkey.ToString() << "\n\n"; + } + } + + if(pwallet->GetHDChain().IsBip44()) + { + CWalletDB walletdb(pwallet->GetDBHandle()); + + std::vector vchWords; + std::vector vchPassphrase; + std::vector vchSeed; + uint256 hash; + + pwallet->GetBip39Data(hash, vchWords, vchPassphrase, vchSeed); + CExtKey masterKey; - masterKey.SetSeed(seed.begin(), seed.size()); + masterKey.SetSeed(vchSeed.data(), vchSeed.size()); CRavenExtKey b58extkey; b58extkey.SetKey(masterKey); file << "# extended private masterkey: " << b58extkey.ToString() << "\n\n"; - } + + file << "# HD seed: " << HexStr(vchSeed) << "\n"; + file << "# mnemonic: " << std::string(vchWords.begin(), vchWords.end()).c_str() << "\n"; + file << "# mnemonic passphrase: " << std::string(vchPassphrase.begin(), vchPassphrase.end()).c_str() << "\n"; + file << "# hash of words: " << hash.GetHex() << "\n\n"; + } } for (std::vector >::const_iterator it = vKeyBirth.begin(); it != vKeyBirth.end(); it++) { const CKeyID &keyid = it->second; diff --git a/src/wallet/rpcwallet.cpp b/src/wallet/rpcwallet.cpp index 4a4be9da89..3e1da29db1 100644 --- a/src/wallet/rpcwallet.cpp +++ b/src/wallet/rpcwallet.cpp @@ -131,6 +131,52 @@ std::string AccountFromValue(const UniValue& value) return strAccount; } +UniValue getmywords(const JSONRPCRequest& request) +{ + CWallet * const pwallet = GetWalletForJSONRPCRequest(request); + if (!EnsureWalletIsAvailable(pwallet, request.fHelp)) { + return NullUniValue; + } + + if (request.fHelp || request.params.size() > 1) + throw std::runtime_error( + "getmywords ( \"account\" )\n" + "\nReturns the 12 words and passphrase used by BIP39 to generate the wallets private keys\n" + "Only returns value if wallet was created by the 12 words import/generation\n" + + "\nResult:\n" + "\"word_list:\" (string) A string of words separated by spaces\n" + "\"passphrase:\" (optional) Only show if passphrase was used when creating the wallet\n" + "\nExamples:\n" + + HelpExampleCli("getmywords", "") + + HelpExampleRpc("getmywords", "") + ); + + LOCK2(cs_main, pwallet->cs_wallet); + + EnsureWalletIsUnlocked(pwallet); + + if (!pwallet->GetHDChain().IsBip44()) { + throw JSONRPCError(RPC_WALLET_ERROR, "Error: Wallet doesn't have 12 words. Only new wallets generated by the mnemonic phrase will have 12 words"); + } + + std::vector vchWords; + std::vector vchPassphrase; + std::vector vchSeed; + uint256 hash; + + pwallet->GetBip39Data(hash, vchWords, vchPassphrase, vchSeed); + + UniValue ret(UniValue::VOBJ); + + ret.pushKV("word_list", std::string(vchWords.begin(), vchWords.end())); + if (vchPassphrase.size()) + ret.pushKV("passphrase", std::string(vchPassphrase.begin(), vchPassphrase.end())); + + return ret; +} + + UniValue getnewaddress(const JSONRPCRequest& request) { CWallet * const pwallet = GetWalletForJSONRPCRequest(request); @@ -3492,6 +3538,7 @@ static const CRPCCommand commands[] = { "wallet", "getaccount", &getaccount, {"address"} }, { "wallet", "getaddressesbyaccount", &getaddressesbyaccount, {"account"} }, { "wallet", "getbalance", &getbalance, {"account","minconf","include_watchonly"} }, + { "wallet", "getmywords", &getmywords, {} }, { "wallet", "getnewaddress", &getnewaddress, {"account"} }, { "wallet", "getrawchangeaddress", &getrawchangeaddress, {} }, { "wallet", "getreceivedbyaccount", &getreceivedbyaccount, {"account","minconf"} }, diff --git a/src/wallet/wallet.cpp b/src/wallet/wallet.cpp index d04261803f..6cf7e85c41 100644 --- a/src/wallet/wallet.cpp +++ b/src/wallet/wallet.cpp @@ -32,6 +32,7 @@ #include "ui_interface.h" #include "utilmoneystr.h" #include "wallet/fees.h" +#include "wallet/bip39.h" #include @@ -51,6 +52,9 @@ bool fWalletRbf = DEFAULT_WALLET_RBF; const char * DEFAULT_WALLET_DAT = "wallet.dat"; const uint32_t BIP32_HARDENED_KEY_LIMIT = 0x80000000; +std::string my_words; +std::string my_passphrase; + /** * Fees smaller than this (in satoshi) are considered zero fee (for transaction creation) * Override with -mintxfee @@ -174,47 +178,77 @@ CPubKey CWallet::GenerateNewKey(CWalletDB &walletdb, bool internal) void CWallet::DeriveNewChildKey(CWalletDB &walletdb, CKeyMetadata& metadata, CKey& secret, bool internal) { // for now we use a fixed keypath scheme of m/0'/0'/k - CKey seed; //seed (256bit) CExtKey masterKey; //hd master key + + CExtKey purposeKey; //key at m/purpose' + CExtKey coinTypeKey; //key at m/purpose'/coin_type' + CExtKey accountKey; //key at m/0' CExtKey chainChildKey; //key at m/0'/0' (external) or m/0'/1' (internal) CExtKey childKey; //key at m/0'/0'/' - // try to get the seed - if (!GetKey(hdChain.seed_id, seed)) - throw std::runtime_error(std::string(__func__) + ": seed not found"); - masterKey.SetSeed(seed.begin(), seed.size()); + uint32_t nAccountIndex = 0; // TODO add HDAccounts management - // derive m/0' - // use hardened derivation (child keys >= 0x80000000 are hardened after bip32) - masterKey.Derive(accountKey, BIP32_HARDENED_KEY_LIMIT); + // try to get the seed + if (!hdChain.IsBip44()) { + CKey seed; //seed (256bit) + if (!GetKey(hdChain.seed_id, seed)) + throw std::runtime_error(std::string(__func__) + ": seed not found"); + masterKey.SetSeed(seed.begin(), seed.size()); + } else { + masterKey.SetSeed(g_vchSeed.data(), g_vchSeed.size()); + } - // derive m/0'/0' (external chain) OR m/0'/1' (internal chain) - assert(internal ? CanSupportFeature(FEATURE_HD_SPLIT) : true); - accountKey.Derive(chainChildKey, BIP32_HARDENED_KEY_LIMIT+(internal ? 1 : 0)); + // Select which chain we are using depending on if this is a change address or not + uint32_t& nChildIndex = internal ? hdChain.nInternalChainCounter : hdChain.nExternalChainCounter; - // derive child key at next index, skip keys already known to the wallet do { - // always derive hardened keys - // childIndex | BIP32_HARDENED_KEY_LIMIT = derive childIndex in hardened child-index-range - // example: 1 | BIP32_HARDENED_KEY_LIMIT == 0x80000001 == 2147483649 - if (internal) { - chainChildKey.Derive(childKey, hdChain.nInternalChainCounter | BIP32_HARDENED_KEY_LIMIT); - metadata.hdKeypath = "m/0'/1'/" + std::to_string(hdChain.nInternalChainCounter) + "'"; - hdChain.nInternalChainCounter++; - } - else { - chainChildKey.Derive(childKey, hdChain.nExternalChainCounter | BIP32_HARDENED_KEY_LIMIT); - metadata.hdKeypath = "m/0'/0'/" + std::to_string(hdChain.nExternalChainCounter) + "'"; - hdChain.nExternalChainCounter++; - } + + if(hdChain.IsBip44()) + { + // Use BIP44 keypath scheme i.e. m / purpose' / coin_type' / account' / change / address_index + + // derive m/purpose' + masterKey.Derive(purposeKey, 44 | BIP32_HARDENED_KEY_LIMIT); + // derive m/purpose'/coin_type' + purposeKey.Derive(coinTypeKey, GetParams().ExtCoinType() | BIP32_HARDENED_KEY_LIMIT); + // derive m/purpose'/coin_type'/account' + coinTypeKey.Derive(accountKey, nAccountIndex | BIP32_HARDENED_KEY_LIMIT); + // derive m/purpose'/coin_type'/account'/change + accountKey.Derive(chainChildKey, internal ? 1 : 0); + // derive m/purpose'/coin_type'/account'/change/address_index + chainChildKey.Derive(childKey, nChildIndex); + } + else + { + // Use BIP32 keypath scheme i.e. m / account' / change' / address_index' + + // derive m/account' + masterKey.Derive(accountKey, nAccountIndex | BIP32_HARDENED_KEY_LIMIT); + // derive m/account'/change + accountKey.Derive(chainChildKey, BIP32_HARDENED_KEY_LIMIT + (internal ? 1 : 0)); + // derive m/account'/change/address_index + chainChildKey.Derive(childKey, BIP32_HARDENED_KEY_LIMIT | nChildIndex); + } + + // increment childkey index + nChildIndex++; } while (HaveKey(childKey.key.GetPubKey().GetID())); + secret = childKey.key; + + if(hdChain.IsBip44()) + metadata.hdKeypath = strprintf("m/44'/%d'/%d'/%d/%d", GetParams().ExtCoinType(), nAccountIndex, internal, nChildIndex - 1); + else + metadata.hdKeypath = strprintf("m/%d'/%d'/%d'", nAccountIndex, internal, nChildIndex - 1); + metadata.hd_seed_id = hdChain.seed_id; + // update the chain model in the database if (!walletdb.WriteHDChain(hdChain)) throw std::runtime_error(std::string(__func__) + ": Writing HD chain model failed"); + } bool CWallet::AddKeyPubKeyWithDB(CWalletDB &walletdb, const CKey& secret, const CPubKey &pubkey) @@ -290,6 +324,41 @@ bool CWallet::LoadCryptedKey(const CPubKey &vchPubKey, const std::vector &vchCryptedWords) +{ + return CCryptoKeyStore::AddCryptedWords(hash, vchCryptedWords); +} + +bool CWallet::LoadCryptedPassphrase(const std::vector &vchCryptedPassphrase) +{ + return CCryptoKeyStore::AddCryptedPassphrase(vchCryptedPassphrase); +} + +bool CWallet::LoadCryptedVchSeed(const std::vector &vchCryptedVchSeed) +{ + return CCryptoKeyStore::AddCryptedVchSeed(vchCryptedVchSeed); +} + +bool CWallet::LoadWords(const uint256& hash, const std::vector &vchWords) +{ + return CCryptoKeyStore::AddWords(hash, vchWords); +} + +bool CWallet::LoadPassphrase(const std::vector &vchPassphrase) +{ + return CCryptoKeyStore::AddPassphrase(vchPassphrase); +} + +bool CWallet::LoadVchSeed(const std::vector &vchSeed) +{ + return CCryptoKeyStore::AddVchSeed(vchSeed); +} + +void CWallet::GetBip39Data(uint256& hash, std::vector &vchWords, std::vector &vchPassphrase, std::vector& vchSeed) +{ + CCryptoKeyStore::GetBip39Data(hash, vchWords, vchPassphrase, vchSeed); +} + /** * Update wallet first key creation time. This should be called whenever keys * are added to the wallet, with the oldest key creation time. @@ -376,8 +445,9 @@ bool CWallet::Unlock(const SecureString& strWalletPassphrase) return false; if (!crypter.Decrypt(pMasterKey.second.vchCryptedKey, _vMasterKey)) continue; // try another master key - if (CCryptoKeyStore::Unlock(_vMasterKey)) + if (CCryptoKeyStore::Unlock(_vMasterKey)) { return true; + } } } return false; @@ -646,6 +716,43 @@ bool CWallet::EncryptWallet(const SecureString& strWalletPassphrase) assert(false); } + if(hdChain.IsBip44()) { + pwalletdbEncryption->EraseBip39Words( false); + pwalletdbEncryption->EraseBip39Passphrase(false); + pwalletdbEncryption->EraseBip39VchSeed(false); + + if (!EncryptBip39(_vMasterKey)) + { + pwalletdbEncryption->TxnAbort(); + delete pwalletdbEncryption; + // We now probably have half of our keys encrypted in memory, and half not... + // die and let the user reload the unencrypted wallet. + assert(false); + } + + if (!pwalletdbEncryption->WriteBip39Words(nWordHash, vchCryptedBip39Words, true)) { + pwalletdbEncryption->TxnAbort(); + delete pwalletdbEncryption; + assert(false); + } + + if (!vchCryptedBip39Passphrase.empty()) { + if (!pwalletdbEncryption->WriteBip39Passphrase(vchCryptedBip39Passphrase, true)) { + pwalletdbEncryption->TxnAbort(); + delete pwalletdbEncryption; + assert(false); + } + } + + if (!vchCryptedBip39VchSeed.empty()) { + if (!pwalletdbEncryption->WriteBip39VchSeed(vchCryptedBip39VchSeed, true)) { + pwalletdbEncryption->TxnAbort(); + delete pwalletdbEncryption; + assert(false); + } + } + } + // Encryption was introduced in version 0.4.0 SetMinVersion(FEATURE_WALLETCRYPT, pwalletdbEncryption, true); @@ -663,19 +770,28 @@ bool CWallet::EncryptWallet(const SecureString& strWalletPassphrase) Unlock(strWalletPassphrase); // if we are using HD, replace the HD seed with a new one - if (IsHDEnabled()) { + if (IsHDEnabled() && !hdChain.IsBip44()) { if (!SetHDSeed(GenerateNewSeed())) { return false; } } - NewKeyPool(); + if (!hdChain.IsBip44()) + NewKeyPool(); + Lock(); // Need to completely rewrite the wallet file; if we don't, bdb might keep // bits of the unencrypted private key in slack space in the database file. dbw->Rewrite(); + if (hdChain.IsBip44()) { + CWalletDB walletdb(*dbw); + walletdb.WriteBip39Words(nWordHash, vchCryptedBip39Words, true); + walletdb.WriteBip39VchSeed(vchCryptedBip39VchSeed, true); + if (!vchCryptedBip39Passphrase.empty()) + walletdb.WriteBip39Passphrase(vchCryptedBip39Passphrase, true); + } } NotifyStatusChanged(this); @@ -1394,9 +1510,49 @@ CAmount CWallet::GetChange(const CTransaction& tx) const CPubKey CWallet::GenerateNewSeed() { - CKey key; - key.MakeNewKey(true); - return DeriveNewSeed(key); + // If bip44 is not set to true on wallet creation + if (!hdChain.IsBip44()) { + hdChain.nVersion = CHDChain::VERSION_HD_CHAIN_SPLIT; + CKey key; + key.MakeNewKey(true); + return DeriveNewSeed(key); + } + + CHDChain newHdChain(this); + newHdChain.UseBip44(hdChain.IsBip44()); + + // NOTE: empty mnemonic means "generate a new one for me" + std::string strMnemonic = gArgs.GetArg("-mnemonic", ""); + // NOTE: default mnemonic passphrase is an empty string + std::string strMnemonicPassphrase = gArgs.GetArg("-mnemonicpassphrase", ""); + + if (!my_words.empty()) { + strMnemonic = my_words; + } + + if (!my_passphrase.empty()) { + strMnemonicPassphrase = my_passphrase; + } + + SecureString vchMnemonic(strMnemonic.begin(), strMnemonic.end()); + SecureString vchMnemonicPassphrase(strMnemonicPassphrase.begin(), strMnemonicPassphrase.end()); + + SecureVector& vchSeed = newHdChain.vchSeed; + if (!newHdChain.SetMnemonic(vchMnemonic, vchMnemonicPassphrase, vchSeed)) + throw std::runtime_error(std::string(__func__) + ": SetMnemonic failed"); + + g_vchSeed = std::vector(vchSeed.begin(), vchSeed.end()); + + CPubKey seed(vchSeed.begin(), vchSeed.end()); + newHdChain.seed_id = seed.GetID(); + + SetHDChain(newHdChain, false); + + my_passphrase.clear(); + my_words.clear(); + + return seed; + } CPubKey CWallet::DeriveNewSeed(const CKey& key) @@ -1432,7 +1588,7 @@ bool CWallet::SetHDSeed(const CPubKey& seed) // store the keyid (hash160) together with // the child index counter in the database // as a hdchain object - CHDChain newHdChain; + CHDChain newHdChain(this); newHdChain.nVersion = CanSupportFeature(FEATURE_HD_SPLIT) ? CHDChain::VERSION_HD_CHAIN_SPLIT : CHDChain::VERSION_HD_BASE; newHdChain.seed_id = seed.GetID(); SetHDChain(newHdChain, false); @@ -1455,6 +1611,11 @@ bool CWallet::IsHDEnabled() const return !hdChain.seed_id.IsNull(); } +bool CWallet::IsBip44Enabled() const +{ + return IsHDEnabled() && hdChain.bUse_bip44; +} + int64_t CWalletTx::GetTxTime() const { int64_t n = nTimeSmart; @@ -2792,7 +2953,7 @@ bool CWallet::SelectAssetsMinConf(const CAmount& nTargetValue, const int nConfMi CAmount nTotalLower = 0; random_shuffle(vCoins.begin(), vCoins.end(), GetRandInt); - + #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" for (const COutput &output : vCoins) { if (!output.fSpendable) @@ -2815,8 +2976,6 @@ bool CWallet::SelectAssetsMinConf(const CAmount& nTargetValue, const int nConfMi int nType = -1; bool fIsOwner = false; if (!coin.txout.scriptPubKey.IsAssetScript(nType, fIsOwner)) { - // TODO - Remove std::cout this before mainnet release - std::cout << "This shouldn't be occuring: Non Asset Script pub key made it to the SelectAssetsMinConf function call. Look into this!" << std::endl; continue; } @@ -2882,6 +3041,8 @@ bool CWallet::SelectAssetsMinConf(const CAmount& nTargetValue, const int nConfMi if (!coinLowestLarger || !coinLowestLargerAmount) return false; setCoinsRet.insert(coinLowestLarger.get()); + + #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" nValueRet += coinLowestLargerAmount.get(); return true; } @@ -3755,6 +3916,11 @@ bool CWallet::AddAccountingEntry(const CAccountingEntry& acentry, CWalletDB *pwa return true; } +bool CWallet::IsFirstRun() +{ + return mapKeys.empty() && mapCryptedKeys.empty() && mapWatchKeys.empty() && setWatchOnly.empty() && mapScripts.empty(); +} + DBErrors CWallet::LoadWallet(bool& fFirstRunRet) { LOCK2(cs_main, cs_wallet); @@ -4535,7 +4701,7 @@ std::vector CWallet::GetDestValues(const std::string& prefix) const return values; } -CWallet* CWallet::CreateWalletFromFile(const std::string walletFile) +CWallet* CWallet:: CreateWalletFromFile(const std::string walletFile) { // needed to restore wallet transaction meta data after -zapwallettxes std::vector vWtx; @@ -4605,6 +4771,7 @@ CWallet* CWallet::CreateWalletFromFile(const std::string walletFile) walletInstance->SetMaxVersion(nMaxVersion); } + if (fFirstRun) { // ensure this wallet.dat can only be opened by clients supporting HD with chain split and expects no default key @@ -4612,12 +4779,27 @@ CWallet* CWallet::CreateWalletFromFile(const std::string walletFile) InitError(strprintf(_("Error creating %s: You can't create non-HD wallets with this version."), walletFile)); return nullptr; } + walletInstance->SetMinVersion(FEATURE_NO_DEFAULT_KEY); + walletInstance->UseBip44(gArgs.GetBoolArg("-bip44", true)); + LogPrintf("parameter interaction: -bip44 wallet enabled: %s\n", gArgs.GetBoolArg("-bip44", true)); + + if (!walletInstance->hdChain.IsBip44()) { + CPubKey seed = walletInstance->GenerateNewSeed(); + if (!walletInstance->SetHDSeed(seed)) + throw std::runtime_error(std::string(__func__) + ": Storing HD seed failed"); + } + + // If this is the first run, show the bip44 gui to the user + if (walletInstance->hdChain.IsBip44()){ + if (gArgs.GetArg("-mnemonic", "").empty() && gArgs.GetArg("-mnemonicpassphrase", "").empty()) + uiInterface.ShowMnemonic(CClientUIInterface::MODAL); + } + // generate a new seed - CPubKey seed = walletInstance->GenerateNewSeed(); - if (!walletInstance->SetHDSeed(seed)) - throw std::runtime_error(std::string(__func__) + ": Storing HD seed failed"); + if (walletInstance->hdChain.IsBip44()) + walletInstance->GenerateNewSeed(); // Top up the keypool if (!walletInstance->TopUpKeyPool()) { @@ -4646,6 +4828,39 @@ CWallet* CWallet::CreateWalletFromFile(const std::string walletFile) // Try to top up keypool. No-op if the wallet is locked. walletInstance->TopUpKeyPool(); + if (walletInstance->hdChain.IsBip44() && fFirstRun) { + CWalletDB walletdb(walletInstance->GetDBHandle()); + + std::string strWords(walletInstance->hdChain.vchMnemonic.begin(), walletInstance->hdChain.vchMnemonic.end()); + std::vector vchWords(walletInstance->hdChain.vchMnemonic.begin(), walletInstance->hdChain.vchMnemonic.end()); + + auto hash = Hash(strWords.begin(), strWords.end()); + if (!walletdb.WriteBip39Words(hash, vchWords, false)) { + InitError(_("Error writing bip 39 words to database")); + return nullptr; + } + + walletInstance->LoadWords(hash, vchWords); + + std::vector vchSeed(walletInstance->hdChain.vchSeed.begin(), walletInstance->hdChain.vchSeed.end()); + if (!walletdb.WriteBip39VchSeed(vchSeed, false)) { + InitError(_("Error writing bip 39 vchseed to database")); + return nullptr; + } + + walletInstance->LoadVchSeed(vchSeed); + + if (!walletInstance->hdChain.vchMnemonicPassphrase.empty()) { + std::vector vchPassphrase(walletInstance->hdChain.vchMnemonicPassphrase.begin(), walletInstance->hdChain.vchMnemonicPassphrase.end()); + if (!walletdb.WriteBip39Passphrase(vchPassphrase, false)) { + InitError(_("Error writing bip 39 passphrase to database")); + return nullptr; + } + + walletInstance->LoadPassphrase(vchPassphrase); + } + } + CBlockIndex *pindexRescan = chainActive.Genesis(); if (!gArgs.GetBoolArg("-rescan", false)) { diff --git a/src/wallet/wallet.h b/src/wallet/wallet.h index a9ebfdce87..b7ef76a38d 100644 --- a/src/wallet/wallet.h +++ b/src/wallet/wallet.h @@ -43,6 +43,9 @@ extern unsigned int nTxConfirmTarget; extern bool bSpendZeroConfChange; extern bool fWalletRbf; +extern std::string my_words; +extern std::string my_passphrase; + static const unsigned int DEFAULT_KEYPOOL_SIZE = 1000; //! -paytxfee default static const CAmount DEFAULT_TRANSACTION_FEE = 0; @@ -780,13 +783,13 @@ class CWallet final : public CCryptoKeyStore, public CValidationInterface unsigned int nMasterKeyMaxID; // Create wallet with dummy database handle - CWallet(): dbw(new CWalletDBWrapper()) + CWallet(): hdChain(this), dbw(new CWalletDBWrapper()) { SetNull(); } // Create wallet with passed-in database handle - explicit CWallet(std::unique_ptr dbw_in) : dbw(std::move(dbw_in)) + explicit CWallet(std::unique_ptr dbw_in) : hdChain(this), dbw(std::move(dbw_in)) { SetNull(); } @@ -928,6 +931,13 @@ class CWallet final : public CCryptoKeyStore, public CValidationInterface bool AddCryptedKey(const CPubKey &vchPubKey, const std::vector &vchCryptedSecret) override; //! Adds an encrypted key to the store, without saving it to disk (used by LoadWallet) bool LoadCryptedKey(const CPubKey &vchPubKey, const std::vector &vchCryptedSecret); + bool LoadCryptedWords(const uint256& hash, const std::vector &vchCryptedWords); + bool LoadCryptedPassphrase(const std::vector &vchCryptedPassphrase); + bool LoadCryptedVchSeed(const std::vector &vchCryptedVchSeed); + bool LoadWords(const uint256& hash, const std::vector &vchWords); + void GetBip39Data(uint256& hash, std::vector &vchWords, std::vector &vchPassphrase, std::vector& vchSeed); + bool LoadPassphrase(const std::vector &vchPassphrase); + bool LoadVchSeed(const std::vector &vchSeed); bool AddCScript(const CScript& redeemScript) override; bool LoadCScript(const CScript& redeemScript); @@ -1076,6 +1086,8 @@ class CWallet final : public CCryptoKeyStore, public CValidationInterface CAmount GetChange(const CTransaction& tx) const; void SetBestChain(const CBlockLocator& loc) override; + bool IsFirstRun(); + DBErrors LoadWallet(bool& fFirstRunRet); DBErrors ZapWalletTx(std::vector& vWtx); DBErrors ZapSelectTx(std::vector& vHashIn, std::vector& vHashOut); @@ -1183,10 +1195,16 @@ class CWallet final : public CCryptoKeyStore, public CValidationInterface bool SetHDChain(const CHDChain& chain, bool memonly); const CHDChain& GetHDChain() const { return hdChain; } + void UseBip44( bool b = true) { hdChain.UseBip44(b);} + /* Returns true if HD is enabled */ bool IsHDEnabled() const; - /* Generates a new HD seed (will not be activated) */ + /* Returns true if HD is enabled with Bip44 */ + bool IsBip44Enabled() const; + + + /* Generates a new HD seed (will not be activated) */ CPubKey GenerateNewSeed(); /* Derives a new HD seed (will not be activated) */ diff --git a/src/wallet/walletdb.cpp b/src/wallet/walletdb.cpp index a96ec0c706..edf48ad703 100644 --- a/src/wallet/walletdb.cpp +++ b/src/wallet/walletdb.cpp @@ -272,7 +272,7 @@ bool ReadKeyValue(CWallet* pwallet, CDataStream& ssKey, CDataStream& ssValue, // If a client has a wallet.dat that contains asset transactions, but we are syncing the chain. // we want to make sure that we don't fail to load this wallet transaction just because it is an asset transaction // before asset are active - if (state.GetRejectReason() != "bad-txns-is-asset-and-asset-not-active") { + if (state.GetRejectReason() != "bad-txns-is-asset-and-asset-not-active" && state.GetRejectReason() != "bad-txns-transfer-asset-bad-deserialize") { strErr = state.GetRejectReason(); return false; } @@ -508,7 +508,7 @@ bool ReadKeyValue(CWallet* pwallet, CDataStream& ssKey, CDataStream& ssValue, } else if (strType == "hdchain") { - CHDChain chain; + CHDChain chain(pwallet); ssValue >> chain; if (!pwallet->SetHDChain(chain, true)) { @@ -516,6 +516,72 @@ bool ReadKeyValue(CWallet* pwallet, CDataStream& ssKey, CDataStream& ssValue, return false; } } + else if (strType == "cbip39words") + { + std::pair > valuePair; + ssValue >> valuePair; + + if (!pwallet->LoadCryptedWords(valuePair.first, valuePair.second)) + { + strErr = "Error reading wallet database: LoadCryptedWords failed"; + return false; + } + } + else if (strType == "cbip39passphrase") + { + std::vector vchPassphrase; + ssValue >> vchPassphrase; + + if (!pwallet->LoadCryptedPassphrase(vchPassphrase)) + { + strErr = "Error reading wallet database: LoadCryptedPassphrase failed"; + return false; + } + } + else if (strType == "cbip39vchseed") + { + std::vector vchSeed; + ssValue >> vchSeed; + + if (!pwallet->LoadCryptedVchSeed(vchSeed)) + { + strErr = "Error reading wallet database: LoadCryptedVchSeed failed"; + return false; + } + } + else if (strType == "bip39words") + { + std::pair > valuePair; + ssValue >> valuePair; + + if (!pwallet->LoadWords(valuePair.first, valuePair.second)) + { + strErr = "Error reading wallet database: LoadWords failed"; + return false; + } + } + else if (strType == "bip39passphrase") + { + std::vector vchPassphrase; + ssValue >> vchPassphrase; + + if (!pwallet->LoadPassphrase(vchPassphrase)) + { + strErr = "Error reading wallet database: LoadPassphrase failed"; + return false; + } + } + else if (strType == "bip39vchseed") + { + std::vector vchSeed; + ssValue >> vchSeed; + + if (!pwallet->LoadVchSeed(vchSeed)) + { + strErr = "Error reading wallet database: LoadVchSeed failed"; + return false; + } + } } catch (...) { return false; @@ -577,6 +643,7 @@ DBErrors CWalletDB::LoadWallet(CWallet* pwallet) result = DB_CORRUPT; else { + LogPrintf("DB failed to Read Key Value. Type: %s, Error: %s\n", strType, strErr); // Leave other errors alone, if we try to fix them we might make things worse. fNoncriticalErrors = true; // ... but do warn the user there is something wrong. if (strType == "tx") { @@ -837,12 +904,80 @@ bool CWalletDB::WriteDestData(const std::string &address, const std::string &key return WriteIC(std::make_pair(std::string("destdata"), std::make_pair(address, key)), value); } +bool CWalletDB::WriteBip39Words(const uint256& hash, const std::vector& vchWords, bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39words"); + return WriteIC(key, std::make_pair(hash,vchWords), true); +} + +bool CWalletDB::WriteBip39Passphrase(const std::vector& vchPassphrase, bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39passphrase"); + return WriteIC(key, vchPassphrase, true); +} + +bool CWalletDB::WriteBip39VchSeed(const std::vector& vchSeed, bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39vchseed"); + return WriteIC(key, vchSeed, true); +} + +bool CWalletDB::ReadBip39Words(uint256& hash, std::vector& vchWords, bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39words"); + std::pair> valuePair; + bool ret = batch.Read(key, valuePair); + if (ret) { + hash = valuePair.first; + vchWords = valuePair.second; + } + return ret; +} + +bool CWalletDB::ReadBip39Passphrase(std::vector& vchPassphrase, bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39passphrase"); + return batch.Read(key, vchPassphrase); +} + +bool CWalletDB::ReadBip39VchSeed(std::vector& vchSeed, bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39vchseed"); + return batch.Read(key, vchSeed); +} + +bool CWalletDB::EraseBip39Words(bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39words"); + return EraseIC(key); +} + +bool CWalletDB::EraseBip39Passphrase(bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39passphrase"); + return EraseIC(key); +} + +bool CWalletDB::EraseBip39VchSeed(bool fEncrypted) +{ + std::string key = fEncrypted ? "c" : ""; + key.append("bip39vchseed"); + return EraseIC(key); +} + bool CWalletDB::EraseDestData(const std::string &address, const std::string &key) { return EraseIC(std::make_pair(std::string("destdata"), std::make_pair(address, key))); } - bool CWalletDB::WriteHDChain(const CHDChain& chain) { return WriteIC(std::string("hdchain"), chain); @@ -872,3 +1007,43 @@ bool CWalletDB::WriteVersion(int nVersion) { return batch.WriteVersion(nVersion); } + +void CHDChain::SetSeedFromSeedId() +{ + // try to get the seed + CKey seed; + if (pwallet && !pwallet->IsLocked()) { + if (!pwallet->GetKey(seed_id, seed)) { + throw std::runtime_error(std::string(__func__) + ": seed not found"); + } + vchSeed = SecureVector(seed.begin(), seed.end()); + } + + if (!pwallet) + throw std::runtime_error(std::string(__func__) + ": wallet not found"); +} + +bool CHDChain::SetMnemonic(const SecureString& ssMnemonic, const SecureString& ssMnemonicPassphrase, SecureVector& vchSeed) +{ + SecureString ssMnemonicTmp = ssMnemonic; + + // can't (re)set mnemonic if seed was already set + if (!IsNull()) + return false; + + // empty mnemonic i.e. "generate a new one" + if (ssMnemonic.empty()) { + ssMnemonicTmp = CMnemonic::Generate(bUse_bip44 ? 128 : 256); + } + // NOTE: default mnemonic passphrase is an empty string + if (!CMnemonic::Check(ssMnemonicTmp)) { + throw std::runtime_error(std::string(__func__) + ": invalid mnemonic: `" + std::string(ssMnemonicTmp.c_str()) + "`"); + } + + CMnemonic::ToSeed(ssMnemonicTmp, ssMnemonicPassphrase, vchSeed); + + vchMnemonic = SecureVector(ssMnemonicTmp.begin(), ssMnemonicTmp.end()); + vchMnemonicPassphrase = SecureVector(ssMnemonicPassphrase.begin(), ssMnemonicPassphrase.end()); + + return true; +} diff --git a/src/wallet/walletdb.h b/src/wallet/walletdb.h index b3dadd2812..a22f0e926d 100644 --- a/src/wallet/walletdb.h +++ b/src/wallet/walletdb.h @@ -11,6 +11,7 @@ #include "primitives/transaction.h" #include "wallet/db.h" #include "key.h" +#include "wallet/bip39.h" #include #include @@ -65,12 +66,21 @@ class CHDChain uint32_t nInternalChainCounter; CKeyID seed_id; //!< seed hash160 + bool bUse_bip44; + SecureVector vchMnemonic; + SecureVector vchMnemonicPassphrase; + SecureVector vchSeed; + static const int VERSION_HD_BASE = 1; static const int VERSION_HD_CHAIN_SPLIT = 2; - static const int CURRENT_VERSION = VERSION_HD_CHAIN_SPLIT; + static const int VERSION_HD_BIP44_BIP39 = 3; + static const int CURRENT_VERSION = VERSION_HD_BIP44_BIP39; int nVersion; - CHDChain() { SetNull(); } + CWallet* pwallet; + + CHDChain(CWallet* pw): pwallet(pw) { SetNull(); } + ADD_SERIALIZE_METHODS; template inline void SerializationOp(Stream& s, Operation ser_action) @@ -78,17 +88,34 @@ class CHDChain READWRITE(this->nVersion); READWRITE(nExternalChainCounter); READWRITE(seed_id); - if (this->nVersion >= VERSION_HD_CHAIN_SPLIT) + if (this->nVersion >= VERSION_HD_CHAIN_SPLIT) { READWRITE(nInternalChainCounter); + } + + if(VERSION_HD_BIP44_BIP39 == this->nVersion) { + READWRITE(bUse_bip44); + } } + void SetSeedFromSeedId(); + void SetNull() { nVersion = CHDChain::CURRENT_VERSION; nExternalChainCounter = 0; nInternalChainCounter = 0; seed_id.SetNull(); + bUse_bip44 = false; } + + bool IsNull() { return seed_id.IsNull();} + + + void UseBip44( bool b = true) { bUse_bip44 = b;} + bool IsBip44() const { return bUse_bip44 == true;} + + + bool SetMnemonic(const SecureString& ssMnemonic, const SecureString& ssMnemonicPassphrase, SecureVector& vchSeed); }; class CKeyMetadata @@ -244,6 +271,16 @@ class CWalletDB bool ReadVersion(int& nVersion); //! Write wallet version bool WriteVersion(int nVersion); + + bool WriteBip39Words(const uint256& hash, const std::vector& vchWords, bool fEncrypted); + bool WriteBip39Passphrase(const std::vector& vchPassphrase, bool fEncrypted); + bool WriteBip39VchSeed(const std::vector& vchSeed, bool fEncrypted); + bool ReadBip39Words(uint256& hash, std::vector& vchWords, bool fEncrypted); + bool ReadBip39Passphrase(std::vector& vchPassphrase, bool fEncrypted); + bool ReadBip39VchSeed(std::vector& vchSeed, bool fEncrypted); + bool EraseBip39Words(bool fEncrypted); + bool EraseBip39Passphrase(bool fEncrypted); + bool EraseBip39VchSeed(bool fEncrypted); private: CDB batch; CWalletDBWrapper& m_dbw; diff --git a/test/functional/README.md b/test/functional/README.md index 52cb391d68..6d5d9e86a1 100644 --- a/test/functional/README.md +++ b/test/functional/README.md @@ -75,52 +75,6 @@ thread.) - Can be used to write tests where specific P2P protocol behavior is tested. Examples tests are `p2p-accept-block.py`, `p2p-compactblocks.py`. -#### Comptool - -- Comptool is a Testing framework for writing tests that compare the block/tx acceptance -behavior of a ravend against 1 or more other ravend instances. It should not be used -to write static tests with known outcomes, since that type of test is easier to write and -maintain using the standard RavenTestFramework. - -- Set the `num_nodes` variable (defined in `ComparisonTestFramework`) to start up -1 or more nodes. If using 1 node, then `--testbinary` can be used as a command line -option to change the ravend binary used by the test. If using 2 or more nodes, -then `--refbinary` can be optionally used to change the ravend that will be used -on nodes 2 and up. - -- Implement a (generator) function called `get_tests()` which yields `TestInstance`s. -Each `TestInstance` consists of: - - A list of `[object, outcome, hash]` entries - * `object` is a `CBlock`, `CTransaction`, or - `CBlockHeader`. `CBlock`'s and `CTransaction`'s are tested for - acceptance. `CBlockHeader`s can be used so that the test runner can deliver - complete headers-chains when requested from the ravend, to allow writing - tests where blocks can be delivered out of order but still processed by - headers-first ravend's. - * `outcome` is `True`, `False`, or `None`. If `True` - or `False`, the tip is compared with the expected tip -- either the - block passed in, or the hash specified as the optional 3rd entry. If - `None` is specified, then the test will compare all the ravend's - being tested to see if they all agree on what the best tip is. - * `hash` is the block hash of the tip to compare against. Optional to - specify; if left out then the hash of the block passed in will be used as - the expected tip. This allows for specifying an expected tip while testing - the handling of either invalid blocks or blocks delivered out of order, - which complete a longer chain. - - `sync_every_block`: `True/False`. If `False`, then all blocks - are inv'ed together, and the test runner waits until the node receives the - last one, and tests only the last block for tip acceptance using the - outcome and specified tip. If `True`, then each block is tested in - sequence and synced (this is slower when processing many blocks). - - `sync_every_transaction`: `True/False`. Analogous to - `sync_every_block`, except if the outcome on the last tx is "None", - then the contents of the entire mempool are compared across all ravend - connections. If `True` or `False`, then only the last tx's - acceptance is tested against the given outcome. - -- For examples of tests written in this framework, see - `invalidblockrequest.py` and `p2p-fullblocktest.py`. - ### test-framework modules #### [test_framework/authproxy.py](test_framework/authproxy.py) @@ -135,15 +89,9 @@ Generally useful functions. #### [test_framework/mininode.py](test_framework/mininode.py) Basic code to support P2P connectivity to a ravend. -#### [test_framework/comptool.py](test_framework/comptool.py) -Framework for comparison-tool style, P2P tests. - #### [test_framework/script.py](test_framework/script.py) Utilities for manipulating transaction scripts (originally from python-ravenlib) -#### [test_framework/blockstore.py](test_framework/blockstore.py) -Implements disk-backed block and tx storage. - #### [test_framework/key.py](test_framework/key.py) Wrapper around OpenSSL EC_Key (originally from python-ravenlib) diff --git a/test/functional/combine_logs.py b/test/functional/combine_logs.py index 03203ada6c..def527dc8a 100755 --- a/test/functional/combine_logs.py +++ b/test/functional/combine_logs.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/create_cache.py b/test/functional/create_cache.py index 917ada3c54..291a776d71 100755 --- a/test/functional/create_cache.py +++ b/test/functional/create_cache.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/example_test.py b/test/functional/example_test.py index 69ae43dc18..41996e9c84 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_assets.py b/test/functional/feature_assets.py index 740993409b..690ad16176 100755 --- a/test/functional/feature_assets.py +++ b/test/functional/feature_assets.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -11,6 +11,7 @@ import string + class AssetTest(RavenTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -62,8 +63,8 @@ def big_test(self): assert_equal(len(myassets["MY_ASSET!"]["outpoints"]), 1) assert_is_hash_string(myassets["MY_ASSET"]["outpoints"][0]["txid"]) assert_equal(myassets["MY_ASSET"]["outpoints"][0]["txid"], myassets["MY_ASSET!"]["outpoints"][0]["txid"]) - assert(int(myassets["MY_ASSET"]["outpoints"][0]["vout"]) >= 0) - assert(int(myassets["MY_ASSET!"]["outpoints"][0]["vout"]) >= 0) + assert (int(myassets["MY_ASSET"]["outpoints"][0]["vout"]) >= 0) + assert (int(myassets["MY_ASSET!"]["outpoints"][0]["vout"]) >= 0) assert_equal(myassets["MY_ASSET"]["outpoints"][0]["amount"], 1000) assert_equal(myassets["MY_ASSET!"]["outpoints"][0]["amount"], 1) @@ -91,7 +92,7 @@ def big_test(self): assert_equal(myassets["MY_ASSET"]["balance"], 200) assert_equal(len(myassets["MY_ASSET"]["outpoints"]), 1) assert_is_hash_string(myassets["MY_ASSET"]["outpoints"][0]["txid"]) - assert(int(myassets["MY_ASSET"]["outpoints"][0]["vout"]) >= 0) + assert (int(myassets["MY_ASSET"]["outpoints"][0]["vout"]) >= 0) assert_equal(n0.listmyassets(asset="MY_ASSET")["MY_ASSET"], 800) self.log.info("Checking listassetbalancesbyaddress()...") @@ -104,7 +105,7 @@ def big_test(self): if n0.validateaddress(assaddr)["ismine"]: changeaddress = assaddr assert_equal(n0.listassetbalancesbyaddress(changeaddress)["MY_ASSET"], 800) - assert(changeaddress is not None) + assert (changeaddress is not None) assert_equal(n0.listassetbalancesbyaddress(address0)["MY_ASSET!"], 1) self.log.info("Burning all units to test reissue on zero units...") @@ -329,7 +330,6 @@ def db_corruption_regression(self): assert_equal(0, len(n0.listassets(asset_name, True))) - def reissue_prec_change(self): self.log.info("Testing precision change on reissue...") n0 = self.nodes[0] @@ -342,16 +342,15 @@ def reissue_prec_change(self): assert_equal(0, n0.listassets("*", True)[asset_name]["units"]) for i in range(0, 8): - n0.reissue(asset_name, 10.0**(-i), address, "", True, i+1) + n0.reissue(asset_name, 10.0 ** (-i), address, "", True, i + 1) n0.generate(1) - assert_equal(i+1, n0.listassets("*", True)[asset_name]["units"]) - assert_raises_rpc_error(-25, "Error: Unable to reissue asset: unit must be larger than current unit selection", n0.reissue, asset_name, 10.0**(-i), address, "", True, i) + assert_equal(i + 1, n0.listassets("*", True)[asset_name]["units"]) + assert_raises_rpc_error(-25, "Error: Unable to reissue asset: unit must be larger than current unit selection", n0.reissue, asset_name, 10.0 ** (-i), address, "", True, i) n0.reissue(asset_name, 0.00000001, address) n0.generate(1) assert_equal(Decimal('11.11111111'), n0.listassets("*", True)[asset_name]["amount"]) - def run_test(self): self.activate_assets() self.big_test() diff --git a/test/functional/feature_assets_mempool.py b/test/functional/feature_assets_mempool.py index 065c0801e4..34417a892b 100755 --- a/test/functional/feature_assets_mempool.py +++ b/test/functional/feature_assets_mempool.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_assets_reorg.py b/test/functional/feature_assets_reorg.py index a7123f4265..5a76df46d7 100755 --- a/test/functional/feature_assets_reorg.py +++ b/test/functional/feature_assets_reorg.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py index cd81760c46..6ea19a4c1b 100755 --- a/test/functional/feature_assumevalid.py +++ b/test/functional/feature_assumevalid.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -35,18 +35,20 @@ import time from test_framework.blocktools import create_block, create_coinbase -from test_framework.key import CECKey +from test_framework.key import ECKey from test_framework.mininode import CBlockHeader, COutPoint, CTransaction, CTxIn, CTxOut, NetworkThread, NodeConn, NodeConnCB, MsgBlock, MsgHeaders from test_framework.script import CScript, OP_TRUE from test_framework.test_framework import RavenTestFramework from test_framework.util import p2p_port, assert_equal + class BaseNode(NodeConnCB): def send_header_for_blocks(self, new_blocks): headers_message = MsgHeaders() headers_message.headers = [CBlockHeader(b) for b in new_blocks] self.send_message(headers_message) + class AssumeValidTest(RavenTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -106,9 +108,9 @@ def run_test(self): self.blocks = [] # Get a pubkey for the coinbase TXO - coinbase_key = CECKey() - coinbase_key.set_secretbytes(b"horsebattery") - coinbase_pubkey = coinbase_key.get_pubkey() + coinbase_key = ECKey() + coinbase_key.generate() + coinbase_pubkey = coinbase_key.get_pubkey().get_bytes() # Create the first block with a coinbase output to our key height = 1 @@ -118,7 +120,7 @@ def run_test(self): block.solve() # Save the coinbase for later self.block1 = block - self.tip = block.sha256 + self.tip = block.x16r height += 1 # Bury the block 100 deep so the coinbase output is spendable @@ -126,13 +128,13 @@ def run_test(self): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() self.blocks.append(block) - self.tip = block.sha256 + self.tip = block.x16r self.block_time += 1 height += 1 # Create a transaction spending the coinbase output with an invalid (null) signature tx = CTransaction() - tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), script_sig=b"")) + tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].x16r, 0), script_sig=b"")) tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) tx.calc_x16r() @@ -143,7 +145,7 @@ def run_test(self): block102.rehash() block102.solve() self.blocks.append(block102) - self.tip = block102.sha256 + self.tip = block102.x16r self.block_time += 1 height += 1 @@ -153,18 +155,18 @@ def run_test(self): block.nVersion = 4 block.solve() self.blocks.append(block) - self.tip = block.sha256 + self.tip = block.x16r self.block_time += 1 height += 1 # Start node1 and node2 with assumevalid so they accept a block with a bad signature. - self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) + self.start_node(1, extra_args=["-assumevalid=" + hex(block102.x16r)]) node1 = BaseNode() # connects to node1 connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1)) node1.add_connection(connections[1]) node1.wait_for_verack() - self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) + self.start_node(2, extra_args=["-assumevalid=" + hex(block102.x16r)]) node2 = BaseNode() # connects to node2 connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2)) node2.add_connection(connections[2]) @@ -192,5 +194,6 @@ def run_test(self): self.send_blocks_until_disconnected(node2) self.assert_blockchain_height(self.nodes[2], 101) + if __name__ == '__main__': AssumeValidTest().main() diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py index 7a30deeda8..e9511d2c6f 100755 --- a/test/functional/feature_bip68_sequence.py +++ b/test/functional/feature_bip68_sequence.py @@ -1,26 +1,29 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -"""Test BIP68 implementation.""" +""" +Test BIP68 implementation. +""" import time import random from test_framework.test_framework import RavenTestFramework -from test_framework.util import satoshi_round, assert_raises_rpc_error, get_bip9_status, assert_equal,assert_greater_than, sync_blocks +from test_framework.util import satoshi_round, assert_raises_rpc_error, get_bip9_status, assert_equal, assert_greater_than, sync_blocks from test_framework.blocktools import CTransaction, COIN, CTxIn, COutPoint, CTxOut, CScript, create_block, create_coinbase from test_framework.mininode import to_hex, from_hex -SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31) -SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height) -SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift +SEQUENCE_LOCKTIME_DISABLE_FLAG = (1 << 31) +SEQUENCE_LOCKTIME_TYPE_FLAG = (1 << 22) # this means use time (0 means height) +SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift SEQUENCE_LOCKTIME_MASK = 0x0000ffff # RPC error for non-BIP68 final transactions NOT_FINAL_ERROR = "64: non-BIP68-final" + class BIP68Test(RavenTestFramework): def set_test_params(self): self.num_nodes = 2 @@ -43,15 +46,15 @@ def run_test(self): def test_disable_flag(self): # Create some unconfirmed inputs new_addr = self.nodes[0].getnewaddress() - self.nodes[0].sendtoaddress(new_addr, 2) # send 2 RVN + self.nodes[0].sendtoaddress(new_addr, 2) # send 2 RVN utxos = self.nodes[0].listunspent(0, 0) - assert(len(utxos) > 0) + assert (len(utxos) > 0) utxo = utxos[0] tx1 = CTransaction() - value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN) + value = int(satoshi_round(utxo["amount"] - self.relayfee) * COIN) # Check that the disable flag disables relative locktime. # If sequence locks were used, this would require 1 block for the @@ -70,7 +73,7 @@ def test_disable_flag(self): tx2.nVersion = 2 sequence_value = sequence_value & 0x7fffffff tx2.vin = [CTxIn(COutPoint(tx1_id, 0), n_sequence=sequence_value)] - tx2.vout = [CTxOut(int(value-self.relayfee*COIN), CScript([b'a']))] + tx2.vout = [CTxOut(int(value - self.relayfee * COIN), CScript([b'a']))] tx2.rehash() assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, to_hex(tx2)) @@ -84,7 +87,7 @@ def test_disable_flag(self): # Calculate the median time past of a prior block ("confirmations" before # the current tip). def get_median_time_past(self, confirmations): - block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations) + block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount() - confirmations) return self.nodes[0].getblockheader(block_hash)["mediantime"] # Test that sequence locks are respected for transactions spending confirmed inputs. @@ -100,7 +103,7 @@ def test_sequence_lock_confirmed_inputs(self): num_outputs = random.randint(1, max_outputs) outputs = {} for i in range(num_outputs): - outputs[addresses[i]] = random.randint(1, 20)*0.01 + outputs[addresses[i]] = random.randint(1, 20) * 0.01 self.nodes[0].sendmany("", outputs) self.nodes[0].generate(1) @@ -118,7 +121,7 @@ def test_sequence_lock_confirmed_inputs(self): # Track whether any sequence locks used should fail should_pass = True - + # Track whether this transaction was built with sequence locks using_sequence_locks = False @@ -126,14 +129,14 @@ def test_sequence_lock_confirmed_inputs(self): tx.nVersion = 2 value = 0 for j in range(num_inputs): - sequence_value = 0xfffffffe # this disables sequence locks + sequence_value = 0xfffffffe # this disables sequence locks # 50% chance we enable sequence locks - if random.randint(0,1): + if random.randint(0, 1): using_sequence_locks = True # 10% of the time, make the input sequence value pass - input_will_pass = (random.randint(1,10) == 1) + input_will_pass = (random.randint(1, 10) == 1) sequence_value = utxos[j]["confirmations"] if not input_will_pass: sequence_value += 1 @@ -144,7 +147,7 @@ def test_sequence_lock_confirmed_inputs(self): # from the tip so that we're looking up MTP of the block # PRIOR to the one the input appears in, as per the BIP68 spec. orig_time = self.get_median_time_past(utxos[j]["confirmations"]) - cur_time = self.get_median_time_past(0) # MTP of the tip + cur_time = self.get_median_time_past(0) # MTP of the tip # can only timelock this input if it's not too old -- otherwise use height can_time_lock = True @@ -152,19 +155,19 @@ def test_sequence_lock_confirmed_inputs(self): can_time_lock = False # if time-lockable, then 50% chance we make this a time lock - if random.randint(0,1) and can_time_lock: + if random.randint(0, 1) and can_time_lock: # Find first time-lock value that fails, or latest one that succeeds time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY if input_will_pass and time_delta > cur_time - orig_time: sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) elif not input_will_pass and time_delta <= cur_time - orig_time: - sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1 + sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) + 1 sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), n_sequence=sequence_value)) - value += utxos[j]["amount"]*COIN + value += utxos[j]["amount"] * COIN # Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output tx_size = len(to_hex(tx)) // 2 + 120 * num_inputs + 50 - tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a']))) + tx.vout.append(CTxOut(int(value - self.relayfee * tx_size * COIN / 1000), CScript([b'a']))) rawtx = self.nodes[0].signrawtransaction(to_hex(tx))["hex"] if using_sequence_locks and not should_pass: @@ -192,8 +195,8 @@ def test_sequence_lock_unconfirmed_inputs(self): # Sequence lock of 0 should pass. tx2 = CTransaction() tx2.nVersion = 2 - tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), n_sequence=0)] - tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))] + tx2.vin = [CTxIn(COutPoint(tx1.x16r, 0), n_sequence=0)] + tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee * COIN), CScript([b'a']))] tx2_raw = self.nodes[0].signrawtransaction(to_hex(tx2))["hex"] tx2 = from_hex(tx2, tx2_raw) tx2.rehash() @@ -211,7 +214,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): tx = CTransaction() tx.nVersion = 2 tx.vin = [CTxIn(COutPoint(orig_tx.x16r, 0), n_sequence=sequence_value)] - tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee*COIN), CScript([b'a']))] + tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee * COIN), CScript([b'a']))] tx.rehash() if orig_tx.hash in node.getrawmempool(): @@ -228,45 +231,45 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): # Now mine some blocks, but make sure tx2 doesn't get mined. # Use prioritisetransaction to lower the effective feerate to 0 - self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(-self.relayfee*COIN)) + self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(-self.relayfee * COIN)) cur_time = int(time.time()) for _ in range(10): self.nodes[0].setmocktime(cur_time + 600) self.nodes[0].generate(1) cur_time += 600 - assert(tx2.hash in self.nodes[0].getrawmempool()) + assert (tx2.hash in self.nodes[0].getrawmempool()) test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True) test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) # Mine tx2, and then try again - self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(self.relayfee*COIN)) + self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(self.relayfee * COIN)) # Advance the time on the node so that we can test timelocks - self.nodes[0].setmocktime(cur_time+600) + self.nodes[0].setmocktime(cur_time + 600) self.nodes[0].generate(1) - assert(tx2.hash not in self.nodes[0].getrawmempool()) + assert (tx2.hash not in self.nodes[0].getrawmempool()) # Now that tx2 is not in the mempool, a sequence locked spend should # succeed tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) - assert(tx3.hash in self.nodes[0].getrawmempool()) + assert (tx3.hash in self.nodes[0].getrawmempool()) self.nodes[0].generate(1) - assert(tx3.hash not in self.nodes[0].getrawmempool()) + assert (tx3.hash not in self.nodes[0].getrawmempool()) # One more test, this time using height locks tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True) - assert(tx4.hash in self.nodes[0].getrawmempool()) + assert (tx4.hash in self.nodes[0].getrawmempool()) # Now try combining confirmed and unconfirmed inputs tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True) - assert(tx5.hash not in self.nodes[0].getrawmempool()) + assert (tx5.hash not in self.nodes[0].getrawmempool()) utxos = self.nodes[0].listunspent() tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), n_sequence=1)) - tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN) + tx5.vout[0].nValue += int(utxos[0]["amount"] * COIN) raw_tx5 = self.nodes[0].signrawtransaction(to_hex(tx5))["hex"] assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5) @@ -281,32 +284,32 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): # If we invalidate the tip, tx3 should get added to the mempool, causing # tx4 to be removed (fails sequence-lock). self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - assert(tx4.hash not in self.nodes[0].getrawmempool()) - assert(tx3.hash in self.nodes[0].getrawmempool()) + assert (tx4.hash not in self.nodes[0].getrawmempool()) + assert (tx3.hash in self.nodes[0].getrawmempool()) # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in # diagram above). # This would cause tx2 to be added back to the mempool, which in turn causes # tx3 to be removed. - tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16) + tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount() - 1), 16) height = self.nodes[0].getblockcount() for _ in range(2): block = create_block(tip, create_coinbase(height), cur_time) block.nVersion = 3 block.rehash() block.solve() - tip = block.sha256 + tip = block.x16r height += 1 self.nodes[0].submitblock(to_hex(block)) cur_time += 1 mempool = self.nodes[0].getrawmempool() - assert(tx3.hash not in mempool) - assert(tx2.hash in mempool) + assert (tx3.hash not in mempool) + assert (tx2.hash in mempool) # Reset the chain and get rid of the mocktimed-blocks self.nodes[0].setmocktime(0) - self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1)) + self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height + 1)) self.nodes[0].generate(10) # Make sure that BIP68 isn't being used to validate blocks, prior to @@ -314,7 +317,7 @@ def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): # being run, then it's possible the test has activated the soft fork, and # this test should be moved to run earlier, or deleted. def test_bip68_not_consensus(self): - assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active') + assert (get_bip9_status(self.nodes[0], 'csv')['status'] != 'active') txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2) tx1 = from_hex(CTransaction(), self.nodes[0].getrawtransaction(txid)) @@ -323,8 +326,8 @@ def test_bip68_not_consensus(self): # Make an anyone-can-spend transaction tx2 = CTransaction() tx2.nVersion = 1 - tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), n_sequence=0)] - tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))] + tx2.vin = [CTxIn(COutPoint(tx1.x16r, 0), n_sequence=0)] + tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee * COIN), CScript([b'a']))] # sign tx2 tx2_raw = self.nodes[0].signrawtransaction(to_hex(tx2))["hex"] @@ -332,21 +335,21 @@ def test_bip68_not_consensus(self): tx2.rehash() self.nodes[0].sendrawtransaction(to_hex(tx2)) - + # Now make an invalid spend of tx2 according to BIP68 - sequence_value = 100 # 100 block relative locktime + sequence_value = 100 # 100 block relative locktime tx3 = CTransaction() tx3.nVersion = 2 - tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), n_sequence=sequence_value)] - tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))] + tx3.vin = [CTxIn(COutPoint(tx2.x16r, 0), n_sequence=sequence_value)] + tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee * COIN), CScript([b'a']))] tx3.rehash() assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, to_hex(tx3)) # make a block that violates bip68; ensure that the tip updates tip = int(self.nodes[0].getbestblockhash(), 16) - block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1)) + block = create_block(tip, create_coinbase(self.nodes[0].getblockcount() + 1)) block.nVersion = 3 block.vtx.extend([tx1, tx2, tx3]) block.hashMerkleRoot = block.calc_merkle_root() @@ -370,8 +373,8 @@ def activate_csv(self): # Use self.nodes[1] to test that version 2 transactions are standard. def test_version2_relay(self): - inputs = [ ] - outputs = { self.nodes[1].getnewaddress() : 1.0 } + inputs = [] + outputs = {self.nodes[1].getnewaddress(): 1.0} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex'] tx = from_hex(CTransaction(), rawtxfund) @@ -379,5 +382,6 @@ def test_version2_relay(self): tx_signed = self.nodes[1].signrawtransaction(to_hex(tx))["hex"] self.nodes[1].sendrawtransaction(tx_signed) + if __name__ == '__main__': BIP68Test().main() diff --git a/test/functional/feature_bip_softforks.py b/test/functional/feature_bip_softforks.py deleted file mode 100755 index bf64a03004..0000000000 --- a/test/functional/feature_bip_softforks.py +++ /dev/null @@ -1,292 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -""" -Test BIP 9 soft forks. - -Connect to a single node. -regtest lock-in with 108/144 block signalling -activation after a further 144 blocks -mine 2 block and save coinbases for later use -mine 141 blocks to transition from DEFINED to STARTED -mine 100 blocks signalling readiness and 44 not in order to fail to change state this period -mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN) -mine a further 143 blocks (LOCKED_IN) -test that enforcement has not triggered (which triggers ACTIVE) -test that enforcement has triggered -""" - -from io import BytesIO -import shutil -import time -import itertools -from test_framework.test_framework import ComparisonTestFramework -from test_framework.util import hex_str_to_bytes, bytes_to_hex_str, assert_equal -from test_framework.mininode import CTransaction, NetworkThread -from test_framework.blocktools import create_coinbase, create_block -from test_framework.comptool import TestInstance, TestManager -from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP - -class BIP9SoftForksTest(ComparisonTestFramework): - def set_test_params(self): - self.num_nodes = 1 - self.extra_args = [['-whitelist=127.0.0.1']] - self.setup_clean_chain = True - - def run_test(self): - self.test = TestManager(self, self.options.tmpdir) - self.test.add_all_connections(self.nodes) - NetworkThread().start() # Start up network handling in another thread - self.test.run() - - @staticmethod - def create_transaction(node, coinbase, to_address, amount): - from_txid = node.getblock(coinbase)['tx'][0] - inputs = [{ "txid" : from_txid, "vout" : 0}] - outputs = { to_address : amount } - rawtx = node.createrawtransaction(inputs, outputs) - tx = CTransaction() - f = BytesIO(hex_str_to_bytes(rawtx)) - tx.deserialize(f) - tx.nVersion = 2 - return tx - - @staticmethod - def sign_transaction(node, tx): - signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize())) - tx = CTransaction() - f = BytesIO(hex_str_to_bytes(signresult['hex'])) - tx.deserialize(f) - return tx - - def generate_blocks(self, number, version, test_blocks=None): - if test_blocks is None: - test_blocks = [] - for _ in range(number): - block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1) - block.nVersion = version - block.rehash() - block.solve() - test_blocks.append([block, True]) - self.last_block_time += 1 - self.tip = block.sha256 - self.height += 1 - return test_blocks - - def get_bip9_status(self, key): - info = self.nodes[0].getblockchaininfo() - return info['bip9_softforks'][key] - - def test_bip(self, bip_name, activated_version, invalidate, invalidate_post_signature, bit_no): - assert_equal(self.get_bip9_status(bip_name)['status'], 'defined') - assert_equal(self.get_bip9_status(bip_name)['since'], 0) - - # generate some coins for later - self.coinbase_blocks = self.nodes[0].generate(2) - self.height = 3 # height of the next block to build - self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) - self.nodeaddress = self.nodes[0].getnewaddress() - self.last_block_time = int(time.time()) - - assert_equal(self.get_bip9_status(bip_name)['status'], 'defined') - assert_equal(self.get_bip9_status(bip_name)['since'], 0) - tmpl = self.nodes[0].getblocktemplate({}) - assert(bip_name not in tmpl['rules']) - assert(bip_name not in tmpl['vbavailable']) - assert_equal(tmpl['vbrequired'], 0) - assert_equal(tmpl['version'], 0x20000000) - - # Test 1 - # Advance from DEFINED to STARTED - test_blocks = self.generate_blocks(141, 4) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['status'], 'started') - assert_equal(self.get_bip9_status(bip_name)['since'], 144) - assert_equal(self.get_bip9_status(bip_name)['statistics']['elapsed'], 0) - assert_equal(self.get_bip9_status(bip_name)['statistics']['count'], 0) - tmpl = self.nodes[0].getblocktemplate({}) - assert(bip_name not in tmpl['rules']) - assert_equal(tmpl['vbavailable'][bip_name], bit_no) - assert_equal(tmpl['vbrequired'], 0) - assert(tmpl['version'] & activated_version) - - # Test 1-A - # check stats after max number of "signalling not" blocks such that LOCKED_IN still possible this period - self.generate_blocks(36, 4, test_blocks) # 0x00000004 (signalling not) - test_blocks = self.generate_blocks(10, activated_version) # 0x20000001 (signalling ready) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['statistics']['elapsed'], 46) - assert_equal(self.get_bip9_status(bip_name)['statistics']['count'], 10) - assert_equal(self.get_bip9_status(bip_name)['statistics']['possible'], True) - - # Test 1-B - # check stats after one additional "signalling not" block -- LOCKED_IN no longer possible this period - test_blocks = self.generate_blocks(1, 4, test_blocks) # 0x00000004 (signalling not) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['statistics']['elapsed'], 47) - assert_equal(self.get_bip9_status(bip_name)['statistics']['count'], 10) - assert_equal(self.get_bip9_status(bip_name)['statistics']['possible'], False) - - # Test 1-C - # finish period with "ready" blocks, but soft fork will still fail to advance to LOCKED_IN - test_blocks = self.generate_blocks(97, activated_version) # 0x20000001 (signalling ready) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['statistics']['elapsed'], 0) - assert_equal(self.get_bip9_status(bip_name)['statistics']['count'], 0) - assert_equal(self.get_bip9_status(bip_name)['statistics']['possible'], True) - assert_equal(self.get_bip9_status(bip_name)['status'], 'started') - - # Test 2 - # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1 - # using a variety of bits to simulate multiple parallel softforks - test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready) - test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not) - test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready) - test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['status'], 'started') - assert_equal(self.get_bip9_status(bip_name)['since'], 144) - assert_equal(self.get_bip9_status(bip_name)['statistics']['elapsed'], 0) - assert_equal(self.get_bip9_status(bip_name)['statistics']['count'], 0) - tmpl = self.nodes[0].getblocktemplate({}) - assert(bip_name not in tmpl['rules']) - assert_equal(tmpl['vbavailable'][bip_name], bit_no) - assert_equal(tmpl['vbrequired'], 0) - assert(tmpl['version'] & activated_version) - - # Test 3 - # 108 out of 144 signal bit 1 to achieve LOCKED_IN - # using a variety of bits to simulate multiple parallel softforks - test_blocks = self.generate_blocks(57, activated_version) # 0x20000001 (signalling ready) - test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not) - test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready) - test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not) - yield TestInstance(test_blocks, sync_every_block=False) - - # check counting stats and "possible" flag before last block of this period achieves LOCKED_IN... - assert_equal(self.get_bip9_status(bip_name)['statistics']['elapsed'], 143) - assert_equal(self.get_bip9_status(bip_name)['statistics']['count'], 107) - assert_equal(self.get_bip9_status(bip_name)['statistics']['possible'], True) - assert_equal(self.get_bip9_status(bip_name)['status'], 'started') - - # ...continue with Test 3 - test_blocks = self.generate_blocks(1, activated_version) # 0x20000001 (signalling ready) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['status'], 'locked_in') - assert_equal(self.get_bip9_status(bip_name)['since'], 576) - tmpl = self.nodes[0].getblocktemplate({}) - assert(bip_name not in tmpl['rules']) - - # Test 4 - # 143 more version 536870913 blocks (waiting period-1) - test_blocks = self.generate_blocks(143, 4) - yield TestInstance(test_blocks, sync_every_block=False) - - assert_equal(self.get_bip9_status(bip_name)['status'], 'locked_in') - assert_equal(self.get_bip9_status(bip_name)['since'], 576) - tmpl = self.nodes[0].getblocktemplate({}) - assert(bip_name not in tmpl['rules']) - - # Test 5 - # Check that the new rule is enforced - spendtx = self.create_transaction(self.nodes[0], - self.coinbase_blocks[0], self.nodeaddress, 1.0) - invalidate(spendtx) - spendtx = self.sign_transaction(self.nodes[0], spendtx) - spendtx.rehash() - invalidate_post_signature(spendtx) - spendtx.rehash() - block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1) - block.nVersion = activated_version - block.vtx.append(spendtx) - block.hashMerkleRoot = block.calc_merkle_root() - block.rehash() - block.solve() - - self.last_block_time += 1 - self.tip = block.sha256 - self.height += 1 - yield TestInstance([[block, True]]) - - assert_equal(self.get_bip9_status(bip_name)['status'], 'active') - assert_equal(self.get_bip9_status(bip_name)['since'], 720) - tmpl = self.nodes[0].getblocktemplate({}) - assert(bip_name in tmpl['rules']) - assert(bip_name not in tmpl['vbavailable']) - assert_equal(tmpl['vbrequired'], 0) - assert(not (tmpl['version'] & (1 << bit_no))) - - # Test 6 - # Check that the new sequence lock rules are enforced - spendtx = self.create_transaction(self.nodes[0], - self.coinbase_blocks[1], self.nodeaddress, 1.0) - invalidate(spendtx) - spendtx = self.sign_transaction(self.nodes[0], spendtx) - spendtx.rehash() - invalidate_post_signature(spendtx) - spendtx.rehash() - - block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1) - block.nVersion = 5 - block.vtx.append(spendtx) - block.hashMerkleRoot = block.calc_merkle_root() - block.rehash() - block.solve() - self.last_block_time += 1 - yield TestInstance([[block, False]]) - - # Restart all - self.test.clear_all_connections() - self.stop_nodes() - self.nodes = [] - shutil.rmtree(self.options.tmpdir + "/node0") - self.setup_chain() - self.setup_network() - self.test.add_all_connections(self.nodes) - NetworkThread().start() - self.test.test_nodes[0].wait_for_verack() - - def get_tests(self): - for test in itertools.chain( - self.test_bip('csv', 0x20000001, self.sequence_lock_invalidate, self.donothing, 0), - self.test_bip('csv', 0x20000001, self.mtp_invalidate, self.donothing, 0), - self.test_bip('csv', 0x20000001, self.donothing, self.csv_invalidate, 0) - ): - yield test - - def donothing(self, tx): - return - - @staticmethod - def csv_invalidate(tx): - """Modify the signature in vin 0 of the tx to fail CSV - Prepends -1 CSV DROP in the scriptSig itself. - """ - tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] + - list(CScript(tx.vin[0].scriptSig))) - - @staticmethod - def sequence_lock_invalidate(tx): - """Modify the nSequence to make it fails once sequence lock rule is - activated (high timespan). - """ - tx.vin[0].nSequence = 0x00FFFFFF - tx.nLockTime = 0 - - def mtp_invalidate(self, tx): - """Modify the nLockTime to make it fails once MTP rule is activated.""" - # Disable Sequence lock, Activate nLockTime - tx.vin[0].nSequence = 0x90FFFFFF - tx.nLockTime = self.last_block_time - -if __name__ == '__main__': - BIP9SoftForksTest().main() diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py deleted file mode 100755 index 444e972cc2..0000000000 --- a/test/functional/feature_block.py +++ /dev/null @@ -1,1299 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -""" -Test block processing. - -This reimplements tests from the ravenj/FullBlockTestGenerator used -by the pull-tester. - -We use the testing framework in which we expect a particular answer from -each test. -""" - -import time -import copy -import struct -from test_framework.test_framework import ComparisonTestFramework -from test_framework.util import assert_equal -from test_framework.comptool import TestManager, TestInstance, RejectResult -from test_framework.mininode import uint256_from_compact, NetworkThread, MAX_BLOCK_BASE_SIZE, CBlockHeader -from test_framework.blocktools import CBlock, create_transaction, create_coinbase, create_block, CTxIn, CTxOut, COutPoint, get_legacy_sigopcount_block, COIN -from test_framework.key import CECKey -from test_framework.script import (CTransaction, CScript, OP_TRUE, signature_hash, SIGHASH_ALL, OP_CHECKSIG, OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY, OP_CHECKSIGVERIFY, OP_2DUP, - hash160, OP_HASH160, OP_EQUAL, ser_uint256, uint256_from_str, MAX_SCRIPT_ELEMENT_SIZE, OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_ENDIF, OP_FALSE, OP_RETURN) - -class PreviousSpendableOutput: - def __init__(self, tx = CTransaction(), n = -1): - self.tx = tx - self.n = n # the output we're spending - -# Use this class for tests that require behavior other than normal "mininode" behavior. -# For now, it is used to serialize a bloated variant (b64). -class CBrokenBlock(CBlock): - def __init__(self, header=None): - super(CBrokenBlock, self).__init__(header) - - def initialize(self, base_block): - self.vtx = copy.deepcopy(base_block.vtx) - self.hashMerkleRoot = self.calc_merkle_root() - - def serialize(self, **kwargs): - r = b"" - r += super(CBlock, self).serialize() - r += struct.pack(" b1 (0) -> b2 (1) - block(1, spend=out[0]) - save_spendable_output() - yield accepted() - - block(2, spend=out[1]) - yield accepted() - save_spendable_output() - - # so fork like this: - # - # genesis -> b1 (0) -> b2 (1) - # \-> b3 (1) - # - # Nothing should happen at this point. We saw b2 first so it takes priority. - tip(1) - b3 = block(3, spend=out[1]) - txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0) - yield rejected() - - - # Now we add another block to make the alternative chain longer. - # - # genesis -> b1 (0) -> b2 (1) - # \-> b3 (1) -> b4 (2) - block(4, spend=out[2]) - yield accepted() - - - # ... and back to the first chain. - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b3 (1) -> b4 (2) - tip(2) - block(5, spend=out[2]) - save_spendable_output() - yield rejected() - - block(6, spend=out[3]) - yield accepted() - - # Try to create a fork that double-spends - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b7 (2) -> b8 (4) - # \-> b3 (1) -> b4 (2) - tip(5) - block(7, spend=out[2]) - yield rejected() - - block(8, spend=out[4]) - yield rejected() - - # Try to create a block that has too much fee - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b9 (4) - # \-> b3 (1) -> b4 (2) - tip(6) - block(9, spend=out[4], additional_coinbase_value=1) - yield rejected(RejectResult(16, b'bad-cb-amount')) - - # Create a fork that ends in a block with too much fee (the one that causes the reorg) - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b10 (3) -> b11 (4) - # \-> b3 (1) -> b4 (2) - tip(5) - block(10, spend=out[3]) - yield rejected() - - block(11, spend=out[4], additional_coinbase_value=1) - yield rejected(RejectResult(16, b'bad-cb-amount')) - - - # Try again, but with a valid fork first - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b14 (5) - # (b12 added last) - # \-> b3 (1) -> b4 (2) - tip(5) - b12 = block(12, spend=out[3]) - save_spendable_output() - b13 = block(13, spend=out[4]) - # Deliver the block header for b12, and the block b13. - # b13 should be accepted but the tip won't advance until b12 is delivered. - yield TestInstance([[CBlockHeader(b12), None], [b13, False]]) - - save_spendable_output() - # b14 is invalid, but the node won't know that until it tries to connect - # Tip still can't advance because b12 is missing - block(14, spend=out[5], additional_coinbase_value=1) - yield rejected() - - yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13. - - # Add a block with max_block_sigops and one with one more sigop - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6) - # \-> b3 (1) -> b4 (2) - - # Test that a block with a lot of checksigs is okay - lots_of_check_sigs = CScript([OP_CHECKSIG] * (max_block_sigops - 1)) - tip(13) - block(15, spend=out[5], script=lots_of_check_sigs) - yield accepted() - save_spendable_output() - - - # Test that a block with too many checksigs is rejected - too_many_checksigs = CScript([OP_CHECKSIG] * max_block_sigops) - block(16, spend=out[6], script=too_many_checksigs) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - - # Attempt to spend a transaction created on a different fork - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1]) - # \-> b3 (1) -> b4 (2) - tip(15) - block(17, spend=txout_b3) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - # Attempt to spend a transaction created on a different fork (on a fork this time) - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) - # \-> b18 (b3.vtx[1]) -> b19 (6) - # \-> b3 (1) -> b4 (2) - tip(13) - block(18, spend=txout_b3) - yield rejected() - - block(19, spend=out[6]) - yield rejected() - - # Attempt to spend a coinbase at depth too low - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7) - # \-> b3 (1) -> b4 (2) - tip(15) - block(20, spend=out[7]) - yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase')) - - # Attempt to spend a coinbase at depth too low (on a fork this time) - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) - # \-> b21 (6) -> b22 (5) - # \-> b3 (1) -> b4 (2) - tip(13) - block(21, spend=out[6]) - yield rejected() - - block(22, spend=out[5]) - yield rejected() - - # Create a block on either side of MAX_BLOCK_BASE_SIZE and make sure its accepted/rejected - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) - # \-> b24 (6) -> b25 (7) - # \-> b3 (1) -> b4 (2) - tip(15) - b23 = block(23, spend=out[6]) - tx = CTransaction() - script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69 - script_output = CScript([b'\x00' * script_length]) - tx.vout.append(CTxOut(0, script_output)) - tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0))) - b23 = update_block(23, [tx]) - # Make sure the math above worked out to produce a max-sized block - assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE) - yield accepted() - save_spendable_output() - - # Make the next block one byte bigger and check that it fails - tip(15) - b24 = block(24, spend=out[6]) - script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69 - script_output = CScript([b'\x00' * (script_length+1)]) - tx.vout = [CTxOut(0, script_output)] - b24 = update_block(24, [tx]) - assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE+1) - yield rejected(RejectResult(16, b'bad-blk-length')) - - block(25, spend=out[7]) - yield rejected() - - # Create blocks with a coinbase input script size out of range - # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) - # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) - # \-> ... (6) -> ... (7) - # \-> b3 (1) -> b4 (2) - tip(15) - b26 = block(26, spend=out[6]) - b26.vtx[0].vin[0].scriptSig = b'\x00' - b26.vtx[0].rehash() - # update_block causes the merkle root to get updated, even with no new - # transactions, and updates the required state. - update_block(26, []) - yield rejected(RejectResult(16, b'bad-cb-length')) - - # Extend the b26 chain to make sure ravend isn't accepting b26 - block(27, spend=out[7]) - yield rejected(False) - - # Now try a too-large-coinbase script - tip(15) - b28 = block(28, spend=out[6]) - b28.vtx[0].vin[0].scriptSig = b'\x00' * 101 - b28.vtx[0].rehash() - update_block(28, []) - yield rejected(RejectResult(16, b'bad-cb-length')) - - # Extend the b28 chain to make sure ravend isn't accepting b28 - block(29, spend=out[7]) - yield rejected(False) - - # b30 has a max-sized coinbase scriptSig. - tip(23) - b30 = block(30) - b30.vtx[0].vin[0].scriptSig = b'\x00' * 100 - b30.vtx[0].rehash() - update_block(30, []) - yield accepted() - save_spendable_output() - - # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY - # - # genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) - # \-> b36 (11) - # \-> b34 (10) - # \-> b32 (9) - # - - # MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end. - lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((max_block_sigops-1) // 20) + [OP_CHECKSIG] * 19) - b31 = block(31, spend=out[8], script=lots_of_multisigs) - assert_equal(get_legacy_sigopcount_block(b31), max_block_sigops) - yield accepted() - save_spendable_output() - - # this goes over the limit because the coinbase has one sigop - too_many_multisigs = CScript([OP_CHECKMULTISIG] * (max_block_sigops // 20)) - b32 = block(32, spend=out[9], script=too_many_multisigs) - assert_equal(get_legacy_sigopcount_block(b32), max_block_sigops + 1) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - - # CHECKMULTISIGVERIFY - tip(31) - lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((max_block_sigops-1) // 20) + [OP_CHECKSIG] * 19) - block(33, spend=out[9], script=lots_of_multisigs) - yield accepted() - save_spendable_output() - - too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (max_block_sigops // 20)) - block(34, spend=out[10], script=too_many_multisigs) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - - # CHECKSIGVERIFY - tip(33) - lots_of_check_sigs = CScript([OP_CHECKSIGVERIFY] * (max_block_sigops - 1)) - b35 = block(35, spend=out[10], script=lots_of_check_sigs) - yield accepted() - save_spendable_output() - - too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * max_block_sigops) - block(36, spend=out[11], script=too_many_checksigs) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - - # Check spending of a transaction in a block which failed to connect - # - # b6 (3) - # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) - # \-> b37 (11) - # \-> b38 (11/37) - # - - # save 37's spendable output, but then double-spend out11 to invalidate the block - tip(35) - b37 = block(37, spend=out[11]) - txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0) - tx = create_and_sign_tx(out[11].tx, out[11].n, 0) - update_block(37, [tx]) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - # attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid - tip(35) - block(38, spend=txout_b37) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - # Check P2SH SigOp counting - # - # - # 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) - # \-> b40 (12) - # - # b39 - create some P2SH outputs that will require 6 sigops to spend: - # - # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG - # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL - # - tip(35) - block(39) - b39_outputs = 0 - b39_sigops_per_output = 6 - - # Build the redeem script, hash it, use hash to create the p2sh script - redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG]) - redeem_script_hash = hash160(redeem_script) - p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) - - # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE - # This must be signed because it is spending a coinbase - spend = out[11] - tx = create_tx(spend.tx, spend.n, 1, p2sh_script) - tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE]))) - self.sign_tx(tx, spend.tx, spend.n) - tx.rehash() - b39 = update_block(39, [tx]) - b39_outputs += 1 - - # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE - tx_last = tx - total_size=len(b39.serialize()) - while total_size < MAX_BLOCK_BASE_SIZE: - tx_new = create_tx(tx_last, 1, 1, p2sh_script) - tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE]))) - tx_new.rehash() - total_size += len(tx_new.serialize()) - if total_size >= MAX_BLOCK_BASE_SIZE: - break - b39.vtx.append(tx_new) # add tx to block - tx_last = tx_new - b39_outputs += 1 - - b39 = update_block(39, []) - yield accepted() - save_spendable_output() - - - # Test sigops in P2SH redeem scripts - # - # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. - # The first tx has one sigop and then at the end we add 2 more to put us just over the max. - # - # b41 does the same, less one, so it has the maximum sigops permitted. - # - tip(39) - b40 = block(40, spend=out[12]) - sigops = get_legacy_sigopcount_block(b40) - numTxes = (max_block_sigops - sigops) // b39_sigops_per_output - assert_equal(numTxes <= b39_outputs, True) - - lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) - new_txs = [] - for i in range(1, numTxes+1): - tx = CTransaction() - tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) - tx.vin.append(CTxIn(lastOutpoint, b'')) - # second input is corresponding P2SH output from b39 - tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) - # Note: must pass the redeem_script (not p2sh_script) to the signature hash function - (sighash, _) = signature_hash(redeem_script, tx, 1, SIGHASH_ALL) - sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL])) - scriptSig = CScript([sig, redeem_script]) - - tx.vin[1].scriptSig = scriptSig - tx.rehash() - new_txs.append(tx) - lastOutpoint = COutPoint(tx.x16r, 0) - - b40_sigops_to_fill = max_block_sigops - (numTxes * b39_sigops_per_output + sigops) + 1 - tx = CTransaction() - tx.vin.append(CTxIn(lastOutpoint, b'')) - tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) - tx.rehash() - new_txs.append(tx) - update_block(40, new_txs) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - # same as b40, but one less sigop - tip(39) - block(41, spend=None) - update_block(41, b40.vtx[1:-1]) - b41_sigops_to_fill = b40_sigops_to_fill - 1 - tx = CTransaction() - tx.vin.append(CTxIn(lastOutpoint, b'')) - tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) - tx.rehash() - update_block(41, [tx]) - yield accepted() - - # Fork off of b39 to create a constant base again - # - # b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) - # \-> b41 (12) - # - tip(39) - block(42, spend=out[12]) - yield rejected() - save_spendable_output() - - block(43, spend=out[13]) - yield accepted() - save_spendable_output() - - - # Test a number of really invalid scenarios - # - # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14) - # \-> ??? (15) - - # The next few blocks are going to be created "by hand" since they'll do funky things, such as having - # the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works. - height = self.block_heights[self.tip.x16r] + 1 - coinbase = create_coinbase(height, self.coinbase_pubkey) - b44 = CBlock() - b44.nTime = self.tip.nTime + 1 - b44.hashPrevBlock = self.tip.x16r - b44.nBits = 0x207fffff - b44.vtx.append(coinbase) - b44.hashMerkleRoot = b44.calc_merkle_root() - b44.solve() - self.tip = b44 - self.block_heights[b44.x16r] = height - self.blocks[44] = b44 - yield accepted() - - # A block with a non-coinbase as the first tx - non_coinbase = create_tx(out[15].tx, out[15].n, 1) - b45 = CBlock() - b45.nTime = self.tip.nTime + 1 - b45.hashPrevBlock = self.tip.x16r - b45.nBits = 0x207fffff - b45.vtx.append(non_coinbase) - b45.hashMerkleRoot = b45.calc_merkle_root() - b45.calc_x16r() - b45.solve() - self.block_heights[b45.x16r] = self.block_heights[self.tip.x16r]+1 - self.tip = b45 - self.blocks[45] = b45 - yield rejected(RejectResult(16, b'bad-cb-missing')) - - # A block with no txns - tip(44) - b46 = CBlock() - b46.nTime = b44.nTime+1 - b46.hashPrevBlock = b44.x16r - b46.nBits = 0x207fffff - b46.vtx = [] - b46.hashMerkleRoot = 0 - b46.solve() - self.block_heights[b46.x16r] = self.block_heights[b44.x16r]+1 - self.tip = b46 - assert 46 not in self.blocks - self.blocks[46] = b46 - ser_uint256(b46.hashMerkleRoot) - yield rejected(RejectResult(16, b'bad-blk-length')) - - # A block with invalid work - tip(44) - b47 = block(47, solve=False) - target = uint256_from_compact(b47.nBits) - while b47.sha256 < target: #changed > to < - b47.nNonce += 1 - b47.rehash() - yield rejected(RejectResult(16, b'high-hash')) - - # A block with timestamp > 2 hrs in the future - tip(44) - b48 = block(48, solve=False) - b48.nTime = int(time.time()) + 60 * 60 * 3 - b48.solve() - yield rejected(RejectResult(16, b'time-too-new')) - - # A block with an invalid merkle hash - tip(44) - b49 = block(49) - b49.hashMerkleRoot += 1 - b49.solve() - yield rejected(RejectResult(16, b'bad-txnmrklroot')) - - # A block with an incorrect POW limit - tip(44) - b50 = block(50) - b50.nBits = b50.nBits - 1 - b50.solve() - yield rejected(RejectResult(16, b'bad-diffbits')) - - # A block with two coinbase txns - tip(44) - block(51) - cb2 = create_coinbase(51, self.coinbase_pubkey) - update_block(51, [cb2]) - yield rejected(RejectResult(16, b'bad-cb-multiple')) - - # A block w/ duplicate txns - # Note: txns have to be in the right position in the merkle tree to trigger this error - tip(44) - b52 = block(52, spend=out[15]) - tx = create_tx(b52.vtx[1], 0, 1) - update_block(52, [tx, tx]) - yield rejected(RejectResult(16, b'bad-txns-duplicate')) - - # Test block timestamps - # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) - # \-> b54 (15) - # - tip(43) - block(53, spend=out[14]) - yield rejected() # rejected since b44 is at same height - save_spendable_output() - - # invalid timestamp (b35 is 5 blocks back, so its time is MedianTimePast) - b54 = block(54, spend=out[15]) - b54.nTime = b35.nTime - 1 - b54.solve() - yield rejected(RejectResult(16, b'time-too-old')) - - # valid timestamp - tip(53) - b55 = block(55, spend=out[15]) - b55.nTime = b35.nTime - update_block(55, []) - yield accepted() - save_spendable_output() - - - # Test CVE-2012-2459 - # - # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16) - # \-> b57 (16) - # \-> b56p2 (16) - # \-> b56 (16) - # - # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without - # affecting the merkle root of a block, while still invalidating it. - # See: src/consensus/merkle.h - # - # b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx. - # Result: OK - # - # b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle - # root but duplicate transactions. - # Result: Fails - # - # b57p2 has six transactions in its merkle tree: - # - coinbase, tx, tx1, tx2, tx3, tx4 - # Merkle root calculation will duplicate as necessary. - # Result: OK. - # - # b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches - # duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates - # that the error was caught early, avoiding a DOS vulnerability.) - - # b57 - a good block with 2 txs, don't submit until end - tip(55) - block(57) - tx = create_and_sign_tx(out[16].tx, out[16].n, 1) - tx1 = create_tx(tx, 0, 1) - b57 = update_block(57, [tx, tx1]) - - # b56 - copy b57, add a duplicate tx - tip(55) - b56 = copy.deepcopy(b57) - self.blocks[56] = b56 - assert_equal(len(b56.vtx),3) - b56 = update_block(56, [tx1]) - assert_equal(b56.hash, b57.hash) - yield rejected(RejectResult(16, b'bad-txns-duplicate')) - - # b57p2 - a good block with 6 tx'es, don't submit until end - tip(55) - block("57p2") - tx = create_and_sign_tx(out[16].tx, out[16].n, 1) - tx1 = create_tx(tx, 0, 1) - tx2 = create_tx(tx1, 0, 1) - tx3 = create_tx(tx2, 0, 1) - tx4 = create_tx(tx3, 0, 1) - b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4]) - - # b56p2 - copy b57p2, duplicate two non-consecutive tx's - tip(55) - b56p2 = copy.deepcopy(b57p2) - self.blocks["b56p2"] = b56p2 - assert_equal(b56p2.hash, b57p2.hash) - assert_equal(len(b56p2.vtx),6) - update_block("b56p2", [tx3, tx4]) - yield rejected(RejectResult(16, b'bad-txns-duplicate')) - - tip("57p2") - yield accepted() - - tip(57) - yield rejected() #rejected because 57p2 seen first - save_spendable_output() - - # Test a few invalid tx types - # - # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) - # \-> ??? (17) - # - - # tx with prevout.n out of range - tip(57) - block(58, spend=out[17]) - tx = CTransaction() - assert(len(out[17].tx.vout) < 42) - tx.vin.append(CTxIn(COutPoint(out[17].tx.x16r, 42), CScript([OP_TRUE]), 0xffffffff)) - tx.vout.append(CTxOut(0, b"")) - tx.calc_x16r() - update_block(58, [tx]) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - # tx with output value > input value out of range - tip(57) - block(59) - tx = create_and_sign_tx(out[17].tx, out[17].n, 51*COIN) - update_block(59, [tx]) - yield rejected(RejectResult(16, b'bad-txns-in-belowout')) - - # reset to good chain - tip(57) - b60 = block(60, spend=out[17]) - yield accepted() - save_spendable_output() - - # Test BIP30 - # - # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) - # \-> b61 (18) - # - # Blocks are not allowed to contain a transaction whose id matches that of an earlier, - # not-fully-spent transaction in the same chain. To test, make identical coinbases; - # the second one should be rejected. - # - tip(60) - b61 = block(61, spend=out[18]) - b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig #equalize the coinbases - b61.vtx[0].rehash() - b61 = update_block(61, []) - assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize()) - yield rejected(RejectResult(16, b'bad-txns-BIP30')) - - - # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests) - # - # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) - # \-> b62 (18) - # - tip(60) - block(62) - tx = CTransaction() - tx.nLockTime = 0xffffffff #this locktime is non-final - assert(out[18].n < len(out[18].tx.vout)) - tx.vin.append(CTxIn(COutPoint(out[18].tx.x16r, out[18].n))) # don't set nSequence - tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) - assert(tx.vin[0].nSequence < 0xffffffff) - tx.calc_x16r() - update_block(62, [tx]) - yield rejected(RejectResult(16, b'bad-txns-nonfinal')) - - - # Test a non-final coinbase is also rejected - # - # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) - # \-> b63 (-) - # - tip(60) - b63 = block(63) - b63.vtx[0].nLockTime = 0xffffffff - b63.vtx[0].vin[0].nSequence = 0xDEADBEEF - b63.vtx[0].rehash() - update_block(63, []) - yield rejected(RejectResult(16, b'bad-txns-nonfinal')) - - - # This checks that a block with a bloated VARINT between the block_header and the array of tx such that - # the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint, - # does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not - # care whether the bloated block is accepted or rejected; it only cares that the second block is accepted. - # - # What matters is that the receiving node should not reject the bloated block, and then reject the canonical - # block on the basis that it's the same as an already-rejected block (which would be a consensus failure.) - # - # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) - # \ - # b64a (18) - # b64a is a bloated block (non-canonical varint) - # b64 is a good block (same as b64 but w/ canonical varint) - # - tip(60) - regular_block = block("64a", spend=out[18]) - - # make it a "broken_block," with non-canonical serialization - b64a = CBrokenBlock(regular_block) - b64a.initialize(regular_block) - self.blocks["64a"] = b64a - self.tip = b64a - tx = CTransaction() - - # use canonical serialization to calculate size - script_length = MAX_BLOCK_BASE_SIZE - len(b64a.normal_serialize()) - 69 - script_output = CScript([b'\x00' * script_length]) - tx.vout.append(CTxOut(0, script_output)) - tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0))) - b64a = update_block("64a", [tx]) - assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8) - yield TestInstance([[self.tip, None]]) - - # comptool workaround: to make sure b64 is delivered, manually erase b64a from blockstore - self.test.block_store.erase(b64a.sha256) - - tip(60) - b64 = CBlock(b64a) - b64.vtx = copy.deepcopy(b64a.vtx) - assert_equal(b64.hash, b64a.hash) - assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE) - self.blocks[64] = b64 - update_block(64, []) - yield accepted() - save_spendable_output() - - # Spend an output created in the block itself - # - # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) - # - tip(64) - block(65) - tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue) - tx2 = create_and_sign_tx(tx1, 0, 0) - update_block(65, [tx1, tx2]) - yield accepted() - save_spendable_output() - - # Attempt to spend an output created later in the same block - # - # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) - # \-> b66 (20) - tip(65) - block(66) - tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) - tx2 = create_and_sign_tx(tx1, 0, 1) - update_block(66, [tx2, tx1]) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - # Attempt to double-spend a transaction created in a block - # - # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) - # \-> b67 (20) - # - # - tip(65) - block(67) - tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue) - tx2 = create_and_sign_tx(tx1, 0, 1) - tx3 = create_and_sign_tx(tx1, 0, 2) - update_block(67, [tx1, tx2, tx3]) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - # More tests of block subsidy - # - # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) - # \-> b68 (20) - # - # b68 - coinbase with an extra 10 satoshis, - # creates a tx that has 9 satoshis from out[20] go to fees - # this fails because the coinbase is trying to claim 1 satoshi too much in fees - # - # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee - # this succeeds - # - tip(65) - block(68, additional_coinbase_value=10) - tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9) - update_block(68, [tx]) - yield rejected(RejectResult(16, b'bad-cb-amount')) - - tip(65) - b69 = block(69, additional_coinbase_value=10) - tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-10) - update_block(69, [tx]) - yield accepted() - save_spendable_output() - - # Test spending the outpoint of a non-existent transaction - # - # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) - # \-> b70 (21) - # - tip(69) - block(70, spend=out[21]) - bogus_tx = CTransaction() - bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c") - tx = CTransaction() - tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff)) - tx.vout.append(CTxOut(1, b"")) - update_block(70, [tx]) - yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent')) - - - # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks) - # - # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) - # \-> b71 (21) - # - # b72 is a good block. - # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b71. - # - tip(69) - block(72) - tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2) - tx2 = create_and_sign_tx(tx1, 0, 1) - b72 = update_block(72, [tx1, tx2]) # now tip is 72 - b71 = copy.deepcopy(b72) - b71.vtx.append(tx2) # add duplicate tx2 - self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69 - self.blocks[71] = b71 - - assert_equal(len(b71.vtx), 4) - assert_equal(len(b72.vtx), 3) - assert_equal(b72.sha256, b71.sha256) - - tip(71) - yield rejected(RejectResult(16, b'bad-txns-duplicate')) - tip(72) - yield accepted() - save_spendable_output() - - - # Test some invalid scripts and max_block_sigops - # - # -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) - # \-> b** (22) - # - - # b73 - tx with excessive sigops that are placed after an excessively large script element. - # The purpose of the test is to make sure those sigops are counted. - # - # script is a bytearray of size 20,526 - # - # bytearray[0-19,998] : OP_CHECKSIG - # bytearray[19,999] : OP_PUSHDATA4 - # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) - # bytearray[20,004-20,525]: unread data (script_element) - # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) - # - tip(72) - block(73) - size = max_block_sigops - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 - a = bytearray([OP_CHECKSIG] * size) - a[max_block_sigops - 1] = int("4e",16) # OP_PUSHDATA4 - - element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 - a[max_block_sigops] = element_size % 256 - a[max_block_sigops+1] = element_size // 256 - a[max_block_sigops+2] = 0 - a[max_block_sigops+3] = 0 - - tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) - b73 = update_block(73, [tx]) - assert_equal(get_legacy_sigopcount_block(b73), max_block_sigops+1) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - # b74/75 - if we push an invalid script element, all previous sigops are counted, - # but sigops after the element are not counted. - # - # The invalid script element is that the push_data indicates that - # there will be a large amount of data (0xffffff bytes), but we only - # provide a much smaller number. These bytes are CHECKSIGS so they would - # cause b75 to fail for excessive sigops, if those bytes were counted. - # - # b74 fails because we put max_block_sigops+1 before the element - # b75 succeeds because we put max_block_sigops before the element - # - # - tip(72) - block(74) - size = max_block_sigops - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 - a = bytearray([OP_CHECKSIG] * size) - a[max_block_sigops] = 0x4e - a[max_block_sigops+1] = 0xfe - a[max_block_sigops+2] = 0xff - a[max_block_sigops+3] = 0xff - a[max_block_sigops+4] = 0xff - tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) - update_block(74, [tx]) - yield rejected(RejectResult(16, b'bad-blk-sigops')) - - tip(72) - block(75) - size = max_block_sigops - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 - a = bytearray([OP_CHECKSIG] * size) - a[max_block_sigops-1] = 0x4e - a[max_block_sigops] = 0xff - a[max_block_sigops+1] = 0xff - a[max_block_sigops+2] = 0xff - a[max_block_sigops+3] = 0xff - tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a)) - update_block(75, [tx]) - yield accepted() - save_spendable_output() - - # Check that if we push an element filled with CHECKSIGs, they are not counted - tip(75) - block(76) - size = max_block_sigops - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 - a = bytearray([OP_CHECKSIG] * size) - a[max_block_sigops-1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs - tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a)) - update_block(76, [tx]) - yield accepted() - save_spendable_output() - - # Test transaction resurrection - # - # -> b77 (24) -> b78 (25) -> b79 (26) - # \-> b80 (25) -> b81 (26) -> b82 (27) - # - # b78 creates a tx, which is spent in b79. After b82, both should be in mempool - # - # The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the - # rather obscure reason that the Python signature code does not distinguish between - # Low-S and High-S values (whereas the raven code has custom code which does so); - # as a result of which, the odds are 50% that the python code will use the right - # value and the transaction will be accepted into the mempool. Until we modify the - # test framework to support low-S signing, we are out of luck. - # - # To get around this issue, we construct transactions which are not signed and which - # spend to OP_TRUE. If the standard-ness rules change, this test would need to be - # updated. (Perhaps to spend to a P2SH OP_TRUE script) - # - tip(76) - block(77) - tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10*COIN) - update_block(77, [tx77]) - yield accepted() - save_spendable_output() - - block(78) - tx78 = create_tx(tx77, 0, 9*COIN) - update_block(78, [tx78]) - yield accepted() - - block(79) - tx79 = create_tx(tx78, 0, 8*COIN) - update_block(79, [tx79]) - yield accepted() - - # mempool should be empty - assert_equal(len(self.nodes[0].getrawmempool()), 0) - - tip(77) - block(80, spend=out[25]) - yield rejected() - save_spendable_output() - - block(81, spend=out[26]) - yield rejected() # other chain is same length - save_spendable_output() - - block(82, spend=out[27]) - yield accepted() # now this chain is longer, triggers re-org - save_spendable_output() - - # now check that tx78 and tx79 have been put back into the peer's mempool - mempool = self.nodes[0].getrawmempool() - assert_equal(len(mempool), 2) - assert(tx78.hash in mempool) - assert(tx79.hash in mempool) - - - # Test invalid opcodes in dead execution paths. - # - # -> b81 (26) -> b82 (27) -> b83 (28) - # - block(83) - op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF] - script = CScript(op_codes) - tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script) - - tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE])) - tx2.vin[0].scriptSig = CScript([OP_FALSE]) - tx2.rehash() - - update_block(83, [tx1, tx2]) - yield accepted() - save_spendable_output() - - - # Reorg on/off blocks that have OP_RETURN in them (and try to spend them) - # - # -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31) - # \-> b85 (29) -> b86 (30) \-> b89a (32) - # - # - block(84) - tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN])) - tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) - tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) - tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) - tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) - tx1.calc_x16r() - self.sign_tx(tx1, out[29].tx, out[29].n) - tx1.rehash() - tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN])) - tx2.vout.append(CTxOut(0, CScript([OP_RETURN]))) - tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN])) - tx3.vout.append(CTxOut(0, CScript([OP_TRUE]))) - tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE])) - tx4.vout.append(CTxOut(0, CScript([OP_RETURN]))) - tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN])) - - update_block(84, [tx1,tx2,tx3,tx4,tx5]) - yield accepted() - save_spendable_output() - - tip(83) - block(85, spend=out[29]) - yield rejected() - - block(86, spend=out[30]) - yield accepted() - - tip(84) - block(87, spend=out[30]) - yield rejected() - save_spendable_output() - - block(88, spend=out[31]) - yield accepted() - save_spendable_output() - - # trying to spend the OP_RETURN output is rejected - block("89a", spend=out[32]) - tx = create_tx(tx1, 0, 0, CScript([OP_TRUE])) - update_block("89a", [tx]) - yield rejected() - - - # Test re-org of a week's worth of blocks (1088 blocks) - # This test takes a minute or two and can be accomplished in memory - # - if self.options.runbarelyexpensive: - tip(88) - large_reorg_size = 1088 - test1 = TestInstance(sync_every_block=False) - spend=out[32] - i = 0 - for i in range(89, large_reorg_size + 89): - b = block(i, spend) - tx = CTransaction() - script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69 - script_output = CScript([b'\x00' * script_length]) - tx.vout.append(CTxOut(0, script_output)) - tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0))) - b = update_block(i, [tx]) - assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE) - test1.blocks_and_transactions.append([self.tip, True]) - save_spendable_output() - spend = get_spendable_output() - - yield test1 - chain1_tip = i - - # now create alt chain of same length - tip(88) - test2 = TestInstance(sync_every_block=False) - for i in range(89, large_reorg_size + 89): - block("alt"+str(i)) - test2.blocks_and_transactions.append([self.tip, False]) - yield test2 - - # extend alt chain to trigger re-org - block("alt" + str(chain1_tip + 1)) - yield accepted() - - # ... and re-org back to the first chain - tip(chain1_tip) - block(chain1_tip + 1) - yield rejected() - block(chain1_tip + 2) - yield accepted() - - chain1_tip += 2 - - - -if __name__ == '__main__': - FullBlockTest().main() diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py index 754cfaea3d..3c19fd8bec 100755 --- a/test/functional/feature_cltv.py +++ b/test/functional/feature_cltv.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py deleted file mode 100755 index 6237bc5172..0000000000 --- a/test/functional/feature_csv_activation.py +++ /dev/null @@ -1,549 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -""" -Test activation of the first version bits soft fork. - -This soft fork will activate the following BIPS: -BIP 68 - nSequence relative lock times -BIP 112 - CHECKSEQUENCEVERIFY -BIP 113 - MedianTimePast semantics for nLockTime - -regtest lock-in with 108/144 block signalling -activation after a further 144 blocks - -mine 82 blocks whose coinbases will be used to generate inputs for our tests -mine 61 blocks to transition from DEFINED to STARTED -mine 144 blocks only 100 of which are signaling readiness in order to fail to change state this period -mine 144 blocks with 108 signaling and verify STARTED->LOCKED_IN -mine 140 blocks and seed block chain with the 82 inputs will use for our tests at height 572 -mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered -mine 1 block and test that enforcement has triggered (which triggers ACTIVE) -Test BIP 113 is enforced -Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height -Mine 1 block so next height is 581 and test BIP 68 now passes time but not height -Mine 1 block so next height is 582 and test BIP 68 now passes time and height -Test that BIP 112 is enforced - -Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates -And that after the soft fork activates transactions pass and fail as they should according to the rules. -For each BIP, transactions of versions 1 and 2 will be tested. ----------------- -BIP 113: -bip113tx - modify the nLocktime variable - -BIP 68: -bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below - -BIP 112: -bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP -bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP -bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP -bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP -bip112tx_special - test negative argument to OP_CSV -""" - -from io import BytesIO -import time -from test_framework.test_framework import ComparisonTestFramework -from test_framework.util import Decimal, hex_str_to_bytes, assert_equal, get_bip9_status -from test_framework.mininode import to_hex, CTransaction, NetworkThread -from test_framework.blocktools import create_coinbase, create_block -from test_framework.comptool import TestInstance, TestManager -from test_framework.script import OP_DROP, CScript, OP_CHECKSEQUENCEVERIFY - - -base_relative_locktime = 10 -seq_disable_flag = 1<<31 -seq_random_high_bit = 1<<25 -seq_type_flag = 1<<22 -seq_random_low_bit = 1<<18 - -# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field -# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1 -relative_locktimes = [] -for b31 in range(2): - b25times = [] - for b25 in range(2): - b22times = [] - for b22 in range(2): - b18times = [] - for b18 in range(2): - rlt = base_relative_locktime - if b31: - rlt = rlt | seq_disable_flag - if b25: - rlt = rlt | seq_random_high_bit - if b22: - rlt = rlt | seq_type_flag - if b18: - rlt = rlt | seq_random_low_bit - b18times.append(rlt) - b22times.append(b18times) - b25times.append(b22times) - relative_locktimes.append(b25times) - - -# noinspection PyShadowingNames -def all_rlt_txs(txarray): - txs = [] - for b31 in range(2): - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - txs.append(txarray[b31][b25][b22][b18]) - return txs - - -# noinspection PyPep8Naming -class BIP68_112_113Test(ComparisonTestFramework): - def set_test_params(self): - self.num_nodes = 1 - self.setup_clean_chain = True - self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4']] - - def run_test(self): - test = TestManager(self, self.options.tmpdir) - test.add_all_connections(self.nodes) - NetworkThread().start() # Start up network handling in another thread - test.run() - - def send_generic_input_tx(self, node, coinbases): - amount = Decimal("49.99") - return node.sendrawtransaction(to_hex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount)))) - - @staticmethod - def create_transaction(node, txid, to_address, amount): - inputs = [{ "txid" : txid, "vout" : 0}] - outputs = { to_address : amount } - rawtx = node.createrawtransaction(inputs, outputs) - tx = CTransaction() - f = BytesIO(hex_str_to_bytes(rawtx)) - tx.deserialize(f) - return tx - - @staticmethod - def sign_transaction(node, unsignedtx): - rawtx = to_hex(unsignedtx) - signresult = node.signrawtransaction(rawtx) - tx = CTransaction() - f = BytesIO(hex_str_to_bytes(signresult['hex'])) - tx.deserialize(f) - return tx - - def generate_blocks(self, number, version, test_blocks=None): - if test_blocks is None: - test_blocks = [] - for _ in range(number): - block = self.create_test_block([], version) - test_blocks.append([block, True]) - self.last_block_time += 600 - self.tip = block.sha256 - self.tipheight += 1 - return test_blocks - - def create_test_block(self, txs, version = 536870912): - block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600) - block.nVersion = version - block.vtx.extend(txs) - block.hashMerkleRoot = block.calc_merkle_root() - block.rehash() - block.solve() - return block - - # noinspection PyShadowingNames - def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0): - txs = [] - assert(len(bip68inputs) >= 16) - i = 0 - for b31 in range(2): - b25txs = [] - for b25 in range(2): - b22txs = [] - for b22 in range(2): - b18txs = [] - for b18 in range(2): - tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98")) - i += 1 - tx.nVersion = txversion - tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta - b18txs.append(self.sign_transaction(self.nodes[0], tx)) - b22txs.append(b18txs) - b25txs.append(b22txs) - txs.append(b25txs) - return txs - - def create_bip112special(self, input_data, txversion): - tx = self.create_transaction(self.nodes[0], input_data, self.nodeaddress, Decimal("49.98")) - tx.nVersion = txversion - signtx = self.sign_transaction(self.nodes[0], tx) - signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) - return signtx - - # noinspection PyShadowingNames - def create_bip112txs(self, bip112inputs, vary_op_csv, txversion, locktime_delta = 0): - txs = [] - assert(len(bip112inputs) >= 16) - i = 0 - for b31 in range(2): - b25txs = [] - for b25 in range(2): - b22txs = [] - for b22 in range(2): - b18txs = [] - for b18 in range(2): - tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98")) - i += 1 - if vary_op_csv: # if varying OP_CSV, nSequence is fixed - tx.vin[0].nSequence = base_relative_locktime + locktime_delta - else: # vary nSequence instead, OP_CSV is fixed - tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta - tx.nVersion = txversion - signtx = self.sign_transaction(self.nodes[0], tx) - if vary_op_csv: - signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) - else: - signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) - b18txs.append(signtx) - b22txs.append(b18txs) - b25txs.append(b22txs) - txs.append(b25txs) - return txs - - # noinspection PyShadowingNames - def get_tests(self): - long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future - self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time - self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs - self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time - self.tipheight = 82 # height of the next block to build - self.last_block_time = long_past_time - self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) - self.nodeaddress = self.nodes[0].getnewaddress() - - assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined') - test_blocks = self.generate_blocks(61, 4) - yield TestInstance(test_blocks, sync_every_block=False) # 1 - # Advanced from DEFINED to STARTED, height = 143 - assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started') - - # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0 - # using a variety of bits to simulate multiple parallel softforks - test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready) - test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not) - test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready) - test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not) - yield TestInstance(test_blocks, sync_every_block=False) # 2 - # Failed to advance past STARTED, height = 287 - assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started') - - # 108 out of 144 signal bit 0 to achieve lock-in - # using a variety of bits to simulate multiple parallel softforks - test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready) - test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not) - test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready) - test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not) - yield TestInstance(test_blocks, sync_every_block=False) # 3 - # Advanced from STARTED to LOCKED_IN, height = 431 - assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in') - - # 140 more version 4 blocks - test_blocks = self.generate_blocks(140, 4) - yield TestInstance(test_blocks, sync_every_block=False) # 4 - - ### Inputs at height = 572 - # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block) - # Note we reuse inputs for v1 and v2 txs so must test these separately - # 16 normal inputs - bip68inputs = [] - for _ in range(16): - bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)) - # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) - bip112basicinputs = [] - for _ in range(2): - inputs = [] - for _ in range(16): - inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)) - bip112basicinputs.append(inputs) - # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig) - bip112diverseinputs = [] - for _ in range(2): - inputs = [] - for _ in range(16): - inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)) - bip112diverseinputs.append(inputs) - # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) - bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks) - # 1 normal input - bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks) - - self.nodes[0].setmocktime(self.last_block_time + 600) - inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572 - self.nodes[0].setmocktime(0) - self.tip = int("0x" + inputblockhash, 0) - self.tipheight += 1 - self.last_block_time += 600 - assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1) - - # 2 more version 4 blocks - test_blocks = self.generate_blocks(2, 4) - yield TestInstance(test_blocks, sync_every_block=False) # 5 - # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575) - assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in') - - # Test both version 1 and version 2 transactions for all tests - # BIP113 test transaction will be modified before each use to put in appropriate block time - bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98")) - bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE - bip113tx_v1.nVersion = 1 - bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98")) - bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE - bip113tx_v2.nVersion = 2 - - # For BIP68 test all 16 relative sequence locktimes - bip68txs_v1 = self.create_bip68txs(bip68inputs, 1) - bip68txs_v2 = self.create_bip68txs(bip68inputs, 2) - - # For BIP112 test: - # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs - bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1) - bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2) - # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs - bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1) - bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1) - # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs - bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1) - bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2) - # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs - bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1) - bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1) - # -1 OP_CSV OP_DROP input - bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1) - bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2) - - - ### TESTING ### - ################################## - ### Before Soft Forks Activate ### - ################################## - # All txs should pass - ### Version 1 txs ### - success_txs = [] - # add BIP113 tx and -1 CSV tx - bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block - bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1) - success_txs.append(bip113signed1) - success_txs.append(bip112tx_special_v1) - # add BIP 68 txs - success_txs.extend(all_rlt_txs(bip68txs_v1)) - # add BIP 112 with seq=10 txs - success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) - success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1)) - # try BIP 112 with seq=9 txs - success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) - success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)) - yield TestInstance([[self.create_test_block(success_txs), True]]) # 6 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - ### Version 2 txs ### - success_txs = [] - # add BIP113 tx and -1 CSV tx - bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block - bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2) - success_txs.append(bip113signed2) - success_txs.append(bip112tx_special_v2) - # add BIP 68 txs - success_txs.extend(all_rlt_txs(bip68txs_v2)) - # add BIP 112 with seq=10 txs - success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2)) - success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2)) - # try BIP 112 with seq=9 txs - success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) - success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)) - yield TestInstance([[self.create_test_block(success_txs), True]]) # 7 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - - # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block - test_blocks = self.generate_blocks(1, 4) - yield TestInstance(test_blocks, sync_every_block=False) # 8 - assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active') - - - ################################# - ### After Soft Forks Activate ### - ################################# - ### BIP 113 ### - # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules - bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block - bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1) - bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block - bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2) - for bip113tx in [bip113signed1, bip113signed2]: - yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10 - # BIP 113 tests should now pass if the locktime is < MTP - bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block - bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1) - bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block - bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2) - for bip113tx in [bip113signed1, bip113signed2]: - yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - # Next block height = 580 after 4 blocks of random version - test_blocks = self.generate_blocks(4, 1234) - yield TestInstance(test_blocks, sync_every_block=False) # 13 - - ### BIP 68 ### - ### Version 1 txs ### - # All still pass - success_txs = [] - success_txs.extend(all_rlt_txs(bip68txs_v1)) - yield TestInstance([[self.create_test_block(success_txs), True]]) # 14 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - ### Version 2 txs ### - bip68success_txs = [] - # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - bip68success_txs.append(bip68txs_v2[1][b25][b22][b18]) - yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512 - bip68timetxs = [] - for b25 in range(2): - for b18 in range(2): - bip68timetxs.append(bip68txs_v2[0][b25][1][b18]) - for tx in bip68timetxs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19 - bip68heighttxs = [] - for b25 in range(2): - for b18 in range(2): - bip68heighttxs.append(bip68txs_v2[0][b25][0][b18]) - for tx in bip68heighttxs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23 - - # Advance one block to 581 - test_blocks = self.generate_blocks(1, 1234) - yield TestInstance(test_blocks, sync_every_block=False) # 24 - - # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512 - bip68success_txs.extend(bip68timetxs) - yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - for tx in bip68heighttxs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29 - - # Advance one block to 582 - test_blocks = self.generate_blocks(1, 1234) - yield TestInstance(test_blocks, sync_every_block=False) # 30 - - # All BIP 68 txs should pass - bip68success_txs.extend(bip68heighttxs) - yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - - ### BIP 112 ### - ### Version 1 txs ### - # -1 OP_CSV tx should fail - yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32 - # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass - success_txs = [] - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18]) - success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18]) - yield TestInstance([[self.create_test_block(success_txs), True]]) # 33 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail - fail_txs = [] - fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) - fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18]) - fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18]) - - for tx in fail_txs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81 - - ### Version 2 txs ### - # -1 OP_CSV tx should fail - yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82 - - # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met) - success_txs = [] - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV - success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9 - - yield TestInstance([[self.create_test_block(success_txs), True]]) # 83 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ## - # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check - fail_txs = [] - fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9 - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9 - - for tx in fail_txs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107 - - # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail - fail_txs = [] - for b25 in range(2): - for b22 in range(2): - for b18 in range(2): - fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence - for tx in fail_txs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115 - - # If sequencelock types mismatch, tx should fail - fail_txs = [] - for b25 in range(2): - for b18 in range(2): - fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence - fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV - for tx in fail_txs: - yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123 - - # Remaining txs should pass, just test masking works properly - success_txs = [] - for b25 in range(2): - for b18 in range(2): - success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence - success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV - yield TestInstance([[self.create_test_block(success_txs), True]]) # 124 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - # Additional test, of checking that comparison of two time types works properly - time_txs = [] - for b25 in range(2): - for b18 in range(2): - tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18] - tx.vin[0].nSequence = base_relative_locktime | seq_type_flag - signtx = self.sign_transaction(self.nodes[0], tx) - time_txs.append(signtx) - yield TestInstance([[self.create_test_block(time_txs), True]]) # 125 - self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) - - ### Missing aspects of test - ## Testing empty stack fails - - -if __name__ == '__main__': - BIP68_112_113Test().main() diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py index 7c1cb51960..b50fae593a 100755 --- a/test/functional/feature_dbcrash.py +++ b/test/functional/feature_dbcrash.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py index 9f18e52b5f..c6dd3bade1 100755 --- a/test/functional/feature_dersig.py +++ b/test/functional/feature_dersig.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py index 819fa3a75a..00d7db7bec 100755 --- a/test/functional/feature_fee_estimation.py +++ b/test/functional/feature_fee_estimation.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_listmyassets.py b/test/functional/feature_listmyassets.py index 0424e9c0c5..65b864b740 100755 --- a/test/functional/feature_listmyassets.py +++ b/test/functional/feature_listmyassets.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index c56bb01780..47427cb14e 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2019 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_maxreorgdepth.py b/test/functional/feature_maxreorgdepth.py index f1dc45aa99..95212bc0a0 100755 --- a/test/functional/feature_maxreorgdepth.py +++ b/test/functional/feature_maxreorgdepth.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py index 5864d94df2..aee6a6ce63 100755 --- a/test/functional/feature_maxuploadtarget.py +++ b/test/functional/feature_maxuploadtarget.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_messaging.py b/test/functional/feature_messaging.py index de3417055f..4dfbd1c4d9 100755 --- a/test/functional/feature_messaging.py +++ b/test/functional/feature_messaging.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py index 6aaa4fe7bb..99be4e1c93 100755 --- a/test/functional/feature_minchainwork.py +++ b/test/functional/feature_minchainwork.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py index dd3e5a5393..b954167ffe 100755 --- a/test/functional/feature_notifications.py +++ b/test/functional/feature_notifications.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py index 8da1b78911..cd80f1a1b8 100755 --- a/test/functional/feature_nulldummy.py +++ b/test/functional/feature_nulldummy.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-mit-license.php. @@ -19,19 +19,20 @@ from io import BytesIO import time from test_framework.test_framework import RavenTestFramework -from test_framework.util import bytes_to_hex_str, assert_raises_rpc_error, hex_str_to_bytes, assert_equal +from test_framework.util import assert_raises_rpc_error, hex_str_to_bytes, assert_equal from test_framework.mininode import CTransaction, NetworkThread from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment from test_framework.script import CScript NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)" + def true_dummy(tx): script_sig = CScript(tx.vin[0].scriptSig) newscript = [] for i in script_sig: if len(newscript) == 0: - assert(len(i) == 0) + assert (len(i) == 0) newscript.append(b'\x51') else: newscript.append(i) @@ -39,6 +40,7 @@ def true_dummy(tx): tx.vin[0].scriptSig = CScript(newscript) tx.rehash() + class NULLDUMMYTest(RavenTestFramework): def set_test_params(self): @@ -48,16 +50,16 @@ def set_test_params(self): def run_test(self): self.address = self.nodes[0].getnewaddress() - self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address]) + self.ms_address = self.nodes[0].addmultisigaddress(1, [self.address]) self.wit_address = self.nodes[0].addwitnessaddress(self.address) self.wit_ms_address = self.nodes[0].addwitnessaddress(self.ms_address) - NetworkThread().start() # Start up network handling in another thread - self.coinbase_blocks = self.nodes[0].generate(2) # Block 2 + NetworkThread().start() # Start up network handling in another thread + self.coinbase_blocks = self.nodes[0].generate(2) # Block 2 coinbase_txid = [] for i in self.coinbase_blocks: coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0]) - self.nodes[0].generate(427) # Block 429 + self.nodes[0].generate(427) # Block 429 self.lastblockhash = self.nodes[0].getbestblockhash() self.tip = int("0x" + self.lastblockhash, 0) self.lastblockheight = 429 @@ -65,45 +67,44 @@ def run_test(self): self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]") test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)] - txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True) + txid1 = self.nodes[0].sendrawtransaction(test1txs[0].serialize_with_witness().hex(), True) test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48)) - txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True) + txid2 = self.nodes[0].sendrawtransaction(test1txs[1].serialize_with_witness().hex(), True) test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49)) - txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True) + txid3 = self.nodes[0].sendrawtransaction(test1txs[2].serialize_with_witness().hex(), True) self.block_submit(self.nodes[0], test1txs, False, True) self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation") test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47) true_dummy(test2tx) - assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True) + assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test2tx.serialize_with_witness().hex(), True) self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]") self.block_submit(self.nodes[0], [test2tx], False, True) self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation") test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46) - test6txs=[CTransaction(test4tx)] + test6txs = [CTransaction(test4tx)] true_dummy(test4tx) - assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True) + assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test4tx.serialize_with_witness().hex(), True) self.block_submit(self.nodes[0], [test4tx]) self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation") test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48) test6txs.append(CTransaction(test5tx)) test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01' - assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True) + assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test5tx.serialize_with_witness().hex(), True) self.block_submit(self.nodes[0], [test5tx], True) self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]") for i in test6txs: - self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True) + self.nodes[0].sendrawtransaction(i.serialize_with_witness().hex(), True) self.block_submit(self.nodes[0], test6txs, True, True) - @staticmethod def create_transaction(node, txid, to_address, amount): - inputs = [{ "txid" : txid, "vout" : 0}] - outputs = { to_address : amount } + inputs = [{"txid": txid, "vout": 0}] + outputs = {to_address: amount} rawtx = node.createrawtransaction(inputs, outputs) signresult = node.signrawtransaction(rawtx) tx = CTransaction() @@ -111,8 +112,7 @@ def create_transaction(node, txid, to_address, amount): tx.deserialize(f) return tx - - def block_submit(self, node, txs, witness = False, accept = False): + def block_submit(self, node, txs, witness=False, accept=False): block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1) block.nVersion = 4 for tx in txs: @@ -122,15 +122,16 @@ def block_submit(self, node, txs, witness = False, accept = False): witness and add_witness_commitment(block) block.rehash() block.solve() - node.submitblock(bytes_to_hex_str(block.serialize(True))) + node.submitblock(block.serialize(True).hex()) if accept: assert_equal(node.getbestblockhash(), block.hash) - self.tip = block.sha256 + self.tip = block.x16r self.lastblockhash = block.hash self.lastblocktime += 1 self.lastblockheight += 1 else: assert_equal(node.getbestblockhash(), self.lastblockhash) + if __name__ == '__main__': NULLDUMMYTest().main() diff --git a/test/functional/feature_proxy.py b/test/functional/feature_proxy.py index 48d61cd875..1fe7cacfc2 100755 --- a/test/functional/feature_proxy.py +++ b/test/functional/feature_proxy.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index e4434a7e9b..235b2e1630 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_raw_restricted_assets.py b/test/functional/feature_raw_restricted_assets.py index 427409eac0..f5bff1cbb9 100755 --- a/test/functional/feature_raw_restricted_assets.py +++ b/test/functional/feature_raw_restricted_assets.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2018 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -11,27 +11,29 @@ from test_framework.util import assert_equal BURN_ADDRESSES = { - 'issue_restricted': 'n1issueRestrictedXXXXXXXXXXXXZVT9V', + 'issue_restricted': 'n1issueRestrictedXXXXXXXXXXXXZVT9V', 'reissue_restricted': 'n1ReissueAssetXXXXXXXXXXXXXXWG9NLd', - 'issue_qualifier': 'n1issueQuaLifierXXXXXXXXXXXXUysLTj', + 'issue_qualifier': 'n1issueQuaLifierXXXXXXXXXXXXUysLTj', 'issue_subqualifier': 'n1issueSubQuaLifierXXXXXXXXXYffPLh', - 'tag_address': 'n1addTagBurnXXXXXXXXXXXXXXXXX5oLMH', + 'tag_address': 'n1addTagBurnXXXXXXXXXXXXXXXXX5oLMH', } BURN_AMOUNTS = { - 'issue_restricted': 1500, - 'reissue_restricted': 100, - 'issue_qualifier': 1000, - 'issue_subqualifier': 100, - 'tag_address': 0.1, + 'issue_restricted': 1500, + 'reissue_restricted': 100, + 'issue_qualifier': 1000, + 'issue_subqualifier': 100, + 'tag_address': 0.1, } FEE_AMOUNT = 0.01 -def truncate(number, digits = 8): + +def truncate(number, digits=8): stepper = pow(10.0, digits) return math.trunc(stepper * number) / stepper + def get_tx_issue_hex(node, to_address, asset_name, asset_quantity=1000, verifier_string="true", units=0, reissuable=1, has_ipfs=0, ipfs_hash="", owner_change_address=""): change_address = node.getnewaddress() @@ -47,12 +49,12 @@ def get_tx_issue_hex(node, to_address, asset_name, asset_quantity=1000, verifier change_address: truncate(float(rvn_unspent['amount']) - BURN_AMOUNTS['issue_restricted'] - FEE_AMOUNT), to_address: { 'issue_restricted': { - 'asset_name': asset_name, - 'asset_quantity': asset_quantity, - 'verifier_string': verifier_string, - 'units': units, - 'reissuable': reissuable, - 'has_ipfs': has_ipfs, + 'asset_name': asset_name, + 'asset_quantity': asset_quantity, + 'verifier_string': verifier_string, + 'units': units, + 'reissuable': reissuable, + 'has_ipfs': has_ipfs, } } } @@ -66,6 +68,7 @@ def get_tx_issue_hex(node, to_address, asset_name, asset_quantity=1000, verifier tx_issue_hex = tx_issue_signed['hex'] return tx_issue_hex + def get_tx_reissue_hex(node, to_address, asset_name, asset_quantity, reissuable=1, verifier_string="", ipfs_hash="", owner_change_address=""): change_address = node.getnewaddress() @@ -81,9 +84,9 @@ def get_tx_reissue_hex(node, to_address, asset_name, asset_quantity, reissuable= change_address: truncate(float(rvn_unspent['amount']) - BURN_AMOUNTS['reissue_restricted'] - FEE_AMOUNT), to_address: { 'reissue_restricted': { - 'asset_name': asset_name, - 'asset_quantity': asset_quantity, - 'reissuable': reissuable, + 'asset_name': asset_name, + 'asset_quantity': asset_quantity, + 'reissuable': reissuable, } } } @@ -99,6 +102,7 @@ def get_tx_reissue_hex(node, to_address, asset_name, asset_quantity, reissuable= tx_issue_hex = tx_issue_signed['hex'] return tx_issue_hex + def get_tx_issue_qualifier_hex(node, to_address, asset_name, asset_quantity=1, has_ipfs=0, ipfs_hash="", root_change_address="", change_qty=1): change_address = node.getnewaddress() @@ -120,9 +124,9 @@ def get_tx_issue_qualifier_hex(node, to_address, asset_name, asset_quantity=1, h change_address: truncate(float(rvn_unspent['amount']) - burn_amount - FEE_AMOUNT), to_address: { 'issue_qualifier': { - 'asset_name': asset_name, - 'asset_quantity': asset_quantity, - 'has_ipfs': has_ipfs, + 'asset_name': asset_name, + 'asset_quantity': asset_quantity, + 'has_ipfs': has_ipfs, } } } @@ -138,6 +142,7 @@ def get_tx_issue_qualifier_hex(node, to_address, asset_name, asset_quantity=1, h tx_issue_hex = tx_issue_signed['hex'] return tx_issue_hex + def get_tx_transfer_hex(node, to_address, asset_name, asset_quantity): change_address = node.getnewaddress() asset_change_address = node.getnewaddress() @@ -149,7 +154,6 @@ def get_tx_transfer_hex(node, to_address, asset_name, asset_quantity): asset_unspent_qty = asset_unspent['amount'] asset_inputs = [{k: asset_unspent[k] for k in ['txid', 'vout']}] - outputs = { change_address: truncate(float(rvn_unspent['amount']) - FEE_AMOUNT), to_address: { @@ -170,6 +174,7 @@ def get_tx_transfer_hex(node, to_address, asset_name, asset_quantity): tx_transfer_hex = tx_transfer_signed['hex'] return tx_transfer_hex + def get_tx_tag_address_hex(node, op, qualifier_name, tag_addresses, qualifier_change_address, change_qty=1): change_address = node.getnewaddress() @@ -199,6 +204,7 @@ def get_tx_tag_address_hex(node, op, qualifier_name, tag_addresses, qualifier_ch tx_tag_hex = tx_tag_signed['hex'] return tx_tag_hex + def get_tx_freeze_address_hex(node, op, asset_name, freeze_addresses, owner_change_address): change_address = node.getnewaddress() @@ -224,6 +230,7 @@ def get_tx_freeze_address_hex(node, op, asset_name, freeze_addresses, owner_chan tx_freeze_hex = tx_freeze_signed['hex'] return tx_freeze_hex + # get_tx_freeze_asset_hex(n0, "freeze", asset_name, owner_change_address) def get_tx_freeze_asset_hex(node, op, asset_name, owner_change_address): change_address = node.getnewaddress() @@ -249,6 +256,7 @@ def get_tx_freeze_asset_hex(node, op, asset_name, owner_change_address): tx_freeze_hex = tx_freeze_signed['hex'] return tx_freeze_hex + class RawRestrictedAssetsTest(RavenTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -288,7 +296,7 @@ def issue_restricted_test(self): txid = n0.sendrawtransaction(hex_data) n0.generate(1) - #verify + # verify assert_equal(64, len(txid)) assert_equal(qty, n0.listmyassets(asset_name, True)[asset_name]['balance']) asset_data = n0.getassetdata(asset_name) @@ -333,7 +341,7 @@ def reissue_restricted_test(self): txid = n0.sendrawtransaction(hex_data) n0.generate(1) - #verify + # verify assert_equal(64, len(txid)) assert_equal(qty + reissue_qty, n0.listmyassets(asset_name, True)[asset_name]['balance']) asset_data = n0.getassetdata(asset_name) @@ -354,12 +362,12 @@ def issue_qualifier_test(self): has_ipfs = 1 ipfs_hash = "QmcvyefkqQX3PpjpY5L8B2yMd47XrVwAipr6cxUt2zvYU8" - #### ROOT QUALIFIER + # ROOT QUALIFIER hex_data = get_tx_issue_qualifier_hex(n0, to_address, asset_name, qty, has_ipfs, ipfs_hash) txid = n0.sendrawtransaction(hex_data) n0.generate(1) - #verify + # verify assert_equal(64, len(txid)) assert_equal(qty, n0.listmyassets(asset_name, True)[asset_name]['balance']) asset_data = n0.getassetdata(asset_name) @@ -376,12 +384,12 @@ def issue_qualifier_test(self): sub_ipfs_hash = "QmcvyefkqQX3PpjpY5L8B2yMd47XrVwAipr6cxUt2zvYU8" root_change_address = n0.getnewaddress() - #### SUB-QUALIFIER + # SUB-QUALIFIER sub_hex = get_tx_issue_qualifier_hex(n0, sub_to_address, sub_asset_name, sub_qty, sub_has_ipfs, sub_ipfs_hash, root_change_address, qty) sub_txid = n0.sendrawtransaction(sub_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(sub_txid)) assert_equal(sub_qty, n0.listmyassets(sub_asset_name, True)[sub_asset_name]['balance']) asset_data = n0.getassetdata(sub_asset_name) @@ -411,7 +419,7 @@ def transfer_qualifier_test(self): n0.generate(1) self.sync_all() - #verify + # verify assert_equal(qty - xfer_qty, n0.listmyassets(asset_name, True)[asset_name]['balance']) assert_equal(xfer_qty, n1.listassetbalancesbyaddress(n1_address)[asset_name]) @@ -429,32 +437,32 @@ def address_tagging_test(self): tag_addresses = [n0.getnewaddress(), n0.getnewaddress(), n0.getnewaddress()] - #verify + # verify for tag_address in tag_addresses: - assert(not n0.checkaddresstag(tag_address, qualifier_name)) + assert (not n0.checkaddresstag(tag_address, qualifier_name)) - #tag + # tag change_address = n0.getnewaddress() tag_hex = get_tx_tag_address_hex(n0, "tag", qualifier_name, tag_addresses, change_address, 2) tag_txid = n0.sendrawtransaction(tag_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(tag_txid)) for tag_address in tag_addresses: - assert(n0.checkaddresstag(tag_address, qualifier_name)) + assert (n0.checkaddresstag(tag_address, qualifier_name)) assert_equal(qty, n0.listassetbalancesbyaddress(change_address)[qualifier_name]) - #untag + # untag change_address = n0.getnewaddress() tag_hex = get_tx_tag_address_hex(n0, "untag", qualifier_name, tag_addresses, change_address, 2) tag_txid = n0.sendrawtransaction(tag_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(tag_txid)) for tag_address in tag_addresses: - assert(not n0.checkaddresstag(tag_address, qualifier_name)) + assert (not n0.checkaddresstag(tag_address, qualifier_name)) assert_equal(qty, n0.listassetbalancesbyaddress(change_address)[qualifier_name]) def address_freezing_test(self): @@ -475,32 +483,32 @@ def address_freezing_test(self): freeze_addresses = [n0.getnewaddress(), n0.getnewaddress(), n0.getnewaddress()] - #verify + # verify for freeze_address in freeze_addresses: - assert(not n0.checkaddressrestriction(freeze_address, asset_name)) + assert (not n0.checkaddressrestriction(freeze_address, asset_name)) - #freeze + # freeze owner_change_address = n0.getnewaddress() freeze_hex = get_tx_freeze_address_hex(n0, "freeze", asset_name, freeze_addresses, owner_change_address) freeze_txid = n0.sendrawtransaction(freeze_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(freeze_txid)) for freeze_address in freeze_addresses: - assert(n0.checkaddressrestriction(freeze_address, asset_name)) + assert (n0.checkaddressrestriction(freeze_address, asset_name)) assert_equal(1, n0.listassetbalancesbyaddress(owner_change_address)[f"{base_asset_name}!"]) - #unfreeze + # unfreeze owner_change_address = n0.getnewaddress() freeze_hex = get_tx_freeze_address_hex(n0, "unfreeze", asset_name, freeze_addresses, owner_change_address) freeze_txid = n0.sendrawtransaction(freeze_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(freeze_txid)) for freeze_address in freeze_addresses: - assert(not n0.checkaddressrestriction(freeze_address, asset_name)) + assert (not n0.checkaddressrestriction(freeze_address, asset_name)) assert_equal(1, n0.listassetbalancesbyaddress(owner_change_address)[f"{base_asset_name}!"]) def asset_freezing_test(self): @@ -519,29 +527,29 @@ def asset_freezing_test(self): n0.issuerestrictedasset(asset_name, qty, verifier, issue_address) n0.generate(1) - #verify - assert(not n0.checkglobalrestriction(asset_name)) + # verify + assert (not n0.checkglobalrestriction(asset_name)) - #freeze + # freeze owner_change_address = n0.getnewaddress() freeze_hex = get_tx_freeze_asset_hex(n0, "freeze", asset_name, owner_change_address) freeze_txid = n0.sendrawtransaction(freeze_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(freeze_txid)) - assert(n0.checkglobalrestriction(asset_name)) + assert (n0.checkglobalrestriction(asset_name)) assert_equal(1, n0.listassetbalancesbyaddress(owner_change_address)[f"{base_asset_name}!"]) - #unfreeze + # unfreeze owner_change_address = n0.getnewaddress() freeze_hex = get_tx_freeze_asset_hex(n0, "unfreeze", asset_name, owner_change_address) freeze_txid = n0.sendrawtransaction(freeze_hex) n0.generate(1) - #verify + # verify assert_equal(64, len(freeze_txid)) - assert(not n0.checkglobalrestriction(asset_name)) + assert (not n0.checkglobalrestriction(asset_name)) assert_equal(1, n0.listassetbalancesbyaddress(owner_change_address)[f"{base_asset_name}!"]) def run_test(self): @@ -555,5 +563,6 @@ def run_test(self): self.address_freezing_test() self.asset_freezing_test() + if __name__ == '__main__': - RawRestrictedAssetsTest().main() \ No newline at end of file + RawRestrictedAssetsTest().main() diff --git a/test/functional/feature_rawassettransactions.py b/test/functional/feature_rawassettransactions.py index 8d75da42d9..c4827bb51a 100755 --- a/test/functional/feature_rawassettransactions.py +++ b/test/functional/feature_rawassettransactions.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_rbf.py b/test/functional/feature_rbf.py index e622caf370..1a421cc6fa 100755 --- a/test/functional/feature_rbf.py +++ b/test/functional/feature_rbf.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -9,12 +9,14 @@ from test_framework.test_framework import RavenTestFramework from test_framework.util import satoshi_round, assert_raises_rpc_error, assert_equal, Decimal from test_framework.script import CScript -from test_framework.mininode import bytes_to_hex_str, COIN, CTransaction, CTxIn, COutPoint, CTxOut +from test_framework.mininode import COIN, CTransaction, CTxIn, COutPoint, CTxOut MAX_REPLACEMENT_LIMIT = 100 + def tx_to_hex(tx): - return bytes_to_hex_str(tx.serialize()) + return tx.serialize().hex() + def make_utxo(node, amount, confirmed=True, script_pub_key=CScript([1])): """Create a txout with a given amount and scriptPubKey @@ -24,12 +26,12 @@ def make_utxo(node, amount, confirmed=True, script_pub_key=CScript([1])): confirmed - txouts created will be confirmed in the blockchain; unconfirmed otherwise. """ - fee = 1*COIN - while node.getbalance() < satoshi_round((amount + fee)/COIN): + fee = 1 * COIN + while node.getbalance() < satoshi_round((amount + fee) / COIN): node.generate(100) new_addr = node.getnewaddress() - txid = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN)) + txid = node.sendtoaddress(new_addr, satoshi_round((amount + fee) / COIN)) tx1 = node.getrawtransaction(txid, 1) txid = int(txid, 16) i = None @@ -56,28 +58,29 @@ def make_utxo(node, amount, confirmed=True, script_pub_key=CScript([1])): new_size = len(node.getrawmempool()) # Error out if we have something stuck in the mempool, as this # would likely be a bug. - assert(new_size < mempool_size) + assert (new_size < mempool_size) mempool_size = new_size return COutPoint(int(txid, 16), 0) + class ReplaceByFeeTest(RavenTestFramework): def set_test_params(self): self.num_nodes = 2 - self.extra_args= [["-maxorphantx=1000", - "-whitelist=127.0.0.1", - "-limitancestorcount=50", - "-limitancestorsize=101", - "-limitdescendantcount=200", - "-limitdescendantsize=101"], + self.extra_args = [["-maxorphantx=1000", + "-whitelist=127.0.0.1", + "-limitancestorcount=50", + "-limitancestorsize=101", + "-limitdescendantcount=200", + "-limitdescendantsize=101"], ["-mempoolreplacement=0"]] def run_test(self): # Leave IBD self.nodes[0].generate(1) - make_utxo(self.nodes[0], 1*COIN) + make_utxo(self.nodes[0], 1 * COIN) # Ensure nodes are synced self.sync_all() @@ -116,7 +119,7 @@ def run_test(self): def test_simple_doublespend(self): """Simple doublespend""" - tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN)) # make_utxo may have generated a bunch of blocks, so we need to sync # before we can spend the coins generated, or else the resulting @@ -125,7 +128,7 @@ def test_simple_doublespend(self): tx1a = CTransaction() tx1a.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx1a_hex = tx_to_hex(tx1a) tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) @@ -134,7 +137,7 @@ def test_simple_doublespend(self): # Should fail because we haven't changed the fee tx1b = CTransaction() tx1b.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1b.vout = [CTxOut(1*COIN, CScript([b'b']))] + tx1b.vout = [CTxOut(1 * COIN, CScript([b'b']))] tx1b_hex = tx_to_hex(tx1b) # This will raise an exception due to insufficient fee @@ -145,7 +148,7 @@ def test_simple_doublespend(self): # Extra 0.1 RVN fee tx1b = CTransaction() tx1b.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))] + tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b']))] tx1b_hex = tx_to_hex(tx1b) # Replacement still disabled even with "enough fee" assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True) @@ -167,14 +170,14 @@ def test_simple_doublespend(self): def test_doublespend_chain(self): """Doublespend of a long chain""" - initial_n_value = 5000*COIN + initial_n_value = 5000 * COIN tx0_outpoint = make_utxo(self.nodes[0], initial_n_value) prevout = tx0_outpoint remaining_value = initial_n_value chain_txids = [] - while remaining_value > 1000*COIN: - remaining_value -= 100*COIN + while remaining_value > 1000 * COIN: + remaining_value -= 100 * COIN tx = CTransaction() tx.vin = [CTxIn(prevout, n_sequence=0)] tx.vout = [CTxOut(remaining_value, CScript([1]))] @@ -187,7 +190,7 @@ def test_doublespend_chain(self): # child fees - 40 RVN - so this attempt is rejected. dbl_tx = CTransaction() dbl_tx.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - dbl_tx.vout = [CTxOut(initial_n_value - 30*COIN, CScript([1]))] + dbl_tx.vout = [CTxOut(initial_n_value - 30 * COIN, CScript([1]))] dbl_tx_hex = tx_to_hex(dbl_tx) # This will raise an exception due to insufficient fee @@ -196,18 +199,18 @@ def test_doublespend_chain(self): # Accepted with sufficient fee dbl_tx = CTransaction() dbl_tx.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - dbl_tx.vout = [CTxOut(1*COIN, CScript([1]))] + dbl_tx.vout = [CTxOut(1 * COIN, CScript([1]))] dbl_tx_hex = tx_to_hex(dbl_tx) self.nodes[0].sendrawtransaction(dbl_tx_hex, True) mempool = self.nodes[0].getrawmempool() for doublespent_txid in chain_txids: - assert(doublespent_txid not in mempool) + assert (doublespent_txid not in mempool) def test_doublespend_tree(self): """Doublespend of a big tree of transactions""" - initial_n_value = 50*COIN + initial_n_value = 50 * COIN tx0_outpoint = make_utxo(self.nodes[0], initial_n_value) def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, _total_txs=None): @@ -220,14 +223,14 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, if txout_value < fee_val: return - vout = [CTxOut(txout_value, CScript([i+1])) + vout = [CTxOut(txout_value, CScript([i + 1])) for i in range(tree_width)] tx_data = CTransaction() tx_data.vin = [CTxIn(prevout, n_sequence=0)] tx_data.vout = vout tx_hex = tx_to_hex(tx_data) - assert(len(tx_data.serialize()) < 100000) + assert (len(tx_data.serialize()) < 100000) txid = self.nodes[0].sendrawtransaction(tx_hex, True) yield tx_data _total_txs[0] += 1 @@ -241,7 +244,7 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, _total_txs=_total_txs): yield x - fee = int(0.0001*COIN) + fee = int(0.0001 * COIN) n = MAX_REPLACEMENT_LIMIT tree_txs = list(branch(tx0_outpoint, initial_n_value, n, fee_val=fee)) assert_equal(len(tree_txs), n) @@ -249,7 +252,7 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, # Attempt double-spend, will fail because too little fee paid dbl_tx = CTransaction() dbl_tx.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - dbl_tx.vout = [CTxOut(initial_n_value - fee*n, CScript([1]))] + dbl_tx.vout = [CTxOut(initial_n_value - fee * n, CScript([1]))] dbl_tx_hex = tx_to_hex(dbl_tx) # This will raise an exception due to insufficient fee assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True) @@ -257,7 +260,7 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, # 1 RVN fee is enough dbl_tx = CTransaction() dbl_tx.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - dbl_tx.vout = [CTxOut(initial_n_value - fee*n - 1*COIN, CScript([1]))] + dbl_tx.vout = [CTxOut(initial_n_value - fee * n - 1 * COIN, CScript([1]))] dbl_tx_hex = tx_to_hex(dbl_tx) self.nodes[0].sendrawtransaction(dbl_tx_hex, True) @@ -269,15 +272,15 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, # Try again, but with more total transactions than the "max txs # double-spent at once" anti-DoS limit. - for n in (MAX_REPLACEMENT_LIMIT+1, MAX_REPLACEMENT_LIMIT*2): - fee = int(0.0001*COIN) + for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2): + fee = int(0.0001 * COIN) tx0_outpoint = make_utxo(self.nodes[0], initial_n_value) tree_txs = list(branch(tx0_outpoint, initial_n_value, n, fee_val=fee)) assert_equal(len(tree_txs), n) dbl_tx = CTransaction() dbl_tx.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - dbl_tx.vout = [CTxOut(initial_n_value - 2*fee*n, CScript([1]))] + dbl_tx.vout = [CTxOut(initial_n_value - 2 * fee * n, CScript([1]))] dbl_tx_hex = tx_to_hex(dbl_tx) # This will raise an exception assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, True) @@ -288,11 +291,11 @@ def branch(prevout, initial_value, max_txs, tree_width=5, fee_val=0.0001 * COIN, def test_replacement_fee_per_kb(self): """Replacement requires fee-per-KB to be higher""" - tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN)) tx1a = CTransaction() tx1a.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx1a_hex = tx_to_hex(tx1a) self.nodes[0].sendrawtransaction(tx1a_hex, True) @@ -300,7 +303,7 @@ def test_replacement_fee_per_kb(self): # rejected. tx1b = CTransaction() tx1b.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*999000]))] + tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 999000]))] tx1b_hex = tx_to_hex(tx1b) # This will raise an exception due to insufficient fee @@ -308,12 +311,12 @@ def test_replacement_fee_per_kb(self): def test_spends_of_conflicting_outputs(self): """Replacements that spend conflicting tx outputs are rejected""" - utxo1 = make_utxo(self.nodes[0], int(1.2*COIN)) - utxo2 = make_utxo(self.nodes[0], 3*COIN) + utxo1 = make_utxo(self.nodes[0], int(1.2 * COIN)) + utxo2 = make_utxo(self.nodes[0], 3 * COIN) tx1a = CTransaction() tx1a.vin = [CTxIn(utxo1, n_sequence=0)] - tx1a.vout = [CTxOut(int(1.1*COIN), CScript([b'a']))] + tx1a.vout = [CTxOut(int(1.1 * COIN), CScript([b'a']))] tx1a_hex = tx_to_hex(tx1a) tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) @@ -332,7 +335,7 @@ def test_spends_of_conflicting_outputs(self): # Spend tx1a's output to test the indirect case. tx1b = CTransaction() tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), n_sequence=0)] - tx1b.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx1b.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx1b_hex = tx_to_hex(tx1b) tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True) tx1b_txid = int(tx1b_txid, 16) @@ -348,12 +351,12 @@ def test_spends_of_conflicting_outputs(self): def test_new_unconfirmed_inputs(self): """Replacements that add new unconfirmed inputs are rejected""" - confirmed_utxo = make_utxo(self.nodes[0], int(1.1*COIN)) - unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1*COIN), False) + confirmed_utxo = make_utxo(self.nodes[0], int(1.1 * COIN)) + unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1 * COIN), False) tx1 = CTransaction() tx1.vin = [CTxIn(confirmed_utxo)] - tx1.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx1.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx1_hex = tx_to_hex(tx1) self.nodes[0].sendrawtransaction(tx1_hex, True) @@ -373,11 +376,11 @@ def test_too_many_replacements(self): # Start by creating a single transaction with many outputs initial_n_value = 10 * COIN utxo = make_utxo(self.nodes[0], initial_n_value) - fee = int(0.0001*COIN) + fee = int(0.0001 * COIN) split_value = int((initial_n_value - fee) / (MAX_REPLACEMENT_LIMIT + 1)) outputs = [] - for i in range(MAX_REPLACEMENT_LIMIT+1): + for i in range(MAX_REPLACEMENT_LIMIT + 1): outputs.append(CTxOut(split_value, CScript([1]))) splitting_tx = CTransaction() @@ -389,19 +392,19 @@ def test_too_many_replacements(self): txid = int(txid, 16) # Now spend each of those outputs individually - for i in range(MAX_REPLACEMENT_LIMIT+1): + for i in range(MAX_REPLACEMENT_LIMIT + 1): tx_i = CTransaction() tx_i.vin = [CTxIn(COutPoint(txid, i), n_sequence=0)] - tx_i.vout = [CTxOut(split_value-fee, CScript([b'a']))] + tx_i.vout = [CTxOut(split_value - fee, CScript([b'a']))] tx_i_hex = tx_to_hex(tx_i) self.nodes[0].sendrawtransaction(tx_i_hex, True) # Now create doublespend of the whole lot; should fail. # Need a big enough fee to cover all spending transactions and have # a higher fee rate - double_spend_value = (split_value-100*fee)*(MAX_REPLACEMENT_LIMIT+1) + double_spend_value = (split_value - 100 * fee) * (MAX_REPLACEMENT_LIMIT + 1) inputs = [] - for i in range(MAX_REPLACEMENT_LIMIT+1): + for i in range(MAX_REPLACEMENT_LIMIT + 1): inputs.append(CTxIn(COutPoint(txid, i), n_sequence=0)) double_tx = CTransaction() double_tx.vin = inputs @@ -420,37 +423,37 @@ def test_too_many_replacements(self): def test_opt_in(self): """Replacing should only work if orig tx opted in""" - tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN)) # Create a non-opting in transaction tx1a = CTransaction() tx1a.vin = [CTxIn(tx0_outpoint, n_sequence=0xffffffff)] - tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx1a_hex = tx_to_hex(tx1a) tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) # Shouldn't be able to double-spend tx1b = CTransaction() tx1b.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))] + tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b']))] tx1b_hex = tx_to_hex(tx1b) # This will raise an exception assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, True) - tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN)) # Create a different non-opting in transaction tx2a = CTransaction() tx2a.vin = [CTxIn(tx1_outpoint, n_sequence=0xfffffffe)] - tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx2a.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx2a_hex = tx_to_hex(tx2a) tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True) # Still shouldn't be able to double-spend tx2b = CTransaction() tx2b.vin = [CTxIn(tx1_outpoint, n_sequence=0)] - tx2b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))] + tx2b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b']))] tx2b_hex = tx_to_hex(tx2b) # This will raise an exception @@ -466,19 +469,19 @@ def test_opt_in(self): tx3a = CTransaction() tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), n_sequence=0xffffffff), CTxIn(COutPoint(tx2a_txid, 0), n_sequence=0xfffffffd)] - tx3a.vout = [CTxOut(int(0.9*COIN), CScript([b'c'])), CTxOut(int(0.9*COIN), CScript([b'd']))] + tx3a.vout = [CTxOut(int(0.9 * COIN), CScript([b'c'])), CTxOut(int(0.9 * COIN), CScript([b'd']))] tx3a_hex = tx_to_hex(tx3a) self.nodes[0].sendrawtransaction(tx3a_hex, True) tx3b = CTransaction() tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), n_sequence=0)] - tx3b.vout = [CTxOut(int(0.5*COIN), CScript([b'e']))] + tx3b.vout = [CTxOut(int(0.5 * COIN), CScript([b'e']))] tx3b_hex = tx_to_hex(tx3b) tx3c = CTransaction() tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), n_sequence=0)] - tx3c.vout = [CTxOut(int(0.5*COIN), CScript([b'f']))] + tx3c.vout = [CTxOut(int(0.5 * COIN), CScript([b'f']))] tx3c_hex = tx_to_hex(tx3c) self.nodes[0].sendrawtransaction(tx3b_hex, True) @@ -491,44 +494,44 @@ def test_prioritised_transactions(self): # correctly used by replacement logic # 1. Check that feeperkb uses modified fees - tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN)) tx1a = CTransaction() tx1a.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx1a_hex = tx_to_hex(tx1a) tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) # Higher fee, but the actual fee per KB is much lower. tx1b = CTransaction() tx1b.vin = [CTxIn(tx0_outpoint, n_sequence=0)] - tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*740000]))] + tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 740000]))] tx1b_hex = tx_to_hex(tx1b) # Verify tx1b cannot replace tx1a. assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True) # Use prioritisetransaction to set tx1a's fee to 0. - self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1*COIN)) + self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1 * COIN)) # Now tx1b should be able to replace tx1a tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True) - assert(tx1b_txid in self.nodes[0].getrawmempool()) + assert (tx1b_txid in self.nodes[0].getrawmempool()) # 2. Check that absolute fee checks use modified fee. - tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN)) tx2a = CTransaction() tx2a.vin = [CTxIn(tx1_outpoint, n_sequence=0)] - tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))] + tx2a.vout = [CTxOut(1 * COIN, CScript([b'a']))] tx2a_hex = tx_to_hex(tx2a) self.nodes[0].sendrawtransaction(tx2a_hex, True) # Lower fee, but we'll prioritise it tx2b = CTransaction() tx2b.vin = [CTxIn(tx1_outpoint, n_sequence=0)] - tx2b.vout = [CTxOut(int(1.01*COIN), CScript([b'a']))] + tx2b.vout = [CTxOut(int(1.01 * COIN), CScript([b'a']))] tx2b.rehash() tx2b_hex = tx_to_hex(tx2b) @@ -536,21 +539,21 @@ def test_prioritised_transactions(self): assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, True) # Now prioritise tx2b to have a higher modified fee - self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1*COIN)) + self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1 * COIN)) # tx2b should now be accepted tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True) - assert(tx2b_txid in self.nodes[0].getrawmempool()) + assert (tx2b_txid in self.nodes[0].getrawmempool()) def test_rpc(self): us0 = self.nodes[0].listunspent()[0] ins = [us0] - outs = {self.nodes[0].getnewaddress() : Decimal(1.0000000)} + outs = {self.nodes[0].getnewaddress(): Decimal(1.0000000)} rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True) rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False) - json0 = self.nodes[0].decoderawtransaction(rawtx0) - json1 = self.nodes[0].decoderawtransaction(rawtx1) + json0 = self.nodes[0].decoderawtransaction(rawtx0) + json1 = self.nodes[0].decoderawtransaction(rawtx1) assert_equal(json0["vin"][0]["sequence"], 4294967293) assert_equal(json1["vin"][0]["sequence"], 4294967295) @@ -558,10 +561,11 @@ def test_rpc(self): f_raw_tx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True}) f_raw_tx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False}) - json0 = self.nodes[0].decoderawtransaction(f_raw_tx2a['hex']) - json1 = self.nodes[0].decoderawtransaction(f_raw_tx2b['hex']) + json0 = self.nodes[0].decoderawtransaction(f_raw_tx2a['hex']) + json1 = self.nodes[0].decoderawtransaction(f_raw_tx2b['hex']) assert_equal(json0["vin"][0]["sequence"], 4294967293) assert_equal(json1["vin"][0]["sequence"], 4294967294) + if __name__ == '__main__': ReplaceByFeeTest().main() diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py index c50370e760..ad30f7bbbd 100755 --- a/test/functional/feature_reindex.py +++ b/test/functional/feature_reindex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_restricted_assets.py b/test/functional/feature_restricted_assets.py index 84fb65e085..63b0ee2586 100755 --- a/test/functional/feature_restricted_assets.py +++ b/test/functional/feature_restricted_assets.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2018 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -310,7 +310,6 @@ def viewmytaggedaddresses(): assert_raises_rpc_error(None, "Invalid Raven change address", n0.addtagtoaddress, tag, address, "garbagechangeaddress") n0.addtagtoaddress(tag, address, change_address) - n0.addtagtoaddress(tag, address, change_address) # redundant tagging ok if consistent n0.generate(1) assert_raises_rpc_error(-32600, "add-qualifier-when-already-assigned", n0.addtagtoaddress, tag, address, change_address) @@ -361,7 +360,6 @@ def viewmytaggedaddresses(): assert_raises_rpc_error(None, "Invalid Raven change address", n0.removetagfromaddress, tag, address, "garbagechangeaddress") n0.removetagfromaddress(tag, address, change_address) - n0.removetagfromaddress(tag, address, change_address) # redundant untagging ok if consistent n0.generate(1) assert_raises_rpc_error(-32600, "removing-qualifier-when-not-assigned", n0.removetagfromaddress, tag, address, change_address) @@ -435,7 +433,6 @@ def viewmyrestrictedaddresses(): assert_raises_rpc_error(None, "Invalid Raven change address", n0.freezeaddress, asset_name, address, "garbagechangeaddress") n0.freezeaddress(asset_name, address, rvn_change_address) - n0.freezeaddress(asset_name, address, rvn_change_address) # redundant freezing ok if consistent n0.generate(1) assert_raises_rpc_error(-32600, "freeze-address-when-already-frozen", n0.freezeaddress, asset_name, address, rvn_change_address) @@ -458,7 +455,6 @@ def viewmyrestrictedaddresses(): assert_raises_rpc_error(None, "Invalid Raven change address", n0.unfreezeaddress, asset_name, address, "garbagechangeaddress") n0.unfreezeaddress(asset_name, address, rvn_change_address) - n0.unfreezeaddress(asset_name, address, rvn_change_address) # redundant unfreezing ok if consistent n0.generate(1) assert_raises_rpc_error(-32600, "unfreeze-address-when-not-frozen", n0.unfreezeaddress, asset_name, address, rvn_change_address) @@ -517,20 +513,17 @@ def global_freezing(self): n0.freezerestrictedasset(asset_name, rvn_change_address) # Can only freeze once! assert_raises_rpc_error(-26, "Freezing transaction already in mempool", n0.freezerestrictedasset, asset_name, rvn_change_address) n0.generate(1) - assert_raises_rpc_error(None, "global-freeze-when-already-frozen", n0.freezerestrictedasset, asset_name, rvn_change_address) # post-freeze validation assert_contains(asset_name, n0.listglobalrestrictions()) assert n0.checkglobalrestriction(asset_name) assert_raises_rpc_error(-8, "restricted asset has been globally frozen", n0.transferfromaddress, asset_name, address, 1000, n1.getnewaddress()) - assert_raises_rpc_error(None, "Invalid Raven change address", n0.unfreezerestrictedasset, asset_name, "garbagechangeaddress") n0.unfreezerestrictedasset(asset_name, rvn_change_address) # Can only un-freeze once! assert_raises_rpc_error(-26, "Unfreezing transaction already in mempool", n0.unfreezerestrictedasset, asset_name, rvn_change_address) n0.generate(1) - assert_raises_rpc_error(None, "global-unfreeze-when-not-frozen", n0.unfreezerestrictedasset, asset_name, rvn_change_address) # post-unfreeze validation diff --git a/test/functional/feature_rewards.py b/test/functional/feature_rewards.py index 49bb5ed598..a39b95aed4 100755 --- a/test/functional/feature_rewards.py +++ b/test/functional/feature_rewards.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py index 8978e57f5e..a5c0dcef96 100755 --- a/test/functional/feature_segwit.py +++ b/test/functional/feature_segwit.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -8,7 +8,7 @@ from io import BytesIO from test_framework.test_framework import RavenTestFramework -from test_framework.util import hex_str_to_bytes, bytes_to_hex_str, connect_nodes, Decimal, assert_equal, sync_blocks, assert_raises_rpc_error, try_rpc +from test_framework.util import hex_str_to_bytes, connect_nodes, Decimal, assert_equal, sync_blocks, assert_raises_rpc_error, try_rpc from test_framework.mininode import sha256, CTransaction, CTxIn, COutPoint, CTxOut, COIN, to_hex, from_hex from test_framework.address import script_to_p2sh, key_to_p2pkh from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_0, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE @@ -18,6 +18,7 @@ WIT_V0 = 0 WIT_V1 = 1 + # Create a scriptPubKey corresponding to either a P2WPKH output for the # given pubkey, or a P2WSH output of a 1-of-1 multisig for the given # pubkey. Returns the hex encoding of the scriptPubKey. @@ -31,30 +32,32 @@ def witness_script(use_p2wsh, pubkey): witness_program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG]) scripthash = sha256(witness_program) pkscript = CScript([OP_0, scripthash]) - return bytes_to_hex_str(pkscript) + return pkscript.hex() + # Return a transaction (in hex) that spends the given utxo to a segwit output, # optionally wrapping the segwit output using P2SH. -def create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount): +def create_witness_program(use_p2wsh, utxo, pubkey, encode_p2sh, amount): pkscript = hex_str_to_bytes(witness_script(use_p2wsh, pubkey)) if encode_p2sh: p2sh_hash = hash160(pkscript) pkscript = CScript([OP_HASH160, p2sh_hash, OP_EQUAL]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), b"")) - tx.vout.append(CTxOut(int(amount*COIN), pkscript)) + tx.vout.append(CTxOut(int(amount * COIN), pkscript)) return to_hex(tx) + # Create a transaction spending a given utxo to a segwit output corresponding # to the given pubkey: use_p2wsh determines whether to use P2WPKH or P2WSH # encode_p2sh determines whether to wrap in P2SH. # sign=True will have the given node sign the transaction. # insert_redeem_script will be added to the scriptSig, if given. def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""): - tx_to_witness = create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount) + tx_to_witness = create_witness_program(use_p2wsh, utxo, pubkey, encode_p2sh, amount) if sign: signed = node.signrawtransaction(tx_to_witness) - assert("errors" not in signed or len(["errors"]) == 0) + assert ("errors" not in signed or len(["errors"]) == 0) return node.sendrawtransaction(signed["hex"]) else: if insert_redeem_script: @@ -64,15 +67,18 @@ def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=Tru return node.sendrawtransaction(tx_to_witness) + def getutxo(txid): utxo = {"vout": 0, "txid": txid} return utxo + def find_unspent(node, min_value): for utxo in node.listunspent(): if utxo['amount'] >= min_value: return utxo + class SegWitTest(RavenTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -107,28 +113,28 @@ def fail_mine(self, node, txid, sign, redeem_script=""): sync_blocks(self.nodes) def run_test(self): - self.nodes[0].generate(161) #block 161 - -# self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork") -# txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) -# tmpl = self.nodes[0].getblocktemplate({}) -# assert(tmpl['sizelimit'] == 1000000) -# assert('weightlimit' not in tmpl) -# assert(tmpl['sigoplimit'] == 20000) -# assert(tmpl['transactions'][0]['hash'] == txid) -# assert(tmpl['transactions'][0]['sigops'] == 2) -# tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) -# assert(tmpl['sizelimit'] == 1000000) -# assert('weightlimit' not in tmpl) -# assert(tmpl['sigoplimit'] == 20000) -# assert(tmpl['transactions'][0]['hash'] == txid) -# assert(tmpl['transactions'][0]['sigops'] == 2) - self.nodes[0].generate(1) #block 162 + self.nodes[0].generate(161) # block 161 + + # self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork") + # txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) + # tmpl = self.nodes[0].getblocktemplate({}) + # assert(tmpl['sizelimit'] == 1000000) + # assert('weightlimit' not in tmpl) + # assert(tmpl['sigoplimit'] == 20000) + # assert(tmpl['transactions'][0]['hash'] == txid) + # assert(tmpl['transactions'][0]['sigops'] == 2) + # tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) + # assert(tmpl['sizelimit'] == 1000000) + # assert('weightlimit' not in tmpl) + # assert(tmpl['sigoplimit'] == 20000) + # assert(tmpl['transactions'][0]['hash'] == txid) + # assert(tmpl['transactions'][0]['sigops'] == 2) + self.nodes[0].generate(1) # block 162 balance_presetup = self.nodes[0].getbalance() self.pubkey = [] - p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh - wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness + p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh + wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness for i in range(3): newaddress = self.nodes[i].getnewaddress() self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"]) @@ -147,15 +153,15 @@ def run_test(self): wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 5000), self.pubkey[n], False, Decimal("4999.9"))) p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_unspent(self.nodes[0], 5000), self.pubkey[n], True, Decimal("4999.9"))) - self.nodes[0].generate(1) #block 163 + self.nodes[0].generate(1) # block 163 sync_blocks(self.nodes) # Make sure all nodes recognize the transactions as theirs - assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*5000 + 20*Decimal("4999.9") + 5000) - assert_equal(self.nodes[1].getbalance(), 20*Decimal("4999.9")) - assert_equal(self.nodes[2].getbalance(), 20*Decimal("4999.9")) + assert_equal(self.nodes[0].getbalance(), balance_presetup - 60 * 5000 + 20 * Decimal("4999.9") + 5000) + assert_equal(self.nodes[1].getbalance(), 20 * Decimal("4999.9")) + assert_equal(self.nodes[2].getbalance(), 20 * Decimal("4999.9")) - self.nodes[0].generate(260) #block 423 + self.nodes[0].generate(260) # block 423 sync_blocks(self.nodes) # unsigned, no scriptsig @@ -167,49 +173,49 @@ def run_test(self): self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0])) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0])) # signed -# self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V0][0], True) -# self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V1][0], True) -# self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V0][0], True) -# self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V1][0], True) + # self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V0][0], True) + # self.fail_accept(self.nodes[0], "no-witness-yet", wit_ids[NODE_0][WIT_V1][0], True) + # self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V0][0], True) + # self.fail_accept(self.nodes[0], "no-witness-yet", p2sh_ids[NODE_0][WIT_V1][0], True) -# self.log.info("Verify witness txs are skipped for mining before the fork") -# self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424 -# self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425 -# self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426 -# self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427 + # self.log.info("Verify witness txs are skipped for mining before the fork") + # self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424 + # self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425 + # self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426 + # self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427 # TODO: An old node would see these txs without witnesses and be able to mine them self.log.info("Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork") - self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) #block 428 - self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) #block 429 + self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) # block 428 + self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) # block 429 self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid") self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False) self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False) self.log.info("Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork") - self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, witness_script(False, self.pubkey[2])) #block 430 - self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, witness_script(True, self.pubkey[2])) #block 431 + self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, witness_script(False, self.pubkey[2])) # block 430 + self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, witness_script(True, self.pubkey[2])) # block 431 self.log.info("Verify previous witness txs skipped for mining can now be mined") assert_equal(len(self.nodes[2].getrawmempool()), 4) - block = self.nodes[2].generate(1) #block 432 (first block with new rules: 432 = 144 * 3) + block = self.nodes[2].generate(1) # block 432 (first block with new rules: 432 = 144 * 3) sync_blocks(self.nodes) assert_equal(len(self.nodes[2].getrawmempool()), 0) segwit_tx_list = self.nodes[2].getblock(block[0])["tx"] assert_equal(len(segwit_tx_list), 5) self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag") - assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False)) - assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False)) + assert (self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False)) + assert (self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False)) for i in range(len(segwit_tx_list)): tx = from_hex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) - assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i])) - assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i])) - assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) - assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) - assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness())) + assert (self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i])) + assert (self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i])) + assert (self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) + assert (self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) + assert (self.nodes[0].getrawtransaction(segwit_tx_list[i]) == tx.serialize_without_witness().hex()) self.log.info("Verify witness txs without witness data are invalid after the fork") self.fail_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][2], False) @@ -218,21 +224,21 @@ def run_test(self): self.fail_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][2], False, witness_script(True, self.pubkey[2])) self.log.info("Verify default node can now use witness txs") - self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432 - self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433 - self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434 - self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435 + self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) # block 432 + self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) # block 433 + self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) # block 434 + self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) # block 435 self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork") txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) - tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) - assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data - assert(tmpl['weightlimit'] == 4000000) - assert(tmpl['sigoplimit'] == 80000) - assert(tmpl['transactions'][0]['txid'] == txid) - assert(tmpl['transactions'][0]['sigops'] == 8) + tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']}) + assert (tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data + assert (tmpl['weightlimit'] == 4000000) + assert (tmpl['sigoplimit'] == 80000) + assert (tmpl['transactions'][0]['txid'] == txid) + assert (tmpl['transactions'][0]['sigops'] == 8) - self.nodes[0].generate(1) # Mine a block to clear the gbt cache + self.nodes[0].generate(1) # Mine a block to clear the gbt cache self.log.info("Non-segwit miners are able to use GBT response after activation.") # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) -> @@ -242,41 +248,41 @@ def run_test(self): txid1 = send_to_witness(1, self.nodes[0], find_unspent(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) hex_tx = self.nodes[0].gettransaction(txid)['hex'] tx = from_hex(CTransaction(), hex_tx) - assert(tx.wit.is_null()) # This should not be a segwit input - assert(txid1 in self.nodes[0].getrawmempool()) + assert (tx.wit.is_null()) # This should not be a segwit input + assert (txid1 in self.nodes[0].getrawmempool()) # Now create tx2, which will spend from txid1. tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) - tx.vout.append(CTxOut(int(49.99*COIN), CScript([OP_TRUE]))) + tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE]))) tx2_hex = self.nodes[0].signrawtransaction(to_hex(tx))['hex'] txid2 = self.nodes[0].sendrawtransaction(tx2_hex) tx = from_hex(CTransaction(), tx2_hex) - assert(not tx.wit.is_null()) + assert (not tx.wit.is_null()) # Now create tx3, which will spend from txid2 tx = CTransaction() tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b"")) - tx.vout.append(CTxOut(int(49.95*COIN), CScript([OP_TRUE]))) # Huge fee + tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE]))) # Huge fee tx.calc_x16r() txid3 = self.nodes[0].sendrawtransaction(to_hex(tx)) - assert(tx.wit.is_null()) - assert(txid3 in self.nodes[0].getrawmempool()) + assert (tx.wit.is_null()) + assert (txid3 in self.nodes[0].getrawmempool()) # Now try calling getblocktemplate() without segwit support. template = self.nodes[0].getblocktemplate() # Check that tx1 is the only transaction of the 3 in the template. - template_txids = [ t['txid'] for t in template['transactions'] ] - assert(txid2 not in template_txids and txid3 not in template_txids) - assert(txid1 in template_txids) + template_txids = [t['txid'] for t in template['transactions']] + assert (txid2 not in template_txids and txid3 not in template_txids) + assert (txid1 in template_txids) # Check that running with segwit support results in all 3 being included. template = self.nodes[0].getblocktemplate({"rules": ["segwit"]}) - template_txids = [ t['txid'] for t in template['transactions'] ] - assert(txid1 in template_txids) - assert(txid2 in template_txids) - assert(txid3 in template_txids) + template_txids = [t['txid'] for t in template['transactions']] + assert (txid1 in template_txids) + assert (txid2 in template_txids) + assert (txid3 in template_txids) # Check that wtxid is properly reported in mempool entry assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_x16r(True)) @@ -288,13 +294,13 @@ def run_test(self): # Some public keys to be used later pubkeys = [ - "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb - "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 - "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV - "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd - "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 - "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K - "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ + "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb + "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 + "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV + "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd + "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 + "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K + "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ ] # Import a compressed key and an uncompressed key, generate some multisig addresses @@ -312,12 +318,12 @@ def run_test(self): self.nodes[0].importpubkey(pubkeys[2]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])] - spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress - spendable_after_importaddress = [] # These outputs should be seen after importaddress - solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable - unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress - solvable_anytime = [] # These outputs should be solvable after importpubkey - unseen_anytime = [] # These outputs should never be seen + spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress + spendable_after_importaddress = [] # These outputs should be seen after importaddress + solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable + unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress + solvable_anytime = [] # These outputs should be solvable after importpubkey + unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])) @@ -408,9 +414,9 @@ def run_test(self): p2wshop1 = CScript([OP_0, sha256(op1)]) unsolvable_after_importaddress.append(unsolvablep2pkh) unsolvable_after_importaddress.append(unsolvablep2wshp2pkh) - unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script + unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script unsolvable_after_importaddress.append(p2wshop1) - unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided + unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided unsolvable_after_importaddress.append(p2shop0) spendable_txid = [] @@ -424,30 +430,30 @@ def run_test(self): v = self.nodes[0].validateaddress(i) if v['isscript']: bare = hex_str_to_bytes(v['hex']) - importlist.append(bytes_to_hex_str(bare)) - importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)]))) + importlist.append(bare.hex()) + importlist.append(CScript([OP_0, sha256(bare)]).hex()) else: pubkey = hex_str_to_bytes(v['pubkey']) p2pk = CScript([pubkey, OP_CHECKSIG]) p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG]) - importlist.append(bytes_to_hex_str(p2pk)) - importlist.append(bytes_to_hex_str(p2pkh)) - importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)]))) - importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)]))) - importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)]))) + importlist.append(p2pk.hex()) + importlist.append(p2pkh.hex()) + importlist.append(CScript([OP_0, hash160(pubkey)]).hex()) + importlist.append(CScript([OP_0, sha256(p2pk)]).hex()) + importlist.append(CScript([OP_0, sha256(p2pkh)]).hex()) - importlist.append(bytes_to_hex_str(unsolvablep2pkh)) - importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh)) - importlist.append(bytes_to_hex_str(op1)) - importlist.append(bytes_to_hex_str(p2wshop1)) + importlist.append(unsolvablep2pkh.hex()) + importlist.append(unsolvablep2wshp2pkh.hex()) + importlist.append(op1.hex()) + importlist.append(p2wshop1.hex()) for i in importlist: # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC # exceptions and continue. try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True) - self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only - self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey + self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only + self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) @@ -484,9 +490,9 @@ def run_test(self): self.nodes[0].importpubkey(pubkeys[6]) uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])] - spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress - solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable - unseen_anytime = [] # These outputs should never be seen + spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress + solvable_after_addwitnessaddress = [] # These outputs should be seen after importaddress but not spendable + unseen_anytime = [] # These outputs should never be seen uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])) uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])) @@ -544,7 +550,7 @@ def run_test(self): # after importaddress it should pass addwitnessaddress v = self.nodes[0].validateaddress(compressed_solvable_address[1]) - self.nodes[0].importaddress(v['hex'],"",False,True) + self.nodes[0].importaddress(v['hex'], "", False, True) for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress: witaddress = self.nodes[0].addwitnessaddress(i) assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress)) @@ -568,11 +574,11 @@ def run_test(self): def mine_and_test_listunspent(self, script_list, ismine): utxo = find_unspent(self.nodes[0], 50) tx = CTransaction() - tx.vin.append(CTxIn(COutPoint(int('0x'+utxo['txid'],0), utxo['vout']))) + tx.vin.append(CTxIn(COutPoint(int('0x' + utxo['txid'], 0), utxo['vout']))) for i in script_list: tx.vout.append(CTxOut(10000000, i)) tx.rehash() - signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex'] + signresults = self.nodes[0].signrawtransaction(tx.serialize_without_witness().hex())['hex'] txid = self.nodes[0].sendrawtransaction(signresults, True) self.nodes[0].generate(1) sync_blocks(self.nodes) @@ -623,10 +629,10 @@ def create_and_mine_tx_from_txids(self, txids): f = BytesIO(hex_str_to_bytes(txraw)) txtmp.deserialize(f) for j in range(len(txtmp.vout)): - tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j))) + tx.vin.append(CTxIn(COutPoint(int('0x' + i, 0), j))) tx.vout.append(CTxOut(0, CScript())) tx.rehash() - signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex'] + signresults = self.nodes[0].signrawtransaction(tx.serialize_without_witness().hex())['hex'] self.nodes[0].sendrawtransaction(signresults, True) self.nodes[0].generate(1) sync_blocks(self.nodes) diff --git a/test/functional/feature_shutdown.py b/test/functional/feature_shutdown.py index d085515be7..566001b781 100755 --- a/test/functional/feature_shutdown.py +++ b/test/functional/feature_shutdown.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -11,7 +11,7 @@ from test_framework.util import assert_equal, get_rpc_proxy, wait_until def test_long_call(node): - block = node.waitfornewblock() + block = node.waitfornewblock(5000) assert_equal(block['height'], 0) class ShutdownTest(RavenTestFramework): diff --git a/test/functional/feature_uacomment.py b/test/functional/feature_uacomment.py index 1f91eb3de1..85aa72d2f0 100755 --- a/test/functional/feature_uacomment.py +++ b/test/functional/feature_uacomment.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/feature_unique_assets.py b/test/functional/feature_unique_assets.py index de8a42569f..bde2faa931 100755 --- a/test/functional/feature_unique_assets.py +++ b/test/functional/feature_unique_assets.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2018 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -10,23 +10,26 @@ from test_framework.test_framework import RavenTestFramework from test_framework.util import assert_contains, assert_does_not_contain_key, assert_equal, assert_raises_rpc_error + def gen_root_asset_name(): size = random.randint(3, 14) name = "" - for _ in range(1, size+1): - ch = random.randint(65, 65+25) + for _ in range(1, size + 1): + ch = random.randint(65, 65 + 25) name += chr(ch) return name + def gen_unique_asset_name(root): tag_ab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789@$%&*()[]{}_.?-:" name = root + "#" tag_size = random.randint(1, 15) - for _ in range(1, tag_size+1): + for _ in range(1, tag_size + 1): tag_c = tag_ab[random.randint(0, len(tag_ab) - 1)] name += tag_c return name + class UniqueAssetTest(RavenTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -58,7 +61,6 @@ def issue_invalid(self): self.sync_all() root = gen_root_asset_name() asset_name = gen_unique_asset_name(root) - # no root assert_raises_rpc_error(-32600, f"Wallet doesn't have asset: {root}!", n0.issue, asset_name) @@ -91,7 +93,6 @@ def issue_invalid(self): self.sync_all() assert_raises_rpc_error(-8, f"Invalid parameter: asset_name '{asset_name}' has already been used", n0.issue, asset_name) - def issue_unique_test(self): self.log.info("Testing issueunique RPC...") n0, n1 = self.nodes[0], self.nodes[1] @@ -113,7 +114,7 @@ def issue_unique_test(self): # invalidate n0.invalidateblock(block_hash) - assert(root in n0.listmyassets()) + assert (root in n0.listmyassets()) assert_does_not_contain_key(asset_name, n0.listmyassets(asset="*", verbose=False, count=100000, start=0, confs=1)) # reconsider diff --git a/test/functional/feature_versionbits_warning.py b/test/functional/feature_versionbits_warning.py index 9ed2e3384d..51d294af52 100755 --- a/test/functional/feature_versionbits_warning.py +++ b/test/functional/feature_versionbits_warning.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/interface_http.py b/test/functional/interface_http.py index ecf21fbcf0..391cdb72a0 100755 --- a/test/functional/interface_http.py +++ b/test/functional/interface_http.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/interface_raven_cli.py b/test/functional/interface_raven_cli.py index 1b70b5c1a7..5c42f67635 100755 --- a/test/functional/interface_raven_cli.py +++ b/test/functional/interface_raven_cli.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test raven-cli""" diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py index ddf5b678ad..773542310a 100755 --- a/test/functional/interface_rest.py +++ b/test/functional/interface_rest.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py index f1506aa579..2c97c679b2 100755 --- a/test/functional/interface_rpc.py +++ b/test/functional/interface_rpc.py @@ -1,12 +1,11 @@ #!/usr/bin/env python3 # Copyright (c) 2018-2019 The Bitcoin Core developers -# Copyright (c) 2017-2018 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Tests some generic aspects of the RPC interface.""" -import os from test_framework.authproxy import JSONRPCException from test_framework.test_framework import RavenTestFramework from test_framework.util import assert_equal, assert_greater_than_or_equal @@ -26,7 +25,6 @@ def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True - def test_getrpcinfo(self): self.log.info("Testing getrpcinfo...") @@ -37,7 +35,6 @@ def test_getrpcinfo(self): assert_equal(command['method'], 'getrpcinfo') assert_greater_than_or_equal(command['duration'], 0) - def test_batch_request(self): self.log.info("Testing basic JSON-RPC batch request...") diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py index baac2387f8..10462871df 100755 --- a/test/functional/interface_zmq.py +++ b/test/functional/interface_zmq.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -11,7 +11,7 @@ import os import struct from test_framework.test_framework import RavenTestFramework, SkipTest -from test_framework.util import assert_equal, bytes_to_hex_str, hash256, x16_hash_block +from test_framework.util import assert_equal, hash256, x16_hash_block # noinspection PyUnresolvedReferences @@ -35,7 +35,7 @@ def receive(self): # noinspection PyUnresolvedReferences -class ZMQTest (RavenTestFramework): +class ZMQTest(RavenTestFramework): def set_test_params(self): self.num_nodes = 2 @@ -96,18 +96,17 @@ def _zmq_test(self): # Should receive the coinbase raw transaction. hex_data = self.rawtx.receive() - assert_equal(bytes_to_hex_str(hash256(hex_data)), - self.nodes[1].getrawtransaction(bytes_to_hex_str(txid), True)["hash"]) + assert_equal(hash256(hex_data).hex(), self.nodes[1].getrawtransaction(txid.hex(), True)["hash"]) # Should receive the generated block hash. - hash_data = bytes_to_hex_str(self.hashblock.receive()) + hash_data = self.hashblock.receive().hex() assert_equal(genhashes[x], hash_data) # The block should only have the coinbase txid. - assert_equal([bytes_to_hex_str(txid)], self.nodes[1].getblock(hash_data)["tx"]) + assert_equal([txid.hex()], self.nodes[1].getblock(hash_data)["tx"]) # Should receive the generated raw block. block = self.rawblock.receive() - assert_equal(genhashes[x], x16_hash_block(bytes_to_hex_str(block[:80]), "2")) + assert_equal(genhashes[x], x16_hash_block(block[:80].hex(), "2")) self.log.info("Wait for tx from second node") payment_txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0) @@ -115,11 +114,12 @@ def _zmq_test(self): # Should receive the broadcasted txid. txid = self.hashtx.receive() - assert_equal(payment_txid, bytes_to_hex_str(txid)) + assert_equal(payment_txid, txid.hex()) # Should receive the broadcasted raw transaction. hex_data = self.rawtx.receive() - assert_equal(payment_txid, bytes_to_hex_str(hash256(hex_data))) + assert_equal(payment_txid, hash256(hex_data).hex()) + if __name__ == '__main__': ZMQTest().main() diff --git a/test/functional/mempool_limit.py b/test/functional/mempool_limit.py index 075fda459b..341b0ae410 100755 --- a/test/functional/mempool_limit.py +++ b/test/functional/mempool_limit.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py index 0c05695987..2e00d3c0d1 100755 --- a/test/functional/mempool_packages.py +++ b/test/functional/mempool_packages.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py index c53f44db1e..de4ff01002 100755 --- a/test/functional/mempool_persist.py +++ b/test/functional/mempool_persist.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py index d18c41cc93..35d7bed256 100755 --- a/test/functional/mempool_reorg.py +++ b/test/functional/mempool_reorg.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool re-org scenarios. diff --git a/test/functional/mempool_resurrect.py b/test/functional/mempool_resurrect.py index 79bc2068ec..c683b7bab2 100755 --- a/test/functional/mempool_resurrect.py +++ b/test/functional/mempool_resurrect.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/mempool_spend_coinbase.py b/test/functional/mempool_spend_coinbase.py index 6c79b6bac8..df5e32f1f1 100755 --- a/test/functional/mempool_spend_coinbase.py +++ b/test/functional/mempool_spend_coinbase.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test spending coinbase transactions. diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py index 2619e862ff..659f578af5 100755 --- a/test/functional/mining_basic.py +++ b/test/functional/mining_basic.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mining RPCs diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py index c5272a62c8..5c5374d8c8 100755 --- a/test/functional/mining_getblocktemplate_longpoll.py +++ b/test/functional/mining_getblocktemplate_longpoll.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py index f2165540b3..264673b516 100755 --- a/test/functional/mining_prioritisetransaction.py +++ b/test/functional/mining_prioritisetransaction.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2018 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py index e4dfe5290e..9de2d5c89d 100755 --- a/test/functional/p2p_compactblocks.py +++ b/test/functional/p2p_compactblocks.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py index 6a3e3161a0..0b4ecb88a2 100755 --- a/test/functional/p2p_disconnect_ban.py +++ b/test/functional/p2p_disconnect_ban.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py index 2863dda09c..280f155323 100755 --- a/test/functional/p2p_feefilter.py +++ b/test/functional/p2p_feefilter.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_fingerprint.py b/test/functional/p2p_fingerprint.py index 73aaa10f25..300aa05465 100755 --- a/test/functional/p2p_fingerprint.py +++ b/test/functional/p2p_fingerprint.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py deleted file mode 100755 index c48150a94f..0000000000 --- a/test/functional/p2p_invalid_block.py +++ /dev/null @@ -1,120 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -""" -Test node responses to invalid blocks. - -In this test we connect to one node over p2p, and test block requests: -1) Valid blocks should be requested and become chain tip. -2) Invalid block with duplicated transaction should be re-requested. -3) Invalid block with bad coinbase value should be rejected and not -re-requested. -""" - -import copy -import time -from test_framework.test_framework import ComparisonTestFramework -from test_framework.util import assert_equal -from test_framework.comptool import TestManager, TestInstance, RejectResult -from test_framework.blocktools import create_block, create_coinbase, create_transaction, COIN -from test_framework.mininode import NetworkThread - -# Use the ComparisonTestFramework with 1 node: only use --testbinary. -class InvalidBlockRequestTest(ComparisonTestFramework): - - """ Can either run this test as 1 node with expected answers, or two and compare them. - Change the "outcome" variable from each TestInstance object to only do the comparison. """ - def set_test_params(self): - self.num_nodes = 1 - self.setup_clean_chain = True - - def run_test(self): - test = TestManager(self, self.options.tmpdir) - test.add_all_connections(self.nodes) - self.tip = None - self.block_time = None - NetworkThread().start() # Start up network handling in another thread - test.run() - - def get_tests(self): - if self.tip is None: - self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) - self.block_time = int(time.time())+1 - - ''' - Create a new block with an anyone-can-spend coinbase - ''' - height = 1 - block = create_block(self.tip, create_coinbase(height), self.block_time) - self.block_time += 1 - block.solve() - # Save the coinbase for later - self.block1 = block - self.tip = block.sha256 - height += 1 - yield TestInstance([[block, True]]) - - ''' - Now we need that block to mature so we can spend the coinbase. - ''' - test = TestInstance(sync_every_block=False) - for _ in range(100): - block = create_block(self.tip, create_coinbase(height), self.block_time) - block.solve() - self.tip = block.sha256 - self.block_time += 1 - test.blocks_and_transactions.append([block, True]) - height += 1 - yield test - - ''' - Now we use merkle-root malleability to generate an invalid block with - same blockheader. - Manufacture a block with 3 transactions (coinbase, spend of prior - coinbase, spend of that spend). Duplicate the 3rd transaction to - leave merkle root and blockheader unchanged but invalidate the block. - ''' - block2 = create_block(self.tip, create_coinbase(height), self.block_time) - self.block_time += 1 - - # b'0x51' is OP_TRUE - tx1 = create_transaction(self.block1.vtx[0], 0, b'\x51', 5000 * COIN) - tx2 = create_transaction(tx1, 0, b'\x51', 5000 * COIN) - - block2.vtx.extend([tx1, tx2]) - block2.hashMerkleRoot = block2.calc_merkle_root() - block2.rehash() - block2.solve() - orig_hash = block2.sha256 - block2_orig = copy.deepcopy(block2) - - # Mutate block 2 - block2.vtx.append(tx2) - assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root()) - assert_equal(orig_hash, block2.rehash()) - assert(block2_orig.vtx != block2.vtx) - - self.tip = block2.sha256 - yield TestInstance([[block2, RejectResult(16, b'bad-txns-duplicate')], [block2_orig, True]]) - height += 1 - - ''' - Make sure that a totally screwed up block is not valid. - ''' - block3 = create_block(self.tip, create_coinbase(height), self.block_time) - self.block_time += 1 - block3.vtx[0].vout[0].nValue = 100 * COIN # Too high! - block3.vtx[0].sha256=None - block3.vtx[0].calc_x16r() - block3.hashMerkleRoot = block3.calc_merkle_root() - block3.rehash() - block3.solve() - - yield TestInstance([[block3, RejectResult(16, b'bad-cb-amount')]]) - - -if __name__ == '__main__': - InvalidBlockRequestTest().main() diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py deleted file mode 100755 index 75a065172c..0000000000 --- a/test/functional/p2p_invalid_tx.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -""" -Test node responses to invalid transactions. - -In this test we connect to one node over p2p, and test tx requests. -""" - -import time -from test_framework.test_framework import ComparisonTestFramework -from test_framework.comptool import TestManager, TestInstance, RejectResult -from test_framework.blocktools import create_block, create_coinbase, create_transaction, COIN -from test_framework.mininode import NetworkThread - -# Use the ComparisonTestFramework with 1 node: only use --testbinary. -class InvalidTxRequestTest(ComparisonTestFramework): - - """ Can either run this test as 1 node with expected answers, or two and compare them. - Change the "outcome" variable from each TestInstance object to only do the comparison. """ - def set_test_params(self): - self.num_nodes = 1 - self.setup_clean_chain = True - - def run_test(self): - test = TestManager(self, self.options.tmpdir) - test.add_all_connections(self.nodes) - self.tip = None - self.block_time = None - NetworkThread().start() # Start up network handling in another thread - test.run() - - def get_tests(self): - if self.tip is None: - self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0) - self.block_time = int(time.time())+1 - - ''' - Create a new block with an anyone-can-spend coinbase - ''' - height = 1 - block = create_block(self.tip, create_coinbase(height), self.block_time) - self.block_time += 1 - block.solve() - # Save the coinbase for later - self.block1 = block - self.tip = block.sha256 - height += 1 - yield TestInstance([[block, True]]) - - ''' - Now we need that block to mature so we can spend the coinbase. - ''' - test = TestInstance(sync_every_block=False) - for _ in range(100): - block = create_block(self.tip, create_coinbase(height), self.block_time) - block.solve() - self.tip = block.sha256 - self.block_time += 1 - test.blocks_and_transactions.append([block, True]) - height += 1 - yield test - - # b'\x64' is OP_NOTIF - # Transaction will be rejected with code 16 (REJECT_INVALID) - tx1 = create_transaction(self.block1.vtx[0], 0, b'\x64', 5000 * COIN - 12000) - yield TestInstance([[tx1, RejectResult(16, b'mandatory-script-verify-flag-failed')]]) - - # TODO: test further transactions... - -if __name__ == '__main__': - InvalidTxRequestTest().main() diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py index 12e2f17761..df72c312dd 100755 --- a/test/functional/p2p_leak.py +++ b/test/functional/p2p_leak.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_leak_tx.py b/test/functional/p2p_leak_tx.py index 99bf1bdba6..139471a0fc 100755 --- a/test/functional/p2p_leak_tx.py +++ b/test/functional/p2p_leak_tx.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017-2018 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_mempool.py b/test/functional/p2p_mempool.py index d8ae7e2563..dd4182f2b8 100755 --- a/test/functional/p2p_mempool.py +++ b/test/functional/p2p_mempool.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py index 3999b1ca18..2707240411 100755 --- a/test/functional/p2p_segwit.py +++ b/test/functional/p2p_segwit.py @@ -1,10 +1,12 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -"""Test segwit transactions and blocks on P2P network.""" +""" +Test segwit transactions and blocks on P2P network. +""" import time import random @@ -17,25 +19,106 @@ hash160, OP_EQUAL, sha256, OP_0, OP_RETURN, ser_uint256, OP_2DROP, uint256_from_str, OP_DROP, struct, OP_1, OP_16, SIGHASH_ANYONECANPAY, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, OP_IF, OP_ELSE, OP_ENDIF, signature_hash) from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER -from test_framework.key import CECKey, CPubKey +from test_framework.key import ECKey # The versionbit bit used to signal activation of SegWit VB_WITNESS_BIT = 1 VB_PERIOD = 144 VB_TOP_BITS = 0x20000000 - MAX_SIGOP_COST = 80000 -# Calculate the virtual size of a witness block: -# (base + witness/4) +class UTXO: + """Used to keep track of anyone-can-spend outputs that we can use inside the tests""" + def __init__(self, x16r, n, n_value): + self.x16r = x16r + self.n = n + self.nValue = n_value + + + +def get_p2pkh_script(pubkeyhash): + """ + Helper for getting the script associated with a P2PKH + :param pubkeyhash: + :return: CScript + """ + return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)]) + + +def sign_p2pk_witness_input(script, tx_to, in_idx, hashtype, value, key): + """ + Add signature for a P2PK witness program. + :param script: + :param tx_to: + :param in_idx: + :param hashtype: + :param value: + :param key: + :return: + """ + tx_hash = segwit_version1_signature_hash(script, tx_to, in_idx, hashtype, value) + signature = key.sign_ecdsa(tx_hash) + chr(hashtype).encode('latin-1') + tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [signature, script] + tx_to.rehash() + + def get_virtual_size(witness_block): + """ + Calculate the virtual size of a witness block: (base + witness/4) + :param witness_block: + :return: size of the supplied witness block + """ base_size = len(witness_block.serialize()) total_size = len(witness_block.serialize(with_witness=True)) # the "+3" is so we round up - vsize = int((3*base_size + total_size + 3)/4) + vsize = int((3 * base_size + total_size + 3) / 4) return vsize + +def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None): + """ + + :param self: + :param tx: + :param with_witness: + :param accepted: + :param reason: + :return: + """ + tx_message = MsgTx(tx) + if with_witness: + tx_message = MsgWitnessTx(tx) + self.send_message(tx_message) + self.sync_with_ping() + assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted) + if reason is not None and not accepted: + # Check the rejection reason as well. + with mininode_lock: + assert_equal(self.last_message["reject"].reason, reason) + + +def test_witness_block(node, p2p, block, accepted, with_witness=True, reason=None): + """ + Send a block to the node and check that it's accepted + - Submit the block over the p2p interface + - use the getbestblockhash rpc to check for acceptance. + :param node: + :param p2p: + :param block: + :param accepted: + :param with_witness: + :param reason: + :return: + """ + reason = [reason] if reason else [] + with node.assert_debug_log(expected_msgs=reason): + p2p.send_message(msg_block(block) if with_witness else msg_no_witness_block(block)) + p2p.sync_with_ping() + assert_equal(node.getbestblockhash() == block.hash, accepted) + + + class TestNode(NodeConnCB): def __init__(self): super().__init__() @@ -56,7 +139,7 @@ def announce_block_and_wait_for_getdata(self, block, use_header): self.last_message.pop("getdata", None) self.last_message.pop("getheaders", None) msg = MsgHeaders() - msg.headers = [ CBlockHeader(block) ] + msg.headers = [CBlockHeader(block)] if use_header: self.send_message(msg) else: @@ -72,46 +155,19 @@ def request_block(self, blockhash, inv_type, timeout=60): self.wait_for_block(blockhash, timeout) return self.last_message["block"].block - def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None): - tx_message = MsgTx(tx) - if with_witness: - tx_message = MsgWitnessTx(tx) - self.send_message(tx_message) - self.sync_with_ping() - assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted) - if reason is not None and not accepted: - # Check the rejection reason as well. - with mininode_lock: - assert_equal(self.last_message["reject"].reason, reason) - - # Test whether a witness block had the correct effect on the tip - def test_witness_block(self, block, accepted, with_witness=True): - if with_witness: - self.send_message(MsgWitnessBlock(block)) - else: - self.send_message(MsgBlock(block)) - self.sync_with_ping() - assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted) -# Used to keep track of anyone-can-spend outputs that we can use in the tests -class UTXO: - def __init__(self, x16r, n, n_value): - self.x16r = x16r - self.n = n - self.nValue = n_value -# Helper for getting the script associated with a P2PKH -def get_p2pkh_script(pubkeyhash): - return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)]) -# Add signature for a P2PK witness program. -def sign_p2pk_witness_input(script, tx_to, in_idx, hashtype, value, key): - tx_hash = segwit_version1_signature_hash(script, tx_to, in_idx, hashtype, value) - signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1') - tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [signature, script] - tx_to.rehash() + + + + + + + +# noinspection PyPep8Naming class SegWitTest(RavenTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -125,6 +181,7 @@ def setup_network(self): self.sync_all() ''' Helpers ''' + # Build a block on top of node0's tip. def build_next_block(self, n_version=4): tip = self.nodes[0].getbestblockhash() @@ -143,11 +200,90 @@ def update_witness_block_with_transactions(block, tx_list, nonce=0): block.solve() return + def run_test(self): + # Setup the p2p connections and start up the network thread. + self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK + self.old_node = TestNode() # only NODE_NETWORK + self.std_node = TestNode() # for testing node1 (fRequireStandard=true) + + self.p2p_connections = [self.test_node, self.old_node] + + self.connections = [] + self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK | NODE_WITNESS)) + self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK)) + self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK | NODE_WITNESS)) + self.test_node.add_connection(self.connections[0]) + self.old_node.add_connection(self.connections[1]) + self.std_node.add_connection(self.connections[2]) + + NetworkThread().start() # Start up network handling in another thread + + # Keep a place to store utxo's that can be used in later tests + self.utxo = [] + + # Test logic begins here + self.test_node.wait_for_verack() + + self.log.info("Starting tests before segwit lock in:") + + self.test_witness_services() # Verifies NODE_WITNESS + self.test_non_witness_transaction() # non-witness tx's are accepted + # self.test_unnecessary_witness_before_segwit_activation() + # self.test_block_relay(segwit_activated=False) + + # Advance to segwit being 'started' + # self.advance_to_segwit_started() + sync_blocks(self.nodes) + self.test_getblocktemplate_before_lockin() + + sync_blocks(self.nodes) + + # At lockin, nothing should change. + self.log.info("Testing behavior post lockin, pre-activation") + self.advance_to_segwit_lockin() + + # Retest unnecessary witnesses + self.test_unnecessary_witness_before_segwit_activation() + self.test_witness_tx_relay_before_segwit_activation() + self.test_block_relay(segwit_activated=False) + self.test_p2sh_witness(segwit_activated=False) + self.test_standardness_v0(segwit_activated=False) + + sync_blocks(self.nodes) + + # Now activate segwit + self.log.info("Testing behavior after segwit activation") + self.advance_to_segwit_active() + + sync_blocks(self.nodes) + + # Test P2SH witness handling again + self.test_p2sh_witness(segwit_activated=True) + self.test_witness_commitments() + self.test_block_malleability() + self.test_witness_block_size() + self.test_submit_block() + self.test_extra_witness_data() + self.test_max_witness_push_length() + self.test_max_witness_program_length() + self.test_witness_input_length() + self.test_block_relay(segwit_activated=True) + self.test_tx_relay_after_segwit_activation() + self.test_standardness_v0(segwit_activated=True) + self.test_segwit_versions() + self.test_premature_coinbase_witness_spend() + self.test_uncompressed_pubkey() + self.test_signature_version_1() + self.test_non_standard_witness() + sync_blocks(self.nodes) + self.test_upgrade_after_activation(node_id=2) + self.test_witness_sigops() + ''' Individual tests ''' + def test_witness_services(self): self.log.info("Verifying NODE_WITNESS service bit") - assert((self.test_node.connection.nServices & NODE_WITNESS) != 0) - + assert ((self.test_node.connection.nServices & NODE_WITNESS) != 0) # See if sending a regular transaction works, and create a utxo # to use in later tests. @@ -158,15 +294,15 @@ def test_non_witness_transaction(self): block = self.build_next_block(n_version=1) block.solve() self.test_node.send_message(MsgBlock(block)) - self.test_node.sync_with_ping() # make sure the block was processed + self.test_node.sync_with_ping() # make sure the block was processed txid = block.vtx[0].x16r - self.nodes[0].generate(99) # let the block mature + self.nodes[0].generate(99) # let the block mature # Create a transaction that spends the coinbase tx = CTransaction() tx.vin.append(CTxIn(COutPoint(txid, 0), b"")) - tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE]))) + tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) tx.calc_x16r() # Check that serializing it with or without witness is the same @@ -174,31 +310,30 @@ def test_non_witness_transaction(self): assert_equal(MsgTx(tx).serialize(), MsgWitnessTx(tx).serialize()) self.test_node.send_message(MsgWitnessTx(tx)) - self.test_node.sync_with_ping() # make sure the tx was processed - assert(tx.hash in self.nodes[0].getrawmempool()) + self.test_node.sync_with_ping() # make sure the tx was processed + assert (tx.hash in self.nodes[0].getrawmempool()) # Save this transaction for later - self.utxo.append(UTXO(tx.x16r, 0, 49*100000000)) + self.utxo.append(UTXO(tx.x16r, 0, 49 * 100000000)) self.nodes[0].generate(1) - # Verify that blocks with witnesses are rejected before activation. def test_unnecessary_witness_before_segwit_activation(self): self.log.info("Testing behavior of unnecessary witnesses") # For now, rely on earlier tests to have created at least one utxo for # us to use - assert(len(self.utxo) > 0) - assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active') + assert (len(self.utxo) > 0) + assert (get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active') tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE]))) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])] # Verify the hash with witness differs from the txid # (otherwise our testing framework must be broken!) tx.rehash() - assert(tx.x16r != tx.calc_x16r(with_witness=True)) + assert (tx.x16r != tx.calc_x16r(with_witness=True)) # Construct a segwit-signaling block that includes the transaction. block = self.build_next_block(n_version=(VB_TOP_BITS | (1 << VB_WITNESS_BIT))) @@ -209,7 +344,7 @@ def test_unnecessary_witness_before_segwit_activation(self): # TODO: fix synchronization so we can test reject reason # Right now, ravend delays sending reject messages for blocks # until the future, making synchronization here difficult. - #assert_equal(self.test_node.last_message["reject"].reason, "unexpected-witness") + # assert_equal(self.test_node.last_message["reject"].reason, "unexpected-witness") # But it should not be permanently marked bad... # Resend without witness information. @@ -230,7 +365,7 @@ def test_unnecessary_witness_before_segwit_activation(self): # to a transaction, eg by violating standard-ness checks. tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) - tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey)) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, scriptPubKey)) tx2.rehash() self.test_node.test_transaction_acceptance(tx2, False, True) self.nodes[0].generate(1) @@ -243,9 +378,9 @@ def test_unnecessary_witness_before_segwit_activation(self): # to the rejection cache. tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.x16r, 0), CScript([p2sh_program]))) - tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptPubKey)) + tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, scriptPubKey)) tx3.wit.vtxinwit.append(CTxInWitness()) - tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a'*400000] + tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a' * 400000] tx3.rehash() # Note that this should be rejected for the premature witness reason, # rather than a policy check, since segwit hasn't activated yet. @@ -257,7 +392,7 @@ def test_unnecessary_witness_before_segwit_activation(self): # Now create a new anyone-can-spend utxo for the next test. tx4 = CTransaction() tx4.vin.append(CTxIn(COutPoint(tx3.x16r, 0), CScript([p2sh_program]))) - tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, CScript([OP_TRUE]))) + tx4.vout.append(CTxOut(tx3.vout[0].nValue - 1000, CScript([OP_TRUE]))) tx4.rehash() self.test_node.test_transaction_acceptance(tx3, False, True) self.test_node.test_transaction_acceptance(tx4, False, True) @@ -269,16 +404,15 @@ def test_unnecessary_witness_before_segwit_activation(self): self.utxo.pop(0) self.utxo.append(UTXO(tx4.x16r, 0, tx4.vout[0].nValue)) - # Mine enough blocks for segwit's vb state to be 'started'. def advance_to_segwit_started(self): height = self.nodes[0].getblockcount() # Will need to rewrite the tests here if we are past the first period - assert(height < VB_PERIOD - 1) + assert (height < VB_PERIOD - 1) # Genesis block is 'defined'. assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined') # Advance to end of period, status should now be 'started' - self.nodes[0].generate(VB_PERIOD-height-1) + self.nodes[0].generate(VB_PERIOD - height - 1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') # Mine enough blocks to lock in segwit, but don't activate. @@ -287,26 +421,24 @@ def advance_to_segwit_started(self): def advance_to_segwit_lockin(self): assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') # Advance to end of period, and verify lock-in happens at the end - self.nodes[0].generate(VB_PERIOD-1) + self.nodes[0].generate(VB_PERIOD - 1) height = self.nodes[0].getblockcount() - assert((height % VB_PERIOD) == VB_PERIOD - 2) + assert ((height % VB_PERIOD) == VB_PERIOD - 2) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') self.nodes[0].generate(1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') - # Mine enough blocks to activate segwit. # TODO: we could verify that activation only happens at the right threshold # of signalling blocks, rather than just at the right period boundary. def advance_to_segwit_active(self): assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') height = self.nodes[0].getblockcount() - self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2) + self.nodes[0].generate(VB_PERIOD - (height % VB_PERIOD) - 2) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') self.nodes[0].generate(1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active') - # This test can only be run after segwit has activated def test_witness_commitments(self): self.log.info("Testing witness commitments") @@ -317,7 +449,7 @@ def test_witness_commitments(self): block.solve() # Test the test -- witness serialization should be different - assert(MsgWitnessBlock(block).serialize() != MsgBlock(block).serialize()) + assert (MsgWitnessBlock(block).serialize() != MsgBlock(block).serialize()) # This empty block should be valid. self.test_node.test_witness_block(block, accepted=True) @@ -328,7 +460,7 @@ def test_witness_commitments(self): block_2.solve() # The commitment should have changed! - assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]) + assert (block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]) # This should also be valid. self.test_node.test_witness_block(block_2, accepted=True) @@ -342,13 +474,13 @@ def test_witness_commitments(self): witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey)) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, scriptPubKey)) tx.rehash() # tx2 will spend tx1, and send back to a regular anyone-can-spend address tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) - tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program)) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_program)) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx2.rehash() @@ -376,7 +508,7 @@ def test_witness_commitments(self): block_3.vtx[0].rehash() block_3.hashMerkleRoot = block_3.calc_merkle_root() block_3.rehash() - assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns + assert (len(block_3.vtx[0].vout) == 4) # 3 OP_returns block_3.solve() self.test_node.test_witness_block(block_3, accepted=True) @@ -385,7 +517,7 @@ def test_witness_commitments(self): block_4 = self.build_next_block() tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.x16r, 0), b"")) - tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program)) + tx3.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_program)) tx3.rehash() block_4.vtx.append(tx3) block_4.hashMerkleRoot = block_4.calc_merkle_root() @@ -396,7 +528,6 @@ def test_witness_commitments(self): self.utxo.pop(0) self.utxo.append(UTXO(tx3.x16r, 0, tx3.vout[0].nValue)) - def test_block_malleability(self): self.log.info("Testing witness block malleability") @@ -407,20 +538,20 @@ def test_block_malleability(self): add_witness_commitment(block) block.solve() - block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000) - assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE) + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a' * 5000000) + assert (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE) # We can't send over the p2p network, because this is too big to relay # TODO: repeat this test with a block that can be relayed self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) - assert(self.nodes[0].getbestblockhash() != block.hash) + assert (self.nodes[0].getbestblockhash() != block.hash) block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop() - assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE) + assert (get_virtual_size(block) < MAX_BLOCK_BASE_SIZE) self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) - assert(self.nodes[0].getbestblockhash() == block.hash) + assert (self.nodes[0].getbestblockhash() == block.hash) # Now make sure that malleating the witness nonce doesn't # result in a block permanently marked bad. @@ -430,14 +561,13 @@ def test_block_malleability(self): # Change the nonce -- should not cause the block to be permanently # failed - block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ] + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(1)] self.test_node.test_witness_block(block, accepted=False) # Changing the witness nonce doesn't change the block hash - block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ] + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)] self.test_node.test_witness_block(block, accepted=True) - def test_witness_block_size(self): self.log.info("Testing witness block size limit") # TODO: Test that non-witness carrying blocks can't exceed 1MB @@ -446,16 +576,16 @@ def test_witness_block_size(self): # Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB. block = self.build_next_block() - assert(len(self.utxo) > 0) - + assert (len(self.utxo) > 0) + # Create a P2WSH transaction. # The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE. # This should give us plenty of room to tweak the spending tx's # virtual size. - NUM_DROPS = 200 # 201 max ops per script! + NUM_DROPS = 200 # 201 max ops per script! NUM_OUTPUTS = 50 - witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE]) + witness_program = CScript([OP_2DROP] * NUM_DROPS + [OP_TRUE]) witness_hash = uint256_from_str(sha256(witness_program)) scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)]) @@ -464,11 +594,11 @@ def test_witness_block_size(self): parent_tx = CTransaction() parent_tx.vin.append(CTxIn(prevout, b"")) - child_value = int(value/NUM_OUTPUTS) + child_value = int(value / NUM_OUTPUTS) for i in range(NUM_OUTPUTS): parent_tx.vout.append(CTxOut(child_value, scriptPubKey)) parent_tx.vout[0].nValue -= 50000 - assert(parent_tx.vout[0].nValue > 0) + assert (parent_tx.vout[0].nValue > 0) parent_tx.rehash() child_tx = CTransaction() @@ -477,17 +607,17 @@ def test_witness_block_size(self): child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))] for i in range(NUM_OUTPUTS): child_tx.wit.vtxinwit.append(CTxInWitness()) - child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program] + child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a' * 195] * (2 * NUM_DROPS) + [witness_program] child_tx.rehash() self.update_witness_block_with_transactions(block, [parent_tx, child_tx]) vsize = get_virtual_size(block) - additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize)*4 + additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize) * 4 i = 0 while additional_bytes > 0: # Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1 - extra_bytes = min(additional_bytes+1, 55) - block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes) + extra_bytes = min(additional_bytes + 1, 55) + block.vtx[-1].wit.vtxinwit[int(i / (2 * NUM_DROPS))].scriptWitness.stack[i % (2 * NUM_DROPS)] = b'a' * (195 + extra_bytes) additional_bytes -= extra_bytes i += 1 @@ -498,17 +628,17 @@ def test_witness_block_size(self): assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1) # Make sure that our test case would exceed the old max-network-message # limit - assert(len(block.serialize(True)) > 2*1024*1024) + assert (len(block.serialize(True)) > 2 * 1024 * 1024) self.test_node.test_witness_block(block, accepted=False) # Now resize the second transaction to make the block fit. cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0]) - block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1) + block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (cur_length - 1) block.vtx[0].vout.pop() add_witness_commitment(block) block.solve() - assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE) + assert (get_virtual_size(block) == MAX_BLOCK_BASE_SIZE) self.test_node.test_witness_block(block, accepted=True) @@ -516,7 +646,6 @@ def test_witness_block_size(self): self.utxo.pop(0) self.utxo.append(UTXO(block.vtx[-1].x16r, 0, block.vtx[-1].vout[0].nValue)) - # submitblock will try to add the nonce automatically, so that mining # software doesn't need to worry about doing so itself. def test_submit_block(self): @@ -525,10 +654,10 @@ def test_submit_block(self): # Try using a custom nonce and then don't supply it. # This shouldn't possibly work. add_witness_commitment(block, nonce=1) - block.vtx[0].wit = CTxWitness() # drop the nonce + block.vtx[0].wit = CTxWitness() # drop the nonce block.solve() self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) - assert(self.nodes[0].getbestblockhash() != block.hash) + assert (self.nodes[0].getbestblockhash() != block.hash) # Now redo commitment with the standard nonce, but let ravend fill it in. add_witness_commitment(block, nonce=0) @@ -551,15 +680,14 @@ def test_submit_block(self): self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True))) # Tip should not advance! - assert(self.nodes[0].getbestblockhash() != block_2.hash) - + assert (self.nodes[0].getbestblockhash() != block_2.hash) # Consensus tests of extra witness data in a transaction. def test_extra_witness_data(self): self.log.info("Testing extra witness data in tx") - assert(len(self.utxo) > 0) - + assert (len(self.utxo) > 0) + block = self.build_next_block() witness_program = CScript([OP_DROP, OP_TRUE]) @@ -569,8 +697,8 @@ def test_extra_witness_data(self): # First try extra witness data on a tx that doesn't require a witness tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-2000, scriptPubKey)) - tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output + tx.vout.append(CTxOut(self.utxo[0].nValue - 2000, scriptPubKey)) + tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])] tx.rehash() @@ -591,12 +719,12 @@ def test_extra_witness_data(self): # Now try extra witness/signature data on an input that DOES require a # witness tx2 = CTransaction() - tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) # witness output - tx2.vin.append(CTxIn(COutPoint(tx.x16r, 1), b"")) # non-witness + tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) # witness output + tx2.vin.append(CTxIn(COutPoint(tx.x16r, 1), b"")) # non-witness tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()]) - tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ] - tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ] + tx2.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program] + tx2.wit.vtxinwit[1].scriptWitness.stack = [CScript([OP_TRUE])] block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) @@ -629,12 +757,11 @@ def test_extra_witness_data(self): self.utxo.pop(0) self.utxo.append(UTXO(tx2.x16r, 0, tx2.vout[0].nValue)) - def test_max_witness_push_length(self): """ Should only allow up to 520 byte pushes in witness stack """ self.log.info("Testing maximum witness push size") MAX_SCRIPT_ELEMENT_SIZE = 520 - assert(len(self.utxo)) + assert (len(self.utxo)) block = self.build_next_block() @@ -644,15 +771,15 @@ def test_max_witness_push_length(self): tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey)) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, scriptPubKey)) tx.rehash() tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) - tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))) tx2.wit.vtxinwit.append(CTxInWitness()) # First try a 521-byte stack element - tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ] + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * (MAX_SCRIPT_ELEMENT_SIZE + 1), witness_program] tx2.rehash() self.update_witness_block_with_transactions(block, [tx, tx2]) @@ -673,12 +800,12 @@ def test_max_witness_program_length(self): # Can create witness outputs that are long, but can't be greater than # 10k bytes to successfully spend self.log.info("Testing maximum witness program length") - assert(len(self.utxo)) + assert (len(self.utxo)) MAX_PROGRAM_LENGTH = 10000 # This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes. - long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE]) - assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1) + long_witness_program = CScript([b'a' * 520] * 19 + [OP_DROP] * 63 + [OP_TRUE]) + assert (len(long_witness_program) == MAX_PROGRAM_LENGTH + 1) long_witness_hash = sha256(long_witness_program) long_scriptPubKey = CScript([OP_0, long_witness_hash]) @@ -686,14 +813,14 @@ def test_max_witness_program_length(self): tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, long_scriptPubKey)) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, long_scriptPubKey)) tx.rehash() tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) - tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))) tx2.wit.vtxinwit.append(CTxInWitness()) - tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program] + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 44 + [long_witness_program] tx2.rehash() self.update_witness_block_with_transactions(block, [tx, tx2]) @@ -701,15 +828,15 @@ def test_max_witness_program_length(self): self.test_node.test_witness_block(block, accepted=False) # Try again with one less byte in the witness program - witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE]) - assert(len(witness_program) == MAX_PROGRAM_LENGTH) + witness_program = CScript([b'a' * 520] * 19 + [OP_DROP] * 62 + [OP_TRUE]) + assert (len(witness_program) == MAX_PROGRAM_LENGTH) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey) tx.rehash() tx2.vin[0].prevout.hash = tx.x16r - tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program] + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 43 + [witness_program] tx2.rehash() block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx, tx2]) @@ -718,24 +845,23 @@ def test_max_witness_program_length(self): self.utxo.pop() self.utxo.append(UTXO(tx2.x16r, 0, tx2.vout[0].nValue)) - def test_witness_input_length(self): """ Ensure that vin length must match vtxinwit length """ self.log.info("Testing witness input length") - assert(len(self.utxo)) + assert (len(self.utxo)) witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) - + # Create a transaction that splits our utxo into many outputs tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) nValue = self.utxo[0].nValue for i in range(10): - tx.vout.append(CTxOut(int(nValue/10), scriptPubKey)) + tx.vout.append(CTxOut(int(nValue / 10), scriptPubKey)) tx.vout[0].nValue -= 1000 - assert(tx.vout[0].nValue >= 0) + assert (tx.vout[0].nValue >= 0) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) @@ -765,7 +891,7 @@ def serialize_with_witness(self): tx2 = BrokenCTransaction() for i in range(10): tx2.vin.append(CTxIn(COutPoint(tx.x16r, i), b"")) - tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE]))) + tx2.vout.append(CTxOut(nValue - 3000, CScript([OP_TRUE]))) # First try using a too long vtxinwit for i in range(11): @@ -787,7 +913,7 @@ def serialize_with_witness(self): # Now make one of the intermediate witnesses be incorrect tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program] - tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ] + tx2.wit.vtxinwit[5].scriptWitness.stack = [witness_program] block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) @@ -802,18 +928,17 @@ def serialize_with_witness(self): self.utxo.pop() self.utxo.append(UTXO(tx2.x16r, 0, tx2.vout[0].nValue)) - def test_witness_tx_relay_before_segwit_activation(self): self.log.info("Testing relay of witness transactions") # Generate a transaction that doesn't require a witness, but send it # with a witness. Should be rejected for premature-witness, but should # not be added to recently rejected list. - assert(len(self.utxo)) + assert (len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE]))) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) - tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ] + tx.wit.vtxinwit[0].scriptWitness.stack = [b'a'] tx.rehash() tx_hash = tx.x16r @@ -822,7 +947,7 @@ def test_witness_tx_relay_before_segwit_activation(self): # Verify that if a peer doesn't set nServices to include NODE_WITNESS, # the getdata is just for the non-witness portion. self.old_node.announce_tx_and_wait_for_getdata(tx) - assert(self.old_node.last_message["getdata"].inv[0].type == 1) + assert (self.old_node.last_message["getdata"].inv[0].type == 1) # Since we haven't delivered the tx yet, inv'ing the same tx from # a witness transaction ought not result in a getdata. @@ -845,12 +970,11 @@ def test_witness_tx_relay_before_segwit_activation(self): # Cleanup: mine the first transaction and update utxo self.nodes[0].generate(1) - assert_equal(len(self.nodes[0].getrawmempool()), 0) + assert_equal(len(self.nodes[0].getrawmempool()), 0) self.utxo.pop(0) self.utxo.append(UTXO(tx_hash, 0, tx_value)) - # After segwit activates, verify that mempool: # - rejects transactions with unnecessary/extra witnesses # - accepts transactions with valid witnesses @@ -860,12 +984,12 @@ def test_tx_relay_after_segwit_activation(self): # Generate a transaction that doesn't require a witness, but send it # with a witness. Should be rejected because we can't use a witness # when spending a non-witness output. - assert(len(self.utxo)) + assert (len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE]))) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) - tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ] + tx.wit.vtxinwit[0].scriptWitness.stack = [b'a'] tx.rehash() tx_hash = tx.x16r @@ -885,7 +1009,7 @@ def test_tx_relay_after_segwit_activation(self): scriptPubKey = CScript([OP_0, witness_hash]) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b"")) - tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey)) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, scriptPubKey)) tx2.rehash() tx3 = CTransaction() @@ -895,8 +1019,8 @@ def test_tx_relay_after_segwit_activation(self): # Add too-large for IsStandard witness and check that it does not enter reject filter p2sh_program = CScript([OP_TRUE]) p2sh_pubkey = hash160(p2sh_program) - witness_program2 = CScript([b'a'*400000]) - tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]))) + witness_program2 = CScript([b'a' * 400000]) + tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]))) tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2] tx3.rehash() @@ -907,18 +1031,18 @@ def test_tx_relay_after_segwit_activation(self): self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size') # Remove witness stuffing, instead add extra witness push on stack - tx3.vout[0] = CTxOut(tx2.vout[0].nValue-1000, CScript([OP_TRUE])) - tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ] + tx3.vout[0] = CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_TRUE])) + tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program] tx3.rehash() self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True) self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False) # Get rid of the extra witness, and verify acceptance. - tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] + tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program] # Also check that old_node gets a tx announcement, even though this is # a witness transaction. - self.old_node.wait_for_inv([CInv(1, tx2.x16r)]) # wait until tx2 was inv'ed + self.old_node.wait_for_inv([CInv(1, tx2.x16r)]) # wait until tx2 was inv'ed self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) self.old_node.wait_for_inv([CInv(1, tx3.x16r)]) @@ -927,27 +1051,26 @@ def test_tx_relay_after_segwit_activation(self): raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1) assert_equal(int(raw_tx["hash"], 16), tx3.calc_x16r(True)) assert_equal(raw_tx["size"], len(tx3.serialize_with_witness())) - vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4 + vsize = (len(tx3.serialize_with_witness()) + 3 * len(tx3.serialize_without_witness()) + 3) / 4 assert_equal(raw_tx["vsize"], vsize) assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1) assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii')) - assert(vsize != raw_tx["size"]) + assert (vsize != raw_tx["size"]) # Cleanup: mine the transactions and update utxo for next test self.nodes[0].generate(1) - assert_equal(len(self.nodes[0].getrawmempool()), 0) + assert_equal(len(self.nodes[0].getrawmempool()), 0) self.utxo.pop(0) self.utxo.append(UTXO(tx3.x16r, 0, tx3.vout[0].nValue)) - # Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG # This is true regardless of segwit activation. # Also test that we don't ask for blocks from unupgraded peers def test_block_relay(self, segwit_activated): self.log.info("Testing block relay") - blocktype = 2|MSG_WITNESS_FLAG + blocktype = 2 | MSG_WITNESS_FLAG # test_node has set NODE_WITNESS, so all getdata requests should be for # witness blocks. @@ -957,20 +1080,20 @@ def test_block_relay(self, segwit_activated): block1.solve() self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False) - assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) + assert (self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block1, True) block2 = self.build_next_block(n_version=4) block2.solve() self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True) - assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) + assert (self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block2, True) block3 = self.build_next_block(n_version=(VB_TOP_BITS | (1 << 15))) block3.solve() self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True) - assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) + assert (self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block3, True) # Check that we can getdata for witness blocks or regular blocks, @@ -981,7 +1104,7 @@ def test_block_relay(self, segwit_activated): chain_height = self.nodes[0].getblockcount() # Pick 10 random blocks on main chain, and verify that getdata's # for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal. - all_heights = list(range(chain_height+1)) + all_heights = list(range(chain_height + 1)) random.shuffle(all_heights) all_heights = all_heights[0:10] for height in all_heights: @@ -989,7 +1112,7 @@ def test_block_relay(self, segwit_activated): rpc_block = self.nodes[0].getblock(block_hash, False) block_hash = int(block_hash, 16) block = self.test_node.request_block(block_hash, 2) - wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG) + wit_block = self.test_node.request_block(block_hash, 2 | MSG_WITNESS_FLAG) assert_equal(block.serialize(True), wit_block.serialize(True)) assert_equal(block.serialize(), hex_str_to_bytes(rpc_block)) else: @@ -999,13 +1122,13 @@ def test_block_relay(self, segwit_activated): block = self.build_next_block() self.update_witness_block_with_transactions(block, []) # This gives us a witness commitment. - assert(len(block.vtx[0].wit.vtxinwit) == 1) - assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1) + assert (len(block.vtx[0].wit.vtxinwit) == 1) + assert (len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1) self.test_node.test_witness_block(block, accepted=True) # Now try to retrieve it... rpc_block = self.nodes[0].getblock(block.hash, False) non_wit_block = self.test_node.request_block(block.x16r, 2) - wit_block = self.test_node.request_block(block.x16r, 2|MSG_WITNESS_FLAG) + wit_block = self.test_node.request_block(block.x16r, 2 | MSG_WITNESS_FLAG) assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block)) assert_equal(wit_block.serialize(False), non_wit_block.serialize()) assert_equal(wit_block.serialize(True), block.serialize(True)) @@ -1014,7 +1137,7 @@ def test_block_relay(self, segwit_activated): rpc_details = self.nodes[0].getblock(block.hash, True) assert_equal(rpc_details["size"], len(block.serialize(True))) assert_equal(rpc_details["strippedsize"], len(block.serialize(False))) - weight = 3*len(block.serialize(False)) + len(block.serialize(True)) + weight = 3 * len(block.serialize(False)) + len(block.serialize(True)) assert_equal(rpc_details["weight"], weight) # Upgraded node should not ask for blocks from unupgraded @@ -1031,15 +1154,15 @@ def test_block_relay(self, segwit_activated): # Since 0.14, inv's will only be responded to with a getheaders, so send a header # to announce this block. msg = MsgHeaders() - msg.headers = [ CBlockHeader(block4) ] + msg.headers = [CBlockHeader(block4)] self.old_node.send_message(msg) self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0]) - assert(block4.x16r not in self.old_node.getdataset) + assert (block4.x16r not in self.old_node.getdataset) # V0 segwit outputs should be standard after activation, but not before. def test_standardness_v0(self, segwit_activated): self.log.info("Testing standardness of v0 outputs (%s activation)" % ("after" if segwit_activated else "before")) - assert(len(self.utxo)) + assert (len(self.utxo)) witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) @@ -1051,7 +1174,7 @@ def test_standardness_v0(self, segwit_activated): # First prepare a p2sh output (so that spending it will pass standardness) p2sh_tx = CTransaction() p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")] - p2sh_tx.vout = [CTxOut(self.utxo[0].nValue-1000, p2sh_scriptPubKey)] + p2sh_tx.vout = [CTxOut(self.utxo[0].nValue - 1000, p2sh_scriptPubKey)] p2sh_tx.rehash() # Mine it on test_node to create the confirmed output. @@ -1063,8 +1186,8 @@ def test_standardness_v0(self, segwit_activated): # Start by creating a transaction with two outputs. tx = CTransaction() tx.vin = [CTxIn(COutPoint(p2sh_tx.x16r, 0), CScript([witness_program]))] - tx.vout = [CTxOut(p2sh_tx.vout[0].nValue-10000, scriptPubKey)] - tx.vout.append(CTxOut(8000, scriptPubKey)) # Might burn this later + tx.vout = [CTxOut(p2sh_tx.vout[0].nValue - 10000, scriptPubKey)] + tx.vout.append(CTxOut(8000, scriptPubKey)) # Might burn this later tx.rehash() self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated) @@ -1081,7 +1204,7 @@ def test_standardness_v0(self, segwit_activated): else: # if tx wasn't accepted, we just re-spend the p2sh output we started with. tx2.vin = [CTxIn(COutPoint(p2sh_tx.x16r, 0), CScript([witness_program]))] - tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, scriptPubKey)] + tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue - 1000, scriptPubKey)] tx2.rehash() self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated) @@ -1093,7 +1216,7 @@ def test_standardness_v0(self, segwit_activated): # P2PKH output; just send tx's first output back to an anyone-can-spend. sync_mempools([self.nodes[0], self.nodes[1]]) tx3.vin = [CTxIn(COutPoint(tx.x16r, 0), b"")] - tx3.vout = [CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))] + tx3.vout = [CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))] tx3.wit.vtxinwit.append(CTxInWitness()) tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx3.rehash() @@ -1101,7 +1224,7 @@ def test_standardness_v0(self, segwit_activated): else: # tx and tx2 didn't go anywhere; just clean up the p2sh_tx output. tx3.vin = [CTxIn(COutPoint(p2sh_tx.x16r, 0), CScript([witness_program]))] - tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, witness_program)] + tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue - 1000, witness_program)] tx3.rehash() self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) @@ -1111,13 +1234,12 @@ def test_standardness_v0(self, segwit_activated): self.utxo.append(UTXO(tx3.x16r, 0, tx3.vout[0].nValue)) assert_equal(len(self.nodes[1].getrawmempool()), 0) - # Verify that future segwit upgraded transactions are non-standard, # but valid in blocks. Can run this before and after segwit activation. def test_segwit_versions(self): self.log.info("Testing standardness/consensus for segwit versions (0-16)") - assert(len(self.utxo)) - NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16 + assert (len(self.utxo)) + NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16 if len(self.utxo) < NUM_TESTS: tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) @@ -1139,36 +1261,36 @@ def test_segwit_versions(self): witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) assert_equal(len(self.nodes[1].getrawmempool()), 0) - for version in list(range(OP_1, OP_16+1)) + [OP_0]: + for version in list(range(OP_1, OP_16 + 1)) + [OP_0]: count += 1 # First try to spend to a future version segwit scriptPubKey. scriptPubKey = CScript([CScriptOp(version), witness_hash]) tx.vin = [CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")] - tx.vout = [CTxOut(self.utxo[0].nValue-1000, scriptPubKey)] + tx.vout = [CTxOut(self.utxo[0].nValue - 1000, scriptPubKey)] tx.rehash() self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True) self.utxo.pop(0) temp_utxo.append(UTXO(tx.x16r, 0, tx.vout[0].nValue)) - self.nodes[0].generate(1) # Mine all the transactions + self.nodes[0].generate(1) # Mine all the transactions sync_blocks(self.nodes) - assert(len(self.nodes[0].getrawmempool()) == 0) + assert (len(self.nodes[0].getrawmempool()) == 0) # Finally, verify that version 0 -> version 1 transactions # are non-standard scriptPubKey = CScript([CScriptOp(OP_1), witness_hash]) tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(tx.x16r, 0), b"")] - tx2.vout = [CTxOut(tx.vout[0].nValue-1000, scriptPubKey)] + tx2.vout = [CTxOut(tx.vout[0].nValue - 1000, scriptPubKey)] tx2.wit.vtxinwit.append(CTxInWitness()) - tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] + tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx2.rehash() # Gets accepted to test_node, because standardness of outputs isn't # checked with fRequireStandard self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True) self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False) - temp_utxo.pop() # last entry in temp_utxo was the output we just spent + temp_utxo.pop() # last entry in temp_utxo was the output we just spent temp_utxo.append(UTXO(tx2.x16r, 0, tx2.vout[0].nValue)) # Spend everything in temp_utxo back to an OP_TRUE output. @@ -1186,7 +1308,7 @@ def test_segwit_versions(self): self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False) self.test_node.sync_with_ping() with mininode_lock: - assert(b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason) + assert (b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason) # Building a block with the transaction must be valid, however. block = self.build_next_block() @@ -1197,7 +1319,6 @@ def test_segwit_versions(self): # Add utxo to our list self.utxo.append(UTXO(tx3.x16r, 0, tx3.vout[0].nValue)) - def test_premature_coinbase_witness_spend(self): self.log.info("Testing premature coinbase witness spend") block = self.build_next_block() @@ -1215,7 +1336,7 @@ def test_premature_coinbase_witness_spend(self): spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].x16r, 0), b"")] spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)] spend_tx.wit.vtxinwit.append(CTxInWitness()) - spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] + spend_tx.wit.vtxinwit[0].scriptWitness.stack = [witness_program] spend_tx.rehash() # Now test a premature spend. @@ -1232,22 +1353,21 @@ def test_premature_coinbase_witness_spend(self): self.test_node.test_witness_block(block2, accepted=True) sync_blocks(self.nodes) - def test_signature_version_1(self): self.log.info("Testing segwit signature hash version 1") - key = CECKey() - key.set_secretbytes(b"9") - pubkey = CPubKey(key.get_pubkey()) + key = ECKey() + key.generate() + pubkey = key.get_pubkey().get_bytes() witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) # First create a witness output for use in the tests. - assert(len(self.utxo)) + assert (len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey)) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, scriptPubKey)) tx.rehash() self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True) @@ -1260,7 +1380,7 @@ def test_signature_version_1(self): # Test each hashtype prev_utxo = UTXO(tx.x16r, 0, tx.vout[0].nValue) - for sigflag in [ 0, SIGHASH_ANYONECANPAY ]: + for sigflag in [0, SIGHASH_ANYONECANPAY]: for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]: hashtype |= sigflag block = self.build_next_block() @@ -1275,7 +1395,7 @@ def test_signature_version_1(self): # Too-small input value sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue - 1, key) - block.vtx.pop() # remove last tx + block.vtx.pop() # remove last tx self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=False) @@ -1320,7 +1440,7 @@ def test_signature_version_1(self): # Create a slight bias for producing more utxos num_outputs = random.randint(1, 11) random.shuffle(temp_utxos) - assert(len(temp_utxos) > num_inputs) + assert (len(temp_utxos) > num_inputs) tx = CTransaction() total_value = 0 for j in range(num_inputs): @@ -1340,8 +1460,8 @@ def test_signature_version_1(self): if hashtype == SIGHASH_SINGLE and k >= num_outputs: used_sighash_single_out_of_bounds = True tx.rehash() - for l in range(num_outputs): - temp_utxos.append(UTXO(tx.x16r, l, split_value)) + for idx in range(num_outputs): + temp_utxos.append(UTXO(tx.x16r, idx, split_value)) temp_utxos = temp_utxos[num_inputs:] block.vtx.append(tx) @@ -1373,7 +1493,7 @@ def test_signature_version_1(self): script = get_p2pkh_script(pubkeyhash) sig_hash = segwit_version1_signature_hash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) - signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL + signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL # Check that we can't have a scriptSig tx2.vin[0].scriptSig = CScript([signature, pubkey]) @@ -1413,12 +1533,11 @@ def test_signature_version_1(self): for i in range(len(tx.vout)): self.utxo.append(UTXO(tx.x16r, i, tx.vout[i].nValue)) - # Test P2SH wrapped witness programs. def test_p2sh_witness(self, segwit_activated): self.log.info("Testing P2SH witness transactions") - assert(len(self.utxo)) + assert (len(self.utxo)) # Prepare the p2sh-wrapped witness output witness_program = CScript([OP_DROP, OP_TRUE]) @@ -1426,12 +1545,12 @@ def test_p2sh_witness(self, segwit_activated): p2wsh_pubkey = CScript([OP_0, witness_hash]) p2sh_witness_hash = hash160(p2wsh_pubkey) scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) - scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script + scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script # Fund the P2SH output tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) - tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey)) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, scriptPubKey)) tx.rehash() # Verify mempool acceptance and block validity @@ -1444,7 +1563,7 @@ def test_p2sh_witness(self, segwit_activated): # Now test attempts to spend the output. spend_tx = CTransaction() spend_tx.vin.append(CTxIn(COutPoint(tx.x16r, 0), scriptSig)) - spend_tx.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))) + spend_tx.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))) spend_tx.rehash() # This transaction should not be accepted into the mempool pre- or @@ -1464,7 +1583,7 @@ def test_p2sh_witness(self, segwit_activated): spend_tx.vin[0].scriptSig = scriptSig spend_tx.rehash() spend_tx.wit.vtxinwit.append(CTxInWitness()) - spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ] + spend_tx.wit.vtxinwit[0].scriptWitness.stack = [b'a', witness_program] # Verify mempool acceptance self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated) @@ -1491,7 +1610,7 @@ def test_p2sh_witness(self, segwit_activated): def test_upgrade_after_activation(self, node_id): self.log.info("Testing software upgrade after softfork activation") - assert(node_id != 0) # node0 is assumed to be a segwit-active ravend + assert (node_id != 0) # node0 is assumed to be a segwit-active ravend # Make sure the nodes are all up sync_blocks(self.nodes) @@ -1504,7 +1623,7 @@ def test_upgrade_after_activation(self, node_id): sync_blocks(self.nodes) # Make sure that this peer thinks segwit has activated. - assert(get_bip9_status(self.nodes[node_id], 'segwit')['status'] == "active") + assert (get_bip9_status(self.nodes[node_id], 'segwit')['status'] == "active") # Make sure this peers blocks match those of node0. height = self.nodes[node_id].getblockcount() @@ -1514,19 +1633,18 @@ def test_upgrade_after_activation(self, node_id): assert_equal(self.nodes[0].getblock(block_hash), self.nodes[node_id].getblock(block_hash)) height -= 1 - def test_witness_sigops(self): """Ensure sigop counting is correct inside witnesses.""" self.log.info("Testing sigops limit") - assert(len(self.utxo)) + assert (len(self.utxo)) # Keep this under MAX_OPS_PER_SCRIPT (201) - witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF]) + witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG] * 5 + [OP_CHECKSIG] * 193 + [OP_ENDIF]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) - sigops_per_script = 20*5 + 193*1 + sigops_per_script = 20 * 5 + 193 * 1 # We'll produce 2 extra outputs, one with a program that would take us # over max sig ops, and one with a program that would exactly reach max # sig ops @@ -1534,12 +1652,12 @@ def test_witness_sigops(self): extra_sigops_available = MAX_SIGOP_COST % sigops_per_script # We chose the number of checkmultisigs/checksigs to make this work: - assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT + assert (extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT # This script, when spent with the first # N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction, # would push us just over the block sigop limit. - witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF]) + witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available + 1) + [OP_ENDIF]) witness_hash_toomany = sha256(witness_program_toomany) scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany]) @@ -1567,12 +1685,12 @@ def test_witness_sigops(self): # If we try to spend the first n-1 outputs from tx, that should be # too many sigops. total_value = 0 - for i in range(outputs-1): + for i in range(outputs - 1): tx2.vin.append(CTxIn(COutPoint(tx.x16r, i), b"")) tx2.wit.vtxinwit.append(CTxInWitness()) - tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ] + tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program] total_value += tx.vout[i].nValue - tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ] + tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program_toomany] tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE]))) tx2.rehash() @@ -1583,7 +1701,7 @@ def test_witness_sigops(self): # Try dropping the last input in tx2, and add an output that has # too many sigops (contributing to legacy sigop count). checksig_count = (extra_sigops_available // 4) + 1 - scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count) + scriptPubKey_checksigs = CScript([OP_CHECKSIG] * checksig_count) tx2.vout.append(CTxOut(0, scriptPubKey_checksigs)) tx2.vin.pop() tx2.wit.vtxinwit.pop() @@ -1595,7 +1713,7 @@ def test_witness_sigops(self): # If we drop the last checksig in this output, the tx should succeed. block_4 = self.build_next_block() - tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1)) + tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG] * (checksig_count - 1)) tx2.rehash() self.update_witness_block_with_transactions(block_4, [tx2]) self.test_node.test_witness_block(block_4, accepted=True) @@ -1609,9 +1727,9 @@ def test_witness_sigops(self): # output of tx block_5 = self.build_next_block() tx2.vout.pop() - tx2.vin.append(CTxIn(COutPoint(tx.x16r, outputs-1), b"")) + tx2.vin.append(CTxIn(COutPoint(tx.x16r, outputs - 1), b"")) tx2.wit.vtxinwit.append(CTxInWitness()) - tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ] + tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program_justright] tx2.rehash() self.update_witness_block_with_transactions(block_5, [tx2]) self.test_node.test_witness_block(block_5, accepted=True) @@ -1629,7 +1747,7 @@ def test_getblocktemplate_before_lockin(self): assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0]) # If we don't specify the segwit rule, then we won't get a default # commitment. - assert('default_witness_commitment' not in gbt_results) + assert ('default_witness_commitment' not in gbt_results) # Workaround: # Can either change the tip, or change the mempool and wait 5 seconds @@ -1637,22 +1755,22 @@ def test_getblocktemplate_before_lockin(self): txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16) # Using mocktime lets us avoid sleep() sync_mempools(self.nodes) - self.nodes[0].setmocktime(int(time.time())+10) - self.nodes[2].setmocktime(int(time.time())+10) + self.nodes[0].setmocktime(int(time.time()) + 10) + self.nodes[2].setmocktime(int(time.time()) + 10) for node in [self.nodes[0], self.nodes[2]]: - gbt_results = node.getblocktemplate({"rules" : ["segwit"]}) + gbt_results = node.getblocktemplate({"rules": ["segwit"]}) block_version = gbt_results['version'] if node == self.nodes[2]: # If this is a non-segwit node, we should still not get a witness # commitment, nor a version bit signalling segwit. assert_equal(block_version & (1 << VB_WITNESS_BIT), 0) - assert('default_witness_commitment' not in gbt_results) + assert ('default_witness_commitment' not in gbt_results) else: # For segwit-aware nodes, check the version bit and the witness # commitment are correct. - assert(block_version & (1 << VB_WITNESS_BIT) != 0) - assert('default_witness_commitment' in gbt_results) + assert (block_version & (1 << VB_WITNESS_BIT) != 0) + assert ('default_witness_commitment' in gbt_results) witness_commitment = gbt_results['default_witness_commitment'] # Check that default_witness_commitment is present. @@ -1671,13 +1789,12 @@ def test_uncompressed_pubkey(self): self.log.info("Testing uncompressed pubkeys") # Segwit transactions using uncompressed pubkeys are not accepted # under default policy, but should still pass consensus. - key = CECKey() - key.set_secretbytes(b"9") - key.set_compressed(False) - pubkey = CPubKey(key.get_pubkey()) - assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey + key = ECKey() + key.generate(False) + pubkey = key.get_pubkey().get_bytes() + assert_equal((len(pubkey)), 65) # This should be an uncompressed pubkey - assert(len(self.utxo) > 0) + assert (len(self.utxo) > 0) utxo = self.utxo.pop(0) # Test 1: P2WPKH @@ -1686,7 +1803,7 @@ def test_uncompressed_pubkey(self): scriptPKH = CScript([OP_0, pubkeyhash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(utxo.x16r, utxo.n), b"")) - tx.vout.append(CTxOut(utxo.nValue-1000, scriptPKH)) + tx.vout.append(CTxOut(utxo.nValue - 1000, scriptPKH)) tx.rehash() # Confirm it in a block. @@ -1702,12 +1819,12 @@ def test_uncompressed_pubkey(self): tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.x16r, 0), b"")) - tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptWSH)) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, scriptWSH)) script = get_p2pkh_script(pubkeyhash) sig_hash = segwit_version1_signature_hash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) - signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL + signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL tx2.wit.vtxinwit.append(CTxInWitness()) - tx2.wit.vtxinwit[0].scriptWitness.stack = [ signature, pubkey ] + tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey] tx2.rehash() # Should fail policy test. @@ -1726,7 +1843,7 @@ def test_uncompressed_pubkey(self): tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.x16r, 0), b"")) - tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptP2SH)) + tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, scriptP2SH)) tx3.wit.vtxinwit.append(CTxInWitness()) sign_p2pk_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key) @@ -1743,7 +1860,7 @@ def test_uncompressed_pubkey(self): scriptPubKey = get_p2pkh_script(pubkeyhash) tx4 = CTransaction() tx4.vin.append(CTxIn(COutPoint(tx3.x16r, 0), scriptSig)) - tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, scriptPubKey)) + tx4.vout.append(CTxOut(tx3.vout[0].nValue - 1000, scriptPubKey)) tx4.wit.vtxinwit.append(CTxInWitness()) sign_p2pk_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key) @@ -1757,9 +1874,9 @@ def test_uncompressed_pubkey(self): # transactions. tx5 = CTransaction() tx5.vin.append(CTxIn(COutPoint(tx4.x16r, 0), b"")) - tx5.vout.append(CTxOut(tx4.vout[0].nValue-1000, CScript([OP_TRUE]))) + tx5.vout.append(CTxOut(tx4.vout[0].nValue - 1000, CScript([OP_TRUE]))) (sig_hash, _) = signature_hash(scriptPubKey, tx5, 0, SIGHASH_ALL) - signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL + signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL tx5.vin[0].scriptSig = CScript([signature, pubkey]) tx5.rehash() # Should pass policy and consensus. @@ -1779,7 +1896,7 @@ def test_non_standard_witness(self): p2wsh_scripts = [] - assert(len(self.utxo)) + assert (len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].x16r, self.utxo[0].n), b"")) @@ -1803,13 +1920,13 @@ def test_non_standard_witness(self): p2sh_txs = [] for i in range(len(scripts)): p2wsh_tx = CTransaction() - p2wsh_tx.vin.append(CTxIn(COutPoint(txid,i*2))) + p2wsh_tx.vin.append(CTxIn(COutPoint(txid, i * 2))) p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) p2wsh_tx.wit.vtxinwit.append(CTxInWitness()) p2wsh_tx.rehash() p2wsh_txs.append(p2wsh_tx) p2sh_tx = CTransaction() - p2sh_tx.vin.append(CTxIn(COutPoint(txid,i*2+1), CScript([p2wsh_scripts[i]]))) + p2sh_tx.vin.append(CTxIn(COutPoint(txid, i * 2 + 1), CScript([p2wsh_scripts[i]]))) p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) p2sh_tx.wit.vtxinwit.append(CTxInWitness()) p2sh_tx.rehash() @@ -1867,85 +1984,5 @@ def test_non_standard_witness(self): self.utxo.pop(0) - def run_test(self): - # Setup the p2p connections and start up the network thread. - self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK - self.old_node = TestNode() # only NODE_NETWORK - self.std_node = TestNode() # for testing node1 (fRequireStandard=true) - - self.p2p_connections = [self.test_node, self.old_node] - - self.connections = [] - self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS)) - self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK)) - self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS)) - self.test_node.add_connection(self.connections[0]) - self.old_node.add_connection(self.connections[1]) - self.std_node.add_connection(self.connections[2]) - - NetworkThread().start() # Start up network handling in another thread - - # Keep a place to store utxo's that can be used in later tests - self.utxo = [] - - # Test logic begins here - self.test_node.wait_for_verack() - - self.log.info("Starting tests before segwit lock in:") - - self.test_witness_services() # Verifies NODE_WITNESS - self.test_non_witness_transaction() # non-witness tx's are accepted - self.test_unnecessary_witness_before_segwit_activation() - self.test_block_relay(segwit_activated=False) - - # Advance to segwit being 'started' - self.advance_to_segwit_started() - sync_blocks(self.nodes) - self.test_getblocktemplate_before_lockin() - - sync_blocks(self.nodes) - - # At lockin, nothing should change. - self.log.info("Testing behavior post lockin, pre-activation") - self.advance_to_segwit_lockin() - - # Retest unnecessary witnesses - self.test_unnecessary_witness_before_segwit_activation() - self.test_witness_tx_relay_before_segwit_activation() - self.test_block_relay(segwit_activated=False) - self.test_p2sh_witness(segwit_activated=False) - self.test_standardness_v0(segwit_activated=False) - - sync_blocks(self.nodes) - - # Now activate segwit - self.log.info("Testing behavior after segwit activation") - self.advance_to_segwit_active() - - sync_blocks(self.nodes) - - # Test P2SH witness handling again - self.test_p2sh_witness(segwit_activated=True) - self.test_witness_commitments() - self.test_block_malleability() - self.test_witness_block_size() - self.test_submit_block() - self.test_extra_witness_data() - self.test_max_witness_push_length() - self.test_max_witness_program_length() - self.test_witness_input_length() - self.test_block_relay(segwit_activated=True) - self.test_tx_relay_after_segwit_activation() - self.test_standardness_v0(segwit_activated=True) - self.test_segwit_versions() - self.test_premature_coinbase_witness_spend() - self.test_uncompressed_pubkey() - self.test_signature_version_1() - self.test_non_standard_witness() - sync_blocks(self.nodes) - self.test_upgrade_after_activation(node_id=2) - self.test_witness_sigops() - - if __name__ == '__main__': SegWitTest().main() diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py index 6c2b71b1e7..3b0eafae45 100755 --- a/test/functional/p2p_sendheaders.py +++ b/test/functional/p2p_sendheaders.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_timeouts.py b/test/functional/p2p_timeouts.py index 578594f36b..5fa9371393 100755 --- a/test/functional/p2p_timeouts.py +++ b/test/functional/p2p_timeouts.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py index e3f1f45f4b..037a661985 100755 --- a/test/functional/p2p_unrequested_blocks.py +++ b/test/functional/p2p_unrequested_blocks.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_addressindex.py b/test/functional/rpc_addressindex.py index ab2b2c9f19..21b4373350 100755 --- a/test/functional/rpc_addressindex.py +++ b/test/functional/rpc_addressindex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_assettransfer.py b/test/functional/rpc_assettransfer.py index cd9268aff1..77030e36f1 100755 --- a/test/functional/rpc_assettransfer.py +++ b/test/functional/rpc_assettransfer.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_bind.py b/test/functional/rpc_bind.py index 77edd91855..4049da0527 100755 --- a/test/functional/rpc_bind.py +++ b/test/functional/rpc_bind.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py index 67a37eeb04..0987759690 100755 --- a/test/functional/rpc_blockchain.py +++ b/test/functional/rpc_blockchain.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_decodescript.py b/test/functional/rpc_decodescript.py index 003877fdc6..0af92b9648 100755 --- a/test/functional/rpc_decodescript.py +++ b/test/functional/rpc_decodescript.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_deprecated.py b/test/functional/rpc_deprecated.py index af6c784232..ecd9480888 100755 --- a/test/functional/rpc_deprecated.py +++ b/test/functional/rpc_deprecated.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py index 43dcc19995..7573a13152 100755 --- a/test/functional/rpc_fundrawtransaction.py +++ b/test/functional/rpc_fundrawtransaction.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -463,12 +463,9 @@ def run_test(self): # drain the keypool self.nodes[1].getnewaddress() self.nodes[1].getrawchangeaddress() - inputs = [] - outputs = {self.nodes[0].getnewaddress():1.1} - rawtx = self.nodes[1].createrawtransaction(inputs, outputs) # fund a transaction that requires a new key for the change output # creating the key must be impossible because the wallet is locked - assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx) + assert_raises_rpc_error(-12, "Keypool ran out, please call keypoolrefill first", self.nodes[1].getnewaddress) #refill the keypool self.nodes[1].walletpassphrase("test", 100) diff --git a/test/functional/rpc_getchaintips.py b/test/functional/rpc_getchaintips.py index 30700ec41c..bd5c20e27e 100755 --- a/test/functional/rpc_getchaintips.py +++ b/test/functional/rpc_getchaintips.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py index 53003689dd..a13f96d618 100755 --- a/test/functional/rpc_invalidateblock.py +++ b/test/functional/rpc_invalidateblock.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_misc.py b/test/functional/rpc_misc.py index 13907ac25a..69846c6ab0 100755 --- a/test/functional/rpc_misc.py +++ b/test/functional/rpc_misc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_named_arguments.py b/test/functional/rpc_named_arguments.py index 5c9f203724..876b23a602 100755 --- a/test/functional/rpc_named_arguments.py +++ b/test/functional/rpc_named_arguments.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py index 6652a97583..6083ab53f1 100755 --- a/test/functional/rpc_net.py +++ b/test/functional/rpc_net.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py index 6979b46166..9c102efa04 100755 --- a/test/functional/rpc_preciousblock.py +++ b/test/functional/rpc_preciousblock.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py index d07645c461..7067181328 100755 --- a/test/functional/rpc_rawtransaction.py +++ b/test/functional/rpc_rawtransaction.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_setban.py b/test/functional/rpc_setban.py index d3634e93a1..d904bed7e2 100755 --- a/test/functional/rpc_setban.py +++ b/test/functional/rpc_setban.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2019 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_signmessage.py b/test/functional/rpc_signmessage.py index 3a213310ed..20b0bdc16b 100755 --- a/test/functional/rpc_signmessage.py +++ b/test/functional/rpc_signmessage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_signrawtransaction.py b/test/functional/rpc_signrawtransaction.py index 93cf7537df..1f5e022d5f 100755 --- a/test/functional/rpc_signrawtransaction.py +++ b/test/functional/rpc_signrawtransaction.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_spentindex.py b/test/functional/rpc_spentindex.py index bae5db6ea5..059fa8434e 100755 --- a/test/functional/rpc_spentindex.py +++ b/test/functional/rpc_spentindex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_timestampindex.py b/test/functional/rpc_timestampindex.py index 2cce0d3bcd..eaee7ca4d2 100755 --- a/test/functional/rpc_timestampindex.py +++ b/test/functional/rpc_timestampindex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_txindex.py b/test/functional/rpc_txindex.py index 4267fc2dff..b4e162d803 100755 --- a/test/functional/rpc_txindex.py +++ b/test/functional/rpc_txindex.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py index 1d0171d1e0..04f1950a95 100755 --- a/test/functional/rpc_txoutproof.py +++ b/test/functional/rpc_txoutproof.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/rpc_uptime.py b/test/functional/rpc_uptime.py index 4c0b041528..82cc40c9fb 100755 --- a/test/functional/rpc_uptime.py +++ b/test/functional/rpc_uptime.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the RPC call related to the uptime command. diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py index 264b7e5f95..74152fe29b 100755 --- a/test/functional/rpc_users.py +++ b/test/functional/rpc_users.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py index 189f5fa030..93c1df309e 100644 --- a/test/functional/test_framework/address.py +++ b/test/functional/test_framework/address.py @@ -1,23 +1,23 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Encode and decode BASE58, P2PKH and P2SH addresses.""" from .script import hash256, hash160, sha256, CScript, OP_0 -from .util import bytes_to_hex_str, hex_str_to_bytes +from .util import hex_str_to_bytes ADDRESS_BCRT1_UNSPENDABLE = 'n1BurnXXXXXXXXXXXXXXXXXXXXXXU1qejP' - chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' + def byte_to_base58(b, version): result = '' - hex_str = bytes_to_hex_str(b) - hex_str = bytes_to_hex_str(chr(version).encode('latin-1')) + hex_str - checksum = bytes_to_hex_str(hash256(hex_str_to_bytes(hex_str))) + hex_str = b.hex() + hex_str = chr(version).encode('latin-1').hex() + hex_str + checksum = hash256(hex_str_to_bytes(hex_str)).hex() hex_str += checksum[:8] value = int('0x' + hex_str, 0) while value > 0: @@ -28,46 +28,54 @@ def byte_to_base58(b, version): hex_str = hex_str[2:] return result + # TODO: def base58_decode -def keyhash_to_p2pkh(hash_input, main = False): +def keyhash_to_p2pkh(hash_input, main=False): assert (len(hash_input) == 20) version = 0 if main else 111 return byte_to_base58(hash_input, version) -def scripthash_to_p2sh(hash_in, main = False): + +def scripthash_to_p2sh(hash_in, main=False): assert (len(hash_in) == 20) version = 5 if main else 196 return byte_to_base58(hash_in, version) -def key_to_p2pkh(key, main = False): + +def key_to_p2pkh(key, main=False): key = check_key(key) return keyhash_to_p2pkh(hash160(key), main) -def script_to_p2sh(script, main = False): + +def script_to_p2sh(script, main=False): script = check_script(script) return scripthash_to_p2sh(hash160(script), main) -def key_to_p2sh_p2wpkh(key, main = False): + +def key_to_p2sh_p2wpkh(key, main=False): key = check_key(key) p2shscript = CScript([OP_0, hash160(key)]) return script_to_p2sh(p2shscript, main) -def script_to_p2sh_p2wsh(script, main = False): + +def script_to_p2sh_p2wsh(script, main=False): script = check_script(script) p2shscript = CScript([OP_0, sha256(script)]) return script_to_p2sh(p2shscript, main) + def check_key(key): if type(key) is str: - key = hex_str_to_bytes(key) # Assuming this is hex string + key = hex_str_to_bytes(key) # Assuming this is hex string if type(key) is bytes and (len(key) == 33 or len(key) == 65): return key assert False + def check_script(script): if type(script) is str: - script = hex_str_to_bytes(script) # Assuming this is hex string + script = hex_str_to_bytes(script) # Assuming this is hex string if type(script) is bytes or type(script) is CScript: return script assert False diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py index 2a42b540dd..81d163a311 100644 --- a/test/functional/test_framework/authproxy.py +++ b/test/functional/test_framework/authproxy.py @@ -1,8 +1,7 @@ # Copyright (c) 2011 Jeff Garzik -# # Previous copyright, from python-jsonrpc/jsonrpc/proxy.py: -# # Copyright (c) 2007 Jan-Klaas Kollhof +# Copyright (c) 2017-2020 The Raven Core developers # # This file is part of jsonrpc. # @@ -41,6 +40,7 @@ import http.client import json import logging +import os import socket import time import urllib.parse @@ -77,19 +77,12 @@ def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connect self._service_name = service_name self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests self.__url = urllib.parse.urlparse(service_url) - port = 80 if self.__url.port is None else self.__url.port user = None if self.__url.username is None else self.__url.username.encode('utf8') passwd = None if self.__url.password is None else self.__url.password.encode('utf8') auth_pair = user + b':' + passwd self.__auth_header = b'Basic ' + base64.b64encode(auth_pair) - - if connection: - # Callables re-use the connection of the original proxy - self.__conn = connection - elif self.__url.scheme == 'https': - self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=timeout) - else: - self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=timeout) + self.timeout = timeout + self._set_conn(connection) def __getattr__(self, name): if name.startswith('__') and name.endswith('__'): @@ -108,6 +101,10 @@ def _request(self, method, path, post_data): 'User-Agent': USER_AGENT, 'Authorization': self.__auth_header, 'Content-type': 'application/json'} + if os.name == 'nt': + # Windows somehow does not like to re-use connections + # TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows + self._set_conn() try: self.__conn.request(method, path, post_data, headers) return self._get_response() @@ -120,12 +117,6 @@ def _request(self, method, path, post_data): return self._get_response() else: raise - except http.client.UnknownProtocol as e: - self.__conn.close() - self.__conn.request(method, path, post_data, headers) - print("~~~~~~~~~~~~~~~~~ Protocol Exception ~~~~~~~~~~~~~~~~~~~~~~~~~~") - print(e) - return self._get_response() except (BrokenPipeError, ConnectionResetError) as e: # Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset # ConnectionResetError happens on FreeBSD with Python 3.4 @@ -138,8 +129,7 @@ def _request(self, method, path, post_data): def get_request(self, *args, **argsn): AuthServiceProxy.__id_count += 1 - log.debug("-%s-> %s %s" % (AuthServiceProxy.__id_count, self._service_name, - json.dumps(args, default=encode_decimal, ensure_ascii=self.ensure_ascii))) + log.debug("-{}-> {} {}".format(AuthServiceProxy.__id_count, self._service_name, json.dumps(args or argsn, default=encode_decimal, ensure_ascii=self.ensure_ascii),)) if args and argsn: raise ValueError('Cannot handle both named and positional arguments') return {'version': '1.1', @@ -157,11 +147,9 @@ def __call__(self, *args, **argsn): log.debug("-----------------------------------------------------") raise JSONRPCException(response['error'], status) elif 'result' not in response: - raise JSONRPCException({ - 'code': -343, 'message': 'missing JSON-RPC result'}, status) + raise JSONRPCException({'code': -343, 'message': 'missing JSON-RPC result'}, status) elif status != HTTPStatus.OK: - raise JSONRPCException({ - 'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status) + raise JSONRPCException({'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status) else: return response['result'] @@ -170,8 +158,7 @@ def batch(self, rpc_call_list): log.debug("--> " + postdata) response, status = self._request('POST', self.__url.path, postdata.encode('utf-8')) if status != HTTPStatus.OK: - raise JSONRPCException({ - 'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status) + raise JSONRPCException({'code': -342, 'message': 'non-200 HTTP status code but no JSON-RPC error'}, status) return response def _get_response(self): @@ -183,22 +170,13 @@ def _get_response(self): 'code': -344, 'message': '%r RPC took longer than %f seconds. Consider ' 'using larger timeout for calls that take ' - 'longer to return.' % (self._service_name, - self.__conn.timeout)}) - except http.client.RemoteDisconnected as e: - log.debug("~~~~~~~ _get_response Remote Disconnected Exception: %s ~~~~~~~~~~~", e) - raise - except http.client.UnknownProtocol as e: - log.debug("~~~~~~~ _get_response Unknown Protocol Exception: %s ~~~~~~~~~~~", e) - raise + 'longer to return.' % (self._service_name, self.__conn.timeout)}) if http_response is None: - raise JSONRPCException({ - 'code': -342, 'message': 'missing HTTP response from server'}) + raise JSONRPCException({'code': -342, 'message': 'missing HTTP response from server'}) content_type = http_response.getheader('Content-Type') if content_type != 'application/json': - raise JSONRPCException({'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)}, - http_response.status) + raise JSONRPCException({'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)}, http_response.status) response_data = http_response.read().decode('utf8') response = json.loads(response_data, parse_float=decimal.Decimal) @@ -211,3 +189,13 @@ def _get_response(self): def __truediv__(self, relative_uri): return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn) + + def _set_conn(self, connection=None): + port = 80 if self.__url.port is None else self.__url.port + if connection: + self.__conn = connection + self.timeout = connection.timeout + elif self.__url.scheme == 'https': + self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout) + else: + self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout) diff --git a/test/functional/test_framework/bignum.py b/test/functional/test_framework/bignum.py index f3ce681e7a..56ee0ba133 100644 --- a/test/functional/test_framework/bignum.py +++ b/test/functional/test_framework/bignum.py @@ -1,11 +1,10 @@ #!/usr/bin/env python3 -# +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Big number routines. - This file is copied from python-ravenlib. """ diff --git a/test/functional/test_framework/blockstore.py b/test/functional/test_framework/blockstore.py deleted file mode 100644 index 3eaa1feab1..0000000000 --- a/test/functional/test_framework/blockstore.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -"""BlockStore and TxStore helper classes.""" - -from .mininode import logging, CBlock, MsgHeaders, CBlockHeader, MsgGeneric, CBlockLocator -from io import BytesIO -import dbm.dumb as dbmd - -logger = logging.getLogger("TestFramework.blockstore") - -class BlockStore: - """BlockStore helper class. - - BlockStore keeps a map of blocks and implements helper functions for - responding to getheaders and getdata, and for constructing a getheaders - message. - """ - - def __init__(self, datadir): - self.blockDB = dbmd.open(datadir + "/blocks", 'c') - self.currentBlock = 0 - self.headers_map = dict() - - def close(self): - self.blockDB.close() - - def erase(self, blockhash): - del self.blockDB[repr(blockhash)] - - # lookup an entry and return the item as raw bytes - def get(self, blockhash): - try: - value = self.blockDB[repr(blockhash)] - except KeyError: - return None - return value - - # lookup an entry and return it as a CBlock - def get_block(self, blockhash): - ret = None - serialized_block = self.get(blockhash) - if serialized_block is not None: - f = BytesIO(serialized_block) - ret = CBlock() - ret.deserialize(f) - ret.calc_x16r() - return ret - - def get_header(self, blockhash): - try: - return self.headers_map[blockhash] - except KeyError: - return None - - # Note: this pulls full blocks out of the database just to retrieve - # the headers -- perhaps we could keep a separate data structure - # to avoid this overhead. - def headers_for(self, locator, hash_stop, current_tip=None): - if current_tip is None: - current_tip = self.currentBlock - current_block_header = self.get_header(current_tip) - if current_block_header is None: - return None - - response = MsgHeaders() - headers_list = [ current_block_header ] - max_headers = 2000 - while headers_list[0].sha256 not in locator.vHave: - prev_block_hash = headers_list[0].hashPrevBlock - prev_block_header = self.get_header(prev_block_hash) - if prev_block_header is not None: - headers_list.insert(0, prev_block_header) - else: - break - headers_list = headers_list[:max_headers] # truncate if we have too many - hash_list = [x.sha256 for x in headers_list] - index = len(headers_list) - if hash_stop in hash_list: - index = hash_list.index(hash_stop)+1 - response.headers = headers_list[:index] - return response - - def add_block(self, block): - block.calc_x16r() - try: - self.blockDB[repr(block.sha256)] = bytes(block.serialize()) - except TypeError: - logger.exception("Unexpected Add Block Error") - self.currentBlock = block.sha256 - self.headers_map[block.sha256] = CBlockHeader(block) - - def add_header(self, header): - self.headers_map[header.sha256] = header - - # lookup the hashes in "inv", and return p2p messages for delivering - # blocks found. - def get_blocks(self, inv): - responses = [] - for i in inv: - if i.type == 2: # MSG_BLOCK - data = self.get(i.hash) - if data is not None: - # Use msg_generic to avoid re-serialization - responses.append(MsgGeneric(b"block", data)) - return responses - - def get_locator(self, current_tip=None): - if current_tip is None: - current_tip = self.currentBlock - r = [] - counter = 0 - step = 1 - last_block = self.get_block(current_tip) - while last_block is not None: - r.append(last_block.hashPrevBlock) - for _ in range(step): - last_block = self.get_block(last_block.hashPrevBlock) - if last_block is None: - break - counter += 1 - if counter > 10: - step *= 2 - locator = CBlockLocator() - locator.vHave = r - return locator - -class TxStore: - def __init__(self, datadir): - self.txDB = dbmd.open(datadir + "/transactions", 'c') - - def close(self): - self.txDB.close() - - # lookup an entry and return the item as raw bytes - def get(self, txhash): - try: - value = self.txDB[repr(txhash)] - except KeyError: - return None - return value - - def add_transaction(self, tx): - tx.calc_x16r() - try: - self.txDB[repr(tx.sha256)] = bytes(tx.serialize()) - except TypeError: - logger.exception("Unexpected Add Transaction Error") - - def get_transactions(self, inv): - responses = [] - for i in inv: - if i.type == 1: # MSG_TX - tx = self.get(i.hash) - if tx is not None: - responses.append(MsgGeneric(b"tx", tx)) - return responses diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py index 70b72b72ce..ad1c943a5c 100644 --- a/test/functional/test_framework/blocktools.py +++ b/test/functional/test_framework/blocktools.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/test_framework/comptool.py b/test/functional/test_framework/comptool.py deleted file mode 100755 index 50e9b07f09..0000000000 --- a/test/functional/test_framework/comptool.py +++ /dev/null @@ -1,409 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. - -""" -Compare two or more ravends to each other. - -To use, create a class that implements get_tests(), and pass it in -as the test generator to TestManager. get_tests() should be a python -generator that returns TestInstance objects. See below for definition. - -TestNode behaves as follows: - Configure with a BlockStore and TxStore - on_inv: log the message but don't request - on_headers: log the chain tip - on_pong: update ping response map (for synchronization) - on_getheaders: provide headers via BlockStore - on_getdata: provide blocks via BlockStore -""" - -from .mininode import (NodeConn, CBlock, MsgInv, CInv, MsgHeaders, MsgMempool, mininode_lock, NodeConnCB, - MsgGetHeaders, MsgPing, MsgBlock, CBlockHeader, MAX_INV_SZ, CTransaction) -from .blockstore import BlockStore, TxStore -from .util import p2p_port, wait_until - -import logging - -logger=logging.getLogger("TestFramework.comptool") - -#global mininode_lock - -class RejectResult: - """Outcome that expects rejection of a transaction or block.""" - def __init__(self, code, reason=b''): - self.code = code - self.reason = reason - def match(self, other): - if self.code != other.code: - return False - return other.reason.startswith(self.reason) - def __repr__(self): - return '%i:%s' % (self.code,self.reason or '*') - -class TestNode(NodeConnCB): - - def __init__(self, block_store, tx_store): - super().__init__() - self.conn = None - self.bestblockhash = None - self.block_store = block_store - self.block_request_map = {} - self.tx_store = tx_store - self.tx_request_map = {} - self.block_reject_map = {} - self.tx_reject_map = {} - - # When the pingmap is non-empty we're waiting for - # a response - self.pingMap = {} - self.lastInv = [] - self.closed = False - - def on_close(self, conn): - self.closed = True - - def add_connection(self, conn): - self.conn = conn - - def on_headers(self, conn, message): - if len(message.headers) > 0: - best_header = message.headers[-1] - best_header.calc_x16r() - self.bestblockhash = best_header.sha256 - - def on_getheaders(self, conn, message): - response = self.block_store.headers_for(message.locator, message.hashstop) - if response is not None: - conn.send_message(response) - - def on_getdata(self, conn, message): - [conn.send_message(r) for r in self.block_store.get_blocks(message.inv)] - [conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)] - - for i in message.inv: - if i.type == 1: - self.tx_request_map[i.hash] = True - elif i.type == 2: - self.block_request_map[i.hash] = True - - def on_inv(self, conn, message): - self.lastInv = [x.hash for x in message.inv] - - def on_pong(self, conn, message): - try: - del self.pingMap[message.nonce] - except KeyError: - raise AssertionError("Got pong for unknown ping [%s]" % repr(message)) - - def on_reject(self, conn, message): - if message.message == b'tx': - self.tx_reject_map[message.data] = RejectResult(message.code, message.reason) - if message.message == b'block': - self.block_reject_map[message.data] = RejectResult(message.code, message.reason) - - def send_inv(self, obj): - mtype = 2 if isinstance(obj, CBlock) else 1 - self.conn.send_message(MsgInv([CInv(mtype, obj.sha256)])) - - def send_getheaders(self): - # We ask for headers from their last tip. - m = MsgGetHeaders() - m.locator = self.block_store.get_locator(self.bestblockhash) - self.conn.send_message(m) - - def send_header(self, header): - m = MsgHeaders() - m.headers.append(header) - self.conn.send_message(m) - - # This assumes BIP31 - def send_ping(self, nonce): - self.pingMap[nonce] = True - self.conn.send_message(MsgPing(nonce)) - - def received_ping_response(self, nonce): - return nonce not in self.pingMap - - def send_mempool(self): - self.lastInv = [] - self.conn.send_message(MsgMempool()) - -# TestInstance: -# -# Instances of these are generated by the test generator, and fed into the -# comptool. -# -# "blocks_and_transactions" should be an array of -# [obj, True/False/None, hash/None]: -# - obj is either a CBlock, CBlockHeader, or a CTransaction, and -# - the second value indicates whether the object should be accepted -# into the blockchain or mempool (for tests where we expect a certain -# answer), or "None" if we don't expect a certain answer and are just -# comparing the behavior of the nodes being tested. -# - the third value is the hash to test the tip against (if None or omitted, -# use the hash of the block) -# - NOTE: if a block header, no test is performed; instead the header is -# just added to the block_store. This is to facilitate block delivery -# when communicating with headers-first clients (when withholding an -# intermediate block). -# sync_every_block: if True, then each block will be inv'ed, synced, and -# nodes will be tested based on the outcome for the block. If False, -# then inv's accumulate until all blocks are processed (or max inv size -# is reached) and then sent out in one inv message. Then the final block -# will be synced across all connections, and the outcome of the final -# block will be tested. -# sync_every_tx: analogous to behavior for sync_every_block, except if outcome -# on the final tx is None, then contents of entire mempool are compared -# across all connections. (If outcome of final tx is specified as true -# or false, then only the last tx is tested against outcome.) - -class TestInstance: - def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False): - self.blocks_and_transactions = objects if objects else [] - self.sync_every_block = sync_every_block - self.sync_every_tx = sync_every_tx - -class TestManager: - - def __init__(self, testgen, datadir): - self.test_generator = testgen - self.connections = [] - self.test_nodes = [] - self.block_store = BlockStore(datadir) - self.tx_store = TxStore(datadir) - self.ping_counter = 1 - - def add_all_connections(self, nodes): - for i in range(len(nodes)): - # Create a p2p connection to each node - test_node = TestNode(self.block_store, self.tx_store) - self.test_nodes.append(test_node) - self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node)) - # Make sure the TestNode (callback class) has a reference to its - # associated NodeConn - test_node.add_connection(self.connections[-1]) - - def clear_all_connections(self): - self.connections = [] - self.test_nodes = [] - - def wait_for_disconnections(self): - def disconnected(): - return all(node.closed for node in self.test_nodes) - wait_until(disconnected, timeout=10, lock=mininode_lock, err_msg="wait_for_disconnections") - - def wait_for_verack(self): - return all(node.wait_for_verack() for node in self.test_nodes) - - def wait_for_pings(self, counter): - def received_pongs(): - return all(node.received_ping_response(counter) for node in self.test_nodes) - wait_until(received_pongs, lock=mininode_lock, err_msg="wait_for_pings") - - # sync_blocks: Wait for all connections to request the blockhash given - # then send get_headers to find out the tip of each node, and synchronize - # the response by using a ping (and waiting for pong with same nonce). - def sync_blocks(self, blockhash, num_blocks): - def blocks_requested(): - return all( - blockhash in node.block_request_map and node.block_request_map[blockhash] - for node in self.test_nodes - ) - - # --> error if not requested - wait_until(blocks_requested, attempts=20*num_blocks, lock=mininode_lock, err_msg="sync_blocks") - - # Send getheaders message - [ c.cb.send_getheaders() for c in self.connections ] - - # Send ping and wait for response -- synchronization hack - [ c.cb.send_ping(self.ping_counter) for c in self.connections ] - self.wait_for_pings(self.ping_counter) - self.ping_counter += 1 - - # Analogous to sync_block (see above) - def sync_transaction(self, txhash, num_events): - # Wait for nodes to request transaction (50ms sleep * 20 tries * num_events) - def transaction_requested(): - return all( - txhash in node.tx_request_map and node.tx_request_map[txhash] - for node in self.test_nodes - ) - - # --> error if not requested - wait_until(transaction_requested, attempts=20*num_events, lock=mininode_lock, err_msg="sync_transaction") - - # Get the mempool - [ c.cb.send_mempool() for c in self.connections ] - - # Send ping and wait for response -- synchronization hack - [ c.cb.send_ping(self.ping_counter) for c in self.connections ] - self.wait_for_pings(self.ping_counter) - self.ping_counter += 1 - - # Sort inv responses from each node - with mininode_lock: - [ c.cb.lastInv.sort() for c in self.connections ] - - # Verify that the tip of each connection all agree with each other, and - # with the expected outcome (if given) - def check_results(self, blockhash, outcome): - with mininode_lock: - for c in self.connections: - if outcome is None: - if c.cb.bestblockhash != self.connections[0].cb.bestblockhash: - return False - elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code - if c.cb.bestblockhash == blockhash: - return False - if blockhash not in c.cb.block_reject_map: - logger.error('Block not in reject map: %064x' % blockhash) - return False - if not outcome.match(c.cb.block_reject_map[blockhash]): - logger.error('Block rejected with %s instead of expected %s: %064x' % (c.cb.block_reject_map[blockhash], outcome, blockhash)) - return False - elif (c.cb.bestblockhash == blockhash) != outcome: - return False - return True - - # Either check that the mempools all agree with each other, or that - # txhash's presence in the mempool matches the outcome specified. - # This is somewhat of a strange comparison, in that we're either comparing - # a particular tx to an outcome, or the entire mempools altogether; - # perhaps it would be useful to add the ability to check explicitly that - # a particular tx's existence in the mempool is the same across all nodes. - def check_mempool(self, txhash, outcome): - with mininode_lock: - for c in self.connections: - if outcome is None: - # Make sure the mempools agree with each other - if c.cb.lastInv != self.connections[0].cb.lastInv: - return False - elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code - if txhash in c.cb.lastInv: - return False - if txhash not in c.cb.tx_reject_map: - logger.error('Tx not in reject map: %064x' % txhash) - return False - if not outcome.match(c.cb.tx_reject_map[txhash]): - logger.error('Tx rejected with %s instead of expected %s: %064x' % (c.cb.tx_reject_map[txhash], outcome, txhash)) - return False - elif (txhash in c.cb.lastInv) != outcome: - return False - return True - - def run(self): - # Wait until verack is received - self.wait_for_verack() - - test_number = 1 - for test_instance in self.test_generator.get_tests(): - # We use these variables to keep track of the last block - # and last transaction in the tests, which are used - # if we're not syncing on every block or every tx. - [ block, block_outcome, tip ] = [ None, None, None ] - [ tx, tx_outcome ] = [ None, None ] - inv_queue = [] - - for test_obj in test_instance.blocks_and_transactions: - b_or_t = test_obj[0] - outcome = test_obj[1] - # Determine if we're dealing with a block or tx - if isinstance(b_or_t, CBlock): # Block test runner - block = b_or_t - block_outcome = outcome - tip = block.x16r - # each test_obj can have an optional third argument - # to specify the tip we should compare with - # (default is to use the block being tested) - if len(test_obj) >= 3: - tip = test_obj[2] - - # Add to shared block_store, set as current block - # If there was an open getdata request for the block - # previously, and we didn't have an entry in the - # block_store, then immediately deliver, because the - # node wouldn't send another getdata request while - # the earlier one is outstanding. - first_block_with_hash = True - if self.block_store.get(block.x16r) is not None: - first_block_with_hash = False - with mininode_lock: - self.block_store.add_block(block) - for c in self.connections: - if first_block_with_hash and block.x16r in c.cb.block_request_map and c.cb.block_request_map[block.x16r] == True: - # There was a previous request for this block hash - # Most likely, we delivered a header for this block - # but never had the block to respond to the getdata - c.send_message(MsgBlock(block)) - else: - c.cb.block_request_map[block.x16r] = False - # Either send inv's to each node and sync, or add - # to inv_queue for later inv'ing. - if test_instance.sync_every_block: - # if we expect success, send inv and sync every block - # if we expect failure, just push the block and see what happens. - if outcome: - [ c.cb.send_inv(block) for c in self.connections ] - self.sync_blocks(block.x16r, 1) - else: - [c.send_message(MsgBlock(block)) for c in self.connections] - [ c.cb.send_ping(self.ping_counter) for c in self.connections ] - self.wait_for_pings(self.ping_counter) - self.ping_counter += 1 - if not self.check_results(tip, outcome): - raise AssertionError("Test failed at test %d" % test_number) - else: - inv_queue.append(CInv(2, block.x16r)) - elif isinstance(b_or_t, CBlockHeader): - block_header = b_or_t - self.block_store.add_header(block_header) - [ c.cb.send_header(block_header) for c in self.connections ] - - else: # Tx test runner - assert(isinstance(b_or_t, CTransaction)) - tx = b_or_t - tx_outcome = outcome - # Add to shared tx store and clear map entry - with mininode_lock: - self.tx_store.add_transaction(tx) - for c in self.connections: - c.cb.tx_request_map[tx.x16r] = False - # Again, either inv to all nodes or save for later - if test_instance.sync_every_tx: - [ c.cb.send_inv(tx) for c in self.connections ] - self.sync_transaction(tx.x16r, 1) - if not self.check_mempool(tx.x16r, outcome): - raise AssertionError("Test failed at test %d" % test_number) - else: - inv_queue.append(CInv(1, tx.x16r)) - # Ensure we're not overflowing the inv queue - if len(inv_queue) == MAX_INV_SZ: - [c.send_message(MsgInv(inv_queue)) for c in self.connections] - inv_queue = [] - - # Do final sync if we weren't syncing on every block or every tx. - if not test_instance.sync_every_block and block is not None: - if len(inv_queue) > 0: - [c.send_message(MsgInv(inv_queue)) for c in self.connections] - inv_queue = [] - self.sync_blocks(block.x16r, len(test_instance.blocks_and_transactions)) - if not self.check_results(tip, block_outcome): - raise AssertionError("Block test failed at test %d" % test_number) - if not test_instance.sync_every_tx and tx is not None: - if len(inv_queue) > 0: - [c.send_message(MsgInv(inv_queue)) for c in self.connections] - self.sync_transaction(tx.x16r, len(test_instance.blocks_and_transactions)) - if not self.check_mempool(tx.x16r, tx_outcome): - raise AssertionError("Mempool test failed at test %d" % test_number) - - logger.info("Test %d: PASS" % test_number) - test_number += 1 - - [ c.disconnect_node() for c in self.connections ] - self.wait_for_disconnections() - self.block_store.close() - self.tx_store.close() diff --git a/test/functional/test_framework/coverage.py b/test/functional/test_framework/coverage.py index ba8d1731d6..90b35fe593 100644 --- a/test/functional/test_framework/coverage.py +++ b/test/functional/test_framework/coverage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py index a6f2667bca..7422f154cb 100644 --- a/test/functional/test_framework/key.py +++ b/test/functional/test_framework/key.py @@ -1,234 +1,389 @@ -# Copyright (c) 2011 Sam Rushing +# Copyright (c) 2019 Pieter Wuille +# Copyright (c) 2017-2020 The Raven Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. """ -ECC secp256k1 OpenSSL wrapper. - -WARNING: This module does not mlock() secrets; your private keys may end up on -disk in swap! Use with caution! - -This file is modified from python-ravenlib. +Test-only secp256k1 elliptic curve implementation +WARNING: This code is slow, uses bad randomness, does not properly protect +keys, and is trivially vulnerable to side channel attacks. Do not use for +anything but tests. """ +import random -import ctypes -import ctypes.util -import hashlib -import sys - -ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('libeay32')) - -ssl.BN_new.restype = ctypes.c_void_p -ssl.BN_new.argtypes = [] - -ssl.BN_bin2bn.restype = ctypes.c_void_p -ssl.BN_bin2bn.argtypes = [ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p] - -ssl.BN_CTX_free.restype = None -ssl.BN_CTX_free.argtypes = [ctypes.c_void_p] - -ssl.BN_CTX_new.restype = ctypes.c_void_p -ssl.BN_CTX_new.argtypes = [] - -ssl.ECDH_compute_key.restype = ctypes.c_int -ssl.ECDH_compute_key.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] - -ssl.ECDSA_sign.restype = ctypes.c_int -ssl.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] - -ssl.ECDSA_verify.restype = ctypes.c_int -ssl.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] - -ssl.EC_KEY_free.restype = None -ssl.EC_KEY_free.argtypes = [ctypes.c_void_p] - -ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p -ssl.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int] - -ssl.EC_KEY_get0_group.restype = ctypes.c_void_p -ssl.EC_KEY_get0_group.argtypes = [ctypes.c_void_p] - -ssl.EC_KEY_get0_public_key.restype = ctypes.c_void_p -ssl.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p] - -ssl.EC_KEY_set_private_key.restype = ctypes.c_int -ssl.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p] - -ssl.EC_KEY_set_conv_form.restype = None -ssl.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, ctypes.c_int] - -ssl.EC_KEY_set_public_key.restype = ctypes.c_int -ssl.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p] - -ssl.i2o_ECPublicKey.restype = ctypes.c_void_p -ssl.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] - -ssl.EC_POINT_new.restype = ctypes.c_void_p -ssl.EC_POINT_new.argtypes = [ctypes.c_void_p] - -ssl.EC_POINT_free.restype = None -ssl.EC_POINT_free.argtypes = [ctypes.c_void_p] - -ssl.EC_POINT_mul.restype = ctypes.c_int -ssl.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] - -# this specifies the curve used with ECDSA. -NID_secp256k1 = 714 # from openssl/obj_mac.h +def modinv(a, n): + """Compute the modular inverse of a modulo n + See https://en.wikipedia.org/wiki/Extended_Euclidean_algorithm#Modular_integers. + """ + t1, t2 = 0, 1 + r1, r2 = n, a + while r2 != 0: + q = r1 // r2 + t1, t2 = t2, t1 - q * t2 + r1, r2 = r2, r1 - q * r2 + if r1 > 1: + return None + if t1 < 0: + t1 += n + return t1 + +def jacobi_symbol(n, k): + """Compute the Jacobi symbol of n modulo k + + See http://en.wikipedia.org/wiki/Jacobi_symbol + + For our application k is always prime, so this is the same as the Legendre symbol.""" + assert k > 0 and k & 1, "jacobi symbol is only defined for positive odd k" + n %= k + t = 0 + while n != 0: + while n & 1 == 0: + n >>= 1 + r = k & 7 + t ^= (r == 3 or r == 5) + n, k = k, n + t ^= (n & k & 3 == 3) + n = n % k + if k == 1: + return -1 if t else 1 + return 0 + +def modsqrt(a, p): + """Compute the square root of a modulo p when p % 4 = 3. + + The Tonelli-Shanks algorithm can be used. See https://en.wikipedia.org/wiki/Tonelli-Shanks_algorithm + + Limiting this function to only work for p % 4 = 3 means we don't need to + iterate through the loop. The highest n such that p - 1 = 2^n Q with Q odd + is n = 1. Therefore Q = (p-1)/2 and sqrt = a^((Q+1)/2) = a^((p+1)/4) + + secp256k1's is defined over field of size 2**256 - 2**32 - 977, which is 3 mod 4. + """ + if p % 4 != 3: + raise NotImplementedError("modsqrt only implemented for p % 4 = 3") + sqrt = pow(a, (p + 1)//4, p) + if pow(sqrt, 2, p) == a % p: + return sqrt + return None + +class EllipticCurve: + def __init__(self, p, a, b): + """Initialize elliptic curve y^2 = x^3 + a*x + b over GF(p).""" + self.p = p + self.a = a % p + self.b = b % p + + def affine(self, p1): + """Convert a Jacobian point tuple p1 to affine form, or None if at infinity. + + An affine point is represented as the Jacobian (x, y, 1)""" + x1, y1, z1 = p1 + if z1 == 0: + return None + inv = modinv(z1, self.p) + inv_2 = (inv**2) % self.p + inv_3 = (inv_2 * inv) % self.p + return ((inv_2 * x1) % self.p, (inv_3 * y1) % self.p, 1) + + def negate(self, p1): + """Negate a Jacobian point tuple p1.""" + x1, y1, z1 = p1 + return (x1, (self.p - y1) % self.p, z1) + + def on_curve(self, p1): + """Determine whether a Jacobian tuple p is on the curve (and not infinity)""" + x1, y1, z1 = p1 + z2 = pow(z1, 2, self.p) + z4 = pow(z2, 2, self.p) + return z1 != 0 and (pow(x1, 3, self.p) + self.a * x1 * z4 + self.b * z2 * z4 - pow(y1, 2, self.p)) % self.p == 0 + + def is_x_coord(self, x): + """Test whether x is a valid X coordinate on the curve.""" + x_3 = pow(x, 3, self.p) + return jacobi_symbol(x_3 + self.a * x + self.b, self.p) != -1 + + def lift_x(self, x): + """Given an X coordinate on the curve, return a corresponding affine point.""" + x_3 = pow(x, 3, self.p) + v = x_3 + self.a * x + self.b + y = modsqrt(v, self.p) + if y is None: + return None + return (x, y, 1) + + def double(self, p1): + """Double a Jacobian tuple p1 + + See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Doubling""" + x1, y1, z1 = p1 + if z1 == 0: + return (0, 1, 0) + y1_2 = (y1**2) % self.p + y1_4 = (y1_2**2) % self.p + x1_2 = (x1**2) % self.p + s = (4*x1*y1_2) % self.p + m = 3*x1_2 + if self.a: + m += self.a * pow(z1, 4, self.p) + m = m % self.p + x2 = (m**2 - 2*s) % self.p + y2 = (m*(s - x2) - 8*y1_4) % self.p + z2 = (2*y1*z1) % self.p + return (x2, y2, z2) + + def add_mixed(self, p1, p2): + """Add a Jacobian tuple p1 and an affine tuple p2 + + See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition (with affine point)""" + x1, y1, z1 = p1 + x2, y2, z2 = p2 + assert(z2 == 1) + # Adding to the point at infinity is a no-op + if z1 == 0: + return p2 + z1_2 = (z1**2) % self.p + z1_3 = (z1_2 * z1) % self.p + u2 = (x2 * z1_2) % self.p + s2 = (y2 * z1_3) % self.p + if x1 == u2: + if (y1 != s2): + # p1 and p2 are inverses. Return the point at infinity. + return (0, 1, 0) + # p1 == p2. The formulas below fail when the two points are equal. + return self.double(p1) + h = u2 - x1 + r = s2 - y1 + h_2 = (h**2) % self.p + h_3 = (h_2 * h) % self.p + u1_h_2 = (x1 * h_2) % self.p + x3 = (r**2 - h_3 - 2*u1_h_2) % self.p + y3 = (r*(u1_h_2 - x3) - y1*h_3) % self.p + z3 = (h*z1) % self.p + return (x3, y3, z3) + + def add(self, p1, p2): + """Add two Jacobian tuples p1 and p2 + + See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition""" + x1, y1, z1 = p1 + x2, y2, z2 = p2 + # Adding the point at infinity is a no-op + if z1 == 0: + return p2 + if z2 == 0: + return p1 + # Adding an Affine to a Jacobian is more efficient since we save field multiplications and squarings when z = 1 + if z1 == 1: + return self.add_mixed(p2, p1) + if z2 == 1: + return self.add_mixed(p1, p2) + z1_2 = (z1**2) % self.p + z1_3 = (z1_2 * z1) % self.p + z2_2 = (z2**2) % self.p + z2_3 = (z2_2 * z2) % self.p + u1 = (x1 * z2_2) % self.p + u2 = (x2 * z1_2) % self.p + s1 = (y1 * z2_3) % self.p + s2 = (y2 * z1_3) % self.p + if u1 == u2: + if (s1 != s2): + # p1 and p2 are inverses. Return the point at infinity. + return (0, 1, 0) + # p1 == p2. The formulas below fail when the two points are equal. + return self.double(p1) + h = u2 - u1 + r = s2 - s1 + h_2 = (h**2) % self.p + h_3 = (h_2 * h) % self.p + u1_h_2 = (u1 * h_2) % self.p + x3 = (r**2 - h_3 - 2*u1_h_2) % self.p + y3 = (r*(u1_h_2 - x3) - s1*h_3) % self.p + z3 = (h*z1*z2) % self.p + return (x3, y3, z3) + + def mul(self, ps): + """Compute a (multi) point multiplication + + ps is a list of (Jacobian tuple, scalar) pairs. + """ + r = (0, 1, 0) + for i in range(255, -1, -1): + r = self.double(r) + for (p, n) in ps: + if ((n >> i) & 1): + r = self.add(r, p) + return r + +SECP256K1 = EllipticCurve(2**256 - 2**32 - 977, 0, 7) +SECP256K1_G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8, 1) SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2 -# Thx to Sam Devlin for the ctypes magic 64-bit fix. -def _check_result(val): - if val == 0: - raise ValueError - else: - return ctypes.c_void_p (val) - -ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p -ssl.EC_KEY_new_by_curve_name.errcheck = _check_result - -class CECKey: - """Wrapper around OpenSSL's EC_KEY""" - - POINT_CONVERSION_COMPRESSED = 2 - POINT_CONVERSION_UNCOMPRESSED = 4 +class ECPubKey(): + """A secp256k1 public key""" def __init__(self): - self.k = ssl.EC_KEY_new_by_curve_name(NID_secp256k1) - - def __del__(self): - if ssl: - ssl.EC_KEY_free(self.k) - self.k = None - - def set_secretbytes(self, secret): - priv_key = ssl.BN_bin2bn(secret, 32, ssl.BN_new()) - group = ssl.EC_KEY_get0_group(self.k) - pub_key = ssl.EC_POINT_new(group) - ctx = ssl.BN_CTX_new() - if not ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx): - raise ValueError("Could not derive public key from the supplied secret.") - ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx) - ssl.EC_KEY_set_private_key(self.k, priv_key) - ssl.EC_KEY_set_public_key(self.k, pub_key) - ssl.EC_POINT_free(pub_key) - ssl.BN_CTX_free(ctx) - return self.k - - def set_privkey(self, key): - self.mb = ctypes.create_string_buffer(key) - return ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key)) - - def set_pubkey(self, key): - self.mb = ctypes.create_string_buffer(key) - return ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key)) - - def get_privkey(self): - size = ssl.i2d_ECPrivateKey(self.k, 0) - mb_pri = ctypes.create_string_buffer(size) - ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri))) - return mb_pri.raw - - def get_pubkey(self): - size = ssl.i2o_ECPublicKey(self.k, 0) - mb = ctypes.create_string_buffer(size) - ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb))) - return mb.raw - - def get_raw_ecdh_key(self, other_pubkey): - ecdh_keybuffer = ctypes.create_string_buffer(32) - r = ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32, - ssl.EC_KEY_get0_public_key(other_pubkey.k), - self.k, 0) - if r != 32: - raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed') - return ecdh_keybuffer.raw - - def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()): - # FIXME: be warned it's not clear what the kdf should be as a default - r = self.get_raw_ecdh_key(other_pubkey) - return kdf(r) - - def sign(self, hash_in, low_s = True): - # FIXME: need unit tests for below cases - if not isinstance(hash_in, bytes): - raise TypeError('Hash must be bytes instance; got %r' % hash_in.__class__) - if len(hash_in) != 32: - raise ValueError('Hash must be exactly 32 bytes long') - - sig_size0 = ctypes.c_uint32() - sig_size0.value = ssl.ECDSA_size(self.k) - mb_sig = ctypes.create_string_buffer(sig_size0.value) - result = ssl.ECDSA_sign(0, hash_in, len(hash_in), mb_sig, ctypes.byref(sig_size0), self.k) - assert 1 == result - assert mb_sig.raw[0] == 0x30 - assert mb_sig.raw[1] == sig_size0.value - 2 - total_size = mb_sig.raw[1] - assert mb_sig.raw[2] == 2 - r_size = mb_sig.raw[3] - assert mb_sig.raw[4 + r_size] == 2 - s_size = mb_sig.raw[5 + r_size] - s_value = int.from_bytes(mb_sig.raw[6+r_size:6+r_size+s_size], byteorder='big') - if (not low_s) or s_value <= SECP256K1_ORDER_HALF: - return mb_sig.raw[:sig_size0.value] - else: - low_s_value = SECP256K1_ORDER - s_value - low_s_bytes = low_s_value.to_bytes(33, byteorder='big') - while len(low_s_bytes) > 1 and low_s_bytes[0] == 0 and low_s_bytes[1] < 0x80: - low_s_bytes = low_s_bytes[1:] - new_s_size = len(low_s_bytes) - new_total_size_byte = (total_size + new_s_size - s_size).to_bytes(1,byteorder='big') - new_s_size_byte = new_s_size.to_bytes(1, byteorder='big') - return b'\x30' + new_total_size_byte + mb_sig.raw[2:5+r_size] + new_s_size_byte + low_s_bytes - - def verify(self, hash_in, sig): - """Verify a DER signature""" - return ssl.ECDSA_verify(0, hash_in, len(hash_in), sig, len(sig), self.k) == 1 - - def set_compressed(self, compressed): - if compressed: - form = self.POINT_CONVERSION_COMPRESSED + """Construct an uninitialized public key""" + self.valid = False + + def set(self, data): + """Construct a public key from a serialization in compressed or uncompressed format""" + if (len(data) == 65 and data[0] == 0x04): + p = (int.from_bytes(data[1:33], 'big'), int.from_bytes(data[33:65], 'big'), 1) + self.valid = SECP256K1.on_curve(p) + if self.valid: + self.p = p + self.compressed = False + elif (len(data) == 33 and (data[0] == 0x02 or data[0] == 0x03)): + x = int.from_bytes(data[1:33], 'big') + if SECP256K1.is_x_coord(x): + p = SECP256K1.lift_x(x) + # if the oddness of the y co-ord isn't correct, find the other + # valid y + if (p[1] & 1) != (data[0] & 1): + p = SECP256K1.negate(p) + self.p = p + self.valid = True + self.compressed = True + else: + self.valid = False else: - form = self.POINT_CONVERSION_UNCOMPRESSED - ssl.EC_KEY_set_conv_form(self.k, form) - + self.valid = False -class CPubKey(bytes): - """An encapsulated public key - - Attributes: + @property + def is_compressed(self): + return self.compressed - is_valid - Corresponds to CPubKey.IsValid() - is_fullyvalid - Corresponds to CPubKey.IsFullyValid() - is_compressed - Corresponds to CPubKey.IsCompressed() - """ + @property + def is_valid(self): + return self.valid + + def get_bytes(self): + assert(self.valid) + p = SECP256K1.affine(self.p) + if p is None: + return None + if self.compressed: + return bytes([0x02 + (p[1] & 1)]) + p[0].to_bytes(32, 'big') + else: + return bytes([0x04]) + p[0].to_bytes(32, 'big') + p[1].to_bytes(32, 'big') + + def verify_ecdsa(self, sig, msg, low_s=True): + """Verify a strictly DER-encoded ECDSA signature against this pubkey. + + See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the + ECDSA verifier algorithm""" + assert(self.valid) + + # Extract r and s from the DER formatted signature. Return false for + # any DER encoding errors. + if (sig[1] + 2 != len(sig)): + return False + if (len(sig) < 4): + return False + if (sig[0] != 0x30): + return False + if (sig[2] != 0x02): + return False + rlen = sig[3] + if (len(sig) < 6 + rlen): + return False + if rlen < 1 or rlen > 33: + return False + if sig[4] >= 0x80: + return False + if (rlen > 1 and (sig[4] == 0) and not (sig[5] & 0x80)): + return False + r = int.from_bytes(sig[4:4+rlen], 'big') + if (sig[4+rlen] != 0x02): + return False + slen = sig[5+rlen] + if slen < 1 or slen > 33: + return False + if (len(sig) != 6 + rlen + slen): + return False + if sig[6+rlen] >= 0x80: + return False + if (slen > 1 and (sig[6+rlen] == 0) and not (sig[7+rlen] & 0x80)): + return False + s = int.from_bytes(sig[6+rlen:6+rlen+slen], 'big') + + # Verify that r and s are within the group order + if r < 1 or s < 1 or r >= SECP256K1_ORDER or s >= SECP256K1_ORDER: + return False + if low_s and s >= SECP256K1_ORDER_HALF: + return False + z = int.from_bytes(msg, 'big') + + # Run verifier algorithm on r, s + w = modinv(s, SECP256K1_ORDER) + u1 = z*w % SECP256K1_ORDER + u2 = r*w % SECP256K1_ORDER + R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, u1), (self.p, u2)])) + if R is None or R[0] != r: + return False + return True + +class ECKey(): + """A secp256k1 private key""" - def __new__(cls, buf, _cec_key=None): - self = super(CPubKey, cls).__new__(cls, buf) - if _cec_key is None: - _cec_key = CECKey() - self._cec_key = _cec_key - self.is_fullyvalid = _cec_key.set_pubkey(self) != 0 - return self + def __init__(self): + self.valid = False + + def set(self, secret, compressed): + """Construct a private key object with given 32-byte secret and compressed flag.""" + assert(len(secret) == 32) + secret = int.from_bytes(secret, 'big') + self.valid = (secret > 0 and secret < SECP256K1_ORDER) + if self.valid: + self.secret = secret + self.compressed = compressed + + def generate(self, compressed=True): + """Generate a random private key (compressed or uncompressed).""" + self.set(random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big'), compressed) + + def get_bytes(self): + """Retrieve the 32-byte representation of this key.""" + assert(self.valid) + return self.secret.to_bytes(32, 'big') @property def is_valid(self): - return len(self) > 0 + return self.valid @property def is_compressed(self): - return len(self) == 33 - - def verify(self, hash_in, sig): - return self._cec_key.verify(hash_in, sig) - - def __str__(self): - return repr(self) - - def __repr__(self): - # Always have represent as b'' so test cases don't have to - # change for py2/3 - if sys.version > '3': - return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__()) - else: - return '%s(b%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__()) + return self.compressed + def get_pubkey(self): + """Compute an ECPubKey object for this secret key.""" + assert(self.valid) + ret = ECPubKey() + p = SECP256K1.mul([(SECP256K1_G, self.secret)]) + ret.p = p + ret.valid = True + ret.compressed = self.compressed + return ret + + def sign_ecdsa(self, msg, low_s=True): + """Construct a DER-encoded ECDSA signature with this key. + + See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the + ECDSA signer algorithm.""" + assert(self.valid) + z = int.from_bytes(msg, 'big') + # Note: no RFC6979, but a simple random nonce (some tests rely on distinct transactions for the same operation) + k = random.randrange(1, SECP256K1_ORDER) + R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, k)])) + r = R[0] % SECP256K1_ORDER + s = (modinv(k, SECP256K1_ORDER) * (z + self.secret * r)) % SECP256K1_ORDER + if low_s and s > SECP256K1_ORDER_HALF: + s = SECP256K1_ORDER - s + # Represent in DER format. The byte representations of r and s have + # length rounded up (255 bits becomes 32 bytes and 256 bits becomes 33 + # bytes). + rb = r.to_bytes((r.bit_length() + 8) // 8, 'big') + sb = s.to_bytes((s.bit_length() + 8) // 8, 'big') + return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py new file mode 100644 index 0000000000..17a0411381 --- /dev/null +++ b/test/functional/test_framework/messages.py @@ -0,0 +1,1648 @@ +#!/usr/bin/env python3 +# Copyright (c) 2010 ArtForz -- public domain half-a-node +# Copyright (c) 2012 Jeff Garzik +# Copyright (c) 2010-2017 The Bitcoin Core developers +# Copyright (c) 2017-2020 The Raven Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +""" +Raven test framework primitive and message structures. + +CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: + data structures that should map to corresponding structures in + src/primitives + +msg_block, msg_tx, msg_headers, etc.: + data structures that represent network messages + +ser_*, deser_*: functions that handle serialization/deserialization. +Classes use __slots__ to ensure extraneous attributes aren't accidentally added +by tests, compromising their intended effect. +""" + +from codecs import encode +import copy +import hashlib +from io import BytesIO +import random +import socket +import struct +import time + +from test_framework.siphash import siphash256 +from test_framework.util import hex_str_to_bytes, bytes_to_hex_str, x16_hash_block + +BIP0031_VERSION = 60000 +MY_VERSION = 70025 # This needs to match the ASSETDATA_VERSION in version.h! +MY_SUBVERSION = b"/python-mininode-tester:0.0.3/" +MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37) + +MAX_INV_SZ = 50000 +MAX_BLOCK_BASE_SIZE = 1000000 + +COIN = 100000000 # 1 rvn in Corbies +BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out + +NODE_NETWORK = (1 << 0) +NODE_WITNESS = (1 << 3) +NODE_UNSUPPORTED_SERVICE_BIT_5 = (1 << 5) +NODE_UNSUPPORTED_SERVICE_BIT_7 = (1 << 7) +MSG_WITNESS_FLAG = 1 << 30 + + +# =================================================== +# Serialization/deserialization tools +# =================================================== +def sha256(s): + return hashlib.new('sha256', s).digest() + + +def ripemd160(s): + return hashlib.new('ripemd160', s).digest() + + +def hash256(s): + return sha256(sha256(s)) + + +def ser_compact_size(v): + if v < 253: + r = struct.pack("B", v) + elif v < 0x10000: + r = struct.pack(">= 32 + return rs + + +def uint256_from_str(s): + r = 0 + t = struct.unpack("> 24) & 0xFF + v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) + return v + + +def deser_vector(f, c): + nit = deser_compact_size(f) + r = [] + for _ in range(nit): + t = c() + t.deserialize(f) + r.append(t) + return r + + +# ser_function_name: Allow for an alternate serialization function on the +# entries in the vector (we use this for serializing the vector of transactions +# for a witness block). +def ser_vector(v, ser_function_name=None): + r = ser_compact_size(len(v)) + for i in v: + if ser_function_name: + r += getattr(i, ser_function_name)() + else: + r += i.serialize() + return r + + +def deser_uint256_vector(f): + nit = deser_compact_size(f) + r = [] + for _ in range(nit): + t = deser_uint256(f) + r.append(t) + return r + + +def ser_uint256_vector(v): + r = ser_compact_size(len(v)) + for i in v: + r += ser_uint256(i) + return r + + +def deser_string_vector(f): + nit = deser_compact_size(f) + r = [] + for _ in range(nit): + t = deser_string(f) + r.append(t) + return r + + +def ser_string_vector(v): + r = ser_compact_size(len(v)) + for sv in v: + r += ser_string(sv) + return r + + +def deser_int_vector(f): + nit = deser_compact_size(f) + r = [] + for _ in range(nit): + t = struct.unpack("H", f.read(2))[0] + + def serialize(self): + r = b"" + r += struct.pack("H", self.port) + return r + + def __repr__(self): + return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, + self.ip, self.port) + + +class CInv: + __slots__ = ("hash", "type") + + typemap = { + 0: "Error", + 1: "TX", + 2: "Block", + 1 | MSG_WITNESS_FLAG: "WitnessTx", + 2 | MSG_WITNESS_FLAG: "WitnessBlock", + 4: "CompactBlock" + } + + def __init__(self, t=0, h=0): + self.type = t + self.hash = h + + def deserialize(self, f): + self.type = struct.unpack(" 21000000 * COIN: + return False + return True + + def __repr__(self): + return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \ + % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) + + +class CBlockHeader: + __slots__ = ("nVersion", "hashPrevBlock", "hashMerkleRoot", "nTime", "nBits", "nNonce", "x16r", "hash") + + def __init__(self, header=None): + if header is None: + self.set_null() + else: + self.nVersion = header.nVersion + self.hashPrevBlock = header.hashPrevBlock + self.hashMerkleRoot = header.hashMerkleRoot + self.nTime = header.nTime + self.nBits = header.nBits + self.nNonce = header.nNonce + self.x16r = header.x16r + self.hash = header.hash + self.calc_x16r() + + def set_null(self): + self.nVersion = 1 + self.hashPrevBlock = 0 + self.hashMerkleRoot = 0 + self.nTime = 0 + self.nBits = 0 + self.nNonce = 0 + self.x16r = None + self.hash = None + + def deserialize(self, f): + self.nVersion = struct.unpack(" 1: + newhashes = [] + for i in range(0, len(hashes), 2): + i2 = min(i + 1, len(hashes) - 1) + newhashes.append(hash256(hashes[i] + hashes[i2])) + hashes = newhashes + return uint256_from_str(hashes[0]) + + def calc_merkle_root(self): + hashes = [] + for tx in self.vtx: + tx.calc_x16r() + hashes.append(ser_uint256(tx.x16r)) + return self.get_merkle_root(hashes) + + def calc_witness_merkle_root(self): + # For witness root purposes, the hash of the + # coinbase, with witness, is defined to be 0...0 + hashes = [ser_uint256(0)] + + for tx in self.vtx[1:]: + # Calculate the hashes with witness data + hashes.append(ser_uint256(tx.calc_x16r(True))) + + return self.get_merkle_root(hashes) + + def is_valid(self): + self.calc_x16r() + target = uint256_from_compact(self.nBits) + if self.x16r > target: + return False + for tx in self.vtx: + if not tx.is_valid(): + return False + if self.calc_merkle_root() != self.hashMerkleRoot: + return False + return True + + def solve(self): + self.rehash() + target = uint256_from_compact(self.nBits) + while self.x16r > target: + self.nNonce += 1 + self.rehash() + + def __repr__(self): + return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ + % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, + time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) + + +class CUnsignedAlert: + __slots__ = ("nVersion", "nRelayUntil", "nExpiration", "nID", "nCancel", "setCancel", "nMinVer", + "nMaxVer", "setSubVer", "nPriority", "strComment", "strStatusBar", "strReserved") + + def __init__(self): + self.nVersion = 1 + self.nRelayUntil = 0 + self.nExpiration = 0 + self.nID = 0 + self.nCancel = 0 + self.setCancel = [] + self.nMinVer = 0 + self.nMaxVer = 0 + self.setSubVer = [] + self.nPriority = 0 + self.strComment = b"" + self.strStatusBar = b"" + self.strReserved = b"" + + def deserialize(self, f): + self.nVersion = struct.unpack("= 106: + self.addrFrom = CAddress() + self.addrFrom.deserialize(f) + self.nNonce = struct.unpack("= 209: + self.nStartingHeight = struct.unpack("= 70001: + # Relay field is optional for version 70001 onwards + try: + self.nRelay = struct.unpack(" +class MsgHeaders: + __slots__ = "headers" + command = b"headers" + + def __init__(self, headers=None): + self.headers = headers if headers is not None else [] + + def deserialize(self, f): + # comment in ravend indicates these should be deserialized as blocks + blocks = deser_vector(f, CBlock) + for x in blocks: + self.headers.append(CBlockHeader(x)) + + def serialize(self): + blocks = [CBlock(x) for x in self.headers] + return ser_vector(blocks) + + def __repr__(self): + return "msg_headers(headers=%s)" % repr(self.headers) + + +class MsgReject: + __slots__ = ("message", "code", "reason", "data") + command = b"reject" + REJECT_MALFORMED = 1 + + def __init__(self): + self.message = b"" + self.code = 0 + self.reason = b"" + self.data = 0 + + def deserialize(self, f): + self.message = deser_string(f) + self.code = struct.unpack(">= 32 - return rs - - -def uint256_from_str(s): - r = 0 - t = struct.unpack("> 24) & 0xFF - v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) - return v - - -def deser_vector(f, c): - nit = deser_compact_size(f) - r = [] - for _ in range(nit): - t = c() - t.deserialize(f) - r.append(t) - return r - - -# ser_function_name: Allow for an alternate serialization function on the -# entries in the vector (we use this for serializing the vector of transactions -# for a witness block). -def ser_vector(l, ser_function_name=None): - r = ser_compact_size(len(l)) - for i in l: - if ser_function_name: - r += getattr(i, ser_function_name)() - else: - r += i.serialize() - return r - - -def deser_uint256_vector(f): - nit = deser_compact_size(f) - r = [] - for _ in range(nit): - t = deser_uint256(f) - r.append(t) - return r - - -def ser_uint256_vector(l): - r = ser_compact_size(len(l)) - for i in l: - r += ser_uint256(i) - return r - - -def deser_string_vector(f): - nit = deser_compact_size(f) - r = [] - for _ in range(nit): - t = deser_string(f) - r.append(t) - return r - - -def ser_string_vector(l): - r = ser_compact_size(len(l)) - for sv in l: - r += ser_string(sv) - return r - - -def deser_int_vector(f): - nit = deser_compact_size(f) - r = [] - for _ in range(nit): - t = struct.unpack("H", f.read(2))[0] - - def serialize(self): - r = b"" - r += struct.pack("H", self.port) - return r - - def __repr__(self): - return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, - self.ip, self.port) - - -MSG_WITNESS_FLAG = 1 << 30 - - -class CInv: - typemap = { - 0: "Error", - 1: "TX", - 2: "Block", - 1 | MSG_WITNESS_FLAG: "WitnessTx", - 2 | MSG_WITNESS_FLAG: "WitnessBlock", - 4: "CompactBlock" - } - - def __init__(self, t=0, h=0): - self.type = t - self.hash = h - - def deserialize(self, f): - self.type = struct.unpack(" 21000000 * COIN: - return False - return True - - def __repr__(self): - return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \ - % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) - - -class CBlockHeader: - def __init__(self, header=None): - if header is None: - self.set_null() - else: - self.nVersion = header.nVersion - self.hashPrevBlock = header.hashPrevBlock - self.hashMerkleRoot = header.hashMerkleRoot - self.nTime = header.nTime - self.nBits = header.nBits - self.nNonce = header.nNonce - self.x16r = header.sha256 - self.hash = header.hash - self.calc_x16r() - - def set_null(self): - self.nVersion = 1 - self.hashPrevBlock = 0 - self.hashMerkleRoot = 0 - self.nTime = 0 - self.nBits = 0 - self.nNonce = 0 - self.x16r = None - self.hash = None - - def deserialize(self, f): - self.nVersion = struct.unpack(" 1: - newhashes = [] - for i in range(0, len(hashes), 2): - i2 = min(i + 1, len(hashes) - 1) - newhashes.append(hash256(hashes[i] + hashes[i2])) - hashes = newhashes - return uint256_from_str(hashes[0]) - - def calc_merkle_root(self): - hashes = [] - for tx in self.vtx: - tx.calc_x16r() - hashes.append(ser_uint256(tx.x16r)) - return self.get_merkle_root(hashes) - - def calc_witness_merkle_root(self): - # For witness root purposes, the hash of the - # coinbase, with witness, is defined to be 0...0 - hashes = [ser_uint256(0)] - - for tx in self.vtx[1:]: - # Calculate the hashes with witness data - hashes.append(ser_uint256(tx.calc_x16r(True))) - - return self.get_merkle_root(hashes) - - def is_valid(self): - self.calc_x16r() - target = uint256_from_compact(self.nBits) - if self.x16r > target: - return False - for tx in self.vtx: - if not tx.is_valid(): - return False - if self.calc_merkle_root() != self.hashMerkleRoot: - return False - return True - - def solve(self): - self.rehash() - target = uint256_from_compact(self.nBits) - while self.x16r > target: - self.nNonce += 1 - self.rehash() - - def __repr__(self): - return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ - % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, - time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) - - -class CUnsignedAlert: - def __init__(self): - self.nVersion = 1 - self.nRelayUntil = 0 - self.nExpiration = 0 - self.nID = 0 - self.nCancel = 0 - self.setCancel = [] - self.nMinVer = 0 - self.nMaxVer = 0 - self.setSubVer = [] - self.nPriority = 0 - self.strComment = b"" - self.strStatusBar = b"" - self.strReserved = b"" - - def deserialize(self, f): - self.nVersion = struct.unpack("= 106: - self.addrFrom = CAddress() - self.addrFrom.deserialize(f) - self.nNonce = struct.unpack("= 209: - self.nStartingHeight = struct.unpack("= 70001: - # Relay field is optional for version 70001 onwards - try: - self.nRelay = struct.unpack(" -class MsgHeaders: - command = b"headers" - - def __init__(self, headers=None): - self.headers = headers if headers is not None else [] - - def deserialize(self, f): - # comment in ravend indicates these should be deserialized as blocks - blocks = deser_vector(f, CBlock) - for x in blocks: - self.headers.append(CBlockHeader(x)) - - def serialize(self): - blocks = [CBlock(x) for x in self.headers] - return ser_vector(blocks) - - def __repr__(self): - return "msg_headers(headers=%s)" % repr(self.headers) - - -class MsgReject: - command = b"reject" - REJECT_MALFORMED = 1 - - def __init__(self): - self.message = b"" - self.code = 0 - self.reason = b"" - self.data = 0 - - def deserialize(self, f): - self.message = deser_string(f) - self.code = struct.unpack(" 1: raise NotImplementedError("wait_for_inv() will only verify the first inv object") test_function = lambda: self.last_message.get("inv") and \ - self.last_message["inv"].inv[0].type == expected_inv[0].type and \ - self.last_message["inv"].inv[0].hash == expected_inv[0].hash + self.last_message["inv"].inv[0].type == expected_inv[0].type \ + and self.last_message["inv"].inv[0].hash == expected_inv[0].hash wait_until(test_function, timeout=timeout, lock=mininode_lock, err_msg="wait_for_inv") def wait_for_verack(self, timeout=60): @@ -1927,7 +400,7 @@ def got_data(self): self.got_message(t) else: logger.warning("Received unknown command from %s:%d: '%s' %s" % ( - self.dstaddr, self.dstport, command, repr(msg))) + self.dstaddr, self.dstport, command, repr(msg))) raise ValueError(f"Unknown command: '{command}'") except Exception as e: logger.exception('got_data: %s', repr(e)) diff --git a/test/functional/test_framework/netutil.py b/test/functional/test_framework/netutil.py index 855abd79c0..bec3aa5df3 100644 --- a/test/functional/test_framework/netutil.py +++ b/test/functional/test_framework/netutil.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -16,7 +16,7 @@ import struct import array import os -from binascii import unhexlify, hexlify +from binascii import unhexlify # STATE_ESTABLISHED = '01' # STATE_SYN_SENT = '02' @@ -27,8 +27,9 @@ # STATE_CLOSE = '07' # STATE_CLOSE_WAIT = '08' # STATE_LAST_ACK = '09' -STATE_LISTEN = '0A' # STATE_CLOSING = '0B' +STATE_LISTEN = '0A' + def get_socket_inodes(pid): """ @@ -42,19 +43,22 @@ def get_socket_inodes(pid): inodes.append(int(target[8:-1])) return inodes + def _remove_empty(array_in): return [x for x in array_in if x != ''] + def _convert_ip_port(array_in): - host,port = array_in.split(':') + host, port = array_in.split(':') # convert host from mangled-per-four-bytes form as used by kernel host = unhexlify(host) host_out = '' for x in range(0, len(host) // 4): - (val,) = struct.unpack('=I', host[x*4:(x+1)*4]) + (val,) = struct.unpack('=I', host[x * 4:(x + 1) * 4]) host_out += '%08x' % val - return host_out,int(port,16) + return host_out, int(port, 16) + def netstat(typ='tcp'): """ @@ -62,21 +66,22 @@ def netstat(typ='tcp'): To get pid of all network process running on system, you must run this script as superuser """ - with open('/proc/net/'+typ,'r',encoding='utf8') as f: + with open('/proc/net/' + typ, 'r', encoding='utf8') as f: content = f.readlines() content.pop(0) result = [] for line in content: - line_array = _remove_empty(line.split(' ')) # Split lines and remove empty spaces. + line_array = _remove_empty(line.split(' ')) # Split lines and remove empty spaces. tcp_id = line_array[0] l_addr = _convert_ip_port(line_array[1]) r_addr = _convert_ip_port(line_array[2]) state = line_array[3] - inode = int(line_array[9]) # Need the inode to match with process pid. + inode = int(line_array[9]) # Need the inode to match with process pid. nline = [tcp_id, l_addr, r_addr, state, inode] result.append(nline) return result + def get_bind_addrs(pid): """ Get bind addresses as (host,port) tuples for process pid. @@ -88,15 +93,16 @@ def get_bind_addrs(pid): bind_addrs.append(conn[1]) return bind_addrs + # from: http://code.activestate.com/recipes/439093/ def all_interfaces(): """ Return all interfaces that are up """ - is_64bits = sys.maxsize > 2**32 + is_64bits = sys.maxsize > 2 ** 32 struct_size = 40 if is_64bits else 32 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - max_possible = 8 # initial value + max_possible = 8 # initial value while True: byte_data = max_possible * struct_size names = array.array('B', b'\0' * byte_data) @@ -109,39 +115,41 @@ def all_interfaces(): max_possible *= 2 else: break - namestr = names.tostring() - return [(namestr[i:i+16].split(b'\0', 1)[0], - socket.inet_ntoa(namestr[i+20:i+24])) + namestr = names.tobytes() + return [(namestr[i:i + 16].split(b'\0', 1)[0], + socket.inet_ntoa(namestr[i + 20:i + 24])) for i in range(0, outbytes, struct_size)] + def addr_to_hex(addr): """ Convert string IPv4 or IPv6 address to binary address as returned by get_bind_addrs. Very naive implementation that certainly doesn't work for all IPv6 variants. """ - if '.' in addr: # IPv4 + if '.' in addr: # IPv4 addr = [int(x) for x in addr.split('.')] - elif ':' in addr: # IPv6 - sub = [[], []] # prefix, suffix + elif ':' in addr: # IPv6 + sub = [[], []] # prefix, suffix x = 0 addr = addr.split(':') - for i,comp in enumerate(addr): + for i, comp in enumerate(addr): if comp == '': - if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end + if i == 0 or i == (len(addr) - 1): # skip empty component at beginning or end continue - x += 1 # :: skips to suffix - assert(x < 2) - else: # two bytes per component + x += 1 # :: skips to suffix + assert (x < 2) + else: # two bytes per component val = int(comp, 16) sub[x].append(val >> 8) sub[x].append(val & 0xff) nullbytes = 16 - len(sub[0]) - len(sub[1]) - assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0)) + assert (x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0) addr = sub[0] + ([0] * nullbytes) + sub[1] else: raise ValueError('Could not parse address %s' % addr) - return hexlify(bytearray(addr)).decode('ascii') + return bytearray(addr).hex() + def test_ipv6_local(): """ diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py index dcf137b107..024de68cbd 100644 --- a/test/functional/test_framework/script.py +++ b/test/functional/test_framework/script.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -12,7 +12,9 @@ from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string from binascii import hexlify +from .bignum import bn2vch import hashlib +import struct import sys @@ -23,34 +25,30 @@ bchr = lambda x: bytes([x]) bord = lambda x: x -import struct - -from .bignum import bn2vch - MAX_SCRIPT_ELEMENT_SIZE = 520 - OPCODE_NAMES = {} +_opcode_instances = [] + def hash160(s): return hashlib.new('ripemd160', sha256(s)).digest() -_opcode_instances = [] class CScriptOp(int): """A single script opcode""" - __slots__ = [] + __slots__ = () @staticmethod def encode_op_pushdata(d): """Encode a PUSHDATA op, returning bytes""" if len(d) < 0x4c: - return b'' + bchr(len(d)) + d # OP_PUSHDATA + return b'' + bchr(len(d)) + d # OP_PUSHDATA elif len(d) <= 0xff: - return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1 + return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1 elif len(d) <= 0xffff: - return b'\x4d' + struct.pack(b' OP_PUSHDATA4: - yield (opcode, None, sop_idx) + yield opcode, None, sop_idx else: if opcode < OP_PUSHDATA1: pushdata_type = 'PUSHDATA(%d)' % opcode @@ -486,21 +493,20 @@ def raw_iter(self): pushdata_type = 'PUSHDATA2' if i + 1 >= len(self): raise CScriptInvalidError('PUSHDATA2: missing data length') - data_size = bord(self[i]) + (bord(self[i+1]) << 8) + data_size = bord(self[i]) + (bord(self[i + 1]) << 8) i += 2 elif opcode == OP_PUSHDATA4: pushdata_type = 'PUSHDATA4' if i + 3 >= len(self): raise CScriptInvalidError('PUSHDATA4: missing data length') - data_size = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24) + data_size = bord(self[i]) + (bord(self[i + 1]) << 8) + (bord(self[i + 2]) << 16) + (bord(self[i + 3]) << 24) i += 4 else: - assert False # shouldn't happen - + assert False # shouldn't happen - data = bytes(self[i:i+data_size]) + data = bytes(self[i:i + data_size]) # Check for truncation if len(data) < data_size: @@ -508,7 +514,7 @@ def raw_iter(self): i += data_size - yield (opcode, data, sop_idx) + yield opcode, data, sop_idx def __iter__(self): """'Cooked' iteration @@ -655,12 +661,12 @@ def signature_hash(script, tx_to, in_idx, hash_type): return hash_data, None + # TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided. # Performance optimization probably not necessary for python tests, however. # Note that this corresponds to sigversion == 1 in EvalScript, which is used # for version 0 witnesses. def segwit_version1_signature_hash(script, tx_to, in_idx, hash_type, amount): - hash_prevouts = 0 hash_sequence = 0 hash_outputs = 0 diff --git a/test/functional/test_framework/siphash.py b/test/functional/test_framework/siphash.py index 0c227aff4c..193d43eb6c 100644 --- a/test/functional/test_framework/siphash.py +++ b/test/functional/test_framework/siphash.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Specialized SipHash-2-4 implementations. diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py index dbe9c0483b..d827c868b2 100644 --- a/test/functional/test_framework/socks5.py +++ b/test/functional/test_framework/socks5.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 8bcc94cef4..00718081c2 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -53,6 +53,7 @@ class RavenTestFramework: def __init__(self): """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method""" + self.num_nodes = None self.setup_clean_chain = False self.nodes = [] self.mocktime = 0 @@ -95,8 +96,8 @@ def main(self): self.options.tmpdir = os.path.abspath(self.options.tmpdir) try: os.makedirs(self.options.tmpdir, exist_ok=False) - except OSError as e: - self.options.tmpdir = os.path.abspath(self.options.tmpdir + str(randint(0,48))) + except OSError: + self.options.tmpdir = os.path.abspath(self.options.tmpdir + str(randint(0, 48))) os.makedirs(self.options.tmpdir, exist_ok=False) else: self.options.tmpdir = tempfile.mkdtemp(prefix="test") @@ -233,7 +234,7 @@ def start_nodes(self, extra_args=None): node.start(extra_args[i]) for node in self.nodes: node.wait_for_rpc_connection() - except: + except Exception: # If one node failed to start, stop the others self.stop_nodes() raise @@ -440,39 +441,6 @@ def _initialize_chain_clean(self): initialize_data_dir(self.options.tmpdir, i) -class ComparisonTestFramework(RavenTestFramework): - """Test framework for doing p2p comparison testing - - Sets up some ravend binaries: - - 1 binary: test binary - - 2 binaries: 1 test binary, 1 ref binary - - n>2 binaries: 1 test binary, n-1 ref binaries""" - - def run_test(self): - pass - - def set_test_params(self): - self.num_nodes = 2 - self.setup_clean_chain = True - - def add_options(self, parser): - parser.add_option("--testbinary", dest="testbinary", - default=os.getenv("RAVEND", "ravend"), - help="ravend binary to test") - parser.add_option("--refbinary", dest="refbinary", - default=os.getenv("RAVEND", "ravend"), - help="ravend binary to use for reference nodes (if any)") - - def setup_network(self): - extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes - if hasattr(self, "extra_args"): - extra_args = self.extra_args - self.add_nodes(self.num_nodes, extra_args, - binary=[self.options.testbinary] + - [self.options.refbinary] * (self.num_nodes - 1)) - self.start_nodes() - - class SkipTest(Exception): """This exception is raised to skip a test""" diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index 5351213739..71385f7b66 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -17,10 +17,11 @@ import time from .util import assert_equal, get_rpc_proxy, rpc_url, wait_until -from .authproxy import JSONRPCException +from .authproxy import JSONRPCException, AuthServiceProxy RAVEND_PROC_WAIT_TIMEOUT = 60 + class TestNode: """A class for representing a ravend node under test. @@ -50,17 +51,18 @@ def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mo self.coverage_dir = coverage_dir # Most callers will just need to add extra args to the standard list below. For those callers that need more flexibility, they can just set the args property directly. self.extra_args = extra_args - self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i] + self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=2", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-bip44=1", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i] self.cli = TestNodeCLI(os.getenv("RAVENCLI", "raven-cli"), self.datadir) self.running = False + AuthServiceProxy.running = False self.process = None self.rpc_connected = False self.rpc = None self.url = None self.log = logging.getLogger('TestFramework.node%d' % i) - self.cleanup_on_exit = True # Whether to kill the node when this object goes away + self.cleanup_on_exit = True # Whether to kill the node when this object goes away self.p2ps = [] def __del__(self): @@ -86,6 +88,7 @@ def start(self, extra_args=None, stderr=None): stderr = self.stderr self.process = subprocess.Popen(self.args + extra_args, stderr=stderr) self.running = True + AuthServiceProxy.running = True self.log.debug("ravend started, waiting for RPC to come up") def wait_for_rpc_connection(self): @@ -144,6 +147,7 @@ def is_node_stopped(self): # process has stopped. Assert that it didn't return an error code. assert_equal(return_code, 0) self.running = False + AuthServiceProxy.running = False self.process = None self.rpc_connected = False self.rpc = None @@ -186,6 +190,7 @@ def node_encrypt_wallet(self, passphrase): self.encryptwallet(passphrase) self.wait_until_stopped() + class TestNodeCLI: """Interface to raven-cli for an individual node""" @@ -204,6 +209,7 @@ def __call__(self, *args, input_data=None): def __getattr__(self, command): def dispatcher(*args, **kwargs): return self.send_cli(command, *args, **kwargs) + return dispatcher def send_cli(self, command, *args, **kwargs): diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index e6981db4a9..43f25910b1 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -26,6 +26,7 @@ logger = logging.getLogger("TestFramework.utils") + ########################################################################################## # Assert functions ########################################################################################## @@ -53,7 +54,7 @@ def assert_contains_pair(key, val, dict_data): def assert_contains_key(key, dict_data): - if not key in dict_data: + if key not in dict_data: raise AssertionError("key %s is not in dict" % key) @@ -216,6 +217,7 @@ def assert_array_result(object_array, to_match, expected, should_not_find=False) if num_matched > 0 and should_not_find: raise AssertionError("Objects were found %s" % (str(to_match))) + def assert_happening(date_str, within_secs=120): """ Make sure date_str happened withing within_secs seconds of now. Assumes date_str is in rpc results cust_format e.g. '2019-11-07 17:50:06' and assumed to represent UTC. @@ -228,6 +230,7 @@ def assert_happening(date_str, within_secs=120): if abs(diff_secs) > within_secs: raise AssertionError("More than expected %s second difference between %s and now(%s) (%ss)" % (within_secs, date_str, now, diff_secs)) + ########################################################################################## # Utility functions ########################################################################################## @@ -304,6 +307,7 @@ def wait_until(predicate, *, err_msg, attempts=float('inf'), timeout=float('inf' assert_greater_than(time.ctime(timeout), time.ctime(), err_msg + " ~~ Exceeded Timeout") raise RuntimeError('Unreachable') + ########################################################################################## # RPC/P2P connection constants and functions ########################################################################################## @@ -319,6 +323,7 @@ def wait_until(predicate, *, err_msg, attempts=float('inf'), timeout=float('inf' # List to store RPC ports rpc_ports = [-1, -1, -1, -1, -1, -1, -1, -1] + class PortSeed: # Must be initialized with a unique integer for each process n = None @@ -358,7 +363,6 @@ def get_rpc_proxy(url, node_number, timeout=None, coverage_dir=None): proxy_kwargs = {} if timeout is not None: proxy_kwargs['timeout'] = timeout - proxy = AuthServiceProxy(url, **proxy_kwargs) proxy.url = url # store URL on proxy for info @@ -398,6 +402,7 @@ def rpc_url(data_dir, i, rpchost=None): host = rpchost return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port)) + ########################################################################################## # Node functions ########################################################################################## @@ -561,6 +566,7 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60): timeout -= wait raise AssertionError("Mempool sync failed") + ########################################################################################## # Transaction/Block functions ########################################################################################## diff --git a/test/functional/test_framework/wallet_util.py b/test/functional/test_framework/wallet_util.py index aa6bb32884..b9e4168c3e 100755 --- a/test/functional/test_framework/wallet_util.py +++ b/test/functional/test_framework/wallet_util.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -28,6 +28,135 @@ 'p2sh_p2wsh_script', 'p2sh_p2wsh_addr']) +bip39_english = ["abandon", "ability", "able", "about", "above", "absent", "absorb", "abstract", "absurd", "abuse", "access", "accident", "account", + "accuse", "achieve", "acid", "acoustic", "acquire", "across", "act", "action", "actor", "actress", "actual", "adapt", "add", "addict", + "address", "adjust", "admit", "adult", "advance", "advice", "aerobic", "affair", "afford", "afraid", "again", "age", "agent", "agree", + "ahead", "aim", "air", "airport", "aisle", "alarm", "album", "alcohol", "alert", "alien", "all", "alley", "allow", "almost", "alone", + "alpha", "already", "also", "alter", "always", "amateur", "amazing", "among", "amount", "amused", "analyst", "anchor", "ancient", "anger", + "angle", "angry", "animal", "ankle", "announce", "annual", "another", "answer", "antenna", "antique", "anxiety", "any", "apart", "apology", + "appear", "apple", "approve", "april", "arch", "arctic", "area", "arena", "argue", "arm", "armed", "armor", "army", "around", "arrange", + "arrest", "arrive", "arrow", "art", "artefact", "artist", "artwork", "ask", "aspect", "assault", "asset", "assist", "assume", "asthma", + "athlete", "atom", "attack", "attend", "attitude", "attract", "auction", "audit", "august", "aunt", "author", "auto", "autumn", "average", + "avocado", "avoid", "awake", "aware", "away", "awesome", "awful", "awkward", "axis", "baby", "bachelor", "bacon", "badge", "bag", "balance", + "balcony", "ball", "bamboo", "banana", "banner", "bar", "barely", "bargain", "barrel", "base", "basic", "basket", "battle", "beach", "bean", + "beauty", "because", "become", "beef", "before", "begin", "behave", "behind", "believe", "below", "belt", "bench", "benefit", "best", "betray", + "better", "between", "beyond", "bicycle", "bid", "bike", "bind", "biology", "bird", "birth", "bitter", "black", "blade", "blame", "blanket", + "blast", "bleak", "bless", "blind", "blood", "blossom", "blouse", "blue", "blur", "blush", "board", "boat", "body", "boil", "bomb", "bone", + "bonus", "book", "boost", "border", "boring", "borrow", "boss", "bottom", "bounce", "box", "boy", "bracket", "brain", "brand", "brass", "brave", + "bread", "breeze", "brick", "bridge", "brief", "bright", "bring", "brisk", "broccoli", "broken", "bronze", "broom", "brother", "brown", "brush", + "bubble", "buddy", "budget", "buffalo", "build", "bulb", "bulk", "bullet", "bundle", "bunker", "burden", "burger", "burst", "bus", "business", + "busy", "butter", "buyer", "buzz", "cabbage", "cabin", "cable", "cactus", "cage", "cake", "call", "calm", "camera", "camp", "can", "canal", + "cancel", "candy", "cannon", "canoe", "canvas", "canyon", "capable", "capital", "captain", "car", "carbon", "card", "cargo", "carpet", "carry", + "cart", "case", "cash", "casino", "castle", "casual", "cat", "catalog", "catch", "category", "cattle", "caught", "cause", "caution", "cave", + "ceiling", "celery", "cement", "census", "century", "cereal", "certain", "chair", "chalk", "champion", "change", "chaos", "chapter", "charge", + "chase", "chat", "cheap", "check", "cheese", "chef", "cherry", "chest", "chicken", "chief", "child", "chimney", "choice", "choose", "chronic", + "chuckle", "chunk", "churn", "cigar", "cinnamon", "circle", "citizen", "city", "civil", "claim", "clap", "clarify", "claw", "clay", "clean", + "clerk", "clever", "click", "client", "cliff", "climb", "clinic", "clip", "clock", "clog", "close", "cloth", "cloud", "clown", "club", "clump", + "cluster", "clutch", "coach", "coast", "coconut", "code", "coffee", "coil", "coin", "collect", "color", "column", "combine", "come", "comfort", + "comic", "common", "company", "concert", "conduct", "confirm", "congress", "connect", "consider", "control", "convince", "cook", "cool", "copper", + "copy", "coral", "core", "corn", "correct", "cost", "cotton", "couch", "country", "couple", "course", "cousin", "cover", "coyote", "crack", "cradle", + "craft", "cram", "crane", "crash", "crater", "crawl", "crazy", "cream", "credit", "creek", "crew", "cricket", "crime", "crisp", "critic", "crop", + "cross", "crouch", "crowd", "crucial", "cruel", "cruise", "crumble", "crunch", "crush", "cry", "crystal", "cube", "culture", "cup", "cupboard", + "curious", "current", "curtain", "curve", "cushion", "custom", "cute", "cycle", "dad", "damage", "damp", "dance", "danger", "daring", "dash", + "daughter", "dawn", "day", "deal", "debate", "debris", "decade", "december", "decide", "decline", "decorate", "decrease", "deer", "defense", + "define", "defy", "degree", "delay", "deliver", "demand", "demise", "denial", "dentist", "deny", "depart", "depend", "deposit", "depth", "deputy", + "derive", "describe", "desert", "design", "desk", "despair", "destroy", "detail", "detect", "develop", "device", "devote", "diagram", "dial", + "diamond", "diary", "dice", "diesel", "diet", "differ", "digital", "dignity", "dilemma", "dinner", "dinosaur", "direct", "dirt", "disagree", + "discover", "disease", "dish", "dismiss", "disorder", "display", "distance", "divert", "divide", "divorce", "dizzy", "doctor", "document", "dog", + "doll", "dolphin", "domain", "donate", "donkey", "donor", "door", "dose", "double", "dove", "draft", "dragon", "drama", "drastic", "draw", "dream", + "dress", "drift", "drill", "drink", "drip", "drive", "drop", "drum", "dry", "duck", "dumb", "dune", "during", "dust", "dutch", "duty", "dwarf", + "dynamic", "eager", "eagle", "early", "earn", "earth", "easily", "east", "easy", "echo", "ecology", "economy", "edge", "edit", "educate", "effort", + "egg", "eight", "either", "elbow", "elder", "electric", "elegant", "element", "elephant", "elevator", "elite", "else", "embark", "embody", "embrace", + "emerge", "emotion", "employ", "empower", "empty", "enable", "enact", "end", "endless", "endorse", "enemy", "energy", "enforce", "engage", "engine", + "enhance", "enjoy", "enlist", "enough", "enrich", "enroll", "ensure", "enter", "entire", "entry", "envelope", "episode", "equal", "equip", "era", + "erase", "erode", "erosion", "error", "erupt", "escape", "essay", "essence", "estate", "eternal", "ethics", "evidence", "evil", "evoke", "evolve", + "exact", "example", "excess", "exchange", "excite", "exclude", "excuse", "execute", "exercise", "exhaust", "exhibit", "exile", "exist", "exit", + "exotic", "expand", "expect", "expire", "explain", "expose", "express", "extend", "extra", "eye", "eyebrow", "fabric", "face", "faculty", "fade", + "faint", "faith", "fall", "false", "fame", "family", "famous", "fan", "fancy", "fantasy", "farm", "fashion", "fat", "fatal", "father", "fatigue", + "fault", "favorite", "feature", "february", "federal", "fee", "feed", "feel", "female", "fence", "festival", "fetch", "fever", "few", "fiber", + "fiction", "field", "figure", "file", "film", "filter", "final", "find", "fine", "finger", "finish", "fire", "firm", "first", "fiscal", "fish", + "fit", "fitness", "fix", "flag", "flame", "flash", "flat", "flavor", "flee", "flight", "flip", "float", "flock", "floor", "flower", "fluid", "flush", + "fly", "foam", "focus", "fog", "foil", "fold", "follow", "food", "foot", "force", "forest", "forget", "fork", "fortune", "forum", "forward", "fossil", + "foster", "found", "fox", "fragile", "frame", "frequent", "fresh", "friend", "fringe", "frog", "front", "frost", "frown", "frozen", "fruit", "fuel", + "fun", "funny", "furnace", "fury", "future", "gadget", "gain", "galaxy", "gallery", "game", "gap", "garage", "garbage", "garden", "garlic", "garment", + "gas", "gasp", "gate", "gather", "gauge", "gaze", "general", "genius", "genre", "gentle", "genuine", "gesture", "ghost", "giant", "gift", "giggle", + "ginger", "giraffe", "girl", "give", "glad", "glance", "glare", "glass", "glide", "glimpse", "globe", "gloom", "glory", "glove", "glow", "glue", "goat", + "goddess", "gold", "good", "goose", "gorilla", "gospel", "gossip", "govern", "gown", "grab", "grace", "grain", "grant", "grape", "grass", "gravity", + "great", "green", "grid", "grief", "grit", "grocery", "group", "grow", "grunt", "guard", "guess", "guide", "guilt", "guitar", "gun", "gym", "habit", + "hair", "half", "hammer", "hamster", "hand", "happy", "harbor", "hard", "harsh", "harvest", "hat", "have", "hawk", "hazard", "head", "health", "heart", + "heavy", "hedgehog", "height", "hello", "helmet", "help", "hen", "hero", "hidden", "high", "hill", "hint", "hip", "hire", "history", "hobby", "hockey", + "hold", "hole", "holiday", "hollow", "home", "honey", "hood", "hope", "horn", "horror", "horse", "hospital", "host", "hotel", "hour", "hover", "hub", + "huge", "human", "humble", "humor", "hundred", "hungry", "hunt", "hurdle", "hurry", "hurt", "husband", "hybrid", "ice", "icon", "idea", "identify", + "idle", "ignore", "ill", "illegal", "illness", "image", "imitate", "immense", "immune", "impact", "impose", "improve", "impulse", "inch", "include", + "income", "increase", "index", "indicate", "indoor", "industry", "infant", "inflict", "inform", "inhale", "inherit", "initial", "inject", "injury", + "inmate", "inner", "innocent", "input", "inquiry", "insane", "insect", "inside", "inspire", "install", "intact", "interest", "into", "invest", "invite", + "involve", "iron", "island", "isolate", "issue", "item", "ivory", "jacket", "jaguar", "jar", "jazz", "jealous", "jeans", "jelly", "jewel", "job", "join", + "joke", "journey", "joy", "judge", "juice", "jump", "jungle", "junior", "junk", "just", "kangaroo", "keen", "keep", "ketchup", "key", "kick", "kid", + "kidney", "kind", "kingdom", "kiss", "kit", "kitchen", "kite", "kitten", "kiwi", "knee", "knife", "knock", "know", "lab", "label", "labor", "ladder", + "lady", "lake", "lamp", "language", "laptop", "large", "later", "latin", "laugh", "laundry", "lava", "law", "lawn", "lawsuit", "layer", "lazy", "leader", + "leaf", "learn", "leave", "lecture", "left", "leg", "legal", "legend", "leisure", "lemon", "lend", "length", "lens", "leopard", "lesson", "letter", "level", + "liar", "liberty", "library", "license", "life", "lift", "light", "like", "limb", "limit", "link", "lion", "liquid", "list", "little", "live", "lizard", + "load", "loan", "lobster", "local", "lock", "logic", "lonely", "long", "loop", "lottery", "loud", "lounge", "love", "loyal", "lucky", "luggage", "lumber", + "lunar", "lunch", "luxury", "lyrics", "machine", "mad", "magic", "magnet", "maid", "mail", "main", "major", "make", "mammal", "man", "manage", "mandate", + "mango", "mansion", "manual", "maple", "marble", "march", "margin", "marine", "market", "marriage", "mask", "mass", "master", "match", "material", "math", + "matrix", "matter", "maximum", "maze", "meadow", "mean", "measure", "meat", "mechanic", "medal", "media", "melody", "melt", "member", "memory", "mention", + "menu", "mercy", "merge", "merit", "merry", "mesh", "message", "metal", "method", "middle", "midnight", "milk", "million", "mimic", "mind", "minimum", + "minor", "minute", "miracle", "mirror", "misery", "miss", "mistake", "mix", "mixed", "mixture", "mobile", "model", "modify", "mom", "moment", "monitor", + "monkey", "monster", "month", "moon", "moral", "more", "morning", "mosquito", "mother", "motion", "motor", "mountain", "mouse", "move", "movie", "much", + "muffin", "mule", "multiply", "muscle", "museum", "mushroom", "music", "must", "mutual", "myself", "mystery", "myth", "naive", "name", "napkin", "narrow", + "nasty", "nation", "nature", "near", "neck", "need", "negative", "neglect", "neither", "nephew", "nerve", "nest", "net", "network", "neutral", "never", + "news", "next", "nice", "night", "noble", "noise", "nominee", "noodle", "normal", "north", "nose", "notable", "note", "nothing", "notice", "novel", "now", + "nuclear", "number", "nurse", "nut", "oak", "obey", "object", "oblige", "obscure", "observe", "obtain", "obvious", "occur", "ocean", "october", "odor", "off", + "offer", "office", "often", "oil", "okay", "old", "olive", "olympic", "omit", "once", "one", "onion", "online", "only", "open", "opera", "opinion", "oppose", + "option", "orange", "orbit", "orchard", "order", "ordinary", "organ", "orient", "original", "orphan", "ostrich", "other", "outdoor", "outer", "output", + "outside", "oval", "oven", "over", "own", "owner", "oxygen", "oyster", "ozone", "pact", "paddle", "page", "pair", "palace", "palm", "panda", "panel", "panic", + "panther", "paper", "parade", "parent", "park", "parrot", "party", "pass", "patch", "path", "patient", "patrol", "pattern", "pause", "pave", "payment", + "peace", "peanut", "pear", "peasant", "pelican", "pen", "penalty", "pencil", "people", "pepper", "perfect", "permit", "person", "pet", "phone", "photo", + "phrase", "physical", "piano", "picnic", "picture", "piece", "pig", "pigeon", "pill", "pilot", "pink", "pioneer", "pipe", "pistol", "pitch", "pizza", "place", + "planet", "plastic", "plate", "play", "please", "pledge", "pluck", "plug", "plunge", "poem", "poet", "point", "polar", "pole", "police", "pond", "pony", "pool", + "popular", "portion", "position", "possible", "post", "potato", "pottery", "poverty", "powder", "power", "practice", "praise", "predict", "prefer", "prepare", + "present", "pretty", "prevent", "price", "pride", "primary", "print", "priority", "prison", "private", "prize", "problem", "process", "produce", "profit", + "program", "project", "promote", "proof", "property", "prosper", "protect", "proud", "provide", "public", "pudding", "pull", "pulp", "pulse", "pumpkin", + "punch", "pupil", "puppy", "purchase", "purity", "purpose", "purse", "push", "put", "puzzle", "pyramid", "quality", "quantum", "quarter", "question", "quick", + "quit", "quiz", "quote", "rabbit", "raccoon", "race", "rack", "radar", "radio", "rail", "rain", "raise", "rally", "ramp", "ranch", "random", "range", "rapid", + "rare", "rate", "rather", "raven", "raw", "razor", "ready", "real", "reason", "rebel", "rebuild", "recall", "receive", "recipe", "record", "recycle", "reduce", + "reflect", "reform", "refuse", "region", "regret", "regular", "reject", "relax", "release", "relief", "rely", "remain", "remember", "remind", "remove", "render", + "renew", "rent", "reopen", "repair", "repeat", "replace", "report", "require", "rescue", "resemble", "resist", "resource", "response", "result", "retire", + "retreat", "return", "reunion", "reveal", "review", "reward", "rhythm", "rib", "ribbon", "rice", "rich", "ride", "ridge", "rifle", "right", "rigid", "ring", + "riot", "ripple", "risk", "ritual", "rival", "river", "road", "roast", "robot", "robust", "rocket", "romance", "roof", "rookie", "room", "rose", "rotate", "rough", + "round", "route", "royal", "rubber", "rude", "rug", "rule", "run", "runway", "rural", "sad", "saddle", "sadness", "safe", "sail", "salad", "salmon", "salon", + "salt", "salute", "same", "sample", "sand", "satisfy", "satoshi", "sauce", "sausage", "save", "say", "scale", "scan", "scare", "scatter", "scene", "scheme", + "school", "science", "scissors", "scorpion", "scout", "scrap", "screen", "script", "scrub", "sea", "search", "season", "seat", "second", "secret", "section", + "security", "seed", "seek", "segment", "select", "sell", "seminar", "senior", "sense", "sentence", "series", "service", "session", "settle", "setup", "seven", + "shadow", "shaft", "shallow", "share", "shed", "shell", "sheriff", "shield", "shift", "shine", "ship", "shiver", "shock", "shoe", "shoot", "shop", "short", + "shoulder", "shove", "shrimp", "shrug", "shuffle", "shy", "sibling", "sick", "side", "siege", "sight", "sign", "silent", "silk", "silly", "silver", "similar", + "simple", "since", "sing", "siren", "sister", "situate", "six", "size", "skate", "sketch", "ski", "skill", "skin", "skirt", "skull", "slab", "slam", "sleep", + "slender", "slice", "slide", "slight", "slim", "slogan", "slot", "slow", "slush", "small", "smart", "smile", "smoke", "smooth", "snack", "snake", "snap", "sniff", + "snow", "soap", "soccer", "social", "sock", "soda", "soft", "solar", "soldier", "solid", "solution", "solve", "someone", "song", "soon", "sorry", "sort", "soul", + "sound", "soup", "source", "south", "space", "spare", "spatial", "spawn", "speak", "special", "speed", "spell", "spend", "sphere", "spice", "spider", "spike", + "spin", "spirit", "split", "spoil", "sponsor", "spoon", "sport", "spot", "spray", "spread", "spring", "spy", "square", "squeeze", "squirrel", "stable", "stadium", + "staff", "stage", "stairs", "stamp", "stand", "start", "state", "stay", "steak", "steel", "stem", "step", "stereo", "stick", "still", "sting", "stock", "stomach", + "stone", "stool", "story", "stove", "strategy", "street", "strike", "strong", "struggle", "student", "stuff", "stumble", "style", "subject", "submit", "subway", + "success", "such", "sudden", "suffer", "sugar", "suggest", "suit", "summer", "sun", "sunny", "sunset", "super", "supply", "supreme", "sure", "surface", "surge", + "surprise", "surround", "survey", "suspect", "sustain", "swallow", "swamp", "swap", "swarm", "swear", "sweet", "swift", "swim", "swing", "switch", "sword", + "symbol", "symptom", "syrup", "system", "table", "tackle", "tag", "tail", "talent", "talk", "tank", "tape", "target", "task", "taste", "tattoo", "taxi", "teach", + "team", "tell", "ten", "tenant", "tennis", "tent", "term", "test", "text", "thank", "that", "theme", "then", "theory", "there", "they", "thing", "this", "thought", + "three", "thrive", "throw", "thumb", "thunder", "ticket", "tide", "tiger", "tilt", "timber", "time", "tiny", "tip", "tired", "tissue", "title", "toast", "tobacco", + "today", "toddler", "toe", "together", "toilet", "token", "tomato", "tomorrow", "tone", "tongue", "tonight", "tool", "tooth", "top", "topic", "topple", "torch", + "tornado", "tortoise", "toss", "total", "tourist", "toward", "tower", "town", "toy", "track", "trade", "traffic", "tragic", "train", "transfer", "trap", "trash", + "travel", "tray", "treat", "tree", "trend", "trial", "tribe", "trick", "trigger", "trim", "trip", "trophy", "trouble", "truck", "true", "truly", "trumpet", + "trust", "truth", "try", "tube", "tuition", "tumble", "tuna", "tunnel", "turkey", "turn", "turtle", "twelve", "twenty", "twice", "twin", "twist", "two", "type", + "typical", "ugly", "umbrella", "unable", "unaware", "uncle", "uncover", "under", "undo", "unfair", "unfold", "unhappy", "uniform", "unique", "unit", "universe", + "unknown", "unlock", "until", "unusual", "unveil", "update", "upgrade", "uphold", "upon", "upper", "upset", "urban", "urge", "usage", "use", "used", "useful", + "useless", "usual", "utility", "vacant", "vacuum", "vague", "valid", "valley", "valve", "van", "vanish", "vapor", "various", "vast", "vault", "vehicle", "velvet", + "vendor", "venture", "venue", "verb", "verify", "version", "very", "vessel", "veteran", "viable", "vibrant", "vicious", "victory", "video", "view", "village", + "vintage", "violin", "virtual", "virus", "visa", "visit", "visual", "vital", "vivid", "vocal", "voice", "void", "volcano", "volume", "vote", "voyage", "wage", + "wagon", "wait", "walk", "wall", "walnut", "want", "warfare", "warm", "warrior", "wash", "wasp", "waste", "water", "wave", "way", "wealth", "weapon", "wear", + "weasel", "weather", "web", "wedding", "weekend", "weird", "welcome", "west", "wet", "whale", "what", "wheat", "wheel", "when", "where", "whip", "whisper", "wide", + "width", "wife", "wild", "will", "win", "window", "wine", "wing", "wink", "winner", "winter", "wire", "wisdom", "wise", "wish", "witness", "wolf", "woman", + "wonder", "wood", "wool", "word", "work", "world", "worry", "worth", "wrap", "wreck", "wrestle", "wrist", "write", "wrong", "yard", "year", "yellow", "you", + "young", "youth", "zebra", "zero", "zone", "zoo"] + def test_address(node, address, **kwargs): """Get address info for `address` and test whether the returned values are as expected.""" diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py index 7ec2c4c228..b80bade463 100755 --- a/test/functional/test_runner.py +++ b/test/functional/test_runner.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -90,6 +90,7 @@ 'feature_assets_mempool.py', 'feature_restricted_assets.py', 'feature_raw_restricted_assets.py', + 'wallet_bip44.py', 'mining_prioritisetransaction.py', 'feature_maxreorgdepth.py 4 --height=60 --tip_age=0 --should_reorg=0', # Don't Reorg 'feature_maxreorgdepth.py 3 --height=60 --tip_age=0 --should_reorg=1', # Reorg (low peer count) @@ -178,10 +179,7 @@ # List of tests that are not going to be run (usually means test is broken) 'example_test.py', 'feature_assumevalid.py', - 'feature_bip_softforks.py', # use this for future soft fork testing - 'feature_block.py', #TODO - fix comptool.TestInstance timeout 'feature_cltv.py', #TODO - fix mininode rehash methods to use X16R - 'feature_csv_activation.py', #TODO - currently testing softfork activations, we need to test the features 'feature_dersig.py', #TODO - fix mininode rehash methods to use X16R 'feature_nulldummy.py', #TODO - fix mininode rehash methods to use X16R 'feature_pruning.py', @@ -191,8 +189,6 @@ 'mining_basic.py', #TODO - fix mininode rehash methods to use X16R 'p2p_compactblocks.py', #TODO - refactor to assume segwit is always active 'p2p_fingerprint.py', #TODO - fix mininode rehash methods to use X16R - 'p2p_invalid_block.py', #TODO - fix mininode rehash methods to use X16R - 'p2p_invalid_tx.py', #TODO - fix mininode rehash methods to use X16R 'p2p_segwit.py', #TODO - refactor to assume segwit is always active 'p2p_sendheaders.py', #TODO - fix mininode rehash methods to use X16R 'p2p_unrequested_blocks.py', @@ -385,6 +381,10 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, use_term_control, j tests_dir = src_dir + '/test/functional/' + # limit number of jobs to 13 + if jobs > 13: + jobs = 13 + print("Jobs limited to 13 threads max.") print("Using: ", jobs, " threads") flags = ["--srcdir={}/src".format(build_dir)] + args diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py index dd2817e457..8468b4ca91 100755 --- a/test/functional/wallet_abandonconflict.py +++ b/test/functional/wallet_abandonconflict.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py index 98eb8aacf9..b511f9c12c 100755 --- a/test/functional/wallet_backup.py +++ b/test/functional/wallet_backup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py index ad2dbab5b6..514f5930ce 100755 --- a/test/functional/wallet_basic.py +++ b/test/functional/wallet_basic.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_bip44.py b/test/functional/wallet_bip44.py new file mode 100755 index 0000000000..19732eb448 --- /dev/null +++ b/test/functional/wallet_bip44.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2017-2020 The Raven Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +"""Test the Wallet BIP44 12 words implementation and supporting RPC""" + +import os +from test_framework.test_framework import RavenTestFramework +from test_framework.util import assert_equal, assert_does_not_contain, assert_contains, assert_raises_rpc_error +from test_framework.wallet_util import bip39_english + +MNEMONIC_0 = 'climb imitate repair vacant moral analyst barely night enemy fault report funny' +MNEMONIC_PASS_0 = 'test0' +MNEMONIC_1 = 'glass random such ginger media want pink comfort portion large ability spare' +MNEMONIC_PASS_2 = 'test2' + + +class Bip44Test(RavenTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 6 + self.extra_args = [['-bip44=1', '-mnemonic=' + MNEMONIC_0, '-mnemonicpassphrase=' + MNEMONIC_PASS_0], # BIP44 wallet with user-generated 12-words and passphrase + ['-bip44=1', '-mnemonic=' + MNEMONIC_1], # BIP44 wallet with user-generated 12-words, but no passphrase + ['-bip44=1', '-mnemonicpassphrase=' + MNEMONIC_PASS_2], # BIP44 wallet with auto-generated 12-words but user-generated passphrase + ['-bip44=1'], # BIP44 wallet with auto-generated 12-words and no passphrase + ['-bip44=0'], # BIP44 wallet disabled but supplied with words and passphrase + ['-bip44=0']] # BIP44 wallet disabled + + + def run_test(self): + nodes = self.nodes + + # BIP39 list should contain 2048 words + assert_equal(len(bip39_english), 2048) + + # Node 0 has 12 words and a passphrase + self.log.info("Testing BIP-44 Word-Lists and passphrases") + assert_equal(len(nodes[0].getmywords()['word_list'].split(' ')), 12) # Contains 12 words + assert_equal(nodes[0].getmywords()['word_list'], MNEMONIC_0) # Word list matches + assert_equal(nodes[0].getmywords()['passphrase'], MNEMONIC_PASS_0) # Passphrase matches + + # Node 1 has 12 words but no passphrase + assert_equal(len(nodes[1].getmywords()['word_list'].split(' ')), 12) # Contains 12 words + assert_equal(nodes[1].getmywords()['word_list'], MNEMONIC_1) # Word list matches + assert_does_not_contain(str(nodes[1].getmywords()), 'passphrase') # Passphrase does not exist + + # Node 2 was not created with specific 12 words (using random-auto-generated), and a passphrase + assert_equal(len(nodes[2].getmywords()['word_list'].split(' ')), 12) # Contains 12 words + assert(nodes[2].getmywords()['word_list'] != nodes[3].getmywords()['word_list']) # auto-generated word-lists should not match + assert_equal(nodes[2].getmywords()['passphrase'], MNEMONIC_PASS_2) + + # Node 3 was not created with specific 12 words (using random-auto-generated), and no passphrase + assert_equal(len(nodes[3].getmywords()['word_list'].split(' ')), 12) # Contains 12 words + assert_does_not_contain(str(nodes[3].getmywords()), 'passphrase') # Passphrase does not exist + + # Nodes 4 & 5 are not BIP44 and should not have 12-words or passphrase + assert_raises_rpc_error(-4, "Wallet doesn't have 12 words.", nodes[4].getmywords) + assert_raises_rpc_error(-4, "Wallet doesn't have 12 words.", nodes[5].getmywords) + + # Cannot change from a non-BIP44 to a BIP44 wallet + self.log.info("Testing that BIP-44 wallets are intransigent") + self.stop_node(4) + self.stop_node(5) + self.start_node(4, extra_args=['-bip44=1', '-mnemonicpassphrase=test4']) + self.start_node(5, extra_args=['-bip44=1', '-mnemonic=' + MNEMONIC_0, '-mnemonicpassphrase=' + MNEMONIC_PASS_0]) + assert_raises_rpc_error(-4, "Wallet doesn't have 12 words.", nodes[4].getmywords) + assert_raises_rpc_error(-4, "Wallet doesn't have 12 words.", nodes[5].getmywords) + + # Try to add a passphrase to an existing bip44 wallet (should not add passphrase) + self.stop_node(3) + self.start_node(3, extra_args=['-mnemonicpassphrase=test3']) + assert_does_not_contain(str(nodes[3].getmywords()), 'passphrase') # Passphrase does not exist + + # Cannot change an already created bip44 wallet to a non-bip44 wallet + word_list_3 = nodes[3].getmywords()['word_list'] + self.stop_node(3) + self.start_node(3, extra_args=['-bip44=0']) + assert_equal(nodes[3].getmywords()['word_list'], word_list_3) # Word list matches + + # All 4 bip44 enabled wallets word-lists are in the bip39 word-list + self.log.info("Testing that BIP-44 wallets words are valid") + word_list_0 = MNEMONIC_0.split(' ') + word_list_1 = MNEMONIC_1.split(' ') + word_list_2 = nodes[2].getmywords()['word_list'].split(' ') + word_list_3 = word_list_3.split(' ') + for i in range(0, 12): + assert_contains(word_list_0[i], bip39_english) + assert_contains(word_list_1[i], bip39_english) + assert_contains(word_list_2[i], bip39_english) + assert_contains(word_list_3[i], bip39_english) + + # None of the words should be text-readable in the log files + self.log.info("Testing that BIP-44 words aren't text readable") + mnemonic_2 = nodes[2].getmywords()['word_list'] + mnemonic_3 = nodes[3].getmywords()['word_list'] + self.stop_nodes() + with open(os.path.join(self.options.tmpdir+"/node0/regtest/", "debug.log"), 'r', encoding='utf8') as f: + assert_does_not_contain(MNEMONIC_0, f.read()) + with open(os.path.join(self.options.tmpdir+"/node1/regtest/", "debug.log"), 'r', encoding='utf8') as f: + assert_does_not_contain(MNEMONIC_1, f.read()) + with open(os.path.join(self.options.tmpdir+"/node2/regtest/", "debug.log"), 'r', encoding='utf8') as f: + assert_does_not_contain(mnemonic_2, f.read()) + with open(os.path.join(self.options.tmpdir+"/node3/regtest/", "debug.log"), 'r', encoding='utf8') as f: + assert_does_not_contain(mnemonic_3, f.read()) + + # But words are readable in a non-encrypted wallet + with open(os.path.join(self.options.tmpdir+"/node0/regtest/", "wallet.dat"), 'rb') as f: + assert_contains(MNEMONIC_0, str(f.read())) + with open(os.path.join(self.options.tmpdir+"/node1/regtest/", "wallet.dat"), 'rb') as f: + assert_contains(MNEMONIC_1, str(f.read())) + with open(os.path.join(self.options.tmpdir+"/node2/regtest/", "wallet.dat"), 'rb') as f: + assert_contains(mnemonic_2, str(f.read())) + with open(os.path.join(self.options.tmpdir+"/node3/regtest/", "wallet.dat"), 'rb') as f: + assert_contains(mnemonic_3, str(f.read())) + + # After encryption the words are no longer readable + self.log.info("Testing that BIP-44 wallet encryption") + self.start_nodes() + nodes[0].node_encrypt_wallet("password0") + nodes[1].node_encrypt_wallet("password1") + nodes[2].node_encrypt_wallet("password2") + nodes[3].node_encrypt_wallet("password3") + self.stop_nodes() + # But words are not readable in an encrypted wallet + with open(os.path.join(self.options.tmpdir+"/node0/regtest/", "wallet.dat"), 'rb') as f: + assert_does_not_contain(MNEMONIC_0, str(f.read())) + with open(os.path.join(self.options.tmpdir+"/node1/regtest/", "wallet.dat"), 'rb') as f: + assert_does_not_contain(MNEMONIC_1, str(f.read())) + with open(os.path.join(self.options.tmpdir+"/node2/regtest/", "wallet.dat"), 'rb') as f: + assert_does_not_contain(mnemonic_2, str(f.read())) + with open(os.path.join(self.options.tmpdir+"/node3/regtest/", "wallet.dat"), 'rb') as f: + assert_does_not_contain(mnemonic_3, str(f.read())) + + # But the words are still available using getmywords after entering the passphrase + self.start_nodes() + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first.", nodes[0].getmywords) + nodes[0].walletpassphrase("password0", 48) + assert_equal(nodes[0].getmywords()['word_list'], MNEMONIC_0) # Word list matches + + # Words can also be retrieved from the dumpwallet command + nodes[0].dumpwallet(os.path.join(self.options.tmpdir+"/node0/regtest/", "dump_wallet_0.txt")) + with open(os.path.join(self.options.tmpdir+"/node0/regtest/", "dump_wallet_0.txt"), 'r') as f: + assert_contains(MNEMONIC_0, str(f.read())) + +if __name__ == '__main__': + Bip44Test().main() \ No newline at end of file diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py index 6cdaa3db16..b6b502694e 100755 --- a/test/functional/wallet_bumpfee.py +++ b/test/functional/wallet_bumpfee.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_coinbase_category.py b/test/functional/wallet_coinbase_category.py index 57ee9fef59..c28b713434 100755 --- a/test/functional/wallet_coinbase_category.py +++ b/test/functional/wallet_coinbase_category.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2018 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_create_tx.py b/test/functional/wallet_create_tx.py index cf4a99da7a..6c9f1304cd 100755 --- a/test/functional/wallet_create_tx.py +++ b/test/functional/wallet_create_tx.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2018-2019 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_disable.py b/test/functional/wallet_disable.py index a31b306f58..d6edec3620 100755 --- a/test/functional/wallet_disable.py +++ b/test/functional/wallet_disable.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py index b53f29a176..3584b97e69 100755 --- a/test/functional/wallet_dump.py +++ b/test/functional/wallet_dump.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -100,7 +100,7 @@ def run_test (self): found_addr, found_addr_chg, found_addr_rsv, _ = read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc) assert_equal(found_addr, test_addr_count) - assert_equal(found_addr_chg, 90*2 + 50) # old reserve keys are marked as change now + assert_equal(found_addr_chg, 50) # old reserve keys are marked as change now assert_equal(found_addr_rsv, 90*2) # Overwriting should fail diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py index 1b54a0eba1..9feaf942a6 100755 --- a/test/functional/wallet_encryption.py +++ b/test/functional/wallet_encryption.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py index 399abb80c6..92e2f76345 100755 --- a/test/functional/wallet_groups.py +++ b/test/functional/wallet_groups.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py index e362678fff..0288965a21 100755 --- a/test/functional/wallet_hd.py +++ b/test/functional/wallet_hd.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -34,7 +34,7 @@ def run_test (self): # create an internal key change_addr = self.nodes[1].getrawchangeaddress() change_addrV= self.nodes[1].validateaddress(change_addr) - assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key + assert_equal(change_addrV["hdkeypath"], "m/44'/1'/0'/1/0") #first internal child key # Import a non-HD private key in the HD wallet non_hd_add = self.nodes[0].getnewaddress() @@ -52,7 +52,7 @@ def run_test (self): for i in range(num_hd_adds): hd_add = self.nodes[1].getnewaddress() hd_info = self.nodes[1].validateaddress(hd_add) - assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'") + assert_equal(hd_info["hdkeypath"], "m/44'/1'/0'/0/"+str(i)) assert_equal(hd_info["hdseedid"], masterkeyid) self.nodes[0].sendtoaddress(hd_add, 1) self.nodes[0].generate(1) @@ -62,7 +62,7 @@ def run_test (self): # create an internal key (again) change_addr = self.nodes[1].getrawchangeaddress() change_addrV= self.nodes[1].validateaddress(change_addr) - assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key + assert_equal(change_addrV["hdkeypath"], "m/44'/1'/0'/1/1") #second internal child key self.sync_all() assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1) @@ -81,7 +81,7 @@ def run_test (self): for _ in range(num_hd_adds): hd_add_2 = self.nodes[1].getnewaddress() hd_info_2 = self.nodes[1].validateaddress(hd_add_2) - assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_)+"'") + assert_equal(hd_info_2["hdkeypath"], "m/44'/1'/0'/0/"+str(_)) assert_equal(hd_info_2["hdseedid"], masterkeyid) assert_equal(hd_add, hd_add_2) connect_nodes_bi(self.nodes, 0, 1) @@ -116,7 +116,7 @@ def run_test (self): if out['value'] != 1: keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath'] - assert_equal(keypath[0:7], "m/0'/1'") + assert_equal(keypath[0:7], "m/44'/1") if __name__ == '__main__': WalletHDTest().main () diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py index 93e0c9eea0..7f0de45425 100755 --- a/test/functional/wallet_import_rescan.py +++ b/test/functional/wallet_import_rescan.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_import_with_label.py b/test/functional/wallet_import_with_label.py index 29c3a21389..1b89955c8b 100755 --- a/test/functional/wallet_import_with_label.py +++ b/test/functional/wallet_import_with_label.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py index 667e6a76c9..021dd35e1a 100755 --- a/test/functional/wallet_importmulti.py +++ b/test/functional/wallet_importmulti.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_importprunedfunds.py b/test/functional/wallet_importprunedfunds.py index 933395b7d6..b318d36197 100755 --- a/test/functional/wallet_importprunedfunds.py +++ b/test/functional/wallet_importprunedfunds.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py index e7ed86f9fe..296fd34522 100755 --- a/test/functional/wallet_keypool.py +++ b/test/functional/wallet_keypool.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -29,8 +29,6 @@ def run_test(self): addr = nodes[0].getnewaddress() addr_data = nodes[0].validateaddress(addr) wallet_info = nodes[0].getwalletinfo() - assert_equal(wallet_info['hdseedid'], wallet_info['hdmasterkeyid']) - assert(addr_before_encrypting_data['hdseedid'] != wallet_info['hdseedid']) assert(addr_data['hdseedid'] == wallet_info['hdseedid']) assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress) diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py index 88c5fb0edb..e956aaff2f 100755 --- a/test/functional/wallet_keypool_topup.py +++ b/test/functional/wallet_keypool_topup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. @@ -69,7 +69,7 @@ def run_test(self): assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive") # Check that we have marked all keys up to the used keypool key as used - assert_equal(self.nodes[1].validateaddress(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'") + assert_equal(self.nodes[1].validateaddress(self.nodes[1].getnewaddress())['hdkeypath'], "m/44'/1'/0'/0/110") if __name__ == '__main__': KeypoolRestoreTest().main() diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py index 64683b529c..f53d294829 100755 --- a/test/functional/wallet_labels.py +++ b/test/functional/wallet_labels.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_listreceivedby.py b/test/functional/wallet_listreceivedby.py index 2d16bc9f99..adedf354c5 100755 --- a/test/functional/wallet_listreceivedby.py +++ b/test/functional/wallet_listreceivedby.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py index d36eab030f..57b7834793 100755 --- a/test/functional/wallet_listsinceblock.py +++ b/test/functional/wallet_listsinceblock.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py index 0c701c79c9..54b881ff34 100755 --- a/test/functional/wallet_listtransactions.py +++ b/test/functional/wallet_listtransactions.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py index 3781820f37..15592bc467 100755 --- a/test/functional/wallet_multiwallet.py +++ b/test/functional/wallet_multiwallet.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test multiwallet. diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py index b953a741c6..f36845d9f2 100755 --- a/test/functional/wallet_reorgsrestore.py +++ b/test/functional/wallet_reorgsrestore.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2019 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_resendtransactions.py b/test/functional/wallet_resendtransactions.py index e5ba78688b..f090b83563 100755 --- a/test/functional/wallet_resendtransactions.py +++ b/test/functional/wallet_resendtransactions.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py index cb00db3cd9..0b360ab525 100755 --- a/test/functional/wallet_txn_clone.py +++ b/test/functional/wallet_txn_clone.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py index 3020df720e..a73bc0fc32 100755 --- a/test/functional/wallet_txn_doublespend.py +++ b/test/functional/wallet_txn_doublespend.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/functional/wallet_zapwallettxes.py b/test/functional/wallet_zapwallettxes.py index 3be55c1df5..e35a3f7c8c 100755 --- a/test/functional/wallet_zapwallettxes.py +++ b/test/functional/wallet_zapwallettxes.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. diff --git a/test/util/raven-util-test.py b/test/util/raven-util-test.py index e5b56fde3d..8b0e5a858c 100755 --- a/test/util/raven-util-test.py +++ b/test/util/raven-util-test.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # Copyright 2014 BitPay Inc. # Copyright 2016-2017 The Bitcoin Core developers -# Copyright (c) 2017-2019 The Raven Core developers +# Copyright (c) 2017-2020 The Raven Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test framework for raven utils.